Commit 3a550c4d authored by Alexandre Delanoë's avatar Alexandre Delanoë

[REFACT] hash functions

parent 3af51fde
...@@ -2,9 +2,10 @@ module Gargantext.API.HashedResponse where ...@@ -2,9 +2,10 @@ module Gargantext.API.HashedResponse where
import Data.Aeson import Data.Aeson
import Data.Swagger import Data.Swagger
import qualified Data.Digest.Pure.MD5 as DPMD5 import Data.Text (Text)
import Gargantext.Prelude
import Gargantext.Prelude.Utils (hash)
import GHC.Generics (Generic) import GHC.Generics (Generic)
import Protolude
data HashedResponse a = HashedResponse { md5 :: Text, value :: a } data HashedResponse a = HashedResponse { md5 :: Text, value :: a }
deriving (Generic) deriving (Generic)
...@@ -16,4 +17,4 @@ instance ToJSON a => ToJSON (HashedResponse a) where ...@@ -16,4 +17,4 @@ instance ToJSON a => ToJSON (HashedResponse a) where
constructHashedResponse :: ToJSON a => a -> HashedResponse a constructHashedResponse :: ToJSON a => a -> HashedResponse a
constructHashedResponse v = HashedResponse { md5 = md5', value = v } constructHashedResponse v = HashedResponse { md5 = md5', value = v }
where where
md5' = show $ DPMD5.md5 $ encode v md5' = hash $ encode v
\ No newline at end of file
...@@ -48,7 +48,7 @@ import Gargantext.Database.Query.Table.NodeNode (selectDocNodes) ...@@ -48,7 +48,7 @@ import Gargantext.Database.Query.Table.NodeNode (selectDocNodes)
import Gargantext.Database.Schema.Node (_node_id, _node_hyperdata) import Gargantext.Database.Schema.Node (_node_id, _node_hyperdata)
import Gargantext.Database.Schema.Ngrams (NgramsType(..)) import Gargantext.Database.Schema.Ngrams (NgramsType(..))
import Gargantext.Prelude import Gargantext.Prelude
import Gargantext.Prelude.Utils (hashFromSet, hashFromList) import Gargantext.Prelude.Utils (hash)
-- Corpus Export -- Corpus Export
...@@ -115,13 +115,13 @@ getCorpus cId lId nt' = do ...@@ -115,13 +115,13 @@ getCorpus cId lId nt' = do
repo <- getRepo repo <- getRepo
ngs <- getNodeNgrams cId lId nt repo ngs <- getNodeNgrams cId lId nt repo
let -- uniqId is hash computed already for each document imported in database let -- uniqId is hash computed already for each document imported in database
r = Map.intersectionWith (\a b -> Document a (Ngrams (Set.toList b) (hashFromSet b)) (d_hash a b) r = Map.intersectionWith (\a b -> Document a (Ngrams (Set.toList b) (hash b)) (d_hash a b)
) ns ngs ) ns ngs
where where
d_hash a b = hashFromList [ fromMaybe "" (_hd_uniqId $ _node_hyperdata a) d_hash a b = hash [ fromMaybe "" (_hd_uniqId $ _node_hyperdata a)
, hashFromSet b , hash b
] ]
pure $ Corpus (Map.elems r) (hashFromList $ List.map _d_hash pure $ Corpus (Map.elems r) (hash $ List.map _d_hash
$ Map.elems r $ Map.elems r
) )
......
...@@ -30,7 +30,7 @@ import Gargantext.API.Ngrams (TODO) ...@@ -30,7 +30,7 @@ import Gargantext.API.Ngrams (TODO)
import Gargantext.Database.Admin.Types.Node import Gargantext.Database.Admin.Types.Node
import Gargantext.Database.Prelude -- (Cmd, CmdM) import Gargantext.Database.Prelude -- (Cmd, CmdM)
import Gargantext.Prelude import Gargantext.Prelude
import Gargantext.Prelude.Utils (sha) import Gargantext.Prelude.Utils (hash)
import Servant import Servant
import Servant.Multipart import Servant.Multipart
import Servant.Swagger (HasSwagger(toSwagger)) import Servant.Swagger (HasSwagger(toSwagger))
...@@ -107,6 +107,6 @@ postUpload _ (Just fileType) multipartData = do ...@@ -107,6 +107,6 @@ postUpload _ (Just fileType) multipartData = do
--pure $ cs content --pure $ cs content
-- is <- inputs multipartData -- is <- inputs multipartData
pure $ map (sha . cs) is pure $ map hash is
------------------------------------------------------------------- -------------------------------------------------------------------
...@@ -28,7 +28,7 @@ import Gargantext.Database.Query.Table.Node ...@@ -28,7 +28,7 @@ import Gargantext.Database.Query.Table.Node
import Gargantext.Database.Query.Table.Node.Error import Gargantext.Database.Query.Table.Node.Error
import Gargantext.Database.Query.Table.Node.UpdateOpaleye (updateHyperdata) import Gargantext.Database.Query.Table.Node.UpdateOpaleye (updateHyperdata)
import Gargantext.Prelude import Gargantext.Prelude
import Gargantext.Prelude.Utils (sha) import Gargantext.Prelude.Utils (hash)
import Gargantext.Database.Prelude import Gargantext.Database.Prelude
import Control.Lens (view) import Control.Lens (view)
import Gargantext.Config (GargConfig(..)) import Gargantext.Config (GargConfig(..))
...@@ -99,7 +99,7 @@ mkNodeWithParent_ConfigureHyperdata' nt (Just i) uId name = do ...@@ -99,7 +99,7 @@ mkNodeWithParent_ConfigureHyperdata' nt (Just i) uId name = do
_ -> nodeError NeedsConfiguration _ -> nodeError NeedsConfiguration
let let
s = _gc_secretkey config s = _gc_secretkey config
hd = HyperdataFrame u (sha $ s <> (cs $ show n)) hd = HyperdataFrame u (hash $ s <> (cs $ show n))
_ <- updateHyperdata n hd _ <- updateHyperdata n hd
pure [n] pure [n]
(_:_:_) -> nodeError MkNode (_:_:_) -> nodeError MkNode
......
...@@ -72,10 +72,8 @@ import Gargantext.Database.Admin.Types.Hyperdata ...@@ -72,10 +72,8 @@ import Gargantext.Database.Admin.Types.Hyperdata
import Gargantext.Database.Admin.Types.Node import Gargantext.Database.Admin.Types.Node
import Gargantext.Database.Prelude (Cmd, runPGSQuery) import Gargantext.Database.Prelude (Cmd, runPGSQuery)
import Gargantext.Prelude import Gargantext.Prelude
import Gargantext.Prelude.Utils (sha) import Gargantext.Prelude.Utils (hash)
import qualified Data.ByteString.Lazy.Char8 as DC (pack) import qualified Data.Text as DT (pack, concat, take)
import qualified Data.Digest.Pure.SHA as SHA (sha256, showDigest)
import qualified Data.Text as DT (pack, unpack, concat, take)
-- TODO : the import of Document constructor below does not work -- TODO : the import of Document constructor below does not work
-- import Gargantext.Database.Types.Node (Document) -- import Gargantext.Database.Types.Node (Document)
...@@ -203,8 +201,8 @@ instance AddUniqId HyperdataDocument ...@@ -203,8 +201,8 @@ instance AddUniqId HyperdataDocument
addUniqIdsDoc doc = set hd_uniqIdBdd (Just shaBdd) addUniqIdsDoc doc = set hd_uniqIdBdd (Just shaBdd)
$ set hd_uniqId (Just shaUni) doc $ set hd_uniqId (Just shaUni) doc
where where
shaUni = sha $ DT.concat $ map ($ doc) shaParametersDoc shaUni = hash $ DT.concat $ map ($ doc) shaParametersDoc
shaBdd = sha $ DT.concat $ map ($ doc) ([(\d -> maybeText (_hd_bdd d))] <> shaParametersDoc) shaBdd = hash $ DT.concat $ map ($ doc) ([(\d -> maybeText (_hd_bdd d))] <> shaParametersDoc)
shaParametersDoc :: [(HyperdataDocument -> Text)] shaParametersDoc :: [(HyperdataDocument -> Text)]
shaParametersDoc = [ \d -> maybeText (_hd_title d) shaParametersDoc = [ \d -> maybeText (_hd_title d)
...@@ -225,11 +223,8 @@ addUniqIdsContact :: HyperdataContact -> HyperdataContact ...@@ -225,11 +223,8 @@ addUniqIdsContact :: HyperdataContact -> HyperdataContact
addUniqIdsContact hc = set (hc_uniqIdBdd) (Just shaBdd) addUniqIdsContact hc = set (hc_uniqIdBdd) (Just shaBdd)
$ set (hc_uniqId ) (Just shaUni) hc $ set (hc_uniqId ) (Just shaUni) hc
where where
shaUni = uniqId $ DT.concat $ map ($ hc) shaParametersContact shaUni = hash $ DT.concat $ map ($ hc) shaParametersContact
shaBdd = uniqId $ DT.concat $ map ($ hc) ([\d -> maybeText (view hc_bdd d)] <> shaParametersContact) shaBdd = hash $ DT.concat $ map ($ hc) ([\d -> maybeText (view hc_bdd d)] <> shaParametersContact)
uniqId :: Text -> Text
uniqId = DT.pack . SHA.showDigest . SHA.sha256 . DC.pack . DT.unpack
-- | TODO add more shaparameters -- | TODO add more shaparameters
shaParametersContact :: [(HyperdataContact -> Text)] shaParametersContact :: [(HyperdataContact -> Text)]
......
...@@ -9,10 +9,12 @@ Portability : POSIX ...@@ -9,10 +9,12 @@ Portability : POSIX
-} -}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Gargantext.Prelude.Utils module Gargantext.Prelude.Utils
where where
import Prelude (String)
import Data.Set (Set) import Data.Set (Set)
import Data.List (foldl) import Data.List (foldl)
import Control.Lens (view) import Control.Lens (view)
...@@ -41,24 +43,28 @@ shuffle ns = SRS.shuffleM ns ...@@ -41,24 +43,28 @@ shuffle ns = SRS.shuffleM ns
-- TODO use newtype -- TODO use newtype
type Hash = Text type Hash = Text
-- | API to hash text -- | Class to make hashes
class IsHashable a where
hash :: a -> Hash
-- | Main API to hash text
-- using sha256 for now -- using sha256 for now
hash :: Text -> Hash instance IsHashable Char.ByteString where
hash = sha hash = Text.pack
. SHA.showDigest
. SHA.sha256
instance IsHashable String where
hash = hash . Char.pack
-- | Sugar fun to sha256 Text instance IsHashable Text where
sha :: Text -> Hash hash = hash . Text.unpack
sha = Text.pack
. SHA.showDigest
. SHA.sha256
. Char.pack
. Text.unpack
hashFromList :: [Hash] -> Hash instance IsHashable (Set Hash) where
hashFromList = hashFromSet . Set.fromList hash = hash . foldl (<>) "" . Set.toList
hashFromSet :: Set Hash -> Hash instance IsHashable [Hash] where
hashFromSet = sha . foldl (<>) "" . Set.toList hash = hash . Set.fromList
-------------------------------------------------------------------------- --------------------------------------------------------------------------
data NodeToHash = NodeToHash { nodeType :: NodeType data NodeToHash = NodeToHash { nodeType :: NodeType
...@@ -85,14 +91,14 @@ writeFile :: (MonadReader env m, MonadBase IO m, HasSettings env, SaveFile a) ...@@ -85,14 +91,14 @@ writeFile :: (MonadReader env m, MonadBase IO m, HasSettings env, SaveFile a)
=> a -> m FilePath => a -> m FilePath
writeFile a = do writeFile a = do
dataPath <- view (settings . fileFolder) <$> ask dataPath <- view (settings . fileFolder) <$> ask
(fp,fn) <- liftBase $ (toPath 3) . sha . Text.pack . show <$> newStdGen (fp,fn) <- liftBase $ (toPath 3) . hash . show <$> newStdGen
let foldPath = dataPath <> "/" <> fp let foldPath = dataPath <> "/" <> fp
filePath = foldPath <> "/" <> fn filePath = foldPath <> "/" <> fn
_ <- liftBase $ createDirectoryIfMissing True foldPath _ <- liftBase $ createDirectoryIfMissing True foldPath
_ <- liftBase $ saveFile' filePath a _ <- liftBase $ saveFile' filePath a
pure filePath pure filePath
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment