Commit 640a4549 authored by Alexandre Delanoë's avatar Alexandre Delanoë

[READ] Code clean

parent ccf15753
...@@ -70,7 +70,6 @@ module Gargantext.Core.NodeStory ...@@ -70,7 +70,6 @@ module Gargantext.Core.NodeStory
, nse_archive_saver_immediate , nse_archive_saver_immediate
, nse_var , nse_var
, unNodeStory , unNodeStory
, getNodeArchiveHistory
, getNodesArchiveHistory , getNodesArchiveHistory
, Archive(..) , Archive(..)
, initArchive , initArchive
...@@ -94,42 +93,42 @@ module Gargantext.Core.NodeStory ...@@ -94,42 +93,42 @@ module Gargantext.Core.NodeStory
where where
-- import Debug.Trace (traceShow) -- import Debug.Trace (traceShow)
import Control.Debounce (mkDebounce, defaultDebounceSettings, debounceFreq, debounceAction)
import Codec.Serialise.Class import Codec.Serialise.Class
import Control.Concurrent (MVar(), newMVar, modifyMVar_) import Control.Concurrent (MVar(), newMVar, modifyMVar_)
import Control.Debounce (mkDebounce, defaultDebounceSettings, debounceFreq, debounceAction)
import Control.Exception (catch, throw, SomeException(..)) import Control.Exception (catch, throw, SomeException(..))
import Control.Lens (makeLenses, Getter, (^.), (.~), (%~), _Just, at, traverse, view) import Control.Lens (makeLenses, Getter, (^.), (.~), (%~), _Just, at, traverse, view)
import Control.Monad.Except import Control.Monad.Except
import Control.Monad.Reader import Control.Monad.Reader
import Data.Aeson hiding ((.=), decode) import Data.Aeson hiding ((.=), decode)
import Data.ByteString.Char8 (hPutStrLn) import Data.ByteString.Char8 (hPutStrLn)
import Data.HashMap.Strict (HashMap)
import Data.Map.Strict (Map) import Data.Map.Strict (Map)
import Data.Maybe (catMaybes) import Data.Maybe (catMaybes)
import Data.Monoid import Data.Monoid
import Data.Pool (Pool, withResource) import Data.Pool (Pool, withResource)
import Data.Profunctor.Product.TH (makeAdaptorAndInstance)
import Data.Semigroup import Data.Semigroup
import Database.PostgreSQL.Simple.SqlQQ (sql)
import Database.PostgreSQL.Simple.FromField (FromField(fromField), fromJSONField) import Database.PostgreSQL.Simple.FromField (FromField(fromField), fromJSONField)
import Data.Profunctor.Product.TH (makeAdaptorAndInstance) import Database.PostgreSQL.Simple.SqlQQ (sql)
import Database.PostgreSQL.Simple.Types (Values(..), QualifiedIdentifier(..))
import GHC.Generics (Generic) import GHC.Generics (Generic)
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HashMap
import Gargantext.Database.Schema.Ngrams (NgramsType)
import Gargantext.API.Ngrams.Types import Gargantext.API.Ngrams.Types
import Gargantext.Core.Types (ListId, NodeId(..), NodeType) import Gargantext.Core.Types (ListId, NodeId(..), NodeType)
import Gargantext.Core.Utils.Prefix (unPrefix) import Gargantext.Core.Utils.Prefix (unPrefix)
import Gargantext.Database.Admin.Config (nodeTypeId) import Gargantext.Database.Admin.Config (nodeTypeId)
import Gargantext.Database.Prelude (CmdM', HasConnectionPool(..), HasConfig) import Gargantext.Database.Prelude (CmdM', HasConnectionPool(..), HasConfig)
import Gargantext.Database.Query.Table.Node.Error (HasNodeError()) import Gargantext.Database.Query.Table.Node.Error (HasNodeError())
import Gargantext.Database.Schema.Ngrams (NgramsType)
import Gargantext.Prelude import Gargantext.Prelude
import Opaleye (DefaultFromField(..), SqlJsonb, fromPGSFromField) import Opaleye (DefaultFromField(..), SqlJsonb, fromPGSFromField)
import Database.PostgreSQL.Simple.Types (Values(..), QualifiedIdentifier(..))
import System.IO (stderr) import System.IO (stderr)
import qualified Data.HashMap.Strict as HashMap
import qualified Data.Map.Strict as Map import qualified Data.Map.Strict as Map
import qualified Data.Map.Strict.Patch as PM import qualified Data.Map.Strict.Patch as PM
import qualified Data.Set as Set import qualified Data.Set as Set
import qualified Data.Text as Text import qualified Data.Text as Text
import qualified Database.PostgreSQL.Simple as PGS import qualified Database.PostgreSQL.Simple as PGS
import qualified Gargantext.Database.Query.Table.Ngrams as TableNgrams import qualified Gargantext.Database.Query.Table.Ngrams as TableNgrams
------------------------------------------------------------------------ ------------------------------------------------------------------------
...@@ -258,8 +257,6 @@ initNodeListStoryMock = NodeStory $ Map.singleton nodeListId archive ...@@ -258,8 +257,6 @@ initNodeListStoryMock = NodeStory $ Map.singleton nodeListId archive
] ]
------------------------------------------------------------------------ ------------------------------------------------------------------------
------------------------------------------------------------------------ ------------------------------------------------------------------------
-- | Lenses at the bottom of the file because Template Haskell would reorder order of execution in others cases -- | Lenses at the bottom of the file because Template Haskell would reorder order of execution in others cases
makeLenses ''NodeStoryEnv makeLenses ''NodeStoryEnv
...@@ -293,7 +290,8 @@ type ArchiveList = Archive NgramsState' NgramsStatePatch' ...@@ -293,7 +290,8 @@ type ArchiveList = Archive NgramsState' NgramsStatePatch'
-- DB stuff -- DB stuff
runPGSExecute :: (PGS.ToRow q) => PGS.Connection -> PGS.Query -> q -> IO Int64 runPGSExecute :: (PGS.ToRow q)
=> PGS.Connection -> PGS.Query -> q -> IO Int64
runPGSExecute c qs a = catch (PGS.execute c qs a) printError runPGSExecute c qs a = catch (PGS.execute c qs a) printError
where where
printError (SomeException e) = do printError (SomeException e) = do
...@@ -301,7 +299,8 @@ runPGSExecute c qs a = catch (PGS.execute c qs a) printError ...@@ -301,7 +299,8 @@ runPGSExecute c qs a = catch (PGS.execute c qs a) printError
_ <- panic $ Text.pack $ show e _ <- panic $ Text.pack $ show e
throw (SomeException e) throw (SomeException e)
runPGSExecuteMany :: (PGS.ToRow q) => PGS.Connection -> PGS.Query -> [q] -> IO Int64 runPGSExecuteMany :: (PGS.ToRow q)
=> PGS.Connection -> PGS.Query -> [q] -> IO Int64
runPGSExecuteMany c qs a = catch (PGS.executeMany c qs a) printError runPGSExecuteMany c qs a = catch (PGS.executeMany c qs a) printError
where where
printError (SomeException e) = do printError (SomeException e) = do
...@@ -309,7 +308,8 @@ runPGSExecuteMany c qs a = catch (PGS.executeMany c qs a) printError ...@@ -309,7 +308,8 @@ runPGSExecuteMany c qs a = catch (PGS.executeMany c qs a) printError
_ <- panic $ Text.pack $ show e _ <- panic $ Text.pack $ show e
throw (SomeException e) throw (SomeException e)
runPGSQuery :: (PGS.FromRow r, PGS.ToRow q) => PGS.Connection -> PGS.Query -> q -> IO [r] runPGSQuery :: (PGS.FromRow r, PGS.ToRow q)
=> PGS.Connection -> PGS.Query -> q -> IO [r]
runPGSQuery c q a = catch (PGS.query c q a) printError runPGSQuery c q a = catch (PGS.query c q a) printError
where where
printError (SomeException e) = do printError (SomeException e) = do
...@@ -319,22 +319,26 @@ runPGSQuery c q a = catch (PGS.query c q a) printError ...@@ -319,22 +319,26 @@ runPGSQuery c q a = catch (PGS.query c q a) printError
runPGSAdvisoryLock :: PGS.Connection -> Int -> IO () runPGSAdvisoryLock :: PGS.Connection -> Int -> IO ()
runPGSAdvisoryLock c id = do runPGSAdvisoryLock c id = do
_ <- runPGSQuery c [sql| SELECT pg_advisory_lock(?) |] (PGS.Only id) :: IO [PGS.Only ()] _ <- runPGSQuery c [sql| SELECT pg_advisory_lock(?) |]
(PGS.Only id) :: IO [PGS.Only ()]
pure () pure ()
runPGSAdvisoryUnlock :: PGS.Connection -> Int -> IO () runPGSAdvisoryUnlock :: PGS.Connection -> Int -> IO ()
runPGSAdvisoryUnlock c id = do runPGSAdvisoryUnlock c id = do
_ <- runPGSQuery c [sql| SELECT pg_advisory_unlock(?) |] (PGS.Only id) :: IO [PGS.Only Bool] _ <- runPGSQuery c [sql| SELECT pg_advisory_unlock(?) |]
(PGS.Only id) :: IO [PGS.Only Bool]
pure () pure ()
runPGSAdvisoryXactLock :: PGS.Connection -> Int -> IO () runPGSAdvisoryXactLock :: PGS.Connection -> Int -> IO ()
runPGSAdvisoryXactLock c id = do runPGSAdvisoryXactLock c id = do
_ <- runPGSQuery c [sql| SELECT pg_advisory_xact_lock(?) |] (PGS.Only id) :: IO [PGS.Only ()] _ <- runPGSQuery c [sql| SELECT pg_advisory_xact_lock(?) |]
(PGS.Only id) :: IO [PGS.Only ()]
pure () pure ()
nodeExists :: PGS.Connection -> NodeId -> IO Bool nodeExists :: PGS.Connection -> NodeId -> IO Bool
nodeExists c nId = (== [PGS.Only True]) nodeExists c nId = (== [PGS.Only True])
<$> runPGSQuery c [sql| SELECT true FROM nodes WHERE id = ? LIMIT 1 |] (PGS.Only nId) <$> runPGSQuery c [sql| SELECT true FROM nodes WHERE id = ? LIMIT 1 |]
(PGS.Only nId)
getNodesIdWithType :: PGS.Connection -> NodeType -> IO [NodeId] getNodesIdWithType :: PGS.Connection -> NodeType -> IO [NodeId]
getNodesIdWithType c nt = do getNodesIdWithType c nt = do
...@@ -345,44 +349,22 @@ getNodesIdWithType c nt = do ...@@ -345,44 +349,22 @@ getNodesIdWithType c nt = do
query = [sql| SELECT id FROM nodes WHERE typename = ? |] query = [sql| SELECT id FROM nodes WHERE typename = ? |]
-- /!\ This function is using an hard coded parameter
-- nodeStoryTable :: Table NodeStoryRead NodeStoryWrite -- which depends on the Ngrams List Flow
-- nodeStoryTable = -- Version > 5 is hard coded because by default
-- Table "node_stories" -- first version of history of manual change is 6
-- ( pNodeStory NodeStoryDB { node_id = tableField "node_id" getNodesArchiveHistory :: PGS.Connection
-- , version = tableField "version" -> [NodeId]
-- , ngrams_type_id = tableField "ngrams_type_id" -> IO [(NodeId, (Map NgramsType [HashMap NgramsTerm NgramsPatch]))]
-- , ngrams_id = tableField "ngrams_id"
-- , ngrams_repo_element = tableField "ngrams_repo_element"
-- } )
-- nodeStoryArchiveTable :: Table NodeStoryArchiveRead NodeStoryArchiveWrite
-- nodeStoryArchiveTable =
-- Table "node_story_archive_history"
-- ( pNodeArchiveStory NodeStoryArchiveDB { a_node_id = tableField "node_id"
-- , archive = tableField "archive" } )
-- nodeStorySelect :: Select NodeStoryRead
-- nodeStorySelect = selectTable nodeStoryTable
-- NOTE "first patch in the _a_history list is the most recent"
getNodeArchiveHistory :: PGS.Connection -> NodeId -> IO [NgramsStatePatch']
getNodeArchiveHistory c nodeId = do
as <- runPGSQuery c query (PGS.Only nodeId) :: IO [(TableNgrams.NgramsType, NgramsTerm, NgramsPatch)]
pure $ (\(ngramsType, terms, patch) -> fst $ PM.singleton ngramsType (NgramsTablePatch $ fst $ PM.singleton terms patch)) <$> as
where
query :: PGS.Query
query = [sql| SELECT ngrams_type_id, terms, patch
FROM node_story_archive_history
JOIN ngrams ON ngrams.id = ngrams_id
WHERE node_id = ?
ORDER BY (version, node_story_archive_history.id) DESC |]
-- getNodesArchiveHistory :: PGS.Connection -> [NodeId] -> IO [(Int, NgramsStatePatch')]
getNodesArchiveHistory :: PGS.Connection -> [NodeId] -> IO [(NodeId, (Map NgramsType [HashMap NgramsTerm NgramsPatch]))]
getNodesArchiveHistory c nodesId = do getNodesArchiveHistory c nodesId = do
as <- runPGSQuery c query (PGS.Only $ Values fields nodesId) :: IO [(Int, TableNgrams.NgramsType, NgramsTerm, NgramsPatch)] as <- runPGSQuery c query (PGS.Only $ Values fields nodesId)
pure $ map (\(nId, ngramsType, terms, patch) -> (NodeId nId, Map.singleton ngramsType [HashMap.singleton terms patch])) as :: IO [(Int, TableNgrams.NgramsType, NgramsTerm, NgramsPatch)]
pure $ map (\(nId, ngramsType, terms, patch)
-> ( NodeId nId
, Map.singleton ngramsType [HashMap.singleton terms patch]
)
) as
where where
fields = [QualifiedIdentifier Nothing "int4"] fields = [QualifiedIdentifier Nothing "int4"]
...@@ -396,9 +378,6 @@ getNodesArchiveHistory c nodesId = do ...@@ -396,9 +378,6 @@ getNodesArchiveHistory c nodesId = do
ORDER BY (version, node_story_archive_history.id) DESC ORDER BY (version, node_story_archive_history.id) DESC
|] |]
-- Version > 5 is hard coded because by default
-- first version of history of manual change is 6
ngramsIdQuery :: PGS.Query ngramsIdQuery :: PGS.Query
ngramsIdQuery = [sql| SELECT id FROM ngrams WHERE terms = ? |] ngramsIdQuery = [sql| SELECT id FROM ngrams WHERE terms = ? |]
......
...@@ -35,7 +35,7 @@ import Gargantext.Database.Query.Tree (NodeMode(Private), HasTreeError) ...@@ -35,7 +35,7 @@ import Gargantext.Database.Query.Tree (NodeMode(Private), HasTreeError)
import Gargantext.Database.Schema.Ngrams (NgramsType) import Gargantext.Database.Schema.Ngrams (NgramsType)
import Gargantext.Prelude import Gargantext.Prelude
import Web.Internal.HttpApiData (ToHttpApiData, FromHttpApiData, parseUrlPiece, toUrlPiece) import Web.Internal.HttpApiData (ToHttpApiData, FromHttpApiData, parseUrlPiece, toUrlPiece)
import qualified Data.List as List import qualified Data.List as List
import qualified Data.Map as Map import qualified Data.Map as Map
import qualified Data.Scientific as Scientific import qualified Data.Scientific as Scientific
import qualified Data.Text as T import qualified Data.Text as T
...@@ -50,8 +50,8 @@ import qualified Prelude ...@@ -50,8 +50,8 @@ import qualified Prelude
-- This parameter depends on the user choice -- This parameter depends on the user choice
data FlowSocialListWith = FlowSocialListWithPriority { fslw_priority :: FlowSocialListPriority } data FlowSocialListWith = FlowSocialListWithPriority { fslw_priority :: FlowSocialListPriority }
| FlowSocialListWithLists { fslw_lists :: [ListId] } | FlowSocialListWithLists { fslw_lists :: [ListId] }
| NoList { makeList :: Bool } | NoList { makeList :: Bool }
deriving (Eq, Show, Generic) deriving (Eq, Show, Generic)
instance FromJSON FlowSocialListWith where instance FromJSON FlowSocialListWith where
...@@ -61,7 +61,7 @@ instance FromJSON FlowSocialListWith where ...@@ -61,7 +61,7 @@ instance FromJSON FlowSocialListWith where
case typ of case typ of
"MyListsFirst" -> pure $ FlowSocialListWithPriority { fslw_priority = MySelfFirst } "MyListsFirst" -> pure $ FlowSocialListWithPriority { fslw_priority = MySelfFirst }
"OtherListsFirst" -> pure $ FlowSocialListWithPriority { fslw_priority = OthersFirst } "OtherListsFirst" -> pure $ FlowSocialListWithPriority { fslw_priority = OthersFirst }
"SelectedLists" -> pure $ FlowSocialListWithLists { fslw_lists = value } "SelectedLists" -> pure $ FlowSocialListWithLists { fslw_lists = value }
"NoList" -> pure $ NoList True "NoList" -> pure $ NoList True
_ -> pure $ FlowSocialListWithPriority { fslw_priority = MySelfFirst } _ -> pure $ FlowSocialListWithPriority { fslw_priority = MySelfFirst }
parseJSON _ = mzero parseJSON _ = mzero
...@@ -113,9 +113,9 @@ flowSocialList :: ( HasNodeStory env err m ...@@ -113,9 +113,9 @@ flowSocialList :: ( HasNodeStory env err m
-> FlowCont NgramsTerm FlowListScores -> FlowCont NgramsTerm FlowListScores
-> m (FlowCont NgramsTerm FlowListScores) -> m (FlowCont NgramsTerm FlowListScores)
flowSocialList Nothing u = flowSocialList' MySelfFirst u flowSocialList Nothing u = flowSocialList' MySelfFirst u
flowSocialList (Just (FlowSocialListWithPriority p)) u = flowSocialList' p u flowSocialList (Just (FlowSocialListWithPriority p)) u = flowSocialList' p u
flowSocialList (Just (FlowSocialListWithLists ls)) _ = getHistoryScores ls flowSocialList (Just (FlowSocialListWithLists ls)) _ = getHistoryScores ls
flowSocialList (Just (NoList _)) _u = panic "[G.C.T.L.Social] Should not be executed" flowSocialList (Just (NoList _)) _u = panic "[G.C.T.L.Social] Should not be executed"
flowSocialList' :: ( HasNodeStory env err m flowSocialList' :: ( HasNodeStory env err m
, CmdM env err m , CmdM env err m
...@@ -182,7 +182,7 @@ getHistory :: ( HasNodeStory env err m ...@@ -182,7 +182,7 @@ getHistory :: ( HasNodeStory env err m
-> m (Map ListId (Map NgramsType [HashMap NgramsTerm NgramsPatch])) -> m (Map ListId (Map NgramsType [HashMap NgramsTerm NgramsPatch]))
getHistory types listsId = do getHistory types listsId = do
pool <- view connPool pool <- view connPool
nsp <- liftBase $ withResource pool $ \c -> getNodesArchiveHistory c listsId nsp <- liftBase $ withResource pool $ \c -> getNodesArchiveHistory c listsId
pure $ Map.map (Map.filterWithKey (\k _ -> List.elem k types)) pure $ Map.map (Map.filterWithKey (\k _ -> List.elem k types))
$ Map.filterWithKey (\k _ -> List.elem k listsId) $ Map.filterWithKey (\k _ -> List.elem k listsId)
$ Map.fromListWith (Map.unionWith (<>)) nsp $ Map.fromListWith (Map.unionWith (<>)) nsp
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment