Commit 48cd6f84 authored by Karen Konou's avatar Karen Konou

Merge branch 'dev' into 706-dev-graph-parameters-display

parents c60b0372 4ceffa69
Pipeline #7630 passed with stages
in 68 minutes
This diff is collapsed.
......@@ -29,12 +29,13 @@ import Gargantext.Database.Action.Flow (getOrMkRoot, getOrMkRootWithCorpus)
import Gargantext.Database.Admin.Trigger.Init (initFirstTriggers, initLastTriggers)
import Gargantext.Database.Admin.Types.Hyperdata (HyperdataCorpus)
import Gargantext.Database.Admin.Types.Node
import Gargantext.Database.Prelude (DBCmd, DBCmdWithEnv)
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.Node (getOrMkList)
import Gargantext.Database.Query.Table.User (insertNewUsers, )
import Gargantext.Database.Query.Tree.Root (MkCorpusUser(MkCorpusUserMaster))
import Gargantext.Prelude
import Options.Applicative
import Gargantext.Core.Types.Individu (toUserHash)
initCLI :: InitArgs -> IO ()
......@@ -45,34 +46,36 @@ initCLI (InitArgs settingsPath) = do
putStrLn ("Enter master user (gargantua) _email_ :" :: Text)
email <- getLine
hashedUsers <- NE.fromList <$> mapM toUserHash (NewUser "gargantua" (cs email) (GargPassword $ cs password) : arbitraryNewUsers)
cfg <- readConfig settingsPath
let secret = _s_secret_key $ _gc_secrets cfg
let createUsers :: forall env. DBCmdWithEnv env BackendInternalError Int64
createUsers = insertNewUsers (NewUser "gargantua" (cs email) (GargPassword $ cs password)
NE.:| arbitraryNewUsers
)
let createUsers :: DBUpdate BackendInternalError Int64
createUsers = insertNewUsers hashedUsers
let
mkRoots :: forall env. DBCmdWithEnv env BackendInternalError [(UserId, RootId)]
mkRoots = mapM getOrMkRoot $ map UserName ("gargantua" : arbitraryUsername)
mkRoots :: DBUpdate BackendInternalError [(UserId, RootId)]
mkRoots = mapM (getOrMkRoot cfg) $ map UserName ("gargantua" : arbitraryUsername)
-- TODO create all users roots
let
initMaster :: forall env. DBCmdWithEnv env BackendInternalError (UserId, RootId, CorpusId, ListId)
initMaster :: DBUpdate BackendInternalError (UserId, RootId, CorpusId, ListId)
initMaster = do
(masterUserId, masterRootId, masterCorpusId)
<- getOrMkRootWithCorpus MkCorpusUserMaster
<- getOrMkRootWithCorpus cfg MkCorpusUserMaster
(Nothing :: Maybe HyperdataCorpus)
masterListId <- getOrMkList masterCorpusId masterUserId
_triggers <- initLastTriggers masterListId
pure (masterUserId, masterRootId, masterCorpusId, masterListId)
withDevEnv settingsPath $ \env -> do
_ <- runCmdDev env (initFirstTriggers secret :: DBCmd BackendInternalError [Int64])
_ <- runCmdDev env createUsers
x <- runCmdDev env initMaster
_ <- runCmdDev env mkRoots
x <- runCmdDev env $ runDBTx $ do
_ <- initFirstTriggers secret
_ <- createUsers
x' <- initMaster
_ <- mkRoots
pure x'
putStrLn (show x :: Text)
initCmd :: HasCallStack => Mod CommandFields CLI
......
{-# LANGUAGE TemplateHaskell #-}
{-|
Module : CLI.Server
Description : Gargantext Server
......@@ -15,16 +16,18 @@ module CLI.Server where
import CLI.Parsers (settings_p)
import CLI.Types
import CLI.Worker (runAllWorkers)
import Control.Concurrent.Async qualified as Async
import Control.Monad.IO.Class
import Data.Text qualified as T
import Data.Version (showVersion)
import Gargantext.API.Admin.EnvTypes (Mode(..))
import GHC.IO.Encoding (setLocaleEncoding, utf8)
import Gargantext.API (startGargantext)
import Gargantext.API.Admin.EnvTypes (Mode(..))
import Gargantext.Core.Config
import Gargantext.Core.Config.Types (_SettingsFile)
import Gargantext.Core.Config.Utils (readConfig)
import Gargantext.Prelude
import Gargantext.System.Logging
import GHC.IO.Encoding (setLocaleEncoding, utf8)
import Options.Applicative
import Paths_gargantext qualified as PG -- cabal magic build module
......@@ -41,8 +44,15 @@ serverCLI (CLIS_start serverArgs) = withServerCLILogger serverArgs $ \ioLogger -
serverCLI (CLIS_startAll serverArgs@(ServerArgs { .. })) = withServerCLILogger serverArgs $ \ioLogger -> do
withAsync (startServerCLI ioLogger serverArgs) $ \aServer -> do
runAllWorkers ioLogger server_toml
wait aServer
res <- Async.race (runAllWorkers ioLogger server_toml) (waitCatch aServer)
case res of
Left () -> pure ()
Right (Left ex)
-> do
$(logLoc) ioLogger ERROR $ "Exception raised when running the server:\n\n" <> T.pack (displayException ex)
exitFailure
Right (Right ())
-> pure ()
serverCLI (CLIS_version) = withLogger (LogConfig Nothing DEBUG) $ \ioLogger -> do
-- Sets the locale to avoid encoding issues like in #284.
......
......@@ -42,31 +42,33 @@ workerCLI (CLIW_run (WorkerArgs { .. })) = do
$ List.take 72
$ List.cycle ["_"]) :: Prelude.String)
___
putText "GarganText worker"
putText $ "worker_name: " <> worker_name
putText $ "worker toml: " <> T.pack (_SettingsFile worker_toml)
___
withWorkerEnv worker_toml $ \env -> do
let ws = env ^. hasConfig . gc_worker
case findDefinitionByName ws worker_name of
Nothing -> do
let workerNames = _wdName <$> (_wsDefinitions ws)
let availableWorkers = T.intercalate ", " workerNames
putText $ "Worker definition not found! Available workers: " <> availableWorkers
Just wd -> do
putText $ "Starting worker '" <> worker_name <> "'"
putText $ "gc config: " <> show (env ^. hasConfig)
putText $ "Worker settings: " <> show ws
___
if worker_run_single then
withPGMQWorkerSingleCtrlC env wd $ \a _state -> do
wait a
else
withPGMQWorkerCtrlC env wd $ \a _state -> do
-- _ <- runReaderT (sendJob Ping) env
wait a
let log_cfg = env ^. hasConfig . gc_logging
withLogger log_cfg $ \ioLogger -> do
___
logMsg ioLogger INFO "GarganText worker"
logMsg ioLogger INFO $ "worker_name: " <> T.unpack worker_name
logMsg ioLogger INFO $ "worker toml: " <> _SettingsFile worker_toml
___
let ws = env ^. hasConfig . gc_worker
case findDefinitionByName ws worker_name of
Nothing -> do
let workerNames = _wdName <$> (_wsDefinitions ws)
let availableWorkers = T.intercalate ", " workerNames
putText $ "Worker definition not found! Available workers: " <> availableWorkers
Just wd -> do
logMsg ioLogger INFO $ "Starting worker '" <> T.unpack worker_name <> "'"
logMsg ioLogger DEBUG $ "gc config: " <> show (env ^. hasConfig)
logMsg ioLogger DEBUG $ "Worker settings: " <> show ws
___
if worker_run_single then
withPGMQWorkerSingleCtrlC env wd $ \a _state -> do
wait a
else
withPGMQWorkerCtrlC env wd $ \a _state -> do
-- _ <- runReaderT (sendJob Ping) env
wait a
workerCLI (CLIW_runAll (WorkerAllArgs { .. })) = withWorkerEnv worker_toml $ \env -> do
let log_cfg = env ^. hasConfig . gc_logging
withLogger log_cfg $ \ioLogger -> runAllWorkers ioLogger worker_toml
......@@ -124,6 +126,15 @@ stats_p = fmap CLIW_stats $ WorkerStatsArgs
<$> settings_p
-- | Runs all the workers concurrently.
-- /NOTE/: Be very careful, this IS a BLOCKING operation, despite its usage
-- of 'forConcurrently_' under the hood. In particular 'forConcurrently_' will
-- execute the inner action in parallel discarding the results, but the inner
-- action has still to terminate!
-- That is /NOT/ the case for this function, which is meant to start the infinite
-- loop for the workers, so beware when using this, make sure that the calling
-- code is using this properly (for example along the use of 'race' or a similar
-- function from async).
runAllWorkers :: Logger IO -> SettingsFile -> IO ()
runAllWorkers ioLogger worker_toml = do
cfg <- readConfig worker_toml
......
......@@ -16,8 +16,8 @@ fi
# with the `sha256sum` result calculated on the `cabal.project` and
# `cabal.project.freeze`. This ensures the files stay deterministic so that CI
# cache can kick in.
expected_cabal_project_hash="963418e37a17d4bb67d4b885613144b36d290f612eea80355e82abc7e76b450c"
expected_cabal_project_freeze_hash="cd52143d3a9d285360b59c6371d3e258552c1bc115bd612024db3de1f7593ff7"
expected_cabal_project_hash="7d021a8e3d0b68421e26bdfe4e1da82f6ea26b6c420fc984b3c30c14bc5fea98"
expected_cabal_project_freeze_hash="553b98aadb35506a305bd740cdd71f5fadc1e6d55d10f91cf39daa6735a63d78"
cabal --store-dir=$STORE_DIR v2-build --dry-run
......
......@@ -87,7 +87,7 @@ source-repository-package
source-repository-package
type: git
location: https://gitlab.iscpif.fr/gargantext/gargantext-graph.git
tag: a08ceed71b297a811f90cb86c3c61dc0b153036b
tag: 316d48b6a89593faaf1f2102e9714cea7e416e56
subdir: gargantext-graph-core
-- Support for GHC 9.6.x
......@@ -99,7 +99,7 @@ source-repository-package
source-repository-package
type: git
location: https://gitlab.iscpif.fr/gargantext/haskell-igraph.git
tag: 9f8a2f4a014539826a4eab3215cc70c0813f20cb
tag: 05e62da3aa466b7d0608d4918b030dc024119b32
source-repository-package
type: git
......
......@@ -25,8 +25,6 @@ constraints: any.Boolean ==0.2.4,
any.StateVar ==1.2.2,
any.accelerate ==1.3.0.0,
accelerate +bounds-checks -debug -internal-checks -nofib -unsafe-checks,
any.accelerate-arithmetic ==1.0.0.1,
any.accelerate-utility ==1.0.0.1,
any.adjunctions ==4.4.2,
any.aeson ==2.1.2.1,
aeson -cffi +ordered-keymap,
......@@ -79,6 +77,8 @@ constraints: any.Boolean ==0.2.4,
any.blaze-builder ==0.4.2.3,
any.blaze-html ==0.9.2.0,
any.blaze-markup ==0.8.3.0,
any.blaze-textual ==0.2.3.1,
blaze-textual -developer -integer-simple +native,
any.boolexpr ==0.3,
any.boring ==0.2.2,
boring +tagged,
......@@ -103,6 +103,7 @@ constraints: any.Boolean ==0.2.4,
cassava-conduit +small_base,
any.cborg ==0.2.10.0,
cborg +optimize-gmp,
any.cborg-json ==0.2.6.0,
any.cereal ==0.5.8.3,
cereal -bytestring-builder,
any.charset ==0.3.11,
......@@ -161,6 +162,8 @@ constraints: any.Boolean ==0.2.4,
any.dense-linear-algebra ==0.1.0.0,
any.digest ==0.0.2.1,
digest -have_arm64_crc32c -have_builtin_prefetch -have_mm_prefetch -have_sse42 -have_strong_getauxval -have_weak_getauxval +pkg-config,
any.direct-sqlite ==2.3.29,
direct-sqlite +dbstat +fulltextsearch +haveusleep +json1 -mathfunctions -systemlib +urifilenames,
any.directory ==1.3.8.5,
any.discrimination ==0.5,
any.distributive ==0.6.2.1,
......@@ -170,7 +173,6 @@ constraints: any.Boolean ==0.2.4,
any.double-conversion ==2.0.5.0,
double-conversion -developer +embedded_double_conversion,
any.easy-file ==0.2.5,
any.eigen ==3.3.7.0,
any.either ==5.0.2,
any.ekg-core ==0.1.1.8,
any.ekg-json ==0.1.1.1,
......@@ -223,7 +225,7 @@ constraints: any.Boolean ==0.2.4,
any.haskell-bee ==0.1.0.0,
any.haskell-bee-pgmq ==0.1.0.0,
any.haskell-bee-tests ==0.1.0.0,
any.haskell-igraph ==0.10.4,
any.haskell-igraph ==0.10.4.1,
any.haskell-lexer ==1.1.2,
any.haskell-pgmq ==0.1.0.0,
any.haskell-src-exts ==1.23.1,
......@@ -300,7 +302,6 @@ constraints: any.Boolean ==0.2.4,
any.linear ==1.23,
linear -herbie +template-haskell,
any.list-t ==1.0.5.7,
any.lockfree-queue ==0.2.4,
any.logict ==0.8.2.0,
any.loop ==0.3.0,
any.lzma ==0.0.1.1,
......@@ -460,6 +461,7 @@ constraints: any.Boolean ==0.2.4,
any.servant-blaze ==0.9.1,
any.servant-client ==0.20.2,
any.servant-client-core ==0.20.2,
any.servant-conduit ==0.16.1,
any.servant-ekg ==0.3.1,
any.servant-openapi3 ==2.0.1.6,
any.servant-routes ==0.1.0.0,
......@@ -487,6 +489,7 @@ constraints: any.Boolean ==0.2.4,
any.split ==0.2.5,
any.splitmix ==0.1.1,
splitmix -optimised-mixer,
any.sqlite-simple ==0.4.19.0,
any.statistics ==0.16.3.0,
statistics -benchpapi,
any.stemmer ==0.5.2,
......@@ -571,7 +574,6 @@ constraints: any.Boolean ==0.2.4,
any.uri-encode ==1.5.0.7,
uri-encode +network-uri -tools,
any.utf8-string ==1.0.2,
any.utility-ht ==0.0.17.2,
any.uuid ==1.3.16,
any.uuid-types ==1.0.6,
any.validity ==0.12.1.0,
......
......@@ -56,6 +56,8 @@ data-files:
test-data/test_config.toml
test-data/issue-381/Termes_A_Ajouter_T4SC_Intellixir.tsv
test-data/issue-381/Termes_A_Ajouter_T4SC_Intellixir12.csv
test-data/issue-380/corpus.tsv
test-data/issue-380/malformed_row.tsv
.clippy.dhall
-- common options
......@@ -136,6 +138,8 @@ library
Gargantext.API.Node
Gargantext.API.Node.Contact.Types
Gargantext.API.Node.Corpus.Annuaire
Gargantext.API.Node.Corpus.Export.Types
Gargantext.API.Node.Corpus.Export.Utils
Gargantext.API.Node.Corpus.New
Gargantext.API.Node.Corpus.New.Types
Gargantext.API.Node.Corpus.Types
......@@ -224,6 +228,7 @@ library
Gargantext.Core.Text.Corpus.API.OpenAlex
Gargantext.Core.Text.Corpus.API.Pubmed
Gargantext.Core.Text.Corpus.Parsers
Gargantext.Core.Text.Corpus.Parsers.Types
Gargantext.Core.Text.Corpus.Parsers.Date
Gargantext.Core.Text.Corpus.Parsers.TSV
Gargantext.Core.Text.Corpus.Query
......@@ -290,21 +295,26 @@ library
Gargantext.Database.Admin.Types.Hyperdata.Document
Gargantext.Database.Admin.Types.Hyperdata.Folder
Gargantext.Database.Admin.Types.Node
Gargantext.Database.Class
Gargantext.Database.Prelude
Gargantext.Database.Query.Facet
Gargantext.Database.Query.Table.Ngrams
Gargantext.Database.Query.Table.NgramsPostag
Gargantext.Database.Query.Table.Node
Gargantext.Database.Query.Table.Node.Error
Gargantext.Database.Query.Table.NodeNode
Gargantext.Database.Query.Table.Node.UpdateOpaleye
Gargantext.Database.Query.Table.Node.User
Gargantext.Database.Query.Table.NodeContext
Gargantext.Database.Query.Table.NodeNode
Gargantext.Database.Query.Table.User
Gargantext.Database.Query.Tree.Root
Gargantext.Database.Schema.Context
Gargantext.Database.Schema.Ngrams
Gargantext.Database.Schema.Node
Gargantext.Database.Schema.Prelude
Gargantext.Database.Schema.User
Gargantext.Database.Transactional
Gargantext.Database.Transactional.Example
Gargantext.Defaults
Gargantext.MicroServices.ReverseProxy
Gargantext.Orphans
......@@ -347,7 +357,6 @@ library
Gargantext.API.Ngrams.NgramsTree
Gargantext.API.Node.Contact
Gargantext.API.Node.Corpus.Export
Gargantext.API.Node.Corpus.Export.Types
Gargantext.API.Node.Corpus.Searx
Gargantext.API.Node.Corpus.Subcorpus
Gargantext.API.Node.Document.Export
......@@ -473,7 +482,6 @@ library
Gargantext.Database.Query.Table.Node.Document.Insert
Gargantext.Database.Query.Table.Node.Select
Gargantext.Database.Query.Table.Node.Update
Gargantext.Database.Query.Table.NodeContext
Gargantext.Database.Query.Table.NodeContext_NodeContext
Gargantext.Database.Query.Table.NodeNgrams
Gargantext.Database.Query.Tree
......@@ -484,7 +492,6 @@ library
Gargantext.Database.Schema.NodeContext_NodeContext
Gargantext.Database.Schema.NodeNgrams
Gargantext.Database.Schema.NodeNode
Gargantext.Database.Schema.Prelude
Gargantext.Database.Types
Gargantext.Utils.Aeson
Gargantext.Utils.Servant
......@@ -538,14 +545,14 @@ library
, fgl ^>= 5.8.0.0
, filepath ^>= 1.4.2.2
, fmt
, formatting ^>= 7.2.0
, free >= 0.5.0
, fullstop ^>= 0.1.4
, gargantext-graph-core >= 0.2.0.0
, gargantext-prelude
, graphviz ^>= 2999.20.1.0
, haskell-bee
, haskell-bee-pgmq
, haskell-igraph ^>= 0.10.4
, haskell-igraph ^>= 0.10.4.1
, haskell-pgmq >= 0.1.0.0 && < 0.2
, haskell-throttle
, hlcm ^>= 0.2.2
......@@ -557,8 +564,8 @@ library
, http-client-tls >= 0.3.6.1 && < 0.4
, http-conduit >= 2.3.8 && < 2.3.9
, http-media ^>= 0.8.0.0
, http-types ^>= 0.12.3
, http-reverse-proxy >= 0.6.1.0
, http-types ^>= 0.12.3
, ini ^>= 0.4.1
, insert-ordered-containers ^>= 0.2.5.1
, iso639 ^>= 0.1.0.3
......@@ -611,8 +618,8 @@ library
, servant-client-core >= 0.20 && < 0.21
, servant-conduit >= 0.15 && < 0.17
, servant-ekg ^>= 0.3.1
, servant-routes < 0.2
, servant-openapi3 >= 2.0.1.6
, servant-routes < 0.2
, servant-server >= 0.18.3 && < 0.21
, servant-swagger ^>= 1.2
, servant-swagger-ui ^>= 0.3.5.3.5.0
......@@ -620,10 +627,10 @@ library
, servant-websockets >= 2.0.0 && < 2.1
, servant-xml-conduit ^>= 0.1.0.4
, shelly >= 1.9 && < 2
, singletons ^>= 3.0.2
, singletons-th >= 3.1 && < 3.3
, smtp-mail >= 0.3.0.0
, split >= 0.2.3.4
, sqlite-simple >= 0.4.19 && < 0.5
, stemmer == 0.5.2
, stm >= 2.5.1.0 && < 2.6
, stm-containers >= 1.2.0.3 && < 1.3
......@@ -635,13 +642,13 @@ library
, text ^>= 2.0.2
, text-metrics ^>= 0.3.2
, time ^>= 1.12.2
, toml-parser >= 2.0.1.0 && < 3
, transformers
, transformers-base ^>= 0.4.6
, tree-diff
, toml-parser >= 2.0.1.0 && < 3
, tuple ^>= 0.3.0.2
, unordered-containers ^>= 0.2.16.0
, unicode-collation >= 0.1.3.5
, unordered-containers ^>= 0.2.16.0
-- needed for Worker / System.Posix.Signals
, unix >= 2.7.3 && < 2.9
, uri-encode ^>= 1.5.0.7
......@@ -699,6 +706,7 @@ executable gargantext
, gargantext
, gargantext-prelude
, haskell-bee
, lens >= 5.2.2 && < 5.3
, MonadRandom ^>= 0.6
, optparse-applicative
, postgresql-simple >= 0.6.4 && <= 0.7.0.0
......@@ -749,12 +757,15 @@ common commonTestDependencies
, monad-control >= 1.0.3 && < 1.1
, mtl >= 2.2.2 && < 2.4
, network-uri
, opaleye
, parsec ^>= 3.1.16.1
, patches-class ^>= 0.1.0.1
, patches-map ^>= 0.1.0.1
, postgres-options >= 0.2 && < 0.3
, postgresql-simple >= 0.6.4 && <= 0.7.0.0
, product-profunctors
, quickcheck-instances ^>= 0.3.25.2
, random
, raw-strings-qq
, resource-pool >= 0.4.0.0 && < 0.5
, safe-exceptions >= 0.1.7.4 && < 0.2
......@@ -762,6 +773,7 @@ common commonTestDependencies
, servant-client >= 0.20 && < 0.21
, servant-client-core >= 0.20 && < 0.21
, servant-conduit >= 0.15 && < 0.17
, servant-server >= 0.20.1 && < 0.21
, shelly >= 1.9 && < 2
, stm >= 2.5.1.0 && < 2.6
, streaming-commons
......@@ -832,6 +844,7 @@ test-suite garg-test-tasty
Test.Database.Operations.NodeStory
Test.Database.Operations.PublishNode
Test.Database.Setup
Test.Database.Transactions
Test.Database.Types
Test.Graph.Clustering
Test.Graph.Distance
......@@ -847,6 +860,7 @@ test-suite garg-test-tasty
Test.Ngrams.Terms
Test.Offline.Errors
Test.Offline.JSON
Test.Offline.Ngrams
Test.Offline.Phylo
Test.Offline.Stemming.Lancaster
Test.Parsers.Date
......@@ -872,12 +886,15 @@ test-suite garg-test-hspec
main-is: drivers/hspec/Main.hs
build-depends:
process ^>= 1.6.18.0
, servant >= 0.20.1 && < 0.21
, sqlite-simple >= 0.4.19 && < 0.5
, unix >= 2.7.3 && < 2.9
other-modules:
Paths_gargantext
Test.API
Test.API.Authentication
Test.API.Errors
Test.API.Export
Test.API.GraphQL
Test.API.Notifications
Test.API.Private
......@@ -896,6 +913,7 @@ test-suite garg-test-hspec
Test.Database.Operations.NodeStory
Test.Database.Operations.PublishNode
Test.Database.Setup
Test.Database.Transactions
Test.Database.Types
Test.Instances
Test.Server.ReverseProxy
......
......@@ -61,12 +61,10 @@ import Network.Wai (Middleware, Request, requestHeaders)
import Network.Wai.Handler.Warp hiding (defaultSettings)
import Network.Wai.Middleware.Cors
import Network.Wai.Middleware.RequestLogger (logStdout)
-- import Paths_gargantext (getDataDir)
import Servant hiding (Header)
import Servant.Client.Core.BaseUrl (showBaseUrl)
import System.Clock qualified as Clock
import System.Cron.Schedule qualified as Cron
-- import System.FilePath
-- | startGargantext takes as parameters port number and Toml file.
startGargantext :: Mode -> PortNumber -> SettingsFile -> IO ()
......
......@@ -24,7 +24,6 @@ And you have the main viz
-}
{-# LANGUAGE MonoLocalBinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
......@@ -62,7 +61,7 @@ import Gargantext.Core.Types.Individu (User(..), Username, GargPassword(..))
import Gargantext.Core.Worker.Jobs.Types qualified as Jobs
import Gargantext.Database.Action.User.New (guessUserName)
import Gargantext.Database.Admin.Types.Node (NodeId(..), UserId)
import Gargantext.Database.Prelude (Cmd, IsDBEnvExtra, IsDBCmd)
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.User
import Gargantext.Database.Query.Tree (isDescendantOf, isIn)
import Gargantext.Database.Query.Tree.Root (getRoot)
......@@ -100,14 +99,14 @@ checkAuthRequest couldBeEmail (GargPassword p) = do
Nothing -> couldBeEmail -- we are sure this is not an email
Just (u,_) -> u -- this was an email in fact
candidate <- head <$> getUsersWith usrname
candidate <- head <$> runDBQuery (getUsersWith usrname)
case candidate of
Nothing -> pure InvalidUser
Just (UserLight { userLight_password = GargPassword h, .. }) ->
case Auth.checkPassword (Auth.mkPassword p) (Auth.PasswordHash h) of
Auth.PasswordCheckFail -> pure InvalidPassword
Auth.PasswordCheckSuccess -> do
muId <- head <$> getRoot (UserName usrname)
muId <- head <$> runDBQuery (getRoot (UserName usrname))
case _node_id <$> muId of
Nothing -> pure InvalidUser
Just nodeId -> do
......@@ -144,12 +143,13 @@ withAccessM :: ( IsDBCmd env err m )
-> m a
-> m a
withAccessM (AuthenticatedUser nodeId _userId) (PathNode id) m = do
d <- id `isDescendantOf` nodeId
d <- runDBQuery (id `isDescendantOf` nodeId)
if d then m else m -- serverError err401
withAccessM (AuthenticatedUser nodeId _userId) (PathNodeNode cId docId) m = do
_a <- isIn cId docId -- TODO use one query for all ?
_d <- cId `isDescendantOf` nodeId
runDBQuery $ do
void $ isIn cId docId -- TODO use one query for all ?
void $ (cId `isDescendantOf` nodeId)
if True -- a && d
then m
else m -- serverError err401
......@@ -249,7 +249,7 @@ forgotPasswordGet (Just uuid) = do
Nothing -> throwError $ _ServerError # err404 { errBody = "Not found" }
Just uuid' -> do
-- fetch user
us <- getUsersWithForgotPasswordUUID uuid'
us <- runDBQuery $ getUsersWithForgotPasswordUUID uuid'
case us of
[u] -> forgotPasswordGetUser u
_ -> throwError $ _ServerError # err404 { errBody = "Not found" }
......@@ -266,12 +266,10 @@ forgotPasswordGetUser (UserLight { .. }) = do
hashed <- liftBase $ Auth.hashPassword $ Auth.mkPassword password
let hashed' = Auth.unPasswordHash hashed
let userPassword = UserLight { userLight_password = GargPassword hashed', .. }
_ <- updateUserPassword userPassword
-- display this briefly in the html
-- clear the uuid so that the page can't be refreshed
_ <- updateUserForgotPasswordUUID $ UserLight { userLight_forgot_password_uuid = Nothing, .. }
runDBTx $ do
void $ updateUserPassword userPassword
void $ updateUserForgotPasswordUUID $ UserLight { userLight_forgot_password_uuid = Nothing, .. }
pure $ ForgotPasswordGet password
......@@ -286,7 +284,7 @@ forgotUserPassword (UserLight { .. }) = do
let userUUID = UserLight { userLight_forgot_password_uuid = Just $ toText uuid, .. }
-- save user with that uuid
_ <- updateUserForgotPasswordUUID userUUID
_ <- runDBTx $ updateUserForgotPasswordUUID userUUID
-- send email with uuid link
cfg <- view $ mailSettings
......@@ -304,7 +302,7 @@ generateForgotPasswordUUID :: (IsDBEnvExtra env)
=> Cmd env err UUID
generateForgotPasswordUUID = do
uuid <- liftBase $ nextRandom
us <- getUsersWithForgotPasswordUUID uuid
us <- runDBQuery $ getUsersWithForgotPasswordUUID uuid
case us of
[] -> pure uuid
_ -> generateForgotPasswordUUID
......
......@@ -80,7 +80,7 @@ modeToLoggingLevels = \case
data Env = Env
{ _env_logger :: ~(Logger (GargM Env BackendInternalError))
, _env_pool :: ~(Pool Connection)
, _env_nodeStory :: ~NodeStoryEnv
, _env_nodeStory :: ~(NodeStoryEnv BackendInternalError)
, _env_manager :: ~Manager
, _env_config :: ~GargConfig
, _env_dispatcher :: ~Dispatcher
......@@ -96,15 +96,9 @@ instance HasConfig Env where
instance HasConnectionPool Env where
connPool = env_pool
instance HasNodeStoryEnv Env where
instance HasNodeStoryEnv Env BackendInternalError where
hasNodeStory = env_nodeStory
instance HasNodeStoryImmediateSaver Env where
hasNodeStoryImmediateSaver = hasNodeStory . nse_saver_immediate
instance HasNodeArchiveStoryImmediateSaver Env where
hasNodeArchiveStoryImmediateSaver = hasNodeStory . nse_archive_saver_immediate
instance HasJWTSettings Env where
jwtSettings = env_jwt_settings
......@@ -152,7 +146,7 @@ data DevEnv = DevEnv
, _dev_env_manager :: ~Manager
, _dev_env_logger :: !(Logger (GargM DevEnv BackendInternalError))
, _dev_env_pool :: !(Pool Connection)
, _dev_env_nodeStory :: !NodeStoryEnv
, _dev_env_nodeStory :: !(NodeStoryEnv BackendInternalError)
}
makeLenses ''DevEnv
......@@ -187,6 +181,8 @@ instance MonadJobStatus (GargM DevEnv err) where
markFailed _ _ = pure ()
emitWarning _ _ = pure ()
addMoreSteps _ _ = pure ()
instance HasConfig DevEnv where
......@@ -196,15 +192,9 @@ instance HasConnectionPool DevEnv where
connPool = dev_env_pool
instance HasNodeStoryEnv DevEnv where
instance HasNodeStoryEnv DevEnv BackendInternalError where
hasNodeStory = dev_env_nodeStory
instance HasNodeStoryImmediateSaver DevEnv where
hasNodeStoryImmediateSaver = hasNodeStory . nse_saver_immediate
instance HasNodeArchiveStoryImmediateSaver DevEnv where
hasNodeArchiveStoryImmediateSaver = hasNodeStory . nse_archive_saver_immediate
instance HasMail DevEnv where
mailSettings = dev_env_config . gc_mail_config
......
......@@ -27,10 +27,10 @@ import Database.PostgreSQL.Simple (Connection, connect, close, ConnectInfo)
import Gargantext.API.Admin.EnvTypes (Env(..))
import Gargantext.API.Errors.Types (BackendInternalError)
import Gargantext.API.Prelude (GargM)
import Gargantext.Core.Notifications.Dispatcher qualified as D
import Gargantext.Core.Config (GargConfig(..))
import Gargantext.Core.Config.Types (jwtSettings)
import Gargantext.Core.NodeStory (fromDBNodeStoryEnv)
import Gargantext.Core.NodeStory (mkNodeStoryEnv)
import Gargantext.Core.Notifications.Dispatcher qualified as D
import Gargantext.Prelude
import Gargantext.System.Logging (Logger)
import Network.HTTP.Client.TLS (newTlsManager)
......@@ -150,7 +150,7 @@ newEnv logger config dispatcher = do
-- putStrLn ("Overrides: " <> show prios :: Text)
-- putStrLn ("New priorities: " <> show prios' :: Text)
!pool <- newPool $ _gc_database_config config
!nodeStory_env <- fromDBNodeStoryEnv pool
let !nodeStory_env = mkNodeStoryEnv
-- secret <- Jobs.genSecret
-- let jobs_settings = (Jobs.defaultJobSettings 1 secret)
......
......@@ -47,25 +47,25 @@ import Gargantext.API.Errors (BackendInternalError)
import Gargantext.API.Errors.Types (AccessPolicyErrorReason(..))
import Gargantext.Core.Config (GargConfig(..), HasConfig(hasConfig))
import Gargantext.Core.Config.Types (SecretsConfig(..))
import Gargantext.Core.Types.Individu (User(UserName))
import Gargantext.Core.Types (NodeId, UserId)
import Gargantext.Database.Prelude (DBCmd)
import Gargantext.Database.Query.Table.Node.Error (HasNodeError)
import Gargantext.Core.Types.Individu (User(UserName))
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.Node (getNode)
import Gargantext.Database.Query.Table.Node.Error (HasNodeError)
import Gargantext.Database.Query.Table.NodeNode
import Gargantext.Database.Query.Tree (isDescendantOf, isOwnedBy, isSharedWith, lookupPublishPolicy)
import Gargantext.Database.Query.Tree.Root (getRootId)
import Gargantext.Database.Schema.Node (node_user_id)
import Gargantext.Prelude
import Servant (HasServer(..), ServerT)
import Servant.API.Routes (HasRoutes(getRoutes))
import Servant.Auth.Server.Internal.AddSetCookie (AddSetCookieApi, AddSetCookies(..), Nat(S))
import Servant.Client.Core (HasClient(..), Client)
import Servant.Ekg (HasEndpoint(..))
import Servant (HasServer(..), ServerT)
import Servant.OpenApi qualified as OpenAPI
import Servant.Server.Internal.Delayed (addParameterCheck)
import Servant.Server.Internal.DelayedIO (DelayedIO(..))
import Servant.Swagger qualified as Swagger
import Servant.OpenApi qualified as OpenAPI
-------------------------------------------------------------------------------
-- Types
......@@ -156,41 +156,43 @@ accessPolicyManager = AccessPolicyManager (\ur ac -> interpretPolicy ur ac)
check' :: HasNodeError err => AuthenticatedUser -> AccessCheck -> DBCmd err AccessResult
check' (AuthenticatedUser loggedUserNodeId loggedUserUserId) = \case
AC_always_deny
-> pure $ Deny invalidUserPermissions
AC_always_allow
-> pure Allow
AC_user_node requestedNodeId
-> do ownedByMe <- requestedNodeId `isOwnedBy` loggedUserUserId
enforce invalidUserPermissions $ (loggedUserNodeId == requestedNodeId || ownedByMe)
AC_user requestedUserId
-> enforce invalidUserPermissions $ (loggedUserUserId == requestedUserId)
AC_master_user _requestedNodeId
-> do
masterUsername <- _s_master_user . _gc_secrets <$> view hasConfig
masterNodeId <- getRootId (UserName masterUsername)
enforce invalidUserPermissions $ masterNodeId == loggedUserNodeId
AC_node_descendant nodeId
-> enforce nodeNotDescendant =<< nodeId `isDescendantOf` loggedUserNodeId
AC_node_shared nodeId
-> enforce nodeNotShared =<< nodeId `isSharedWith` loggedUserNodeId
AC_node_published_read nodeId
-> enforce nodeNotShared =<< isNodeReadOnly nodeId
AC_node_published_edit nodeId
-> do
mb_pp <- lookupPublishPolicy nodeId
targetNode <- getNode nodeId
let allowedOrNot = do
case mb_pp of
Nothing -> pure Allow
Just NPP_publish_no_edits_allowed
-> throwError not_editable
Just NPP_publish_edits_only_owner_or_super
-> enforce (nodeNotShared' not_editable) (targetNode ^. node_user_id == loggedUserUserId)
case allowedOrNot of
Left err -> enforce (nodeNotShared' err) False
Right _ -> pure Allow
check' (AuthenticatedUser loggedUserNodeId loggedUserUserId) c = do
cfg <- view hasConfig
runDBQuery $ case c of
AC_always_deny
-> pure $ Deny invalidUserPermissions
AC_always_allow
-> pure Allow
AC_user_node requestedNodeId
-> do ownedByMe <- requestedNodeId `isOwnedBy` loggedUserUserId
enforce invalidUserPermissions $ (loggedUserNodeId == requestedNodeId || ownedByMe)
AC_user requestedUserId
-> enforce invalidUserPermissions $ (loggedUserUserId == requestedUserId)
AC_master_user _requestedNodeId
-> do
let masterUsername = _s_master_user . _gc_secrets $ cfg
masterNodeId <- getRootId (UserName masterUsername)
enforce invalidUserPermissions $ masterNodeId == loggedUserNodeId
AC_node_descendant nodeId
-> enforce nodeNotDescendant =<< nodeId `isDescendantOf` loggedUserNodeId
AC_node_shared nodeId
-> enforce nodeNotShared =<< nodeId `isSharedWith` loggedUserNodeId
AC_node_published_read nodeId
-> enforce nodeNotShared =<< isNodeReadOnly nodeId
AC_node_published_edit nodeId
-> do
mb_pp <- lookupPublishPolicy nodeId
targetNode <- getNode nodeId
let allowedOrNot = do
case mb_pp of
Nothing -> pure Allow
Just NPP_publish_no_edits_allowed
-> throwError not_editable
Just NPP_publish_edits_only_owner_or_super
-> enforce (nodeNotShared' not_editable) (targetNode ^. node_user_id == loggedUserUserId)
case allowedOrNot of
Left err -> enforce (nodeNotShared' err) False
Right _ -> pure Allow
-------------------------------------------------------------------------------
-- Errors
......
......@@ -21,9 +21,9 @@ import Gargantext.API.Admin.Auth.Types (PathId(..), AuthenticatedUser)
import Gargantext.API.Prelude (IsGargServer)
import Gargantext.API.Routes.Named.Context qualified as Named
import Gargantext.Database.Admin.Types.Node (ContextId, contextId2NodeId)
import Gargantext.Database.Prelude (JSONB)
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.Context (getContextWith)
import Gargantext.Prelude
import Gargantext.Prelude
import Servant.Server.Generic (AsServerT)
-------------------------------------------------------------------
......@@ -40,4 +40,4 @@ contextAPI :: ( IsGargServer env err m
contextAPI p uId id' =
withNamedAccess uId (PathNode $ contextId2NodeId id') contextAPI'
where
contextAPI' = Named.ContextAPI $ getContextWith id' p
contextAPI' = Named.ContextAPI $ runDBQuery (getContextWith id' p)
......@@ -23,7 +23,7 @@ import Gargantext.API.Prelude ( GargM )
import Gargantext.Core.Config (_gc_database_config, gc_logging)
import Gargantext.Core.Config.Types (SettingsFile(..))
import Gargantext.Core.Config.Utils (readConfig)
import Gargantext.Core.NodeStory (fromDBNodeStoryEnv)
import Gargantext.Core.NodeStory (mkNodeStoryEnv)
import Gargantext.Database.Prelude (Cmd, CmdRandom, connPool, runCmd)
import Gargantext.Prelude
import Gargantext.System.Logging ( withLoggerIO )
......@@ -41,7 +41,7 @@ withDevEnv settingsFile k = do
where
newDevEnv logger cfg = do
pool <- newPool (_gc_database_config cfg)
nodeStory_env <- fromDBNodeStoryEnv pool
let nodeStory_env = mkNodeStoryEnv
manager <- newTlsManager
pure $ DevEnv
{ _dev_env_pool = pool
......
......@@ -79,6 +79,11 @@ backendErrorToFrontendError = \case
$ FE_validation_error $ case prettyValidation validationError of
Nothing -> "unknown_validation_error"
Just v -> T.pack v
-- Worker errors might contain sensitive information, so we don't
-- want to expose that to the frontend.
InternalWorkerError _workerError
-> let msg = T.pack $ "An unexpected error occurred in one of the async worker tasks. Please check your server logs."
in mkFrontendErr' msg $ FE_internal_server_error msg
AccessPolicyError accessPolicyError
-> case accessPolicyError of
AccessPolicyNodeError nodeError
......
......@@ -118,6 +118,7 @@ data BackendInternalError
| InternalTreeError !TreeError
| InternalUnexpectedError !SomeException
| InternalValidationError !Validation
| InternalWorkerError !IOException
| AccessPolicyError !AccessPolicyErrorReason
deriving (Show, Typeable)
......
......@@ -23,7 +23,7 @@ import Gargantext.Database.Admin.Types.Hyperdata.Contact
, cw_lastName
, hc_who, ContactWhere, hc_where, cw_organization, cw_labTeamDepts, cw_role, cw_office, cw_country, cw_city, cw_touch, ct_mail, ct_phone, ct_url, hc_title, hc_source)
import Gargantext.Database.Admin.Types.Node (ContextId (..))
import Gargantext.Database.Prelude (IsDBEnvExtra)
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.Context (getContextWith)
import Gargantext.Database.Schema.Node (node_hyperdata)
import Gargantext.Prelude
......@@ -71,7 +71,7 @@ dbAnnuaireContacts contact_id = do
-- FIXME(adinapoli) This function seems a bit iffy, unless a 'contact_id'
-- is just a synonym for a 'ContextId'.
c <- lift $ getContextWith (UnsafeMkContextId contact_id) (Proxy :: Proxy HyperdataContact)
c <- lift $ runDBQuery $ getContextWith (UnsafeMkContextId contact_id) (Proxy :: Proxy HyperdataContact)
pure [toAnnuaireContact (contact_id, c ^. node_hyperdata)]
toAnnuaireContact :: (Int, HyperdataContact) -> AnnuaireContact
......
......@@ -33,7 +33,7 @@ import Gargantext.API.Prelude (GargM)
import Gargantext.Core.Types.Search (HyperdataRow(..), toHyperdataRow)
import Gargantext.Database.Admin.Types.Hyperdata.Document ( HyperdataDocument )
import Gargantext.Database.Admin.Types.Node (ContextTitle, NodeId(..), NodeTypeId, UserId, unNodeId, ContextId (..))
import Gargantext.Database.Prelude (IsDBEnvExtra)
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.NodeContext (getNodeContext, getContextsForNgramsTerms, ContextForNgramsTerms(..), {- getContextNgrams, -} getContextNgramsMatchingFTS)
import Gargantext.Database.Query.Table.NodeContext qualified as DNC
import Gargantext.Database.Schema.NodeContext (NodeContext, NodeContextPoly(..))
......@@ -147,7 +147,7 @@ dbNodeContext context_id node_id = do
-- user <- getUsersWithId user_id
-- hyperdata <- getUserHyperdata user_id
-- lift (map toUser <$> zip user hyperdata)
c <- lift $ getNodeContext (UnsafeMkContextId context_id) (UnsafeMkNodeId node_id)
c <- lift $ runDBQuery $ getNodeContext (UnsafeMkContextId context_id) (UnsafeMkNodeId node_id)
pure $ toNodeContextGQL <$> [c]
-- | Returns list of `ContextGQL` for given ngrams in given corpus id.
......@@ -155,7 +155,7 @@ dbContextForNgrams
:: (IsDBEnvExtra env)
=> Int -> [Text] -> Text -> GqlM e env [ContextGQL]
dbContextForNgrams node_id ngrams_terms and_logic = do
contextsForNgramsTerms <- lift $ getContextsForNgramsTerms (UnsafeMkNodeId node_id) ngrams_terms ( readMaybe $ unpack $ Text.toTitle and_logic )
contextsForNgramsTerms <- lift $ runDBQuery $ getContextsForNgramsTerms (UnsafeMkNodeId node_id) ngrams_terms ( readMaybe $ unpack $ Text.toTitle and_logic )
--lift $ printDebug "[dbContextForNgrams] contextsForNgramsTerms" contextsForNgramsTerms
pure $ toContextGQL <$> contextsForNgramsTerms
......@@ -164,7 +164,7 @@ dbContextNgrams
:: (IsDBEnvExtra env)
=> Int -> Int -> GqlM e env [Text]
dbContextNgrams context_id list_id = do
lift $ getContextNgramsMatchingFTS (UnsafeMkContextId context_id) (UnsafeMkNodeId list_id)
lift $ runDBQuery $ getContextNgramsMatchingFTS (UnsafeMkContextId context_id) (UnsafeMkNodeId list_id)
-- Conversion functions
......@@ -228,5 +228,5 @@ updateNodeContextCategory :: (IsDBEnvExtra env)
-> GqlM' e env [Int]
updateNodeContextCategory autUser mgr NodeContextCategoryMArgs { context_id, node_id, category } =
withPolicy autUser mgr (nodeWriteChecks $ UnsafeMkNodeId node_id) $ do
void $ lift $ DNC.updateNodeContextCategory (UnsafeMkContextId context_id) (UnsafeMkNodeId node_id) category
void $ lift $ runDBTx $ DNC.updateNodeContextCategory (UnsafeMkContextId context_id) (UnsafeMkNodeId node_id) category
pure [1]
......@@ -24,7 +24,7 @@ import Gargantext.API.GraphQL.Types ( GqlM )
import Gargantext.Core ( HasDBid(lookupDBid) )
import Gargantext.Database.Admin.Types.Node (NodeType)
import Gargantext.Database.Admin.Types.Node qualified as NN
import Gargantext.Database.Prelude (IsDBEnvExtra) -- , JSONB)
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.Node (getClosestChildrenByType, getClosestParentIdByType, getNode)
import Gargantext.Database.Schema.Node qualified as N
import Gargantext.Prelude
......@@ -74,14 +74,14 @@ dbNodes
:: (IsDBEnvExtra env)
=> Int -> GqlM e env [Node]
dbNodes node_id = do
node <- lift $ getNode $ NN.UnsafeMkNodeId node_id
node <- lift $ runDBQuery $ getNode $ NN.UnsafeMkNodeId node_id
pure [toNode node]
dbNodesCorpus
:: (IsDBEnvExtra env)
=> Int -> GqlM e env [Corpus]
dbNodesCorpus corpus_id = do
corpus <- lift $ getNode $ NN.UnsafeMkNodeId corpus_id
corpus <- lift $ runDBQuery $ getNode $ NN.UnsafeMkNodeId corpus_id
pure [toCorpus corpus]
data NodeParentArgs
......@@ -116,19 +116,21 @@ dbParentNodes node_id parentType = do
-- lift $ printDebug "[dbParentNodes] error reading parent type" (T.pack err)
-- pure []
-- Right parentType -> do
mNodeId <- lift $ getClosestParentIdByType (NN.UnsafeMkNodeId node_id) parentType -- (fromNodeTypeId parent_type_id)
lift $ runDBQuery $ do
mNodeId <- getClosestParentIdByType (NN.UnsafeMkNodeId node_id) parentType -- (fromNodeTypeId parent_type_id)
case mNodeId of
Nothing -> pure []
Just id -> do
node <- lift $ getNode id
node <- getNode id
pure [toNode node]
dbChildNodes :: (IsDBEnvExtra env)
=> Int -> NodeType -> GqlM e env [Node]
dbChildNodes node_id childType = do
childIds <- lift $ getClosestChildrenByType (NN.UnsafeMkNodeId node_id) childType -- (fromNodeTypeId parent_type_id)
children <- lift $ mapM getNode childIds
pure $ toNode <$> children
lift $ runDBQuery $ do
childIds <- getClosestChildrenByType (NN.UnsafeMkNodeId node_id) childType -- (fromNodeTypeId parent_type_id)
children <- mapM getNode childIds
pure $ toNode <$> children
toNode :: NN.Node json -> Node
toNode N.Node { .. } = Node { id = nid
......
......@@ -25,7 +25,7 @@ import Gargantext.Core.Config (HasJWTSettings)
import Gargantext.Core.Types (NodeId(..), unNodeId)
import Gargantext.Core.Types.Individu qualified as Individu
import Gargantext.Database.Action.Share (membersOf, deleteMemberShip)
import Gargantext.Database.Prelude (IsDBEnvExtra)
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.Node (getNode)
import Gargantext.Database.Query.Table.User (getUsersWithNodeHyperdata)
import Gargantext.Database.Schema.Node (NodePoly(Node, _node_id), _node_user_id)
......@@ -60,13 +60,14 @@ dbTeam :: (IsDBEnvExtra env) =>
Int -> GqlM e env Team
dbTeam nodeId = do
let nId = UnsafeMkNodeId nodeId
res <- lift $ membersOf nId
teamNode <- lift $ getNode nId
userNodes <- lift $ getUsersWithNodeHyperdata $ Individu.UserDBId $ uId teamNode
let username = getUsername userNodes
pure $ Team { team_owner_username = username
, team_members = map toTeamMember res
}
lift $ runDBQuery $ do
res <- membersOf nId
teamNode <- getNode nId
userNodes <- getUsersWithNodeHyperdata $ Individu.UserDBId $ uId teamNode
let username = getUsername userNodes
pure $ Team { team_owner_username = username
, team_members = map toTeamMember res
}
where
toTeamMember :: (Text, NodeId) -> TeamMember
toTeamMember (username, fId)= TeamMember {
......@@ -81,18 +82,19 @@ dbTeam nodeId = do
deleteTeamMembership :: (IsDBEnvExtra env, HasJWTSettings env) =>
TeamDeleteMArgs -> GqlM' e env [Int]
deleteTeamMembership TeamDeleteMArgs { token, shared_folder_id, team_node_id } = do
teamNode <- lift $ getNode $ UnsafeMkNodeId team_node_id
userNodes <- lift (getUsersWithNodeHyperdata $ Individu.UserDBId $ uId teamNode)
userNodes <- lift $ runDBTx $ do
teamNode <- getNode $ UnsafeMkNodeId team_node_id
getUsersWithNodeHyperdata $ Individu.UserDBId $ uId teamNode
case userNodes of
[] -> panicTrace $ "[deleteTeamMembership] User with id " <> T.pack (show $ uId teamNode) <> " doesn't exist."
[] -> panicTrace $ "[deleteTeamMembership] User with id " <> T.pack (show $ team_node_id) <> " doesn't exist."
(( _, node_u):_) -> do
testAuthUser <- lift $ authUser (nId node_u) token
lift $ case testAuthUser of
case testAuthUser of
-- Invalid -> panicTrace "[deleteTeamMembership] failed to validate user"
Invalid -> do
throwError $ InternalAuthenticationError $ UserNotAuthorized (uId node_u) "This user is not team owner"
lift $ throwError $ InternalAuthenticationError $ UserNotAuthorized (uId node_u) "This user is not team owner"
Valid -> do
deleteMemberShip [(UnsafeMkNodeId shared_folder_id, UnsafeMkNodeId team_node_id)]
lift $ runDBTx $ deleteMemberShip [(UnsafeMkNodeId shared_folder_id, UnsafeMkNodeId team_node_id)]
where
uId Node { _node_user_id } = _node_user_id
nId Node { _node_id } = _node_id
......@@ -25,7 +25,7 @@ import Gargantext.Core.Types.Main ( Tree(..), _tn_node, _tn_children, NodeTree(.
import Gargantext.Database.Admin.Config ()
import Gargantext.Database.Admin.Types.Node (allNodeTypes, NodeId(..), NodeType)
import Gargantext.Database.Admin.Types.Node qualified as NN
import Gargantext.Database.Prelude (IsDBEnvExtra)
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.Node (getNode)
import Gargantext.Database.Query.Tree qualified as T
import Gargantext.Database.Schema.Node (NodePoly(_node_parent_id))
......@@ -77,10 +77,11 @@ dbTree :: (IsDBEnvExtra env) =>
NN.UserId -> Int -> GqlM e env (TreeFirstLevel (GqlM e env))
dbTree loggedInUserId root_id = do
let rId = UnsafeMkNodeId root_id
t <- lift $ T.tree loggedInUserId T.TreeFirstLevel rId allNodeTypes
n <- lift $ getNode $ UnsafeMkNodeId root_id
let pId = toParentId n
pure $ toTree rId pId t
lift $ runDBQuery $ do
t <- T.tree loggedInUserId T.TreeFirstLevel rId allNodeTypes
n <- getNode $ UnsafeMkNodeId root_id
let pId = toParentId n
pure $ toTree rId pId t
where
toParentId N.Node { _node_parent_id } = _node_parent_id
......@@ -100,7 +101,7 @@ childrenToTreeNodes (TreeN {_tn_node}, rId) = toTreeNode (Just rId) _tn_node
resolveParent :: (IsDBEnvExtra env) => Maybe NodeId -> GqlM e env (Maybe TreeNode)
resolveParent (Just pId) = do
node <- lift $ getNode pId
node <- lift $ runDBQuery $ getNode pId
pure $ nodeToTreeNode node
resolveParent Nothing = pure Nothing
......@@ -133,6 +134,6 @@ convertDbTreeToTreeNode T.DbTreeNode { _dt_name, _dt_nodeId, _dt_typeId, _dt_par
dbRecursiveParents :: (IsDBEnvExtra env) => Int -> GqlM e env BreadcrumbInfo
dbRecursiveParents nodeId = do
let nId = UnsafeMkNodeId nodeId
dbParents <- lift $ T.recursiveParents nId allNodeTypes
dbParents <- lift $ runDBQuery $ T.recursiveParents nId allNodeTypes
let treeNodes = map convertDbTreeToTreeNode dbParents
pure $ BreadcrumbInfo { parents = treeNodes }
......@@ -22,7 +22,7 @@ import Gargantext.API.GraphQL.Types (GqlM, GqlM')
import Gargantext.Core.Types (NodeId(..), UserId)
import Gargantext.Core.Types.Individu qualified as Individu
import Gargantext.Database.Admin.Types.Hyperdata (HyperdataUser(..))
import Gargantext.Database.Prelude (IsDBEnvExtra)
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.User qualified as DBUser
import Gargantext.Database.Schema.User (UserLight(..))
import Gargantext.Prelude
......@@ -72,7 +72,7 @@ resolveUsers autUser mgr UserArgs { user_id } = do
-- | Inner function to fetch the user from DB.
dbUsers :: (IsDBEnvExtra env)
=> Int -> GqlM e env [User (GqlM e env)]
dbUsers user_id = lift (map toUser <$> DBUser.getUsersWithId (Individu.RootId $ UnsafeMkNodeId user_id))
dbUsers user_id = lift (map toUser <$> runDBQuery (DBUser.getUsersWithId (Individu.RootId $ UnsafeMkNodeId user_id)))
toUser
:: (IsDBEnvExtra env)
......@@ -85,25 +85,25 @@ toUser (UserLight { .. }) = User { u_email = userLight_email
resolveHyperdata
:: (IsDBEnvExtra env)
=> UserId -> GqlM e env (Maybe HyperdataUser)
resolveHyperdata userid = lift (listToMaybe <$> DBUser.getUserHyperdata (Individu.UserDBId userid))
resolveHyperdata userid = lift (listToMaybe <$> runDBQuery (DBUser.getUserHyperdata (Individu.UserDBId userid)))
updateUserPubmedAPIKey :: ( IsDBEnvExtra env ) =>
UserPubmedAPIKeyMArgs -> GqlM' e env Int
updateUserPubmedAPIKey UserPubmedAPIKeyMArgs { user_id, api_key } = do
_ <- lift $ DBUser.updateUserPubmedAPIKey (Individu.RootId $ UnsafeMkNodeId user_id) api_key
_ <- lift $ runDBTx $ DBUser.updateUserPubmedAPIKey (Individu.RootId $ UnsafeMkNodeId user_id) api_key
pure 1
updateUserEPOAPIUser :: ( IsDBEnvExtra env ) =>
UserEPOAPIUserMArgs -> GqlM' e env Int
updateUserEPOAPIUser UserEPOAPIUserMArgs { user_id, api_user } = do
_ <- lift $ DBUser.updateUserEPOAPIUser (Individu.RootId $ UnsafeMkNodeId user_id) api_user
_ <- lift $ runDBTx $ DBUser.updateUserEPOAPIUser (Individu.RootId $ UnsafeMkNodeId user_id) api_user
pure 1
updateUserEPOAPIToken :: ( IsDBEnvExtra env ) =>
UserEPOAPITokenMArgs -> GqlM' e env Int
updateUserEPOAPIToken UserEPOAPITokenMArgs { user_id, api_token } = do
_ <- lift $ DBUser.updateUserEPOAPIToken (Individu.RootId $ UnsafeMkNodeId user_id) api_token
_ <- lift $ runDBTx $ DBUser.updateUserEPOAPIToken (Individu.RootId $ UnsafeMkNodeId user_id) api_token
pure 1
......@@ -49,7 +49,7 @@ import Gargantext.API.GraphQL.Utils (AuthStatus(Invalid, Valid), authUser)
import Gargantext.Core.Config (HasJWTSettings)
import Gargantext.Core.Types (UserId(..))
import Gargantext.Core.Types.Individu qualified as Individu
import Gargantext.Database.Prelude (IsDBEnvExtra)
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.Node.UpdateOpaleye (updateHyperdata)
import Gargantext.Database.Query.Table.User (getUsersWithHyperdata, getUsersWithNodeHyperdata, updateUserEmail)
import Gargantext.Database.Schema.Node (node_id, node_hyperdata, NodePoly (Node, _node_id))
......@@ -124,7 +124,7 @@ updateUserInfo
=> UserInfoMArgs -> GqlM' e env Int
updateUserInfo (UserInfoMArgs { ui_id, .. }) = do
-- lift $ printDebug "[updateUserInfo] ui_id" ui_id
users <- lift (getUsersWithNodeHyperdata (Individu.UserDBId $ UnsafeMkUserId ui_id))
users <- lift $ runDBQuery $ getUsersWithNodeHyperdata (Individu.UserDBId $ UnsafeMkUserId ui_id)
case users of
[] -> panicTrace $ "[updateUserInfo] User with id " <> (T.pack $ show ui_id) <> " doesn't exist."
((UserLight { .. }, node_u):_) -> do
......@@ -155,10 +155,11 @@ updateUserInfo (UserInfoMArgs { ui_id, .. }) = do
let u' = UserLight { userLight_email = fromMaybe userLight_email $ view ui_cwTouchMailL u_hyperdata'
, .. }
-- lift $ printDebug "[updateUserInfo] with firstName" u_hyperdata'
_ <- lift $ updateHyperdata (node_u ^. node_id) u_hyperdata'
_ <- lift $ updateUserEmail u'
--let _newUser = toUser (u, u_hyperdata')
pure 1
lift $ runDBTx $ do
_ <- updateHyperdata (node_u ^. node_id) u_hyperdata'
_ <- updateUserEmail u'
--let _newUser = toUser (u, u_hyperdata')
pure 1
where
uh _ Nothing u_hyperdata = u_hyperdata
uh lens' (Just val) u_hyperdata = u_hyperdata & lens' ?~ val
......@@ -175,7 +176,7 @@ dbUsers user_id = do
-- user <- getUsersWithId user_id
-- hyperdata <- getUserHyperdata user_id
-- lift (map toUser <$> zip user hyperdata)
lift (map toUser <$> getUsersWithHyperdata (Individu.UserDBId user_id))
lift (map toUser <$> runDBQuery (getUsersWithHyperdata (Individu.UserDBId user_id)))
toUser :: (UserLight, HyperdataUser) -> UserInfo
toUser (UserLight { .. }, u_hyperdata) =
......
......@@ -20,6 +20,7 @@ module Gargantext.API.Job (
, jobLogFailTotalWithMessage
, RemainingSteps(..)
, addErrorEvent
, addWarningEvent
) where
import Control.Lens (over, _Just)
......@@ -49,6 +50,9 @@ addEvent level message (JobLog { _scst_events = mEvts, .. }) = JobLog { _scst_ev
addErrorEvent :: ToHumanFriendlyError e => e -> JobLog -> JobLog
addErrorEvent message = addEvent "ERROR" (mkHumanFriendly message)
addWarningEvent :: ToHumanFriendlyError e => e -> JobLog -> JobLog
addWarningEvent message = addEvent "WARNING" (mkHumanFriendly message)
jobLogProgress :: Int -> JobLog -> JobLog
jobLogProgress n jl = over (scst_succeeded . _Just) (+ n) $
over (scst_remaining . _Just) (\x -> max 0 (x - n)) jl
......
......@@ -15,6 +15,7 @@ import Gargantext.API.Routes.Named.Private qualified as Named
import Gargantext.Database.Action.Share (membersOf)
import Gargantext.Database.Admin.Types.Node (NodeType(NodeTeam))
import Gargantext.Database.Query.Table.Node (getNodesIdWithType)
import Gargantext.Database.Prelude
import Gargantext.Prelude
import Servant.Server.Generic (AsServerT)
......@@ -22,7 +23,7 @@ members :: IsGargServer err env m => Named.MembersAPI (AsServerT m)
members = Named.MembersAPI getMembers
getMembers :: IsGargServer err env m => m [Text]
getMembers = do
getMembers = runDBQuery $ do
teamNodeIds <- getNodesIdWithType NodeTeam
m <- concatMapM membersOf teamNodeIds
pure $ map fst m
This diff is collapsed.
This diff is collapsed.
......@@ -17,6 +17,8 @@ Portability : POSIX
module Gargantext.API.Ngrams.List
where
import Control.Lens (view)
import Data.Aeson qualified as Aeson
import Data.ByteString.Lazy qualified as BSL
import Data.Csv qualified as Tsv
import Data.HashMap.Strict (HashMap)
......@@ -25,8 +27,10 @@ import Data.Map.Strict (toList)
import Data.Map.Strict qualified as Map
import Data.Set qualified as Set
import Data.Text (concat, pack, splitOn)
import Data.Text.Encoding qualified as TE
import Data.Vector (Vector)
import Data.Vector qualified as Vec
import Database.PostgreSQL.Simple.LargeObjects qualified as PSQL
import Gargantext.API.Admin.EnvTypes (Env)
import Gargantext.API.Errors.Types (BackendInternalError(InternalServerError))
import Gargantext.API.Ngrams (setListNgrams)
......@@ -35,19 +39,20 @@ import Gargantext.API.Ngrams.Prelude (getNgramsList)
import Gargantext.API.Ngrams.Types
import Gargantext.API.Prelude (GargM, serverError, HasServerError)
import Gargantext.API.Routes.Named.List qualified as Named
import Gargantext.API.Worker (serveWorkerAPI, serveWorkerAPIM)
import Gargantext.Core.NodeStory.Types ( HasNodeStory )
import Gargantext.API.Worker (serveWorkerAPIM)
import Gargantext.Core.NodeStory.Types ( HasNodeStory, hasNodeStory, NodeStoryEnv )
import Gargantext.Core.Text.Ngrams (Ngrams, NgramsType(NgramsTerms))
import Gargantext.Core.Types.Main (ListType(..))
import Gargantext.Core.Worker.Jobs.Types qualified as Jobs
import Gargantext.Database.Action.Flow (reIndexWith)
import Gargantext.Database.Admin.Types.Node ( NodeId(_NodeId), ListId )
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.Node (getNode)
import Gargantext.Database.Schema.Ngrams ( text2ngrams, NgramsId )
import Gargantext.Database.Schema.Node (_node_parent_id)
import Gargantext.Database.Types (Indexed(..))
import Gargantext.Prelude hiding (concat, toList)
import Gargantext.System.Logging
import Gargantext.System.Logging (logLocM, LogLevel(..), MonadLogger)
import Gargantext.Utils.Jobs.Monad (MonadJobStatus(..))
import Prelude qualified
import Protolude qualified as P
......@@ -74,7 +79,8 @@ getJson :: HasNodeStory env err m
=> ListId
-> m (Headers '[Header "Content-Disposition" Text] NgramsList)
getJson lId = do
lst <- getNgramsList lId
env <- view hasNodeStory
lst <- runDBQuery $ getNgramsList env lId
pure $ addHeader (concat [ "attachment; filename=GarganText_NgramsList-"
, pack $ show (_NodeId lId)
, ".json"
......@@ -85,7 +91,8 @@ getJsonZip :: HasNodeStory env err m
=> ListId
-> m (Headers '[Header "Content-Disposition" Text] NgramsListZIP)
getJsonZip lId = do
lst <- getNgramsList lId
env <- view hasNodeStory
lst <- runDBQuery $ getNgramsList env lId
let nlz = NgramsListZIP { _nlz_nl = lst, _nlz_list_id = lId}
pure $ addHeader (concat [ "attachment; filename="
, nlzFileName nlz
......@@ -97,7 +104,8 @@ getTsv :: HasNodeStory env err m
=> ListId
-> m (Headers '[Header "Content-Disposition" Text] NgramsTableMap)
getTsv lId = do
lst <- getNgramsList lId
env <- view hasNodeStory
lst <- runDBQuery $ getNgramsList env lId
pure $ case Map.lookup NgramsTerms lst of
Nothing -> noHeader Map.empty
Just (Versioned { _v_data }) ->
......@@ -110,12 +118,15 @@ getTsv lId = do
------------------------------------------------------------------------
jsonPostAsync :: Named.JSONAPI (AsServerT (GargM Env BackendInternalError))
jsonPostAsync = Named.JSONAPI {
updateListJSONEp = \lId -> serveWorkerAPI $ \p ->
Jobs.JSONPost { _jp_list_id = lId
, _jp_ngrams_list = _wjf_data p }
updateListJSONEp = \lId -> serveWorkerAPIM $ \p -> do
(PSQL.Oid oId) <- createLargeObject $ TE.encodeUtf8 $ _wjf_data p
pure $ Jobs.JSONPost { _jp_list_id = lId
, _jp_ngrams_oid = fromIntegral oId }
-- , _jp_ngrams_list = _wjf_data p }
}
------------------------------------------------------------------------
-- NOTE(adn) Make it DB-transactional.
postAsyncJSON :: (HasNodeStory env err m, MonadJobStatus m, MonadLogger m)
=> ListId
-> NgramsList
......@@ -123,27 +134,28 @@ postAsyncJSON :: (HasNodeStory env err m, MonadJobStatus m, MonadLogger m)
-> m ()
postAsyncJSON l ngramsList jobHandle = do
env <- view hasNodeStory
markStarted 2 jobHandle
$(logLocM) DEBUG "[postAsyncJSON] Setting the Ngrams list ..."
setList
setList env
$(logLocM) DEBUG "[postAsyncJSON] Done."
markProgress 1 jobHandle
corpus_node <- getNode l -- (Proxy :: Proxy HyperdataList)
corpus_node <- runDBQuery $ getNode l -- (Proxy :: Proxy HyperdataList)
let corpus_id = fromMaybe (panicTrace "no parent_id") (_node_parent_id corpus_node)
$(logLocM) DEBUG "[postAsyncJSON] Executing re-indexing..."
_ <- reIndexWith corpus_id l NgramsTerms (Set.fromList [MapTerm, CandidateTerm])
_ <- runDBTx $ reIndexWith env corpus_id l NgramsTerms (Set.fromList [MapTerm, CandidateTerm])
$(logLocM) DEBUG "[postAsyncJSON] Re-indexing done."
markComplete jobHandle
where
setList :: HasNodeStory env err m => m ()
setList = do
setList :: IsDBCmd env err m => NodeStoryEnv err -> m ()
setList env = do
-- TODO check with Version for optim
mapM_ (\(nt, Versioned _v ns) -> setListNgrams l nt ns) $ toList ngramsList
runDBTx $ mapM_ (\(nt, Versioned _v ns) -> setListNgrams env l nt ns) $ toList ngramsList
-- TODO reindex
......@@ -162,8 +174,10 @@ tsvPostAsync =
$(logLocM) DEBUG $ "Started to upload " <> (_wtf_name p)
case ngramsListFromTSVData (_wtf_data p) of
Left err -> throwError $ InternalServerError $ err500 { errReasonPhrase = err }
Right ngramsList -> pure $ Jobs.JSONPost { _jp_list_id = lId
, _jp_ngrams_list = ngramsList }
Right ngramsList -> do
(PSQL.Oid oId) <- createLargeObject $ BSL.toStrict $ Aeson.encode ngramsList
pure $ Jobs.JSONPost { _jp_list_id = lId
, _jp_ngrams_oid = fromIntegral oId }
}
-- | Tries converting a text file into an 'NgramList', so that we can reuse the
......
......@@ -9,15 +9,13 @@ Portability : POSIX
-}
{-# LANGUAGE TypeOperators #-}
module Gargantext.API.Ngrams.List.Types where
import Data.Aeson
import Data.ByteString.Lazy qualified as BSL
-- import Data.ByteString.Lazy qualified as BSL
import Data.Swagger (ToSchema, declareNamedSchema, genericDeclareNamedSchema)
import Data.Text
import Data.Text.Encoding qualified as E
-- import Data.Text.Encoding qualified as E
import Gargantext.API.Ngrams.Types (NgramsList)
import Gargantext.API.Node.Corpus.New.Types (FileType(..))
import Gargantext.Core.Utils.Aeson (jsonOptions)
......@@ -46,16 +44,18 @@ instance ToSchema WithFile where
------------------------------------------------------------------------
data WithJsonFile = WithJsonFile
{ _wjf_data :: !NgramsList
{ -- _wjf_data :: !NgramsList
_wjf_data :: !Text
, _wjf_name :: !Text
} deriving (Eq, Show, Generic)
instance FromForm WithJsonFile where
fromForm f = do
d' <- parseUnique "_wjf_data" f
d <- case eitherDecode' (BSL.fromStrict $ E.encodeUtf8 d') of
Left s -> Left $ pack s
Right v -> Right v
d <- parseUnique "_wjf_data" f
-- d' <- parseUnique "_wjf_data" f
-- d <- case eitherDecode' (BSL.fromStrict $ E.encodeUtf8 d') of
-- Left s -> Left $ pack s
-- Right v -> Right v
n <- parseUnique "_wjf_name" f
pure $ WithJsonFile { _wjf_data = d
, _wjf_name = n }
......
......@@ -34,7 +34,7 @@ data NgramsTree = NgramsTree { mt_label :: Text
, mt_value :: Double
, mt_children :: [NgramsTree]
}
deriving (Generic, Show)
deriving (Generic, Show, Eq)
toNgramsTree :: Tree (NgramsTerm,Double) -> NgramsTree
toNgramsTree (Node (NgramsTerm l,v) xs) = NgramsTree l v (map toNgramsTree xs)
......
......@@ -22,26 +22,27 @@ import Data.Map.Strict qualified as Map
import Data.Text qualified as Text
import Gargantext.API.Ngrams (getNgramsTableMap)
import Gargantext.API.Ngrams.Types
import Gargantext.Core.NodeStory.Types ( HasNodeStory )
import Gargantext.Core.NodeStory.Types ( NodeStoryEnv )
import Gargantext.Core.Text.Context (TermList)
import Gargantext.Core.Text.List.Social.Prelude ( unPatchMapToHashMap )
import Gargantext.Core.Text.Ngrams (NgramsType, ngramsTypes)
import Gargantext.Core.Types.Main ( ListType )
import Gargantext.Database.Admin.Types.Node (ListId)
import Gargantext.Prelude
import Gargantext.Database.Prelude
------------------------------------------------------------------------
getNgramsList :: HasNodeStory env err m
=> ListId -> m NgramsList
getNgramsList lId = fromList
getNgramsList :: NodeStoryEnv err
-> ListId -> DBQuery err x NgramsList
getNgramsList env lId = fromList
<$> zip ngramsTypes
<$> mapM (getNgramsTableMap lId) ngramsTypes
<$> mapM (getNgramsTableMap env lId) ngramsTypes
getTermList :: HasNodeStory env err m
=> ListId -> ListType -> NgramsType -> m (Maybe TermList)
getTermList lId listType ngramsType = do
ngramsList <- getNgramsList lId
getTermList :: NodeStoryEnv err
-> ListId -> ListType -> NgramsType -> DBQuery err x (Maybe TermList)
getTermList err lId listType ngramsType = do
ngramsList <- getNgramsList err lId
pure $ toTermList listType ngramsType ngramsList
......
......@@ -29,6 +29,7 @@ import Gargantext.Core.NodeStory.Types
import Gargantext.Core.Text.Ngrams (NgramsType)
import Gargantext.Core.Types.Main ( ListType(..) )
import Gargantext.Database.Admin.Types.Node ( NodeId, ListId )
import Gargantext.Database.Prelude
import Gargantext.Prelude
......@@ -38,14 +39,8 @@ mergeNgramsElement _neOld neNew = neNew
type RootTerm = NgramsTerm
getRepo :: HasNodeStory env err m
=> [ListId] -> m NodeListStory
getRepo listIds = do
f <- getNodeListStoryMulti
liftBase $ f listIds
-- v <- liftBase $ f listIds
-- v' <- liftBase $ atomically $ readTVar v
-- pure $ v'
getRepo :: NodeStoryEnv err -> [ListId] -> DBQuery err x NodeListStory
getRepo env listIds = getNodeListStoryMulti env listIds
repoSize :: Ord k1 => NodeStory (Map.Map k1 (Map.Map k2 a)) p
......@@ -58,28 +53,19 @@ repoSize repo node_id = Map.map Map.size state'
. a_state
getNodeStory :: HasNodeStory env err m
=> ListId -> m ArchiveList
getNodeStory l = do
f <- getNodeListStory
liftBase $ f l
-- v <- liftBase $ f l
-- pure v
getNodeStory :: NodeStoryEnv err -> ListId -> DBQuery err x ArchiveList
getNodeStory env l = getNodeListStory env l
getNodeListStory :: NodeStoryEnv err
-> NodeId
-> DBQuery err x ArchiveList
getNodeListStory env = view nse_getter env
getNodeListStory :: HasNodeStory env err m
=> m (NodeId -> IO ArchiveList)
getNodeListStory = do
env <- view hasNodeStory
pure $ view nse_getter env
getNodeListStoryMulti :: HasNodeStory env err m
=> m ([NodeId] -> IO NodeListStory)
getNodeListStoryMulti = do
env <- view hasNodeStory
pure $ view nse_getter_multi env
getNodeListStoryMulti :: NodeStoryEnv err
-> [NodeId]
-> DBQuery err x NodeListStory
getNodeListStoryMulti = view nse_getter_multi
listNgramsFromRepo :: [ListId]
......@@ -102,25 +88,27 @@ listNgramsFromRepo nodeIds ngramsType repo =
-- Add a static capability parameter would be nice.
-- Ideally this is the access to `repoVar` which needs to
-- be properly guarded.
getListNgrams :: HasNodeStory env err m
=> [ListId] -> NgramsType
-> m (HashMap NgramsTerm NgramsRepoElement)
getListNgrams nodeIds ngramsType = listNgramsFromRepo nodeIds ngramsType
<$> getRepo nodeIds
getListNgrams :: NodeStoryEnv err
-> [ListId]
-> NgramsType
-> DBQuery err x (HashMap NgramsTerm NgramsRepoElement)
getListNgrams env nodeIds ngramsType = listNgramsFromRepo nodeIds ngramsType
<$> getRepo env nodeIds
-- | Fetch terms from repo, gathering terms under the same root (parent).
getTermsWith :: forall a env err m.
(HasNodeStory env err m, Eq a, Hashable a)
=> (NgramsTerm -> a) -> [ListId]
-> NgramsType -> Set ListType
-> m (HashMap a [a])
getTermsWith f ls ngt lts = HM.fromListWith (<>)
<$> map toTreeWith
<$> HM.toList
<$> HM.filter (\f' -> Set.member (fst f') lts)
<$> mapTermListRoot ls ngt
<$> getRepo ls
getTermsWith :: forall a err x. Hashable a
=> NodeStoryEnv err
-> (NgramsTerm -> a) -> [ListId]
-> NgramsType -> Set ListType
-> DBQuery err x (HashMap a [a])
getTermsWith env f ls ngt lts =
let func = HM.fromListWith (<>)
. map toTreeWith
. HM.toList
. HM.filter (\f' -> Set.member (fst f') lts)
. mapTermListRoot ls ngt
in func <$> getRepo env ls
where
toTreeWith :: (NgramsTerm, (b, Maybe NgramsTerm)) -> (a, [a])
toTreeWith (t, (_lt, maybeRoot)) = case maybeRoot of
......
This diff is collapsed.
{-# LANGUAGE TypeOperators #-}
{-|
Module : Gargantext.API.Node.Corpus.Export
Description : Corpus export
......@@ -17,27 +16,25 @@ Main exports of Gargantext:
module Gargantext.API.Node.Corpus.Export
where
import Data.HashMap.Strict qualified as HashMap
import Control.Exception.Safe qualified as CES
import Control.Lens (view)
import Data.List qualified as List
import Data.Map.Strict qualified as Map
import Data.Set qualified as Set
import Data.Text (pack)
import Gargantext.API.Ngrams.Tools (filterListWithRoot, mapTermListRoot, getRepo)
import Gargantext.API.Ngrams.Tools (getRepo)
import Gargantext.API.Ngrams.Types ( NgramsTerm(unNgramsTerm) )
import Gargantext.API.Node.Corpus.Export.Types ( Corpus(..) )
import Gargantext.API.Node.Corpus.Export.Types ( Corpus(..), CorpusSQLite(..) )
import Gargantext.API.Node.Corpus.Export.Utils (getContextNgrams, mkCorpusSQLite, mkCorpusSQLiteData)
import Gargantext.API.Node.Document.Export.Types qualified as DocumentExport
import Gargantext.API.Prelude (IsGargServer)
import Gargantext.Core.NodeStory.Types ( NodeListStory )
import Gargantext.Core.NodeStory
import Gargantext.Core.Text.Ngrams (NgramsType(..))
import Gargantext.Core.Types.Main ( ListType(MapTerm) )
import Gargantext.Database.Admin.Types.Node
import Gargantext.Database.Action.Metrics.NgramsByContext (getNgramsByContextOnlyUser)
import Gargantext.Database.Admin.Config (userMaster)
import Gargantext.Database.Admin.Types.Hyperdata.Document ( HyperdataDocument(..) )
import Gargantext.Database.Prelude (DBCmdExtra)
import Gargantext.Database.Admin.Types.Node
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.Node ( defaultList )
import Gargantext.Database.Query.Table.Node.Error (HasNodeError)
import Gargantext.Database.Query.Table.Node.Select (selectNodesWithUsername)
import Gargantext.Database.Query.Table.NodeContext (selectDocNodes)
import Gargantext.Database.Schema.Context (_context_id)
import Gargantext.Prelude hiding (hash)
......@@ -48,10 +45,13 @@ import qualified Gargantext.API.Routes.Named.Corpus as Named
--------------------------------------------------
-- | Hashes are ordered by Set
getCorpus :: forall env err m. IsGargServer env err m
getCorpus :: (CES.MonadMask m, IsGargServer env err m)
=> CorpusId
-> Named.CorpusExportAPI (AsServerT m)
getCorpus cId = Named.CorpusExportAPI $ \lId nt' -> get_corpus lId nt'
getCorpus cId = Named.CorpusExportAPI {
Named.corpusExportEp = get_corpus
, Named.corpusSQLiteEp = getCorpusSQLite cId
}
where
get_corpus :: IsGargServer env err m
......@@ -59,53 +59,49 @@ getCorpus cId = Named.CorpusExportAPI $ \lId nt' -> get_corpus lId nt'
-> Maybe NgramsType
-> m (Headers '[Header "Content-Disposition" Text] Corpus)
get_corpus lId nt' = do
let
nt = fromMaybe NgramsTerms nt'
env <- view hasNodeStory
runDBQuery $ do
let
nt = fromMaybe NgramsTerms nt'
listId <- case lId of
Nothing -> defaultList cId
Just l -> pure l
listId <- case lId of
Nothing -> defaultList cId
Just l -> pure l
-- FIXME(adn) Audit the usage of this, we are converting from a node
-- to a context id.
ns <- Map.fromList
<$> map (\n -> (nodeId2ContextId $ _context_id n, n))
<$> selectDocNodes cId
-- FIXME(adn) Audit the usage of this, we are converting from a node
-- to a context id.
ns <- Map.fromList
<$> map (\n -> (nodeId2ContextId $ _context_id n, n))
<$> selectDocNodes cId
repo <- getRepo [listId]
ngs <- getContextNgrams cId listId MapTerm nt repo
let -- uniqId is hash computed already for each document imported in database
r = Map.intersectionWith
(\a b -> DocumentExport.Document { _d_document = context2node a
, _d_ngrams = DocumentExport.Ngrams (Set.toList b) (hash b)
, _d_hash = d_hash a b }
) ns (Map.map (Set.map unNgramsTerm) ngs)
where
d_hash :: Context HyperdataDocument -> Set Text -> Text
d_hash _a b = hash [ -- fromMaybe "" (_hd_uniqId $ _context_hyperdata a),
hash b
]
pure $ addHeader ("attachment; filename=GarganText_corpus-" <> pack (show cId) <> ".json")
$ Corpus { _c_corpus = Map.elems r
, _c_hash = hash $ List.map DocumentExport._d_hash $ Map.elems r }
repo <- getRepo env [listId]
ngs <- getContextNgrams cId listId MapTerm nt repo
let -- uniqId is hash computed already for each document imported in database
r = Map.intersectionWith
(\a b -> DocumentExport.Document { _d_document = context2node a
, _d_ngrams = DocumentExport.Ngrams (Set.toList b) (hash b)
, _d_hash = d_hash a b }
) ns (Map.map (Set.map unNgramsTerm) ngs)
where
d_hash :: Context HyperdataDocument -> Set Text -> Text
d_hash _a b = hash [ -- fromMaybe "" (_hd_uniqId $ _context_hyperdata a),
hash b
]
pure $ addHeader ("attachment; filename=GarganText_corpus-" <> pack (show cId) <> ".json")
$ Corpus { _c_corpus = Map.elems r
, _c_hash = hash $ List.map DocumentExport._d_hash $ Map.elems r }
getContextNgrams :: HasNodeError err
=> CorpusId
-> ListId
-> ListType
-> NgramsType
-> NodeListStory
-> DBCmdExtra err (Map ContextId (Set NgramsTerm))
getContextNgrams cId lId listType nt repo = do
-- lId <- case lId' of
-- Nothing -> defaultList cId
-- Just l -> pure l
lIds <- selectNodesWithUsername NodeList userMaster
let ngs = filterListWithRoot [listType] $ mapTermListRoot [lId] nt repo
-- TODO HashMap
r <- getNgramsByContextOnlyUser cId (lIds <> [lId]) nt (HashMap.keys ngs)
pure r
getCorpusSQLite :: ( CES.MonadMask m
, IsGargServer env err m)
=> CorpusId
-> Maybe ListId
-> m (Headers '[Header "Content-Disposition" Text] CorpusSQLite)
getCorpusSQLite cId lId = do
corpusSQLiteData <- mkCorpusSQLiteData cId lId
corpusSQLite <- mkCorpusSQLite corpusSQLiteData
pure $ addHeader ("attachment; filename=GarganText_corpus-" <> pack (show cId) <> ".sqlite")
$ corpusSQLite
-- TODO
-- Exports List
......
......@@ -13,14 +13,21 @@ Portability : POSIX
module Gargantext.API.Node.Corpus.Export.Types where
import Data.Aeson.TH (deriveJSON)
import Data.Swagger ( ToSchema(..), genericDeclareNamedSchema, ToParamSchema(..) )
import Data.Text (Text)
import GHC.Generics (Generic)
import Data.ByteString.Lazy qualified as BSL
import Data.Swagger ( ToSchema(..), genericDeclareNamedSchema, ToParamSchema(..), NamedSchema(..), binarySchema )
import Data.Time.Clock (UTCTime)
import Data.Version (Version)
import Gargantext.API.Node.Document.Export.Types qualified as DocumentExport
import Gargantext.Core.Types ( TODO )
import Gargantext.API.Ngrams.Types (NgramsTerm)
import Gargantext.Core.Types ( CorpusId, ListId, TODO )
import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger)
import Servant
import Gargantext.Database.Admin.Types.Hyperdata.Corpus (HyperdataCorpus)
import Gargantext.Database.Admin.Types.Hyperdata.Document (HyperdataDocument)
import Gargantext.Database.Admin.Types.Hyperdata.List (HyperdataList)
import Gargantext.Database.Admin.Types.Node (ContextId, NodeId)
import Gargantext.Prelude
import Gargantext.Prelude.Crypto.Hash qualified as H
import Servant (Accept(..), MimeRender(mimeRender), MimeUnrender(mimeUnrender), OctetStream)
-- Corpus Export
......@@ -37,3 +44,46 @@ instance ToSchema Corpus where
instance ToParamSchema Corpus where
toParamSchema _ = toParamSchema (Proxy :: Proxy TODO)
$(deriveJSON (unPrefix "_c_") ''Corpus)
-- | Wrapper around 'ByteString' to return an SQLite db containing
-- corpus
newtype CorpusSQLite =
CorpusSQLite { _cs_bs :: BSL.ByteString }
deriving (Generic, NFData)
instance Accept CorpusSQLite where
contentType _ = contentType (Proxy :: Proxy OctetStream)
instance MimeRender OctetStream CorpusSQLite where
mimeRender _ (CorpusSQLite bs) = bs
-- | Needed for tests
instance MimeUnrender OctetStream CorpusSQLite where
mimeUnrender _ bs = Right $ CorpusSQLite { _cs_bs = bs }
instance ToSchema CorpusSQLite where
declareNamedSchema _ = pure $ NamedSchema (Just "CorpusSQLite") binarySchema
-- | Contents of the SQLite export DB
-- (having such datatype makes it easier to coherently implement import/export)
data CorpusSQLiteData =
CorpusSQLiteData { _csd_version :: Version
, _csd_cId :: CorpusId
, _csd_lId :: ListId
, _csd_created :: UTCTime
, _csd_corpus_name :: Text
, _csd_corpus_hash :: Maybe H.Hash
, _csd_corpus_parent :: Maybe NodeId
, _csd_corpus_hyperdata :: HyperdataCorpus
, _csd_list_name :: Text
, _csd_list_parent :: Maybe NodeId
, _csd_list_hyperdata :: HyperdataList
, _csd_contexts :: [(NodeId, Text, UTCTime, HyperdataDocument)]
, _csd_map_context_ngrams :: Map ContextId (Set NgramsTerm)
, _csd_stop_context_ngrams :: Map ContextId (Set NgramsTerm)
, _csd_candidate_context_ngrams :: Map ContextId (Set NgramsTerm)
} deriving (Show, Eq, Generic)
This diff is collapsed.
This diff is collapsed.
......@@ -23,7 +23,7 @@ import Data.Tuple.Select (sel1, sel2, sel3)
import Gargantext.Core (Lang(..))
import Gargantext.Core.Config (GargConfig(..), hasConfig)
import Gargantext.Core.Config.Types (FramesConfig(..))
import Gargantext.Core.NLP (HasNLPServer, nlpServerGet)
import Gargantext.Core.NLP (HasNLPServer)
import Gargantext.Core.NodeStory.Types ( HasNodeStory )
import Gargantext.Core.Text.Corpus.Query qualified as Query
import Gargantext.Core.Text.Terms (TermType(..))
......@@ -35,6 +35,7 @@ import Gargantext.Database.Action.User (getUserId)
import Gargantext.Database.Admin.Types.Hyperdata.Corpus (HyperdataCorpus)
import Gargantext.Database.Admin.Types.Hyperdata.Document (HyperdataDocument(..))
import Gargantext.Database.Admin.Types.Node (CorpusId, ListId, NodeType(NodeTexts))
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.Node (getOrMkList, insertDefaultNodeIfNotExists)
import Gargantext.Database.Query.Table.Node.Error (HasNodeError)
import Gargantext.Database.Query.Tree.Error (HasTreeError)
......@@ -126,7 +127,6 @@ insertSearxResponse :: ( MonadBase IO m
-> m ()
insertSearxResponse _ _ _ _ (Left _) = pure ()
insertSearxResponse user cId listId l (Right (SearxResponse { _srs_results })) = do
server <- view (nlpServerGet l)
-- docs :: [Either Text HyperdataDocument]
let docs = hyperdataDocumentFromSearxResult l <$> _srs_results
--printDebug "[triggerSearxSearch] docs" docs
......@@ -141,13 +141,12 @@ insertSearxResponse user cId listId l (Right (SearxResponse { _srs_results })) =
-}
--_ <- flowDataText user (DataNew [docs']) (Multi l) cId Nothing logStatus
let mCorpus = Nothing :: Maybe HyperdataCorpus
void $ addDocumentsToHyperCorpus server mCorpus (Multi l) cId docs'
void $ addDocumentsToHyperCorpus mCorpus (Multi l) cId docs'
_ <- buildSocialList l user cId listId mCorpus Nothing
pure ()
-- TODO Make an async task out of this?
triggerSearxSearch :: ( MonadBase IO m
, HasNodeStory env err m
......@@ -164,9 +163,9 @@ triggerSearxSearch :: ( MonadBase IO m
-> JobHandle m
-> m ()
triggerSearxSearch user cId q l jobHandle = do
userId <- getUserId user
_tId <- insertDefaultNodeIfNotExists NodeTexts cId userId
runDBTx $ do
userId <- getUserId user
void $ insertDefaultNodeIfNotExists NodeTexts cId userId
let numPages = 100
markStarted numPages jobHandle
......@@ -175,10 +174,12 @@ triggerSearxSearch user cId q l jobHandle = do
-- printDebug "[triggerSearxSearch] q" q
-- printDebug "[triggerSearxSearch] l" l
cfg <- view hasConfig
uId <- getUserId user
let surl = _f_searx_url $ _gc_frames cfg
-- printDebug "[triggerSearxSearch] surl" surl
listId <- getOrMkList cId uId
listId <- runDBTx $ do
uId <- getUserId user
-- printDebug "[triggerSearxSearch] surl" surl
getOrMkList cId uId
-- printDebug "[triggerSearxSearch] listId" listId
......
......@@ -14,7 +14,7 @@ import Gargantext.Database.Prelude (IsDBCmd)
import Servant.Server.Generic (AsServerT)
makeSubcorpus :: ( HasNodeStoryEnv env
makeSubcorpus :: ( HasNodeStoryEnv env BackendInternalError
, HasNLPServer env
, IsDBCmd env BackendInternalError m
)
......
......@@ -19,20 +19,19 @@ import Control.Lens (over)
import Gargantext.Core (Lang)
import Gargantext.Database.Admin.Types.Hyperdata.Corpus (HyperdataCorpus, _hc_lang)
import Gargantext.Database.Admin.Types.Node (CorpusId)
import Gargantext.Database.Prelude (IsDBCmd)
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.Node (getNodeWith)
import Gargantext.Database.Query.Table.Node.Error (HasNodeError)
import Gargantext.Database.Query.Table.Node.UpdateOpaleye (updateHyperdata)
import Gargantext.Database.Schema.Node (node_hyperdata)
import Gargantext.Prelude
import Gargantext.Utils.Jobs.Monad (MonadJobStatus)
-- | Updates the 'HyperdataCorpus' with the input 'Lang'.
addLanguageToCorpus :: (HasNodeError err, IsDBCmd env err m, MonadJobStatus m)
addLanguageToCorpus :: HasNodeError err
=> CorpusId
-> Lang
-> m ()
-> DBUpdate err ()
addLanguageToCorpus cId lang = do
hyperNode <- getNodeWith cId (Proxy @HyperdataCorpus)
let hyperNode' = hyperNode & over node_hyperdata (\corpus -> corpus { _hc_lang = Just lang })
......
......@@ -28,6 +28,7 @@ import Gargantext.API.Prelude (IsGargServer)
import Gargantext.API.Routes.Named.Document qualified as Named
import Gargantext.Core (toDBid)
import Gargantext.Database.Admin.Types.Node (DocId, NodeId, NodeType(..))
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Facet (runViewDocuments, Facet(..))
import Gargantext.Database.Query.Table.Node (getClosestParentIdByType)
import Gargantext.Database.Query.Table.Node.User ( getNodeUser )
......@@ -64,12 +65,13 @@ getDocumentsJSON nodeUserId pId = do
get_document_json :: IsGargServer err env m => NodeId -> DocId -> m DocumentExport
get_document_json nodeUserId pId = do
uId <- view node_user_id <$> getNodeUser nodeUserId
mcId <- getClosestParentIdByType pId NodeCorpus
let cId = maybe (panicTrace "[G.A.N.D.Export] Node has no parent") identity mcId
docs <- runViewDocuments cId False Nothing Nothing Nothing Nothing Nothing
pure DocumentExport { _de_documents = mapFacetDoc uId <$> docs
, _de_garg_version = T.pack $ showVersion PG.version }
runDBQuery $ do
uId <- view node_user_id <$> getNodeUser nodeUserId
mcId <- getClosestParentIdByType pId NodeCorpus
let cId = maybe (panicTrace "[G.A.N.D.Export] Node has no parent") identity mcId
docs <- runViewDocuments cId False Nothing Nothing Nothing Nothing Nothing
pure DocumentExport { _de_documents = mapFacetDoc uId <$> docs
, _de_garg_version = T.pack $ showVersion PG.version }
where
mapFacetDoc uId (FacetDoc { .. }) =
Document { _d_document =
......
......@@ -27,8 +27,8 @@ import Gargantext.API.Prelude ( GargM )
import Gargantext.API.Routes.Named.Document qualified as Named
import Gargantext.API.Worker (serveWorkerAPI)
import Gargantext.Core (Lang(..))
import Gargantext.Core.NLP (nlpServerGet, HasNLPServer)
import Gargantext.Core.NodeStory.Types (HasNodeStoryEnv, HasNodeArchiveStoryImmediateSaver)
import Gargantext.Core.NLP (HasNLPServer)
import Gargantext.Core.NodeStory.Types (HasNodeStoryEnv)
import Gargantext.Core.Text.Corpus.Parsers.Date (mDateSplit)
import Gargantext.Core.Text.Terms (TermType(..))
import Gargantext.Core.Types.Individu (User(..))
......@@ -39,7 +39,7 @@ import Gargantext.Database.Action.Flow.Types ( FlowCmdM )
import Gargantext.Database.Admin.Types.Hyperdata.Corpus ( HyperdataCorpus )
import Gargantext.Database.Admin.Types.Hyperdata.Document (HyperdataDocument(..))
import Gargantext.Database.Admin.Types.Node ( DocId, NodeId, NodeType(NodeCorpus), ParentId )
import Gargantext.Database.Prelude (IsDBCmd)
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.Node.Error (HasNodeError)
import Gargantext.Database.Query.Table.Node (getClosestParentIdByType')
import Gargantext.Database.Schema.Node (_node_hyperdata)
......@@ -71,7 +71,7 @@ documentUpload :: (FlowCmdM env err m)
-> DocumentUpload
-> m [DocId]
documentUpload nId doc = do
mcId <- getClosestParentIdByType' nId NodeCorpus
mcId <- runDBQuery $ getClosestParentIdByType' nId NodeCorpus
let cId = case mcId of
Just c -> c
Nothing -> panicTrace $ T.pack $ "[G.A.N.DU] Node has no corpus parent: " <> show nId
......@@ -99,8 +99,7 @@ documentUpload nId doc = do
, _hd_institutes_tree = Nothing }
let lang = EN
ncs <- view $ nlpServerGet lang
addDocumentsToHyperCorpus ncs (Nothing :: Maybe HyperdataCorpus) (Multi lang) cId [hd]
addDocumentsToHyperCorpus (Nothing :: Maybe HyperdataCorpus) (Multi lang) cId [hd]
-- | Imports the documents contained into this 'DocumentExport' into this (local) version
-- of the running node.
......@@ -108,8 +107,7 @@ documentUpload nId doc = do
-- only compatible versions.
remoteImportDocuments :: ( HasNodeError err
, HasNLPServer env
, HasNodeArchiveStoryImmediateSaver env
, HasNodeStoryEnv env
, HasNodeStoryEnv env err
, IsDBCmd env err m
, MonadLogger m
, MonadIO m)
......@@ -122,9 +120,8 @@ remoteImportDocuments :: ( HasNodeError err
-> m [NodeId]
remoteImportDocuments loggedInUser corpusId nodeId WorkSplit{..} documents = do
let la = Multi EN
nlpServerConfig <- view $ nlpServerGet (_tt_lang la)
$(logLocM) INFO $ "Importing " <> T.pack (show _ws_current) <> "/" <> T.pack (show _ws_total) <> " documents for corpus node " <> T.pack (show nodeId)
docs <- addDocumentsToHyperCorpus nlpServerConfig (Nothing :: Maybe HyperdataCorpus) la corpusId (map (_node_hyperdata . _d_document) documents)
docs <- addDocumentsToHyperCorpus (Nothing :: Maybe HyperdataCorpus) la corpusId (map (_node_hyperdata . _d_document) documents)
_versioned <- commitCorpus corpusId (RootId $ _auth_node_id loggedInUser)
$(logLocM) INFO $ "Done importing " <> T.pack (show _ws_current) <> "/" <> T.pack (show _ws_total) <> " documents for corpus node " <> T.pack (show nodeId)
pure docs
......@@ -28,7 +28,7 @@ import Gargantext.API.Prelude (GargM)
import Gargantext.API.Routes.Named.Document qualified as Named
import Gargantext.API.Worker (serveWorkerAPI)
import Gargantext.Core (Lang(..))
import Gargantext.Core.NodeStory (HasNodeStoryImmediateSaver, HasNodeArchiveStoryImmediateSaver, currentVersion)
import Gargantext.Core.NodeStory (currentVersion, hasNodeStory)
import Gargantext.Core.Text.Corpus.Parsers.Date (split')
import Gargantext.Core.Text.Corpus.Parsers.FrameWrite (Author(..), Parsed(..), parseLines, text2titleParagraphs)
import Gargantext.Core.Text.Terms (TermType(..))
......@@ -41,11 +41,13 @@ import Gargantext.Database.Admin.Types.Hyperdata.Frame ( HyperdataFrame(..), get
import Gargantext.Database.Admin.Types.Node ( NodeId, Node, NodeType(..) )
import Gargantext.Database.Query.Table.Node (getChildrenByType, getClosestParentIdByType', getNodeWith, getOrMkList)
import Gargantext.Database.Schema.Node (node_hyperdata, node_name, node_date)
import Gargantext.Database.Prelude
import Gargantext.Prelude
import Gargantext.System.Logging (logLocM, LogLevel(..))
import Gargantext.Utils.Jobs.Monad (MonadJobStatus(..))
import Gargantext.Utils.Jobs.Error (HumanFriendlyErrorText(..))
import Servant.Server.Generic (AsServerT)
import Control.Lens (view)
api :: AuthenticatedUser
-- ^ The logged-in user
......@@ -61,8 +63,7 @@ api authenticatedUser nId =
documentsFromWriteNodes :: ( FlowCmdM env err m
, MonadJobStatus m
, HasNodeStoryImmediateSaver env
, HasNodeArchiveStoryImmediateSaver env )
)
=> AuthenticatedUser
-- ^ The logged-in user
-> NodeId
......@@ -73,7 +74,7 @@ documentsFromWriteNodes authenticatedUser nId Params { selection, lang, paragrap
markStarted 2 jobHandle
markProgress 1 jobHandle
mcId <- getClosestParentIdByType' nId NodeCorpus
mcId <- runDBQuery $ getClosestParentIdByType' nId NodeCorpus
cId <- case mcId of
Just cId -> pure cId
Nothing -> do
......@@ -82,10 +83,10 @@ documentsFromWriteNodes authenticatedUser nId Params { selection, lang, paragrap
markFailed (Just $ UnsafeMkHumanFriendlyErrorText "The requested node has no corpus parent.") jobHandle
panicTrace msg
frameWriteIds <- getChildrenByType nId Notes
frameWriteIds <- runDBQuery $ getChildrenByType nId Notes
-- https://write.frame.gargantext.org/<frame_id>/download
frameWrites <- mapM (\id -> getNodeWith id (Proxy :: Proxy HyperdataFrame)) frameWriteIds
frameWrites <- mapM (\id -> runDBQuery $ getNodeWith id (Proxy :: Proxy HyperdataFrame)) frameWriteIds
frameWritesWithContents <- liftBase $
mapM (\node -> do
......@@ -107,9 +108,11 @@ documentsFromWriteNodes authenticatedUser nId Params { selection, lang, paragrap
-- FIXME(adn) If we were to store the UserID inside an 'AuthenticatedUser', we won't need this.
listId <- getOrMkList cId userId
v <- currentVersion listId
_ <- commitStatePatch listId (Versioned v mempty)
env <- view hasNodeStory
runDBTx $ do
listId <- getOrMkList cId userId
v <- currentVersion listId
void $ commitStatePatch env listId (Versioned v mempty)
markProgress 1 jobHandle
where
......
......@@ -11,7 +11,6 @@ Portability : POSIX
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE IncoherentInstances #-}
module Gargantext.API.Node.File where
......@@ -35,10 +34,13 @@ import Gargantext.Database.GargDB qualified as GargDB
import Gargantext.Database.Query.Table.Node (getNodeWith)
import Gargantext.Database.Query.Table.Node.UpdateOpaleye (updateHyperdata)
import Gargantext.Database.Schema.Node (node_hyperdata)
import Gargantext.Database.Prelude
import Gargantext.Prelude
import Gargantext.Utils.Jobs.Monad (MonadJobStatus(..))
import Servant
import Servant.Server.Generic (AsServerT)
import Control.Lens (view)
import Gargantext.Core.Config (hasConfig)
fileApi :: (FlowCmdM env err m)
......@@ -53,16 +55,14 @@ fileDownload nId = do
-- printDebug "[fileDownload] uId" uId
-- printDebug "[fileDownload] nId" nId
node <- getNodeWith nId (Proxy :: Proxy HyperdataFile)
node <- runDBQuery $ getNodeWith nId (Proxy :: Proxy HyperdataFile)
let (HyperdataFile { _hff_name = name'
, _hff_path = path }) = node ^. node_hyperdata
Contents c <- GargDB.readGargFile $ T.unpack path
let (mMime, _) = DMT.guessType DMT.defaultmtd False $ T.unpack name'
mime = case mMime of
Just m -> m
Nothing -> "text/plain"
mime = fromMaybe "text/plain" mMime
pure $ addHeader (T.pack mime) $ BSResponse c
......@@ -95,26 +95,29 @@ addWithFile :: (FlowCmdM env err m, MonadJobStatus m)
-> JobHandle m
-> m ()
addWithFile authenticatedUser nId nwf@(NewWithFile _d _l fName) jobHandle = do
cfg <- view hasConfig
-- printDebug "[addWithFile] Uploading file: " nId
markStarted 1 jobHandle
fPath <- GargDB.writeFile nwf
-- printDebug "[addWithFile] File saved as: " fPath
nIds <- mkNodeWithParent NodeFile (Just nId) userId fName
_ <- case nIds of
[nId'] -> do
runDBTx $ do
nIds <- mkNodeWithParent cfg NodeFile (Just nId) userId fName
case nIds of
[nId'] -> do
node <- getNodeWith nId' (Proxy :: Proxy HyperdataFile)
let hl = node ^. node_hyperdata
_ <- updateHyperdata nId' $ hl { _hff_name = fName
, _hff_path = T.pack fPath }
void $ updateHyperdata nId' $ hl { _hff_name = fName
, _hff_path = T.pack fPath }
-- printDebug "[addWithFile] Created node with id: " nId'
pure ()
_ -> pure ()
_ -> pure ()
-- printDebug "[addWithFile] File upload finished: " nId
markComplete jobHandle
where
userId = authenticatedUser ^. auth_user_id
{-|
Module : Gargantext.API.Node.File.Types
Description :
Copyright : (c) CNRS, 2017
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
-}
module Gargantext.API.Node.File.Types where
......@@ -10,27 +19,34 @@ import Gargantext.Prelude
import Network.HTTP.Media qualified as M
import Servant
data RESPONSE deriving Typeable
instance Accept RESPONSE where
contentType _ = "text" M.// "*"
instance MimeRender RESPONSE BSResponse where
mimeRender _ (BSResponse val) = BSL.fromStrict $ val
instance MimeUnrender RESPONSE BSResponse where
mimeUnrender _ lbs = Right $ BSResponse (BSL.toStrict lbs)
newtype Contents = Contents BS.ByteString
instance GargDB.ReadFile Contents where
readFile' fp = do
c <- BS.readFile fp
pure $ Contents c
instance ToSchema Contents where
declareNamedSchema _ = declareNamedSchema (Proxy :: Proxy TODO)
instance MimeUnrender OctetStream Contents where
mimeUnrender _ lbs = Right $ Contents (BSL.toStrict lbs)
newtype BSResponse = BSResponse BS.ByteString
deriving (Generic)
instance ToSchema BSResponse where
declareNamedSchema _ = declareNamedSchema (Proxy :: Proxy TODO)
-- | Temporary file, held in database, return it's OID
newtype DBTempFile = DBTempFile Int
deriving (Generic, ToJSON)
instance ToSchema DBTempFile where
declareNamedSchema _ = declareNamedSchema (Proxy :: Proxy TODO)
......@@ -14,27 +14,28 @@ Portability : POSIX
module Gargantext.API.Node.FrameCalcUpload where
import Control.Exception.Safe (MonadMask)
import Data.ByteString.Lazy qualified as BSL
import Data.ByteString.UTF8 qualified as BSU8
import Data.Text qualified as T
import Database.PostgreSQL.Simple.LargeObjects qualified as PSQL
import Gargantext.API.Admin.Auth.Types ( auth_node_id, AuthenticatedUser )
import Gargantext.API.Admin.EnvTypes (Env)
import Gargantext.API.Errors.Types ( BackendInternalError )
import Gargantext.API.Node.Corpus.New (addToCorpusWithForm)
import Gargantext.API.Node.Corpus.New (addToCorpusWithTempFile)
import Gargantext.API.Node.Corpus.New.Types (FileFormat(..), FileType(..))
import Gargantext.API.Node.FrameCalcUpload.Types
import Gargantext.API.Node.Types (NewWithForm(..))
import Gargantext.API.Node.Types (NewWithTempFile(..))
import Gargantext.API.Prelude ( GargM )
import Gargantext.API.Routes.Named.FrameCalc qualified as Named
import Gargantext.API.Worker (serveWorkerAPI)
import Gargantext.Core.Config (HasConfig)
import Gargantext.Core.NodeStory.Types ( HasNodeArchiveStoryImmediateSaver )
import Gargantext.Core.Types.Individu (User(..))
import Gargantext.Core.Worker.Jobs.Types qualified as Jobs
import Gargantext.Database.Action.Flow.Types ( FlowCmdM )
import Gargantext.Database.Admin.Types.Hyperdata.Frame ( HyperdataFrame(..) )
import Gargantext.Database.Admin.Types.Node ( NodeId, NodeType(NodeCorpus) )
import Gargantext.Database.Query.Table.Node (getClosestParentIdByType, getNodeWith)
import Gargantext.Database.Prelude
import Gargantext.Database.Schema.Node (node_hyperdata)
import Gargantext.Prelude
import Gargantext.Utils.Jobs.Monad (MonadJobStatus(..), markFailureNoErr)
......@@ -54,10 +55,10 @@ api authenticatedUser nId =
frameCalcUploadAsync :: ( HasConfig env
frameCalcUploadAsync :: ( MonadMask m
, HasConfig env
, FlowCmdM env err m
, MonadJobStatus m
, HasNodeArchiveStoryImmediateSaver env
)
=> AuthenticatedUser
-- ^ The logged-in user
......@@ -65,13 +66,13 @@ frameCalcUploadAsync :: ( HasConfig env
-> FrameCalcUpload
-> JobHandle m
-> m ()
frameCalcUploadAsync authenticatedUser nId (FrameCalcUpload _wf_lang _wf_selection) jobHandle = do
frameCalcUploadAsync authenticatedUser nId (FrameCalcUpload _wtf_lang _wtf_selection) jobHandle = do
markStarted 5 jobHandle
-- printDebug "[frameCalcUploadAsync] uId" uId
-- printDebug "[frameCalcUploadAsync] nId" nId
node <- getNodeWith nId (Proxy :: Proxy HyperdataFrame)
node <- runDBQuery $ getNodeWith nId (Proxy :: Proxy HyperdataFrame)
let (HyperdataFrame { _hf_base = base
, _hf_frame_id = frame_id }) = node ^. node_hyperdata
......@@ -82,24 +83,25 @@ frameCalcUploadAsync authenticatedUser nId (FrameCalcUpload _wf_lang _wf_selecti
manager <- newManager tlsManagerSettings
req <- parseRequest $ T.unpack csvUrl
httpLbs req manager
let body = T.pack $ BSU8.toString $ BSL.toStrict $ responseBody res
let body = BSL.toStrict $ responseBody res
PSQL.Oid oId <- createLargeObject body
-- printDebug "body" body
mCId <- getClosestParentIdByType nId NodeCorpus
mCId <- runDBQuery $ getClosestParentIdByType nId NodeCorpus
-- printDebug "[frameCalcUploadAsync] mCId" mCId
case mCId of
Nothing -> markFailureNoErr 1 jobHandle
Just cId ->
-- FIXME(adn) Audit this conversion.
addToCorpusWithForm (RootId userNodeId)
cId
(NewWithForm { _wf_filetype = TSV
, _wf_fileformat = Plain
, _wf_data = body
, _wf_lang
, _wf_name = "calc-upload.csv"
, _wf_selection }) jobHandle
addToCorpusWithTempFile (RootId userNodeId)
cId
(NewWithTempFile { _wtf_filetype = TSV
, _wtf_fileformat = Plain
, _wtf_file_oid = fromIntegral oId
, _wtf_lang
, _wtf_name = "calc-upload.csv"
, _wtf_selection }) jobHandle
markComplete jobHandle
where
......
......@@ -12,8 +12,6 @@ Async new node feature
-}
{-# LANGUAGE IncoherentInstances #-}
module Gargantext.API.Node.New
where
......@@ -25,13 +23,14 @@ import Gargantext.API.Node.New.Types (PostNode(..))
import Gargantext.API.Prelude (GargM)
import Gargantext.API.Routes.Named.Node qualified as Named
import Gargantext.API.Worker (serveWorkerAPI)
import Gargantext.Core.Config (hasConfig)
import Gargantext.Core.Mail.Types (HasMail)
import Gargantext.Core.Notifications.CentralExchange.Types qualified as CE
import Gargantext.Core.NLP (HasNLPServer)
import Gargantext.Core.Notifications.CentralExchange.Types qualified as CE
import Gargantext.Core.Worker.Jobs.Types qualified as Jobs
import Gargantext.Database.Action.Node (mkNodeWithParent)
import Gargantext.Database.Admin.Types.Node
import Gargantext.Database.Prelude (IsDBCmdExtra, DBCmdWithEnv)
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.Node.Error (HasNodeError(..))
import Gargantext.Prelude
import Servant.Server.Generic (AsServerT)
......@@ -85,7 +84,8 @@ postNode' :: ( IsDBCmdExtra env err m
postNode' authenticatedUser nId (PostNode nodeName tn) = do
let userId = authenticatedUser ^. auth_user_id
nodeIds <- mkNodeWithParent tn (Just nId) userId nodeName
cfg <- view hasConfig
nodeIds <- runDBTx $ mkNodeWithParent cfg tn (Just nId) userId nodeName
-- mapM_ (CE.ce_notify . CE.UpdateTreeFirstLevel) nodeIds
CE.ce_notify $ CE.UpdateTreeFirstLevel nId
......
......@@ -18,6 +18,7 @@ import Gargantext.API.Routes.Named.Viz qualified as Named
import Gargantext.Core.Viz.Phylo.API.Tools (getPhyloData, phylo2dot, phylo2dot2json)
import Gargantext.Core.Viz.Phylo.Example (phyloCleopatre)
import Gargantext.Database.Admin.Types.Node (PhyloId, NodeId)
import Gargantext.Database.Prelude
import Gargantext.Prelude
import Servant
import Servant.Server.Generic (AsServerT)
......@@ -37,7 +38,7 @@ getPhyloJson :: NodeId
-> PhyloId
-> GargNoServer (Headers '[Header "Content-Disposition" T.Text] Value)
getPhyloJson _ pId = do
maybePhyloData <- getPhyloData pId
maybePhyloData <- runDBQuery $ getPhyloData pId
let phyloData = fromMaybe phyloCleopatre maybePhyloData
phyloJson <- liftBase $ phylo2dot2json phyloData
pure $ addHeader (T.concat [ "attachment; filename="
......@@ -51,7 +52,7 @@ getPhyloDot :: NodeId
-> PhyloId
-> GargNoServer (Headers '[Header "Content-Disposition" T.Text] T.Text)
getPhyloDot _ pId = do
maybePhyloData <- getPhyloData pId
maybePhyloData <- runDBQuery $ getPhyloData pId
let phyloData = fromMaybe phyloCleopatre maybePhyloData
phyloDot <- liftBase $ phylo2dot phyloData
pure $ addHeader (T.concat [ "attachment; filename="
......
......@@ -19,17 +19,19 @@ import Data.Text qualified as Text
import Gargantext.API.Node.Share.Types (ShareNodeParams(..))
import Gargantext.API.Prelude (IsGargServer)
import Gargantext.API.Routes.Named.Share qualified as Named
import Gargantext.Core.Notifications.CentralExchange.Types (CEMessage)
import Gargantext.Core.Types.Individu (User(..), arbitraryUsername)
import Gargantext.Database.Action.Share (ShareNodeWith(..))
import Gargantext.Database.Action.Share as DB (shareNodeWith, unshare)
import Gargantext.Database.Action.User (getUserId', getUsername)
import Gargantext.Database.Action.User.New (guessUserName, newUser)
import Gargantext.Database.Admin.Types.Node (NodeId, NodeType(..), UserId(..))
import Gargantext.Database.Prelude (IsDBCmdExtra)
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.Node.Error (HasNodeError(..))
import Gargantext.Database.Query.Tree (findNodesWithType)
import Gargantext.Prelude
import Servant.Server.Generic (AsServerT)
import qualified Gargantext.Core.Notifications.CentralExchange.Types as CE
------------------------------------------------------------------------
-- TODO permission
......@@ -48,20 +50,20 @@ api userInviting nId (ShareTeamParams user') = do
user <- case guessUserName user'' of
Nothing -> pure user''
Just (u, _) -> do
isRegistered <- getUserId' (UserName u)
isRegistered <- runDBQuery $ getUserId' (UserName u)
case isRegistered of
Right _ -> do
-- printDebug "[G.A.N.Share.api]" ("Team shared with " <> u)
pure u
Left _err -> do
username' <- getUsername userInviting
username' <- runDBQuery (getUsername userInviting)
unless (username' `List.elem` arbitraryUsername) $ do
-- TODO better analysis of the composition of what is shared
children <- findNodesWithType nId [NodeList] [ NodeFolderShared
, NodeTeam
, NodeFolder
, NodeCorpus
]
children <- runDBQuery $ findNodesWithType nId [NodeList] [ NodeFolderShared
, NodeTeam
, NodeFolder
, NodeCorpus
]
_ <- if List.null children
then do
-- printDebug "[G.A.N.Share.api]" ("Invitation is enabled if you share a corpus at least" :: Text)
......@@ -72,11 +74,22 @@ api userInviting nId (ShareTeamParams user') = do
pure ()
pure u
fromIntegral <$> DB.shareNodeWith (ShareNodeWith_User NodeFolderShared (UserName user)) nId
fromIntegral <$> shareNodeAndNotify (shareNodeWith (ShareNodeWith_User NodeFolderShared (UserName user)) nId)
api _uId nId2 (SharePublicParams nId1) =
fromIntegral <$> DB.shareNodeWith (ShareNodeWith_Node NodeFolderPublic nId1) nId2
fromIntegral <$> shareNodeAndNotify (shareNodeWith (ShareNodeWith_Node NodeFolderPublic nId1) nId2)
shareNodeAndNotify :: ( HasNodeError err
, IsDBCmdExtra env err m
, MonadRandom m
)
=> DBUpdate err (Int, [CEMessage])
-> m Int
shareNodeAndNotify dbTx = do
(res, msgs) <- runDBTx dbTx
forM_ msgs CE.ce_notify
pure res
-- | Unshare a previously shared node via the /share endpoint.
unShare :: IsGargServer env err m => NodeId -> Named.UnshareNode (AsServerT m)
unShare = Named.UnshareNode . DB.unshare
unShare p = Named.UnshareNode (\n -> runDBTx $ DB.unshare p n)
......@@ -31,6 +31,9 @@ import Gargantext.Prelude
import Web.FormUrlEncoded (FromForm, ToForm)
-------------------------------------------------------
-- | A file is uploaded with this type. Then, for internal job
-- creation for the worker, 'NewWithTempFile' is used with a large
-- object oid
data NewWithForm = NewWithForm
{ _wf_filetype :: !FileType
, _wf_fileformat :: !FileFormat
......@@ -50,6 +53,26 @@ instance ToJSON NewWithForm where
instance ToSchema NewWithForm where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_wf_")
-------------------------------------------------------
data NewWithTempFile = NewWithTempFile
{ _wtf_filetype :: !FileType
, _wtf_fileformat :: !FileFormat
, _wtf_file_oid :: !Int
, _wtf_lang :: !(Maybe Lang)
, _wtf_name :: !Text
, _wtf_selection :: !FlowSocialListWith
} deriving (Eq, Show, Generic)
makeLenses ''NewWithTempFile
instance FromForm NewWithTempFile
instance ToForm NewWithTempFile
instance FromJSON NewWithTempFile where
parseJSON = genericParseJSON $ jsonOptions "_wtf_"
instance ToJSON NewWithTempFile where
toJSON = genericToJSON $ jsonOptions "_wtf_"
instance ToSchema NewWithTempFile where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_wtf_")
-------------------------------------------------------
data NewWithFile = NewWithFile
......
......@@ -24,7 +24,7 @@ import Gargantext.API.Node.Update.Types (Method(..), UpdateNodeParams(..), Updat
import Gargantext.API.Prelude (GargM, simuLogs)
import Gargantext.API.Routes.Named.Node qualified as Named
import Gargantext.API.Worker (serveWorkerAPI)
import Gargantext.Core.NodeStory.Types (HasNodeStory)
import Gargantext.Core.NodeStory.Types (HasNodeStory, hasNodeStory)
import Gargantext.Core.Text.Ngrams (NgramsType(NgramsTerms))
import Gargantext.Core.Types.Main (ListType(..))
import Gargantext.Core.Viz.Graph.API (recomputeGraph)
......@@ -39,6 +39,7 @@ import Gargantext.Database.Admin.Types.Node ( NodeId, NodeType(NodeCorpus, NodeA
import Gargantext.Database.Query.Table.Node (defaultList, getNode, getChildrenByType, getNodeWith)
import Gargantext.Database.Query.Table.Node.UpdateOpaleye (updateHyperdata)
import Gargantext.Database.Schema.Node (node_parent_id, node_hyperdata)
import Gargantext.Database.Prelude
import Gargantext.Prelude
import Gargantext.System.Logging ( MonadLogger )
import Gargantext.Utils.Jobs.Monad (MonadJobStatus(..))
......@@ -54,6 +55,7 @@ api nId =
, _un_args = p }
}
-- TODO(adn) Make DB-transactional.
updateNode :: (HasNodeStory env err m
, MonadJobStatus m
, MonadLogger m
......@@ -73,11 +75,12 @@ updateNode nId (UpdateNodeParamsGraph
markComplete jobHandle
updateNode nid1 (LinkNodeReq nt nid2) jobHandle = do
env <- view hasNodeStory
markStarted 2 jobHandle
markProgress 1 jobHandle
_ <- case nt of
NodeAnnuaire -> pairing nid2 nid1 Nothing -- defaultList
NodeCorpus -> pairing nid1 nid2 Nothing -- defaultList
NodeAnnuaire -> runDBTx $ pairing env nid2 nid1 Nothing -- defaultList
NodeCorpus -> runDBTx $ pairing env nid1 nid2 Nothing -- defaultList
_ -> panicTrace $ "[G.API.N.Update.updateNode] NodeType not implemented"
<> show nt <> " nid1: " <> show nid1 <> " nid2: " <> show nid2
......@@ -86,7 +89,7 @@ updateNode nid1 (LinkNodeReq nt nid2) jobHandle = do
-- | `Advanced` to update graphs
updateNode lId (UpdateNodeParamsList Advanced) jobHandle = do
markStarted 4 jobHandle
corpusId <- view node_parent_id <$> getNode lId
corpusId <- view node_parent_id <$> (runDBQuery $ getNode lId)
markProgress 1 jobHandle
......@@ -103,16 +106,17 @@ updateNode lId (UpdateNodeParamsList Advanced) jobHandle = do
markComplete jobHandle
updateNode lId (UpdateNodeParamsList _mode) jobHandle = do
env <- view hasNodeStory
markStarted 3 jobHandle
corpusId <- view node_parent_id <$> getNode lId
corpusId <- view node_parent_id <$> runDBQuery (getNode lId)
markProgress 1 jobHandle
_ <- case corpusId of
Just cId -> do
_ <- reIndexWith cId lId NgramsTerms (Set.singleton MapTerm)
_ <- runDBTx $ reIndexWith env cId lId NgramsTerms (Set.singleton MapTerm)
markProgress 1 jobHandle
_ <- updateNgramsOccurrences cId lId
_ <- runDBTx $ updateNgramsOccurrences env cId lId
pure ()
Nothing -> pure ()
......@@ -120,7 +124,7 @@ updateNode lId (UpdateNodeParamsList _mode) jobHandle = do
updateNode phyloId (UpdateNodePhylo config) jobHandle = do
markStarted 3 jobHandle
oldPhylo <- getNodeWith phyloId (Proxy @HyperdataPhylo)
oldPhylo <- runDBQuery $ getNodeWith phyloId (Proxy @HyperdataPhylo)
let corpusId' = view node_parent_id oldPhylo
let mbComputeHistory = oldPhylo ^? node_hyperdata . hp_data . traverse . phylo_computeTime . _Just
markProgress 1 jobHandle
......@@ -137,7 +141,7 @@ updateNode phyloId (UpdateNodePhylo config) jobHandle = do
, _scst_events = Just []
}
-}
_ <- timeMeasured "updateNode.updateHyperdataPhylo" $ updateHyperdata phyloId (HyperdataPhylo Nothing (Just phy))
_ <- timeMeasured "updateNode.updateHyperdataPhylo" $ runDBTx $ updateHyperdata phyloId (HyperdataPhylo Nothing (Just phy))
-- TODO: catch the error of sendMail if userId is not found, then debug
-- sendMail (UserDBId userId)
......@@ -145,7 +149,7 @@ updateNode phyloId (UpdateNodePhylo config) jobHandle = do
updateNode tId (UpdateNodeParamsTexts _mode) jobHandle = do
markStarted 2 jobHandle
corpusId <- view node_parent_id <$> getNode tId
corpusId <- view node_parent_id <$> (runDBQuery $ getNode tId)
markProgress 1 jobHandle
_ <- case corpusId of
......@@ -162,11 +166,11 @@ updateNode tId
markStarted 5 jobHandle
markProgress 1 jobHandle
_ <- getNode tId
childTexts <- getChildrenByType tId NodeTexts
childGraphs <- getChildrenByType tId NodeGraph
childPhylos <- getChildrenByType tId NodePhylo
childNodeLists <- getChildrenByType tId NodeList
_ <- runDBQuery (getNode tId)
childTexts <- runDBQuery $ getChildrenByType tId NodeTexts
childGraphs <- runDBQuery $ getChildrenByType tId NodeGraph
childPhylos <- runDBQuery $ getChildrenByType tId NodePhylo
childNodeLists <- runDBQuery $ getChildrenByType tId NodeList
mapM_ (\cId -> updateNode cId (UpdateNodeParamsTexts methodTexts) jobHandle) childTexts
markProgress 1 jobHandle
......@@ -189,15 +193,16 @@ updateDocs :: ( HasNodeStory env err m
-> JobHandle m
-> m ()
updateDocs cId jobHandle = do
env <- view hasNodeStory
markStarted 4 jobHandle
lId <- defaultList cId
_ <- reIndexWith cId lId NgramsTerms (Set.singleton MapTerm)
lId <- runDBQuery $ defaultList cId
_ <- runDBTx $ reIndexWith env cId lId NgramsTerms (Set.singleton MapTerm)
markProgress 1 jobHandle
_ <- updateNgramsOccurrences cId lId
_ <- runDBTx $ updateNgramsOccurrences env cId lId
markProgress 1 jobHandle
_ <- updateContextScore cId lId
_ <- runDBTx $ updateContextScore env cId lId
markProgress 1 jobHandle
_ <- Metrics.updateChart' cId lId NgramsTypes.Docs Nothing
_ <- runDBTx $ Metrics.updateChart' cId lId NgramsTypes.Docs Nothing
markProgress 1 jobHandle
-- printDebug "updateContextsScore" (cId, lId, u)
pure ()
......
......@@ -40,10 +40,10 @@ import Servant
authenticationError :: (MonadError e m, HasAuthenticationError e) => AuthenticationError -> m a
authenticationError = throwError . (_AuthenticationError #)
type EnvC env =
type EnvC env err =
( HasConnectionPool env
, HasConfig env
, HasNodeStoryEnv env
, HasNodeStoryEnv env err
, HasMail env
, HasNLPServer env
, HasManager env
......@@ -66,7 +66,7 @@ type GargServerC env err m =
, HasMail env
, MonadRandom m
, Safe.MonadCatch m
, EnvC env
, EnvC env err
, ErrC err
, ToJSON err
)
......@@ -82,7 +82,7 @@ class (MonadLogger m, GargServerC env err m) => IsGargServer env err m
type GargM env err = ReaderT env (ExceptT err IO)
-- This is the server type using GargM. It needs to be used as little as possible.
-- Instead, prefer GargServer, GargServerT.
type GargServerM env err api = (EnvC env, ErrC err) => ServerT api (GargM env err)
type GargServerM env err api = (EnvC env err, ErrC err) => ServerT api (GargM env err)
-------------------------------------------------------------------
-- | This Type is needed to prepare the function before the GargServer
......
......@@ -11,21 +11,30 @@ Portability : POSIX
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
module Gargantext.API.Routes
where
module Gargantext.API.Routes where
import Data.ByteString.Base64 qualified as BSB64
import Data.Text qualified as T
import Data.Text.Encoding qualified as TE
import Database.PostgreSQL.Simple.LargeObjects qualified as PSQL
import Gargantext.API.Admin.Auth.Types (AuthenticatedUser, auth_user_id)
import Gargantext.API.Admin.EnvTypes (Env)
import Gargantext.API.Errors.Types (BackendInternalError)
import Gargantext.API.Node.Corpus.New.Types ( FileFormat(..) )
import Gargantext.API.Node.Types (NewWithForm(..), NewWithTempFile(..))
import Gargantext.API.Prelude (GargServer, GargM)
import Gargantext.API.Routes.Named.Annuaire qualified as Named
import Gargantext.API.Routes.Named.Corpus qualified as Named
import Gargantext.API.Worker (serveWorkerAPI)
import Gargantext.API.Worker (serveWorkerAPI, serveWorkerAPIM)
import Gargantext.Core.Types.Individu (User(..))
import Gargantext.Core.Worker.Jobs.Types qualified as Jobs
import Gargantext.Database.Prelude (createLargeObject)
import Gargantext.Prelude
import Gargantext.System.Logging ( logLocM, LogLevel(..) )
import Servant (Get, JSON)
import Servant.Server.Generic (AsServerT)
......@@ -63,19 +72,34 @@ addCorpusWithQuery user =
, Jobs._acq_cid = cId }
}
addCorpusWithForm :: User -> Named.AddWithForm (AsServerT (GargM Env BackendInternalError))
addCorpusWithForm user =
Named.AddWithForm {
addWithFormEp = \cId -> serveWorkerAPI $ \p ->
-- /NOTE(adinapoli)/ Track the initial steps outside 'addToCorpusWithForm', because it's
-- called in a few places, and the job status might be different between invocations.
-- markStarted 3 jHandle
-- New.addToCorpusWithForm user cid i jHandle
Jobs.AddCorpusFormAsync { Jobs._acf_args = p
, Jobs._acf_user = user
, Jobs._acf_cid = cId }
-- | Uses temporary file stored in postgres to add that file to a corpus
addWithTempFileApi :: AuthenticatedUser
-> Named.AddWithTempFile (AsServerT (GargM Env BackendInternalError))
addWithTempFileApi authenticatedUser =
Named.AddWithTempFile {
addWithTempFileEp = \cId ->
serveWorkerAPIM $ \(NewWithForm { .. }) -> do
let bs = case _wf_fileformat of
Plain -> cs _wf_data
ZIP -> case BSB64.decode $ TE.encodeUtf8 _wf_data of
Left err -> panicTrace $ T.pack "[addWithTempFileApi] error decoding base64: " <> T.pack err
Right decoded -> decoded
(PSQL.Oid oId) <- createLargeObject bs
$(logLocM) DEBUG $ "[addWithTempFileApi] oId': " <> show oId
let args = NewWithTempFile { _wtf_filetype = _wf_filetype
, _wtf_fileformat = _wf_fileformat
, _wtf_file_oid = fromIntegral oId
, _wtf_lang = _wf_lang
, _wtf_name = _wf_name
, _wtf_selection = _wf_selection }
pure $ Jobs.AddCorpusTempFileAsync { _actf_args = args
, _actf_cid = cId
, _actf_user = userId }
}
where
userId = UserDBId $ authenticatedUser ^. auth_user_id
addAnnuaireWithForm :: Named.AddAnnuaireWithForm (AsServerT (GargM Env BackendInternalError))
addAnnuaireWithForm =
Named.AddAnnuaireWithForm {
......
......@@ -14,7 +14,7 @@ Portability : POSIX
module Gargantext.API.Routes.Named.Corpus (
-- * Routes types
CorpusExportAPI(..)
, AddWithForm(..)
, AddWithTempFile(..)
, AddWithQuery(..)
, MakeSubcorpusAPI(..)
-- * Others
......@@ -25,7 +25,7 @@ import Data.Aeson.TH (deriveJSON)
import Data.Swagger (ToSchema(..), genericDeclareNamedSchema)
import Data.Text (Text)
import GHC.Generics
import Gargantext.API.Node.Corpus.Export.Types (Corpus)
import Gargantext.API.Node.Corpus.Export.Types (Corpus, CorpusSQLite)
import Gargantext.API.Node.Types (NewWithForm, WithQuery)
import Gargantext.API.Worker (WorkerAPI)
import Gargantext.Core.Text.Ngrams (NgramsType(..))
......@@ -35,22 +35,27 @@ import Gargantext.Prelude (Bool)
import Servant
--------------------------------------------------
newtype CorpusExportAPI mode = CorpusExportAPI
data CorpusExportAPI mode = CorpusExportAPI
{ corpusExportEp :: mode :- Summary "Corpus Export"
:> "export"
:> QueryParam "listId" ListId
:> QueryParam "ngramsType" NgramsType
:> Get '[JSON] (Headers '[Servant.Header "Content-Disposition" Text] Corpus)
, corpusSQLiteEp :: mode :- Summary "Corpus SQLite export"
:> "sqlite"
:> QueryParam "listId" ListId
:> Get '[OctetStream] (Headers '[Servant.Header "Content-Disposition" Text] CorpusSQLite)
} deriving Generic
newtype AddWithForm mode = AddWithForm
{ addWithFormEp :: mode :- Summary "Add with FormUrlEncoded to corpus endpoint"
:> "corpus"
:> Capture "corpus_id" CorpusId
:> "add"
:> "form"
:> "async"
:> NamedRoutes (WorkerAPI '[FormUrlEncoded] NewWithForm)
data AddWithTempFile mode = AddWithTempFile
{ addWithTempFileEp :: mode :- Summary "Add with form via temp file"
:> "corpus"
:> Capture "corpus_id" CorpusId
:> "add"
:> "form"
:> "async"
:> NamedRoutes (WorkerAPI '[FormUrlEncoded] NewWithForm)
} deriving Generic
newtype AddWithQuery mode = AddWithQuery
......
{-|
Module : Gargantext.API.Routes.Named.File
Description :
Copyright : (c) CNRS, 2017
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
-}
{-# LANGUAGE TypeOperators #-}
module Gargantext.API.Routes.Named.File (
......@@ -6,6 +16,7 @@ module Gargantext.API.Routes.Named.File (
, FileAsyncAPI(..)
) where
import Data.Text (Text)
import GHC.Generics (Generic)
import Gargantext.API.Node.File.Types (BSResponse, RESPONSE)
......@@ -13,6 +24,7 @@ import Gargantext.API.Node.Types (NewWithFile)
import Gargantext.API.Worker (WorkerAPI)
import Servant
data FileAPI mode = FileAPI
{ fileDownloadEp :: mode :- Summary "File download"
:> "download"
......@@ -26,4 +38,3 @@ data FileAsyncAPI mode = FileAsyncAPI
:> "add"
:> NamedRoutes (WorkerAPI '[FormUrlEncoded] NewWithFile)
} deriving Generic
{-|
Module : Gargantext.API.Routes.Named.Node
Description :
Copyright : (c) CNRS, 2017
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
-}
{-# LANGUAGE TypeOperators #-}
module Gargantext.API.Routes.Named.Node (
......@@ -84,7 +94,7 @@ data NodeAPI a mode = NodeAPI
, searchAPI :: mode :- "search" :> NamedRoutes (SearchAPI SearchResult)
, shareAPI :: mode :- "share" :> NamedRoutes ShareNode
, unshareEp :: mode :- "unshare" :> NamedRoutes UnshareNode
, publishAPI :: mode :- "publish" :> (PolicyChecked (NamedRoutes PublishAPI))
, publishAPI :: mode :- "publish" :> PolicyChecked (NamedRoutes PublishAPI)
---- Pairing utilities
, pairWithEp :: mode :- "pairwith" :> NamedRoutes PairWith
, pairsEp :: mode :- "pairs" :> NamedRoutes Pairs
......
......@@ -8,9 +8,8 @@ Stability : experimental
Portability : POSIX
-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
module Gargantext.API.Routes.Named.Private (
-- * Routes types
......@@ -26,13 +25,13 @@ module Gargantext.API.Routes.Named.Private (
import Data.Text (Text)
import Gargantext.API.Admin.Auth.Types
import Gargantext.API.Auth.PolicyCheck
import Gargantext.API.Routes.Named.Contact
import Gargantext.API.Routes.Named.Context
import Gargantext.API.Routes.Named.Corpus
import Gargantext.API.Routes.Named.Count
import Gargantext.API.Routes.Named.Document
import Gargantext.API.Admin.Auth.Types (AuthenticatedUser)
import Gargantext.API.Auth.PolicyCheck (PolicyChecked)
import Gargantext.API.Routes.Named.Contact (ContactAPI)
import Gargantext.API.Routes.Named.Context (ContextAPI)
import Gargantext.API.Routes.Named.Corpus (AddWithTempFile, AddWithQuery, CorpusExportAPI, MakeSubcorpusAPI)
import Gargantext.API.Routes.Named.Count (CountAPI, Query)
import Gargantext.API.Routes.Named.Document (DocumentExportAPI)
import Gargantext.API.Routes.Named.List (GETAPI, JSONAPI, TSVAPI)
import Gargantext.API.Routes.Named.Node
import Gargantext.API.Routes.Named.Remote
......@@ -95,7 +94,7 @@ data GargPrivateAPI' mode = GargPrivateAPI'
:> Capture "tree_id" NodeId
:> NamedRoutes TreeFlatAPI
, membersAPI :: mode :- "members" :> Summary "Team node members" :> NamedRoutes MembersAPI
, addWithFormAPI :: mode :- NamedRoutes AddWithForm
, addWithTempFile :: mode :- NamedRoutes AddWithTempFile
, addWithQueryEp :: mode :- NamedRoutes AddWithQuery
, makeSubcorpusAPI :: mode :- NamedRoutes MakeSubcorpusAPI
, listGetAPI :: mode :- NamedRoutes GETAPI
......
......@@ -28,6 +28,7 @@ import Gargantext.Core.Types.Search (toRow)
import Gargantext.Database.Action.Flow.Pairing (isPairedWith)
import Gargantext.Database.Action.Search (searchInCorpus, searchInCorpusWithContacts)
import Gargantext.Database.Admin.Types.Node (NodeId, NodeType(..))
import Gargantext.Database.Prelude
import Gargantext.Prelude
import Gargantext.System.Logging (logLocM, LogLevel(..))
import Servant.Server.Generic (AsServerT)
......@@ -44,10 +45,10 @@ api nId = Named.SearchAPI $ \query o l order -> case query of
$(logLocM) DEBUG $ T.pack "New search started with query = " <> (getRawQuery rawQuery)
SearchResult <$> SearchResultDoc
<$> map (toRow nId)
<$> searchInCorpus nId False q o l order
<$> runDBQuery (searchInCorpus nId False q o l order)
(SearchQuery rawQuery SearchContact) -> case parseQuery rawQuery of
Left err -> pure $ SearchResult $ SearchNoResult (T.pack err)
Right q -> do
Right q -> runDBQuery $ do
-- printDebug "isPairedWith" nId
aIds <- isPairedWith nId NodeAnnuaire
-- TODO if paired with several corpus
......
......@@ -6,7 +6,7 @@ module Gargantext.API.Server.Named.Ngrams (
, tableNgramsPostChartsAsync
) where
import Control.Lens ((%%~))
import Control.Lens ((%%~), view)
import Data.Map.Strict qualified as Map
import Data.Set qualified as Set
import Gargantext.API.Admin.Auth.Types (AuthenticatedUser, PathId (..))
......@@ -19,27 +19,26 @@ import Gargantext.API.Ngrams.Types
import Gargantext.API.Prelude (GargM)
import Gargantext.API.Routes.Named.Table qualified as Named
import Gargantext.API.Worker (serveWorkerAPI)
import Gargantext.Core.NodeStory.Types (HasNodeStory)
import Gargantext.Core.NodeStory.Types (HasNodeStory, NodeStoryEnv, hasNodeStory, HasNodeStoryEnv)
import Gargantext.Core.Types (DocId, ListId, ListType(..), NodeId, NodeType(..))
import Gargantext.Core.Types.Query (Limit(..), Offset(..))
import Gargantext.Core.Worker.Jobs.Types qualified as Jobs
import Gargantext.Database.Admin.Config (userMaster)
import Gargantext.Database.Query.Table.Ngrams ( selectNgramsByDoc )
import Gargantext.Database.Query.Table.Node.Error (HasNodeError)
import Gargantext.Database.Query.Table.Node (getNode)
import Gargantext.Database.Query.Table.Node.Select ( selectNodesWithUsername )
import Gargantext.Database.Schema.Node (node_id, node_parent_id, node_user_id)
import Gargantext.Database.Prelude
import Gargantext.Prelude
import Gargantext.System.Logging
import Gargantext.Utils.Jobs.Monad (JobHandle, MonadJobStatus(..), markFailedNoErr)
import Servant.Server.Generic (AsServerT)
apiNgramsTableCorpus :: NodeId -> Named.TableNgramsAPI (AsServerT (GargM Env BackendInternalError))
apiNgramsTableCorpus cId = Named.TableNgramsAPI
{ tableNgramsGetAPI = Named.TableNgramsApiGet $ getTableNgramsCorpus cId
{ tableNgramsGetAPI = Named.TableNgramsApiGet $ getTableNgramsCorpusHandler cId
, tableNgramsPutAPI = Named.TableNgramsApiPut $ tableNgramsPut
, recomputeScoresEp = Named.RecomputeScoresNgramsApiGet $ scoresRecomputeTableNgrams cId
, recomputeScoresEp = Named.RecomputeScoresNgramsApiGet $ scoresRecomputeTableNgramsHandler cId
, tableNgramsGetVersionEp = Named.TableNgramsApiGetVersion $ getTableNgramsVersion cId
, tableNgramsAsyncAPI = apiNgramsAsync cId
}
......@@ -49,21 +48,20 @@ apiNgramsTableDoc :: AuthenticatedUser
-> DocId
-> Named.TableNgramsAPI (AsServerT (GargM Env BackendInternalError))
apiNgramsTableDoc uid dId = withNamedAccess uid (PathNode dId) $ Named.TableNgramsAPI
{ tableNgramsGetAPI = Named.TableNgramsApiGet $ getTableNgramsDoc dId
{ tableNgramsGetAPI = Named.TableNgramsApiGet $ getTableNgramsDocHandler dId
, tableNgramsPutAPI = Named.TableNgramsApiPut tableNgramsPut
, recomputeScoresEp = Named.RecomputeScoresNgramsApiGet $ scoresRecomputeTableNgrams dId
, recomputeScoresEp = Named.RecomputeScoresNgramsApiGet $ scoresRecomputeTableNgramsHandler dId
, tableNgramsGetVersionEp = Named.TableNgramsApiGetVersion $ getTableNgramsVersion dId
, tableNgramsAsyncAPI = apiNgramsAsync dId
}
getTableNgramsVersion :: ( HasNodeStory env err m
, HasNodeError err )
getTableNgramsVersion :: (IsDBCmd err env m, HasNodeStoryEnv err env)
=> NodeId
-> TabType
-> ListId
-> m Version
getTableNgramsVersion _nId _tabType listId = currentVersion listId
getTableNgramsVersion _nId _tabType listId = runDBQuery $ currentVersion listId
apiNgramsAsync :: NodeId -> Named.TableNgramsAsyncAPI (AsServerT (GargM Env BackendInternalError))
......@@ -83,7 +81,7 @@ tableNgramsPostChartsAsync utn jobHandle = do
let tabType = utn ^. utn_tab_type
let listId = utn ^. utn_list_id
node <- getNode listId
node <- runDBQuery $ getNode listId
let _nId = node ^. node_id
_uId = node ^. node_user_id
mCId = node ^. node_parent_id
......@@ -150,11 +148,15 @@ tableNgramsPostChartsAsync utn jobHandle = do
}
-}
scoresRecomputeTableNgrams :: forall env err m.
( HasNodeStory env err m, HasNodeError err, MonadLogger m )
=> NodeId -> TabType -> ListId -> m Int
scoresRecomputeTableNgrams nId tabType listId = do
tableMap <- getNgramsTableMap listId ngramsType
scoresRecomputeTableNgramsHandler :: (IsDBCmd err env m, HasNodeStoryEnv err env)
=> NodeId -> TabType -> ListId -> m Int
scoresRecomputeTableNgramsHandler nId tabType listId = do
env <- view hasNodeStory
runDBQuery $ scoresRecomputeTableNgrams env nId tabType listId
scoresRecomputeTableNgrams :: NodeStoryEnv err -> NodeId -> TabType -> ListId -> DBQuery err x Int
scoresRecomputeTableNgrams env nId tabType listId = do
tableMap <- getNgramsTableMap env listId ngramsType
_ <- tableMap & v_data %%~ (setNgramsTableScores nId listId ngramsType)
. Map.mapWithKey ngramsElementFromRepo
......@@ -162,19 +164,28 @@ scoresRecomputeTableNgrams nId tabType listId = do
where
ngramsType = ngramsTypeFromTabType tabType
getTableNgramsDocHandler :: (IsDBCmd err env m, HasNodeStoryEnv err env)
=> DocId -> TabType
-> ListId -> Limit -> Maybe Offset
-> Maybe ListType
-> Maybe MinSize -> Maybe MaxSize
-> Maybe OrderBy
-> Maybe Text -- full text search
-> m (VersionedWithCount NgramsTable)
getTableNgramsDocHandler dId tabType listId limit_ offset listType minSize maxSize orderBy _mt = do
env <- view hasNodeStory
runDBQuery $ getTableNgramsDoc env dId tabType listId limit_ offset listType minSize maxSize orderBy _mt
-- | Text search is deactivated for now for ngrams by doc only
getTableNgramsDoc :: ( HasNodeStory env err m
, HasNodeError err
, MonadLogger m
)
=> DocId -> TabType
getTableNgramsDoc :: NodeStoryEnv err
-> DocId -> TabType
-> ListId -> Limit -> Maybe Offset
-> Maybe ListType
-> Maybe MinSize -> Maybe MaxSize
-> Maybe OrderBy
-> Maybe Text -- full text search
-> m (VersionedWithCount NgramsTable)
getTableNgramsDoc dId tabType listId limit_ offset listType minSize maxSize orderBy _mt = do
-> DBQuery err x (VersionedWithCount NgramsTable)
getTableNgramsDoc env dId tabType listId limit_ offset listType minSize maxSize orderBy _mt = do
ns <- selectNodesWithUsername NodeList userMaster
let ngramsType = ngramsTypeFromTabType tabType
ngs <- selectNgramsByDoc (ns <> [listId]) dId ngramsType
......@@ -188,6 +199,6 @@ getTableNgramsDoc dId tabType listId limit_ offset listType minSize maxSize orde
, _nsq_orderBy = orderBy
, _nsq_searchQuery = searchQueryFn
}
getTableNgrams dId listId tabType searchQuery
getTableNgrams env dId listId tabType searchQuery
......@@ -18,7 +18,7 @@ import Gargantext.API.Node.Phylo.Export qualified as PhyloExport
import Gargantext.API.Node qualified as Tree
import Gargantext.API.Node.ShareURL ( shareURL )
import Gargantext.API.Prelude (GargM)
import Gargantext.API.Routes (addCorpusWithForm, addCorpusWithQuery)
import Gargantext.API.Routes (addWithTempFileApi, addCorpusWithQuery)
import Gargantext.API.Routes.Named.Private qualified as Named
import Gargantext.API.Server.Named.Ngrams
import Gargantext.API.Server.Named.Viz qualified as Viz
......@@ -59,7 +59,7 @@ serverPrivateGargAPI' authenticatedUser@(AuthenticatedUser userNodeId userId)
, treeAPI = Tree.treeAPI authenticatedUser
, treeFlatAPI = Tree.treeFlatAPI authenticatedUser
, membersAPI = members
, addWithFormAPI = addCorpusWithForm (RootId userNodeId)
, addWithTempFile = addWithTempFileApi authenticatedUser
, addWithQueryEp = addCorpusWithQuery (RootId userNodeId)
, makeSubcorpusAPI = Subcorpus.makeSubcorpus userId
, listGetAPI = List.getAPI
......
......@@ -17,7 +17,7 @@ import Gargantext.Database.Admin.Types.Hyperdata.Corpus ( hc_fields )
import Gargantext.Database.Admin.Types.Hyperdata.CorpusField
import Gargantext.Database.Admin.Types.Hyperdata.Folder ( HyperdataFolder )
import Gargantext.Database.Admin.Types.Node ( NodeId(..), Node, unNodeId )
import Gargantext.Database.Prelude (DBCmd, DBCmdExtra)
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.Node.Error (HasNodeError(..))
import Gargantext.Database.Query.Table.NodeNode (selectPublicNodes)
import Gargantext.Database.Schema.Node ( NodePoly(..), node_date, node_hyperdata ) -- (NodePoly(..))
......@@ -33,14 +33,14 @@ serverPublicGargAPI baseUrl = Named.GargPublicAPI $
}
api_home :: IsGargServer env err m => Text -> Named.HomeAPI (AsServerT m)
api_home baseUrl = Named.HomeAPI $ catMaybes
api_home baseUrl = Named.HomeAPI $ runDBQuery $ catMaybes
<$> map (toPublicData baseUrl)
<$> filterPublicDatas
<$> selectPublic
api_node :: IsGargServer env err m => NodeId -> Named.FileAPI (AsServerT m)
api_node nId = Named.FileAPI $ do
pubNodes <- publicNodes
pubNodes <- runDBQuery publicNodes
-- TODO optimize with SQL
case Set.member nId pubNodes of
False -> serverError $ err405 { errBody = "Not allowed" }
......@@ -50,7 +50,7 @@ api_node nId = Named.FileAPI $ do
selectPublic :: HasNodeError err
=> DBCmd err [( Node HyperdataFolder, Maybe Int)]
=> DBQuery err x [( Node HyperdataFolder, Maybe Int)]
selectPublic = selectPublicNodes
-- For tests only
......@@ -68,7 +68,7 @@ filterPublicDatas datas =
& Map.elems
publicNodes :: HasNodeError err
=> DBCmdExtra err (Set NodeId)
=> DBQuery err x (Set NodeId)
publicNodes = do
candidates <- filterPublicDatas <$> selectPublicNodes
pure $ Set.fromList
......
......@@ -16,7 +16,7 @@ import Conduit
import Control.Exception.Safe qualified as Safe
import Control.Exception (toException)
import Control.Lens (view, (#), (^.))
import Control.Monad.Except (throwError, MonadError)
import Control.Monad.Except (throwError)
import Control.Monad (void, forM_)
import Data.Aeson qualified as JSON
import Data.Aeson.Types qualified as JS
......@@ -46,14 +46,14 @@ import Gargantext.Core.Config
import Gargantext.Core.Config.Types (f_write_url)
import Gargantext.Core (lookupDBid)
import Gargantext.Core.NLP (HasNLPServer)
import Gargantext.Core.NodeStory.Types (HasNodeStoryEnv, HasNodeArchiveStoryImmediateSaver)
import Gargantext.Core.NodeStory.Types (HasNodeStoryEnv (..))
import Gargantext.Core.Types.Main
import Gargantext.Core.Worker.Jobs (sendJob)
import Gargantext.Core.Worker.Jobs.Types qualified as Jobs
import Gargantext.Database.Admin.Types.Hyperdata.Default (DefaultHyperdata(..))
import Gargantext.Database.Admin.Types.Hyperdata.Frame (HyperdataFrame(..))
import Gargantext.Database.Admin.Types.Node hiding (ERROR, WARNING, INFO)
import Gargantext.Database.Prelude (IsDBCmd)
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.Node.Error (HasNodeError, nodeError, NodeError (..))
import Gargantext.Database.Query.Table.Node (insertNodeWithHyperdata, getNodes, getUserRootPrivateNode)
import Gargantext.Database.Query.Table.Node qualified as DB
......@@ -95,7 +95,7 @@ renderExportableNode = \case
instance Serialise ExportableNode where
remoteExportAPI :: (MonadIO m, IsGargServer env BackendInternalError m, HasNodeArchiveStoryImmediateSaver env)
remoteExportAPI :: (MonadIO m, IsGargServer env BackendInternalError m)
=> NodeId
-> AuthenticatedUser
-> Named.RemoteExportAPI (AsServerT m)
......@@ -105,7 +105,7 @@ remoteExportAPI nodeId authenticatedUser =
withPolicy authenticatedUser (remoteExportChecks nodeId) (remoteExportHandler nodeId authenticatedUser payload) mgr
}
remoteImportAPI :: (MonadIO m, IsGargServer env BackendInternalError m, HasNodeArchiveStoryImmediateSaver env)
remoteImportAPI :: (MonadIO m, IsGargServer env BackendInternalError m)
=> AuthenticatedUser
-> Named.RemoteImportAPI (AsServerT m)
remoteImportAPI authenticatedUser =
......@@ -115,10 +115,9 @@ remoteImportAPI authenticatedUser =
type ExpectedPayload = Tree ExportableNode
remoteImportHandler :: forall err env m.
( HasNodeStoryEnv env
( HasNodeStoryEnv env err
, HasNodeError err
, HasBackendInternalError err
, HasNodeArchiveStoryImmediateSaver env
, IsDBCmd env err m
, HasNLPServer env
, MonadLogger m
......@@ -138,7 +137,7 @@ remoteImportHandler loggedInUser c = do
$(logLocM) INFO $ "Importing " <> renderExportableNode x
-- NOTE(adn) By default, we append the imported node(s) to the user's
-- private folder.
privateFolderId <- _node_id <$> getUserRootPrivateNode (_auth_user_id loggedInUser)
privateFolderId <- _node_id <$> runDBQuery (getUserRootPrivateNode (_auth_user_id loggedInUser))
$(logLocM) INFO $ "Attaching " <> renderExportableNode x <> " to private folder " <> T.pack (show privateFolderId)
-- Attempts to insert nodes a we go along.
rootNode <- insertNode (Just privateFolderId) x
......@@ -168,14 +167,14 @@ remoteImportHandler loggedInUser c = do
cfg <- view hasConfig
newHyperdataFrame <- importNote mgr noteAsMarkdown cfg
-- TODO(adn) Import with the valid name.
new_node <- DB.insertNode Notes (Just "Imported note")
(Just $ DefaultFrameCode newHyperdataFrame) parentId (_auth_user_id loggedInUser)
new_node <- runDBTx $ DB.insertNode Notes (Just "Imported note")
(Just $ DefaultFrameCode newHyperdataFrame) parentId (_auth_user_id loggedInUser)
pure new_node
EN_document x docsList -> case lookupDBid $ _node_typename x of
Nothing -> throwError $ _BackendInternalError # InternalUnexpectedError (toException $ userError $ "remoteImportHandler: impossible, node with invalid type.")
Just ty -> do
new_node <- insertNodeWithHyperdata ty (_node_name x) (_node_hyperdata x) mb_parent (_auth_user_id loggedInUser)
new_node <- runDBTx $ insertNodeWithHyperdata ty (_node_name x) (_node_hyperdata x) mb_parent (_auth_user_id loggedInUser)
$(logLocM) INFO $ "Created a new node " <> T.pack (show $ new_node) <> " of type " <> T.pack (show ty)
for_ mb_parent $ \parentId -> do
$(logLocM) INFO $ "Found document list to import..."
......@@ -192,7 +191,7 @@ remoteImportHandler loggedInUser c = do
EN_terms x ngramsList -> case lookupDBid $ _node_typename x of
Nothing -> throwError $ _BackendInternalError # InternalUnexpectedError (toException $ userError $ "remoteImportHandler: impossible, node with invalid type.")
Just ty -> do
new_node <- insertNodeWithHyperdata ty (_node_name x) (_node_hyperdata x) mb_parent (_auth_user_id loggedInUser)
new_node <- runDBTx $ insertNodeWithHyperdata ty (_node_name x) (_node_hyperdata x) mb_parent (_auth_user_id loggedInUser)
$(logLocM) INFO $ "Created a new node " <> T.pack (show $ new_node) <> " of type " <> T.pack (show ty)
$(logLocM) INFO $ "Found ngrams list to import..."
void $ sendJob $ Jobs.ImportRemoteTerms $ Jobs.ImportRemoteTermsPayload new_node ngramsList
......@@ -202,7 +201,7 @@ remoteImportHandler loggedInUser c = do
insertSimple mb_parent x = case lookupDBid $ _node_typename x of
Nothing -> throwError $ _BackendInternalError # InternalUnexpectedError (toException $ userError $ "remoteImportHandler: impossible, node with invalid type.")
Just ty -> do
new_node <- insertNodeWithHyperdata ty (_node_name x) (_node_hyperdata x) mb_parent (_auth_user_id loggedInUser)
new_node <- runDBTx $ insertNodeWithHyperdata ty (_node_name x) (_node_hyperdata x) mb_parent (_auth_user_id loggedInUser)
$(logLocM) INFO $ "Created a new node " <> T.pack (show $ new_node) <> " of type " <> T.pack (show ty)
pure new_node
......@@ -220,8 +219,10 @@ remoteExportHandler :: ( MonadIO m, Safe.MonadCatch m
-> m [NodeId]
remoteExportHandler _rer_node_id loggedInUser Named.RemoteExportRequest{..} = do
mgr <- view gargHttpManager
nodes <- getNodes _rer_node_id
checkNodesTypeAllowed nodes
nodes <- runDBQuery $ do
ns <- getNodes _rer_node_id
checkNodesTypeAllowed ns
pure ns
exportable <- makeExportable (_auth_node_id loggedInUser) nodes
liftIO (withClientM (remoteImportClient _rer_instance_auth (streamEncoder exportable)) (mkClientEnv mgr _rer_instance_url) streamDecode)
`Safe.catch` \(e :: BackendInternalError) -> throwError $ _BackendInternalError # e
......@@ -233,12 +234,13 @@ makeExportable :: (MonadIO m, IsGargServer err env m)
makeExportable userNodeId (TreeN x xs)
| Just nty <- lookupDBid (_node_typename x)
= do
env <- view hasNodeStory
exportableRoot <- case nty of
NodeCorpus -> EN_corpus <$> pure x
NodeGraph -> EN_graph <$> pure x
NodePhylo -> EN_phylo <$> pure x
NodeTexts -> EN_document <$> pure x <*> get_document_json userNodeId (_node_id x)
NodeList -> EN_terms <$> pure x <*> getNgramsList (_node_id x)
NodeList -> EN_terms <$> pure x <*> runDBQuery (getNgramsList env (_node_id x))
Notes -> case JS.parseMaybe JS.parseJSON (_node_hyperdata x) of
Nothing
-> mk_err " invalid HyperdataFrame inside."
......@@ -315,12 +317,12 @@ appendPath t r = case List.last t of
'/' -> t <> List.tail r
_ -> t <> r
checkNodesTypeAllowed :: (MonadError e m, HasNodeError e) => Tree (Node a) -> m ()
checkNodesTypeAllowed :: HasNodeError e => Tree (Node a) -> DBQuery e x ()
checkNodesTypeAllowed (TreeN r xs) = do
checkNodeTypeAllowed r
mapM_ checkNodesTypeAllowed xs
checkNodeTypeAllowed :: (MonadError e m, HasNodeError e) => Node a -> m ()
checkNodeTypeAllowed :: HasNodeError e => Node a -> DBQuery e x ()
checkNodeTypeAllowed n
| Just nty <- lookupDBid (_node_typename n)
, nty `elem` exportableNodeTypes
......
......@@ -3,15 +3,18 @@ module Gargantext.API.Server.Named.Viz (
graphAPI
) where
import Control.Lens (view)
import Gargantext.API.Admin.Auth (withNamedAccess)
import Gargantext.API.Admin.Auth.Types (AuthenticatedUser, PathId(..))
import Gargantext.API.Admin.EnvTypes (Env)
import Gargantext.API.Errors.Types ( BackendInternalError )
import Gargantext.API.Prelude (GargM)
import Gargantext.API.Routes.Named.Viz qualified as Named
import Gargantext.Core.Config (hasConfig)
import Gargantext.Core.NodeStory.Types (hasNodeStory)
import Gargantext.Core.Viz.Graph.API
-- (cooc2graph)
import Gargantext.Database.Admin.Types.Node (NodeId, UserId)
import Gargantext.Database.Prelude
import Gargantext.Prelude
import Servant.Server.Generic (AsServerT)
......@@ -20,15 +23,19 @@ graphAPI :: AuthenticatedUser -> UserId -> NodeId -> Named.GraphAPI (AsServerT (
graphAPI authenticatedUser userId n = withNamedAccess authenticatedUser (PathNode n) $ Named.GraphAPI
{ getGraphEp = getGraph n
, getGraphAsyncEp = graphAsync n
, cloneGraphEp = graphClone userId n
, cloneGraphEp = \grAPI -> do
cfg <- view hasConfig
runDBTx $ graphClone cfg userId n grAPI
, gexfEp = getGraphGexf n
, graphVersionsAPI = graphVersionsAPI userId n
, updateGraphLegendEp = updateGraphLegend n
, updateGraphLegendEp = runDBTx . updateGraphLegend n
}
graphVersionsAPI :: UserId -> NodeId -> Named.GraphVersionsAPI (AsServerT (GargM Env BackendInternalError))
graphVersionsAPI u n = Named.GraphVersionsAPI
{ getGraphVersionsEp = graphVersions u n
{ getGraphVersionsEp = do
env <- view hasNodeStory
runDBTx $ graphVersions env u n
, recomputeGraphVersionEp = recomputeVersions n
}
......@@ -41,7 +41,7 @@ import Gargantext.Core.Types.Query (Offset, Limit)
import Gargantext.Database.Action.Learn (FavOrTrash(..), moreLike)
import Gargantext.Database.Action.Search (searchCountInCorpus, searchInCorpus)
import Gargantext.Database.Admin.Types.Node (ContactId, CorpusId, NodeId)
import Gargantext.Database.Prelude (IsDBCmdExtra, IsDBCmd, DBCmd)
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Facet (FacetDoc , runViewDocuments, runCountDocuments, OrderBy(..), runViewAuthorsDoc)
import Gargantext.Database.Query.Table.Node.Error (HasNodeError)
import Gargantext.Prelude
......@@ -81,7 +81,7 @@ getTableApi cId tabType mLimit mOffset mOrderBy mQuery mYear =
where
get_table = do
$(logLocM) DEBUG $ "getTable cId = " <> T.pack (show cId)
t <- getTable cId tabType mOffset mLimit mOrderBy mQuery mYear
t <- runDBQuery $ getTable cId tabType mOffset mLimit mOrderBy mQuery mYear
pure $ constructHashedResponse t
postTableApi :: (IsDBCmdExtra env err m, MonadLogger m, HasNodeError err)
......@@ -91,7 +91,7 @@ postTableApi :: (IsDBCmdExtra env err m, MonadLogger m, HasNodeError err)
postTableApi cId tq = case tq of
TableQuery o l order ft "" -> do
$(logLocM) DEBUG $ "New search with no query"
getTable cId (Just ft) (Just o) (Just l) (Just order) Nothing Nothing
runDBQuery $ getTable cId (Just ft) (Just o) (Just l) (Just order) Nothing Nothing
TableQuery o l order ft q -> case ft of
Docs -> do
$(logLocM) DEBUG $ "New search with query " <> getRawQuery q
......@@ -121,7 +121,7 @@ searchInCorpus' cId t q o l order = do
Left noParseErr -> do
$(logLocM) ERROR $ "Invalid input query " <> (getRawQuery q) <> " , error = " <> (T.pack noParseErr)
pure $ TableResult 0 []
Right boolQuery -> do
Right boolQuery -> runDBQuery $ do
docs <- searchInCorpus cId t boolQuery o l order
countAllDocs <- searchCountInCorpus cId t (Just boolQuery)
pure $ TableResult { tr_docs = docs
......@@ -136,7 +136,7 @@ getTable :: HasNodeError err
-> Maybe OrderBy
-> Maybe RawQuery
-> Maybe Text
-> DBCmd err FacetTableResult
-> DBQuery err x FacetTableResult
getTable cId ft o l order raw_query year = do
docs <- getTable' cId ft o l order query year
docsCount <- runCountDocuments cId (ft == Just Trash) query year
......@@ -152,7 +152,7 @@ getTable' :: HasNodeError err
-> Maybe OrderBy
-> Maybe Text
-> Maybe Text
-> DBCmd err [FacetDoc]
-> DBQuery err x [FacetDoc]
getTable' cId ft o l order query year =
case ft of
(Just Docs) -> runViewDocuments cId False o l order query year
......@@ -162,10 +162,12 @@ getTable' cId ft o l order query year =
x -> panicTrace $ "not implemented in getTable: " <> (show x)
getPair :: ContactId -> Maybe TabType
-> Maybe Offset -> Maybe Limit
-> Maybe OrderBy -> DBCmd err [FacetDoc]
getPair cId ft o l order =
getPair :: IsDBCmd env err m
=> ContactId
-> Maybe TabType
-> Maybe Offset -> Maybe Limit
-> Maybe OrderBy -> m [FacetDoc]
getPair cId ft o l order = runDBQuery $ do
case ft of
(Just Docs) -> runViewAuthorsDoc cId False o l order
(Just Trash) -> runViewAuthorsDoc cId True o l order
......
......@@ -13,6 +13,9 @@ Portability : POSIX
module Gargantext.API.Worker where
import Data.Aeson qualified as JSON
import Data.ByteString.Lazy qualified as BL
import Data.Text.Encoding qualified as TE
import Gargantext.API.Prelude (IsGargServer)
import Gargantext.Core.Worker.Jobs (sendJob)
import Gargantext.Core.Worker.Jobs.Types (Job(..), getWorkerMNodeId)
......@@ -41,7 +44,7 @@ serveWorkerAPIM mkJob = WorkerAPI { workerAPIPost }
where
workerAPIPost i = do
job <- mkJob i
logM DEBUG $ "[serveWorkerAPI] sending job " <> show job
logM DEBUG $ "[serveWorkerAPI] sending job " <> TE.decodeUtf8 (BL.toStrict $ JSON.encode job)
mId <- sendJob job
pure $ JobInfo { _ji_message_id = mId
, _ji_mNode_id = getWorkerMNodeId job }
......@@ -59,4 +62,3 @@ serveWorkerAPIEJob f = WorkerAPI { workerAPIPost }
mId <- sendJob job
pure $ JobInfo { _ji_message_id = mId
, _ji_mNode_id = getWorkerMNodeId job }
......@@ -20,7 +20,7 @@ import Data.Bimap (Bimap)
import Data.Bimap qualified as Bimap
import Data.LanguageCodes qualified as ISO639
import Data.Morpheus.Types (GQLType)
import Data.Swagger (ToSchema(..), defaultSchemaOptions, genericDeclareNamedSchemaUnrestricted)
import Data.Swagger (ToParamSchema, ToSchema(..), defaultSchemaOptions, genericDeclareNamedSchemaUnrestricted)
import Data.Text (pack)
import Gargantext.Prelude hiding (All)
import Prelude (userError)
......@@ -70,6 +70,7 @@ defaultLanguage = EN
instance ToJSON Lang
instance FromJSON Lang
instance ToParamSchema Lang
instance ToSchema Lang where
declareNamedSchema = genericDeclareNamedSchemaUnrestricted defaultSchemaOptions
instance FromHttpApiData Lang
......
This diff is collapsed.
This diff is collapsed.
......@@ -17,10 +17,6 @@ module Gargantext.Core.NodeStory.Types
( HasNodeStory
, HasNodeStoryEnv
, hasNodeStory
, HasNodeStoryImmediateSaver
, hasNodeStoryImmediateSaver
, HasNodeArchiveStoryImmediateSaver
, hasNodeArchiveStoryImmediateSaver
, NodeStory(..)
, NgramsState'
, NgramsStatePatch'
......@@ -30,8 +26,10 @@ module Gargantext.Core.NodeStory.Types
, initNodeStory
, nse_getter
, nse_getter_multi
, nse_saver_immediate
, nse_archive_saver_immediate
, nse_saver
, nse_archive_saver
, hasNodeStoryImmediateSaver
, hasNodeArchiveStoryImmediateSaver
-- , nse_var
, unNodeStory
, Archive(..)
......@@ -42,12 +40,13 @@ module Gargantext.Core.NodeStory.Types
, a_state
, a_version
, combineState
, ArchiveState
, ArchiveStateSet
, ArchiveStateList )
where
import Codec.Serialise.Class ( Serialise )
import Control.Lens (Getter)
import Control.Lens (Getter, Lens')
import Data.Aeson hiding ((.=), decode)
import Data.Map.Strict qualified as Map
import Data.Profunctor.Product.TH (makeAdaptorAndInstance)
......@@ -58,7 +57,7 @@ import Gargantext.Database.Admin.Types.Node ( NodeId(..) )
import Gargantext.Core.Text.Ngrams qualified as Ngrams
import Gargantext.Core.Utils.Prefix (unPrefix)
import Gargantext.Database.Admin.Config ()
import Gargantext.Database.Prelude (IsDBCmd)
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.Node.Error (HasNodeError())
import Gargantext.Prelude hiding (to)
import Opaleye (DefaultFromField(..), SqlJsonb, fromPGSFromField)
......@@ -187,37 +186,22 @@ type ArchiveList = Archive NgramsState' NgramsStatePatch'
------------------------------------------------------------------------
data NodeStoryEnv = NodeStoryEnv
{ _nse_saver_immediate :: !(NodeId -> ArchiveList -> IO ())
, _nse_archive_saver_immediate :: !(NodeId -> ArchiveList -> IO ArchiveList)
, _nse_getter :: !(NodeId -> IO ArchiveList)
, _nse_getter_multi :: !([NodeId] -> IO NodeListStory)
data NodeStoryEnv err = NodeStoryEnv
{ _nse_saver :: !(NodeId -> ArchiveList -> DBUpdate err ())
, _nse_archive_saver :: !(NodeId -> ArchiveList -> DBUpdate err ArchiveList)
, _nse_getter :: !(forall x. NodeId -> DBQuery err x ArchiveList)
, _nse_getter_multi :: !(forall x. [NodeId] -> DBQuery err x NodeListStory)
--, _nse_cleaner :: !(IO ()) -- every 12 hours: cleans the repos of unused NodeStories
-- , _nse_lock :: !FileLock -- TODO (it depends on the option: if with database or file only)
}
deriving (Generic)
type HasNodeStory env err m = ( IsDBCmd env err m
, MonadReader env m
, MonadError err m
, HasNodeStoryEnv env
, HasNodeError err
)
class (HasNodeStoryImmediateSaver env)
=> HasNodeStoryEnv env where
hasNodeStory :: Getter env NodeStoryEnv
class HasNodeStoryImmediateSaver env where
hasNodeStoryImmediateSaver :: Getter env (NodeId -> ArchiveList -> IO ())
type HasNodeStory env err m = ( IsDBCmd env err m, HasNodeStoryEnv env err, HasNodeError err)
class HasNodeArchiveStoryImmediateSaver env where
hasNodeArchiveStoryImmediateSaver :: Getter env (NodeId -> ArchiveList -> IO ArchiveList)
class HasNodeStoryEnv env err where
hasNodeStory :: Getter env (NodeStoryEnv err)
type ArchiveStateList = [(Ngrams.NgramsType, NgramsTerm, NgramsRepoElement)]
type ArchiveState = (Ngrams.NgramsType, NgramsTerm, NgramsRepoElement)
type ArchiveStateList = [ArchiveState]
type ArchiveStateSet = Set.Set (Ngrams.NgramsType, NgramsTerm)
------------------------------------------------------------------------
......@@ -226,3 +210,9 @@ type ArchiveStateSet = Set.Set (Ngrams.NgramsType, NgramsTerm)
makeLenses ''NodeStoryEnv
makeLenses ''NodeStory
makeLenses ''Archive
hasNodeStoryImmediateSaver :: Lens' (NodeStoryEnv err) (NodeId -> ArchiveList -> DBUpdate err ())
hasNodeStoryImmediateSaver = nse_saver
hasNodeArchiveStoryImmediateSaver :: Lens' (NodeStoryEnv err) (NodeId -> ArchiveList -> DBUpdate err ArchiveList)
hasNodeArchiveStoryImmediateSaver = nse_archive_saver
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment