Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
haskell-gargantext
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Christian Merten
haskell-gargantext
Commits
030dede0
Verified
Commit
030dede0
authored
Feb 13, 2024
by
Przemyslaw Kaminski
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
[nodestory] large nodestory refactoring
Get rid of that TVar completely.
parent
ad928574
Changes
10
Hide whitespace changes
Inline
Side-by-side
Showing
10 changed files
with
242 additions
and
475 deletions
+242
-475
gargantext.cabal
gargantext.cabal
+1
-33
API.hs
src/Gargantext/API.hs
+3
-6
EnvTypes.hs
src/Gargantext/API/Admin/EnvTypes.hs
+0
-6
Dev.hs
src/Gargantext/API/Dev.hs
+1
-4
Ngrams.hs
src/Gargantext/API/Ngrams.hs
+42
-53
Tools.hs
src/Gargantext/API/Ngrams/Tools.hs
+20
-11
NodeStory.hs
src/Gargantext/Core/NodeStory.hs
+109
-295
List.hs
src/Gargantext/Database/Action/Flow/List.hs
+14
-12
NodeStory.hs
test/Test/Database/Operations/NodeStory.hs
+52
-51
Types.hs
test/Test/Database/Types.hs
+0
-4
No files found.
gargantext.cabal
View file @
030dede0
...
...
@@ -89,6 +89,7 @@ library
Gargantext.Core.Methods.Similarities
Gargantext.Core.NLP
Gargantext.Core.NodeStory
Gargantext.Core.NodeStory.Types
Gargantext.Core.Text
Gargantext.Core.Text.Context
Gargantext.Core.Text.Corpus.API
...
...
@@ -630,39 +631,6 @@ executable gargantext-admin
, text
default-language: Haskell2010
executable gargantext-cbor2json
main-is: Main.hs
other-modules:
Paths_gargantext
hs-source-dirs:
bin/gargantext-cbor2json
default-extensions:
DataKinds
DeriveGeneric
FlexibleContexts
FlexibleInstances
GADTs
GeneralizedNewtypeDeriving
ImportQualifiedPost
MultiParamTypeClasses
NamedFieldPuns
NoImplicitPrelude
OverloadedStrings
RankNTypes
RecordWildCards
StrictData
ghc-options: -Wall -threaded -rtsopts -with-rtsopts=-N -O2 -Wmissing-signatures
build-depends:
aeson ^>= 1.5.6.0
, base ^>= 4.14.3
, bytestring ^>= 0.10.12.0
, extra ^>= 1.7.9
, gargantext
, gargantext-prelude
, serialise ^>= 0.2.4.0
, text ^>= 1.2.4.1
default-language: Haskell2010
executable gargantext-cli
main-is: Main.hs
other-modules:
...
...
src/Gargantext/API.hs
View file @
030dede0
...
...
@@ -48,10 +48,8 @@ import Gargantext.API.Admin.Settings.CORS
import
Gargantext.API.Admin.Types
(
FireWall
(
..
),
PortNumber
,
cookieSettings
,
jwtSettings
,
settings
,
corsSettings
)
import
Gargantext.API.EKG
import
Gargantext.API.Middleware
(
logStdoutDevSanitised
)
import
Gargantext.API.Ngrams
(
saveNodeStoryImmediate
)
import
Gargantext.API.Routes
import
Gargantext.API.Server
(
server
)
import
Gargantext.Core.NodeStory
import
Gargantext.Database.Prelude
qualified
as
DB
import
Gargantext.Prelude
hiding
(
putStrLn
)
import
Gargantext.System.Logging
...
...
@@ -74,7 +72,7 @@ startGargantext mode port file = withLoggerHoisted mode $ \logger -> do
app
<-
makeApp
env
mid
<-
makeGargMiddleware
(
env
^.
settings
.
corsSettings
)
mode
periodicActions
<-
schedulePeriodicActions
env
run
port
(
mid
app
)
`
finally
`
stopGargantext
env
periodicActions
run
port
(
mid
app
)
`
finally
`
stopGargantext
periodicActions
where
runDbCheck
env
=
do
r
<-
runExceptT
(
runReaderT
DB
.
dbCheck
env
)
`
catch
`
...
...
@@ -94,11 +92,10 @@ portRouteInfo port = do
-- | Stops the gargantext server and cancels all the periodic actions
-- scheduled to run up to that point.
-- TODO clean this Monad condition (more generic) ?
stopGargantext
::
HasNodeStoryImmediateSaver
env
=>
env
->
[
ThreadId
]
->
IO
()
stopGargantext
env
scheduledPeriodicActions
=
do
stopGargantext
::
[
ThreadId
]
->
IO
()
stopGargantext
scheduledPeriodicActions
=
do
forM_
scheduledPeriodicActions
killThread
putStrLn
"----- Stopping gargantext -----"
runReaderT
saveNodeStoryImmediate
env
-- | Schedules all sorts of useful periodic actions to be run while
-- the server is alive accepting requests.
...
...
src/Gargantext/API/Admin/EnvTypes.hs
View file @
030dede0
...
...
@@ -136,9 +136,6 @@ instance HasConnectionPool Env where
instance
HasNodeStoryEnv
Env
where
hasNodeStory
=
env_nodeStory
instance
HasNodeStoryVar
Env
where
hasNodeStoryVar
=
hasNodeStory
.
nse_getter
instance
HasNodeStoryImmediateSaver
Env
where
hasNodeStoryImmediateSaver
=
hasNodeStory
.
nse_saver_immediate
...
...
@@ -314,9 +311,6 @@ instance HasSettings DevEnv where
instance
HasNodeStoryEnv
DevEnv
where
hasNodeStory
=
dev_env_nodeStory
instance
HasNodeStoryVar
DevEnv
where
hasNodeStoryVar
=
hasNodeStory
.
nse_getter
instance
HasNodeStoryImmediateSaver
DevEnv
where
hasNodeStoryImmediateSaver
=
hasNodeStory
.
nse_saver_immediate
...
...
src/Gargantext/API/Dev.hs
View file @
030dede0
...
...
@@ -16,7 +16,6 @@ import Control.Monad (fail)
import
Gargantext.API.Admin.EnvTypes
import
Gargantext.API.Admin.Settings
import
Gargantext.API.Errors.Types
import
Gargantext.API.Ngrams
(
saveNodeStoryImmediate
)
import
Gargantext.API.Prelude
import
Gargantext.Core.NLP
(
nlpServerMap
)
import
Gargantext.Core.NodeStory
...
...
@@ -72,9 +71,7 @@ runCmdDev env f =
runCmdGargDev
::
DevEnv
->
GargM
DevEnv
BackendInternalError
a
->
IO
a
runCmdGargDev
env
cmd
=
(
either
(
fail
.
show
)
pure
=<<
runExceptT
(
runReaderT
cmd
env
))
`
finally
`
runReaderT
saveNodeStoryImmediate
env
either
(
fail
.
show
)
pure
=<<
runExceptT
(
runReaderT
cmd
env
)
runCmdDevNoErr
::
DevEnv
->
Cmd'
DevEnv
()
a
->
IO
a
runCmdDevNoErr
=
runCmdDev
...
...
src/Gargantext/API/Ngrams.hs
View file @
030dede0
...
...
@@ -60,7 +60,6 @@ module Gargantext.API.Ngrams
,
r_history
,
NgramsRepoElement
(
..
)
,
saveNodeStory
,
saveNodeStoryImmediate
,
initRepo
,
TabType
(
..
)
...
...
@@ -87,7 +86,7 @@ module Gargantext.API.Ngrams
)
where
import
Control.Lens
((
.~
),
view
,
(
^.
),
(
^..
),
(
+~
),
(
%~
),
(
.~
),
msumOf
,
at
,
_Just
,
Each
(
..
),
(
%%~
),
mapped
,
non
,
ifolded
,
to
,
withIndex
,
over
)
import
Control.Lens
((
.~
),
view
,
(
^.
),
(
^..
),
(
+~
),
(
%~
),
(
.~
),
msumOf
,
at
,
_Just
,
Each
(
..
),
(
%%~
),
mapped
,
ifolded
,
to
,
withIndex
,
over
)
import
Control.Monad.Reader
import
Data.Aeson.Text
qualified
as
DAT
import
Data.Foldable
...
...
@@ -105,10 +104,10 @@ import Gargantext.API.Admin.Orchestrator.Types (JobLog(..), AsyncJobs)
import
Gargantext.API.Admin.Types
(
HasSettings
)
import
Gargantext.API.Errors.Types
import
Gargantext.API.Metrics
qualified
as
Metrics
import
Gargantext.API.Ngrams.Tools
import
Gargantext.API.Ngrams.Tools
(
getNodeStory
)
import
Gargantext.API.Ngrams.Types
import
Gargantext.API.Prelude
import
Gargantext.Core.NodeStory
import
Gargantext.Core.NodeStory
(
ArchiveList
,
HasNodeStory
,
HasNodeArchiveStoryImmediateSaver
(
..
),
HasNodeStoryImmediateSaver
(
..
),
NgramsStatePatch
'
,
a_history
,
a_state
,
a_version
,
currentVersion
)
import
Gargantext.Core.Types
(
ListType
(
..
),
NodeId
,
ListId
,
DocId
,
TODO
,
assertValid
,
HasValidationError
,
ContextId
)
import
Gargantext.Core.Types.Query
(
Limit
(
..
),
Offset
(
..
),
MinSize
(
..
),
MaxSize
(
..
))
import
Gargantext.Database.Action.Metrics.NgramsByContext
(
getOccByNgramsOnlyFast
)
...
...
@@ -123,7 +122,6 @@ import Gargantext.Database.Schema.Node (node_id, node_parent_id, node_user_id)
import
Gargantext.Prelude
hiding
(
log
,
to
,
toLower
,
(
%
),
isInfixOf
)
import
Gargantext.Prelude.Clock
(
hasTime
,
getTime
)
import
Gargantext.Utils.Jobs
(
serveJobsAPI
,
MonadJobStatus
(
..
))
import
GHC.Conc
(
readTVar
,
writeTVar
)
import
Servant
hiding
(
Patch
)
{-
...
...
@@ -174,23 +172,10 @@ mkChildrenGroups addOrRem nt patches =
------------------------------------------------------------------------
saveNodeStory
::
(
MonadReader
env
m
,
MonadBase
IO
m
,
HasNodeStoryImmediateSaver
env
)
=>
m
()
saveNodeStory
=
do
=>
NodeId
->
ArchiveList
->
m
()
saveNodeStory
nId
a
=
do
saver
<-
view
hasNodeStoryImmediateSaver
liftBase
$
do
--Gargantext.Prelude.putStrLn "---- Running node story saver ----"
saver
--Gargantext.Prelude.putStrLn "---- Node story saver finished ----"
saveNodeStoryImmediate
::
(
MonadReader
env
m
,
MonadBase
IO
m
,
HasNodeStoryImmediateSaver
env
)
=>
m
()
saveNodeStoryImmediate
=
do
saver
<-
view
hasNodeStoryImmediateSaver
liftBase
$
do
--Gargantext.Prelude.putStrLn "---- Running node story immediate saver ----"
saver
--Gargantext.Prelude.putStrLn "---- Node story immediate saver finished ----"
liftBase
$
saver
nId
a
listTypeConflictResolution
::
ListType
->
ListType
->
ListType
listTypeConflictResolution
_
_
=
undefined
-- TODO Use Map User ListType
...
...
@@ -256,19 +241,23 @@ setListNgrams :: HasNodeStory env err m
->
m
()
setListNgrams
listId
ngramsType
ns
=
do
-- printDebug "[setListNgrams]" (listId, ngramsType)
var
<-
getNodeStoryVar
[
listId
]
liftBase
$
atomically
$
do
nls
<-
readTVar
var
writeTVar
var
$
(
unNodeStory
.
at
listId
.
_Just
.
a_state
.
at
ngramsType
%~
(
\
mns'
->
case
mns'
of
Nothing
->
Just
ns
Just
ns'
->
Just
$
ns
<>
ns'
)
)
nls
saveNodeStory
a
<-
getNodeStory
listId
let
a'
=
a
&
a_state
.
at
ngramsType
%~
(
\
mns'
->
case
mns'
of
Nothing
->
Just
ns
Just
ns'
->
Just
$
ns
<>
ns'
)
saveNodeStory
listId
a'
-- liftBase $ atomically $ do
-- nls <- readTVar var
-- writeTVar var $
-- ( unNodeStory
-- . at listId . _Just
-- . a_state
-- . at ngramsType
-- %~ (\mns' -> case mns' of
-- Nothing -> Just ns
-- Just ns' -> Just $ ns <> ns')
-- ) nls
-- saveNodeStory
newNgramsFromNgramsStatePatch
::
NgramsStatePatch'
->
[
Ngrams
]
...
...
@@ -292,11 +281,11 @@ commitStatePatch :: ( HasNodeStory env err m
->
m
(
Versioned
NgramsStatePatch'
)
commitStatePatch
listId
(
Versioned
_p_version
p
)
=
do
-- printDebug "[commitStatePatch]" listId
var
<-
getNodeStoryVar
[
listId
]
a
<-
getNodeStory
listId
archiveSaver
<-
view
hasNodeArchiveStoryImmediateSaver
ns
<-
liftBase
$
atomically
$
readTVar
var
--
ns <- liftBase $ atomically $ readTVar var
let
a
=
ns
^.
unNodeStory
.
at
listId
.
non
initArchive
--
a = ns ^. unNodeStory . at listId . non initArchive
-- apply patches from version p_version to a ^. a_version
-- TODO Check this
--q = mconcat $ take (a ^. a_version - p_version) (a ^. a_history)
...
...
@@ -327,10 +316,12 @@ commitStatePatch listId (Versioned _p_version p) = do
-}
-- printDebug "[commitStatePatch] a version" (a ^. a_version)
-- printDebug "[commitStatePatch] a' version" (a' ^. a_version)
let
newNs
=
(
ns
&
unNodeStory
.
at
listId
.~
(
Just
a'
)
,
Versioned
(
a'
^.
a_version
)
q'
)
--
let newNs = ( ns & unNodeStory . at listId .~ (Just a')
--
, Versioned (a' ^. a_version) q'
--
)
let
newA
=
Versioned
(
a'
^.
a_version
)
q'
-- NOTE Now is the only good time to save the archive history. We
-- have the handle to the MVar and we need to save its exact
-- snapshot. Node Story archive is a linear table, so it's only
...
...
@@ -353,16 +344,15 @@ commitStatePatch listId (Versioned _p_version p) = do
-- archive was saved and applied)
-- newNs' <- archiveSaver $ fst newNs
liftBase
$
do
newNs'
<-
archiveSaver
$
fst
newNs
atomically
$
writeTVar
var
newNs'
-- newNs' <- archiveSaver $ fst newNs
-- atomically $ writeTVar var newNs'
void
$
archiveSaver
listId
a'
-- Save new ngrams
_
<-
insertNgrams
(
newNgramsFromNgramsStatePatch
p
)
-- NOTE State (i.e. `NodeStory` can be saved asynchronously, i.e. with debounce)
-- saveNodeStory
saveNodeStoryImmediate
saveNodeStory
listId
a'
pure
$
snd
newNs
pure
newA
...
...
@@ -374,11 +364,11 @@ tableNgramsPull :: HasNodeStory env err m
->
m
(
Versioned
NgramsTablePatch
)
tableNgramsPull
listId
ngramsType
p_version
=
do
-- printDebug "[tableNgramsPull]" (listId, ngramsType)
var
<-
getNodeStoryVar
[
listId
]
r
<-
liftBase
$
atomically
$
readTVar
var
a
<-
getNodeStory
listId
--
r <- liftBase $ atomically $ readTVar var
let
a
=
r
^.
unNodeStory
.
at
listId
.
non
initArchive
--
a = r ^. unNodeStory . at listId . non initArchive
q
=
mconcat
$
take
(
a
^.
a_version
-
p_version
)
(
a
^.
a_history
)
q_table
=
q
^.
_PatchMap
.
at
ngramsType
.
_Just
...
...
@@ -502,10 +492,9 @@ getNgramsTableMap :: HasNodeStory env err m
->
TableNgrams
.
NgramsType
->
m
(
Versioned
NgramsTableMap
)
getNgramsTableMap
nodeId
ngramsType
=
do
v
<-
getNodeStoryVar
[
nodeId
]
repo
<-
liftBase
$
atomically
$
readTVar
v
pure
$
Versioned
(
repo
^.
unNodeStory
.
at
nodeId
.
_Just
.
a_version
)
(
repo
^.
unNodeStory
.
at
nodeId
.
_Just
.
a_state
.
at
ngramsType
.
_Just
)
a
<-
getNodeStory
nodeId
pure
$
Versioned
(
a
^.
a_version
)
(
a
^.
a_state
.
at
ngramsType
.
_Just
)
dumpJsonTableMap
::
HasNodeStory
env
err
m
...
...
src/Gargantext/API/Ngrams/Tools.hs
View file @
030dede0
...
...
@@ -23,7 +23,7 @@ import Data.HashMap.Strict qualified as HM
import
Data.Map.Strict
qualified
as
Map
import
Data.Set
qualified
as
Set
import
Data.Validity
import
GHC.Conc
(
TVar
,
readTVar
)
--
import GHC.Conc (TVar, readTVar)
import
Gargantext.API.Ngrams.Types
import
Gargantext.Core.NodeStory
import
Gargantext.Core.Types
(
ListType
(
..
),
NodeId
,
ListId
)
...
...
@@ -40,10 +40,11 @@ type RootTerm = NgramsTerm
getRepo
::
HasNodeStory
env
err
m
=>
[
ListId
]
->
m
NodeListStory
getRepo
listIds
=
do
f
<-
getNodeListStory
v
<-
liftBase
$
f
listIds
v'
<-
liftBase
$
atomically
$
readTVar
v
pure
$
v'
f
<-
getNodeListStoryMulti
liftBase
$
f
listIds
-- v <- liftBase $ f listIds
-- v' <- liftBase $ atomically $ readTVar v
-- pure $ v'
repoSize
::
Ord
k1
=>
NodeStory
(
Map
.
Map
k1
(
Map
.
Map
k2
a
))
p
...
...
@@ -56,21 +57,29 @@ repoSize repo node_id = Map.map Map.size state'
.
a_state
getNodeStory
Var
::
HasNodeStory
env
err
m
=>
[
ListId
]
->
m
(
TVar
NodeListStory
)
getNodeStory
Var
l
=
do
getNodeStory
::
HasNodeStory
env
err
m
=>
ListId
->
m
ArchiveList
getNodeStory
l
=
do
f
<-
getNodeListStory
v
<-
liftBase
$
f
l
pure
v
liftBase
$
f
l
-- v <- liftBase $ f l
-- pure v
getNodeListStory
::
HasNodeStory
env
err
m
=>
m
(
[
NodeId
]
->
IO
(
TVar
NodeListStory
)
)
=>
m
(
NodeId
->
IO
ArchiveList
)
getNodeListStory
=
do
env
<-
view
hasNodeStory
pure
$
view
nse_getter
env
getNodeListStoryMulti
::
HasNodeStory
env
err
m
=>
m
([
NodeId
]
->
IO
NodeListStory
)
getNodeListStoryMulti
=
do
env
<-
view
hasNodeStory
pure
$
view
nse_getter_multi
env
listNgramsFromRepo
::
[
ListId
]
->
NgramsType
...
...
src/Gargantext/Core/NodeStory.hs
View file @
030dede0
...
...
@@ -40,42 +40,15 @@ TODO:
- charger les listes
-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE Arrows #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
module
Gargantext.Core.NodeStory
(
HasNodeStory
,
HasNodeStoryEnv
,
hasNodeStory
,
HasNodeStoryVar
,
hasNodeStoryVar
,
HasNodeStoryImmediateSaver
,
hasNodeStoryImmediateSaver
,
HasNodeArchiveStoryImmediateSaver
,
hasNodeArchiveStoryImmediateSaver
,
NodeStory
(
..
)
,
NgramsStatePatch
'
,
NodeListStory
,
ArchiveList
,
initNodeListStoryMock
,
NodeStoryEnv
(
..
)
,
initNodeStory
,
nse_getter
,
nse_saver_immediate
,
nse_archive_saver_immediate
,
nse_var
,
unNodeStory
(
module
Gargantext
.
Core
.
NodeStory
.
Types
,
getNodesArchiveHistory
,
Archive
(
..
)
,
initArchive
,
archiveAdvance
,
unionArchives
,
a_history
,
a_state
,
a_version
,
nodeExists
,
runPGSQuery
,
runPGSAdvisoryLock
...
...
@@ -84,7 +57,7 @@ module Gargantext.Core.NodeStory
,
getNodesIdWithType
,
fromDBNodeStoryEnv
,
upsertNodeStories
,
getNodeStory
--
, getNodeStory
,
nodeStoriesQuery
,
currentVersion
,
archiveStateFromList
...
...
@@ -92,208 +65,29 @@ module Gargantext.Core.NodeStory
,
fixNodeStoryVersions
)
where
import
Codec.Serialise.Class
import
Control.Lens
(
makeLenses
,
Getter
,
(
^.
),
(
.~
),
(
%~
),
non
,
_Just
,
at
,
view
)
import
Control.Lens
((
^.
),
(
.~
),
(
%~
),
non
,
_Just
,
at
,
view
)
import
Control.Monad.Except
import
Control.Monad.Reader
import
Data.Aeson
hiding
((
.=
),
decode
)
import
Data.HashMap.Strict
(
HashMap
)
import
Data.HashMap.Strict
qualified
as
HashMap
import
Data.Map.Strict
qualified
as
Map
import
Data.Map.Strict.Patch
qualified
as
PM
import
Data.Monoid
import
Data.Pool
(
Pool
,
withResource
)
import
Data.Profunctor.Product.TH
(
makeAdaptorAndInstance
)
import
Data.Semigroup
import
Data.Set
qualified
as
Set
import
Database.PostgreSQL.Simple
qualified
as
PGS
import
Database.PostgreSQL.Simple.FromField
(
FromField
(
fromField
),
fromJSONField
)
import
Database.PostgreSQL.Simple.SqlQQ
(
sql
)
import
Database.PostgreSQL.Simple.ToField
qualified
as
PGS
import
Database.PostgreSQL.Simple.Types
(
Values
(
..
),
QualifiedIdentifier
(
..
))
import
GHC.Conc
(
TVar
,
newTVar
,
readTVar
,
writeTVar
)
import
Gargantext.API.Ngrams.Types
import
Gargantext.Core
(
toDBid
)
import
Gargantext.Core.NodeStory.Types
import
Gargantext.Core.Types
(
ListId
,
NodeId
(
..
),
NodeType
)
import
Gargantext.Core.Utils.Prefix
(
unPrefix
)
import
Gargantext.Database.Admin.Config
()
import
Gargantext.Database.Prelude
(
DbCmd
'
,
HasConnectionPool
(
..
))
import
Gargantext.Database.Prelude
(
HasConnectionPool
(
..
))
import
Gargantext.Database.Query.Table.Ngrams
qualified
as
TableNgrams
import
Gargantext.Database.Query.Table.Node.Error
(
HasNodeError
())
import
Gargantext.Database.Schema.Ngrams
(
NgramsType
)
import
Gargantext.Prelude
hiding
(
to
)
import
Gargantext.Prelude.Database
import
Opaleye
(
DefaultFromField
(
..
),
SqlJsonb
,
fromPGSFromField
)
------------------------------------------------------------------------
data
NodeStoryEnv
=
NodeStoryEnv
{
_nse_var
::
!
(
TVar
NodeListStory
)
,
_nse_saver_immediate
::
!
(
IO
()
)
,
_nse_archive_saver_immediate
::
!
(
NodeListStory
->
IO
NodeListStory
)
,
_nse_getter
::
!
([
NodeId
]
->
IO
(
TVar
NodeListStory
))
--, _nse_cleaner :: !(IO ()) -- every 12 hours: cleans the repos of unused NodeStories
-- , _nse_lock :: !FileLock -- TODO (it depends on the option: if with database or file only)
}
deriving
(
Generic
)
type
HasNodeStory
env
err
m
=
(
DbCmd'
env
err
m
,
MonadReader
env
m
,
MonadError
err
m
,
HasNodeStoryEnv
env
,
HasNodeError
err
)
class
(
HasNodeStoryVar
env
,
HasNodeStoryImmediateSaver
env
)
=>
HasNodeStoryEnv
env
where
hasNodeStory
::
Getter
env
NodeStoryEnv
class
HasNodeStoryVar
env
where
hasNodeStoryVar
::
Getter
env
([
NodeId
]
->
IO
(
TVar
NodeListStory
))
class
HasNodeStoryImmediateSaver
env
where
hasNodeStoryImmediateSaver
::
Getter
env
(
IO
()
)
class
HasNodeArchiveStoryImmediateSaver
env
where
hasNodeArchiveStoryImmediateSaver
::
Getter
env
(
NodeListStory
->
IO
NodeListStory
)
------------------------------------------------------------------------
{- | Node Story for each NodeType where the Key of the Map is NodeId
TODO : generalize for any NodeType, let's start with NodeList which
is implemented already
-}
newtype
NodeStory
s
p
=
NodeStory
{
_unNodeStory
::
Map
NodeId
(
Archive
s
p
)
}
deriving
(
Generic
,
Show
,
Eq
)
instance
(
FromJSON
s
,
FromJSON
p
)
=>
FromJSON
(
NodeStory
s
p
)
instance
(
ToJSON
s
,
ToJSON
p
)
=>
ToJSON
(
NodeStory
s
p
)
instance
(
Serialise
s
,
Serialise
p
)
=>
Serialise
(
NodeStory
s
p
)
data
Archive
s
p
=
Archive
{
_a_version
::
!
Version
,
_a_state
::
!
s
,
_a_history
::
!
[
p
]
-- first patch in the list is the most recent
-- We use `take` in `commitStatePatch`, that's why.
-- History is immutable, we just insert things on top of existing
-- list.
-- We don't need to store the whole history in memory, this
-- structure holds only recent history, the one that will be
-- inserted to the DB.
}
deriving
(
Generic
,
Show
,
Eq
)
instance
(
Serialise
s
,
Serialise
p
)
=>
Serialise
(
Archive
s
p
)
type
NodeListStory
=
NodeStory
NgramsState'
NgramsStatePatch'
-- NOTE: 'type NgramsTableMap = Map NgramsTerm NgramsRepoElement'
type
NgramsState'
=
Map
TableNgrams
.
NgramsType
NgramsTableMap
type
NgramsStatePatch'
=
PatchMap
TableNgrams
.
NgramsType
NgramsTablePatch
instance
Serialise
NgramsStatePatch'
instance
FromField
(
Archive
NgramsState'
NgramsStatePatch'
)
where
fromField
=
fromJSONField
instance
DefaultFromField
SqlJsonb
(
Archive
NgramsState'
NgramsStatePatch'
)
where
defaultFromField
=
fromPGSFromField
-- | Combine `NgramsState'`. This is because the structure is (Map
-- NgramsType (Map ...)) and the default `(<>)` operator is
-- left-biased
-- (https://hackage.haskell.org/package/containers-0.6.6/docs/Data-Map-Internal.html#v:union)
combineState
::
NgramsState'
->
NgramsState'
->
NgramsState'
combineState
=
Map
.
unionWith
(
<>
)
-- This is not a typical Semigroup instance. The state is not
-- appended, instead it is replaced with the second entry. This is
-- because state changes with each version. We have to take into
-- account the removal of terms as well.
-- instance (Semigroup s, Semigroup p) => Semigroup (Archive s p) where
-- (<>) (Archive { _a_history = p }) (Archive { _a_version = v'
-- , _a_state = s'
-- , _a_history = p' }) =
-- Archive { _a_version = v'
-- , _a_state = s'
-- , _a_history = p' <> p }
-- instance (Monoid s, Semigroup p) => Monoid (Archive s p) where
-- mempty = Archive { _a_version = 0
-- , _a_state = mempty
-- , _a_history = [] }
instance
(
FromJSON
s
,
FromJSON
p
)
=>
FromJSON
(
Archive
s
p
)
where
parseJSON
=
genericParseJSON
$
unPrefix
"_a_"
instance
(
ToJSON
s
,
ToJSON
p
)
=>
ToJSON
(
Archive
s
p
)
where
toJSON
=
genericToJSON
$
unPrefix
"_a_"
toEncoding
=
genericToEncoding
$
unPrefix
"_a_"
-- | This is the normal way to update archive state, bumping the
-- version and history. Resulting state is taken directly from new
-- archive, omitting old archive completely.
archiveAdvance
::
(
Semigroup
s
,
Semigroup
p
)
=>
Archive
s
p
->
Archive
s
p
->
Archive
s
p
archiveAdvance
aOld
aNew
=
aNew
{
_a_history
=
_a_history
aNew
<>
_a_history
aOld
}
-- | This is to merge archive states.
unionArchives
::
(
Semigroup
s
,
Semigroup
p
)
=>
Archive
s
p
->
Archive
s
p
->
Archive
s
p
unionArchives
aOld
aNew
=
aNew
{
_a_state
=
_a_state
aOld
<>
_a_state
aNew
,
_a_history
=
_a_history
aNew
<>
_a_history
aOld
}
------------------------------------------------------------------------
initNodeStory
::
(
Monoid
s
,
Semigroup
p
)
=>
NodeId
->
NodeStory
s
p
initNodeStory
ni
=
NodeStory
$
Map
.
singleton
ni
initArchive
initArchive
::
(
Monoid
s
,
Semigroup
p
)
=>
Archive
s
p
initArchive
=
Archive
{
_a_version
=
0
,
_a_state
=
mempty
,
_a_history
=
[]
}
initNodeListStoryMock
::
NodeListStory
initNodeListStoryMock
=
NodeStory
$
Map
.
singleton
nodeListId
archive
where
nodeListId
=
0
archive
=
Archive
{
_a_version
=
0
,
_a_state
=
ngramsTableMap
,
_a_history
=
[]
}
ngramsTableMap
=
Map
.
singleton
TableNgrams
.
NgramsTerms
$
Map
.
fromList
[
(
n
^.
ne_ngrams
,
ngramsElementToRepo
n
)
|
n
<-
mockTable
^.
_NgramsTable
]
------------------------------------------------------------------------
------------------------------------------------------------------------
-- | Lenses at the bottom of the file because Template Haskell would reorder order of execution in others cases
makeLenses
''
N
odeStoryEnv
makeLenses
''
N
odeStory
makeLenses
''
A
rchive
----------------------------------------------------------------------
data
NodeStoryPoly
nid
v
ngtid
ngid
nre
=
NodeStoryDB
{
node_id
::
!
nid
,
version
::
!
v
,
ngrams_type_id
::
!
ngtid
,
ngrams_id
::
!
ngid
,
ngrams_repo_element
::
!
nre
}
deriving
(
Eq
)
data
NodeStoryArchivePoly
nid
a
=
NodeStoryArchiveDB
{
a_node_id
::
!
nid
,
archive
::
!
a
}
deriving
(
Eq
)
$
(
makeAdaptorAndInstance
"pNodeStory"
''
N
odeStoryPoly
)
$
(
makeAdaptorAndInstance
"pNodeArchiveStory"
''
N
odeStoryArchivePoly
)
-- type NodeStoryWrite = NodeStoryPoly (Column SqlInt4) (Column SqlInt4) (Column SqlInt4) (Column SqlInt4) (Column SqlJsonb)
-- type NodeStoryRead = NodeStoryPoly (Column SqlInt4) (Column SqlInt4) (Column SqlInt4) (Column SqlInt4) (Column SqlJsonb)
-- type NodeStoryArchiveWrite = NodeStoryArchivePoly (Column SqlInt4) (Column SqlJsonb)
-- type NodeStoryArchiveRead = NodeStoryArchivePoly (Column SqlInt4) (Column SqlJsonb)
type
ArchiveList
=
Archive
NgramsState'
NgramsStatePatch'
-- DB stuff
...
...
@@ -366,8 +160,8 @@ insertNodeArchiveHistory c nodeId version (h:hs) = do
VALUES (?, ?, ?, ?, ?)
|]
getNodeStory
::
PGS
.
Connection
->
NodeId
->
IO
NodeListStory
getNodeStory
c
nId
=
do
getNodeStory
'
::
PGS
.
Connection
->
NodeId
->
IO
ArchiveList
getNodeStory
'
c
nId
=
do
--res <- withResource pool $ \c -> runSelect c query :: IO [NodeStoryPoly NodeId Version Int Int NgramsRepoElement]
res
<-
runPGSQuery
c
nodeStoriesQuery
(
PGS
.
Only
$
PGS
.
toField
nId
)
::
IO
[(
Version
,
TableNgrams
.
NgramsType
,
NgramsTerm
,
NgramsRepoElement
)]
-- We have multiple rows with same node_id and different (ngrams_type_id, ngrams_id).
...
...
@@ -390,12 +184,19 @@ getNodeStory c nId = do
pure ()
-}
pure
$
NodeStory
$
Map
.
singleton
nId
$
foldl
combine
initArchive
dbData
pure
$
foldl
combine
initArchive
dbData
where
-- NOTE (<>) for Archive doesn't concatenate states, so we have to use `combine`
combine
a1
a2
=
a1
&
a_state
%~
combineState
(
a2
^.
a_state
)
&
a_version
.~
(
a2
^.
a_version
)
-- version should be updated from list, not taken from the empty Archive
getNodeStory
::
PGS
.
Connection
->
NodeId
->
IO
NodeListStory
getNodeStory
c
nId
=
do
a
<-
getNodeStory'
c
nId
pure
$
NodeStory
$
Map
.
singleton
nId
a
nodeStoriesQuery
::
PGS
.
Query
nodeStoriesQuery
=
[
sql
|
SELECT version, ngrams_type_id, terms, ngrams_repo_element
FROM node_stories
...
...
@@ -403,9 +204,6 @@ nodeStoriesQuery = [sql| SELECT version, ngrams_type_id, terms, ngrams_repo_elem
WHERE node_id = ?
|]
type
ArchiveStateList
=
[(
TableNgrams
.
NgramsType
,
NgramsTerm
,
NgramsRepoElement
)]
type
ArchiveStateSet
=
Set
.
Set
(
TableNgrams
.
NgramsType
,
NgramsTerm
)
-- |Functions to convert archive state (which is a `Map NgramsType
-- (Map NgramsTerm NgramsRepoElement`)) to/from a flat list
archiveStateToList
::
NgramsState'
->
ArchiveStateList
...
...
@@ -523,13 +321,6 @@ updateNodeStory c nodeId currentArchive newArchive = do
-- , uWhere = (\row -> node_id row .== sqlInt4 nId)
-- , uReturning = rCount }
-- nodeStoryRemove :: Pool PGS.Connection -> NodeId -> IO Int64
-- nodeStoryRemove pool (NodeId nId) = withResource pool $ \c -> runDelete c delete
-- where
-- delete = Delete { dTable = nodeStoryTable
-- , dWhere = (\row -> node_id row .== sqlInt4 nId)
-- , dReturning = rCount }
upsertNodeStories
::
PGS
.
Connection
->
NodeId
->
ArchiveList
->
IO
()
upsertNodeStories
c
nodeId
newArchive
=
do
-- printDebug "[upsertNodeStories] START nId" nId
...
...
@@ -562,10 +353,6 @@ updateNodeStoryVersion c nodeId newArchive = do
WHERE node_id = ?
AND ngrams_type_id = ?
|]
writeNodeStories
::
PGS
.
Connection
->
NodeListStory
->
IO
()
writeNodeStories
c
(
NodeStory
nls
)
=
do
mapM_
(
\
(
nId
,
a
)
->
upsertNodeStories
c
nId
a
)
$
Map
.
toList
nls
-- | Returns a `NodeListStory`, updating the given one for given `NodeId`
nodeStoryInc
::
PGS
.
Connection
->
NodeListStory
->
NodeId
->
IO
NodeListStory
nodeStoryInc
c
ns
@
(
NodeStory
nls
)
nId
=
do
...
...
@@ -575,34 +362,10 @@ nodeStoryInc c ns@(NodeStory nls) nId = do
pure
$
NodeStory
$
Map
.
unionWith
archiveAdvance
nls'
nls
Just
_
->
pure
ns
nodeStoryIncrementalRead
::
PGS
.
Connection
->
Maybe
NodeListStory
->
[
NodeId
]
->
IO
NodeListStory
nodeStoryIncrementalRead
_
Nothing
[]
=
pure
$
NodeStory
$
Map
.
empty
nodeStoryIncrementalRead
c
Nothing
(
ni
:
ns
)
=
do
m
<-
getNodeStory
c
ni
nodeStoryIncrementalRead
c
(
Just
m
)
ns
nodeStoryIncrementalRead
c
(
Just
nls
)
ns
=
foldM
(
\
m
n
->
nodeStoryInc
c
m
n
)
nls
ns
-- nodeStoryDec :: Pool PGS.Connection -> NodeListStory -> NodeId -> IO NodeListStory
-- nodeStoryDec pool ns@(NodeStory nls) ni = do
-- case Map.lookup ni nls of
-- Nothing -> do
-- _ <- nodeStoryRemove pool ni
-- pure ns
-- Just _ -> do
-- let ns' = Map.filterWithKey (\k _v -> k /= ni) nls
-- _ <- nodeStoryRemove pool ni
-- pure $ NodeStory ns'
------------------------------------
-- | NgramsRepoElement contains, in particular, `nre_list`,
-- `nre_parent` and `nre_children`. We want to make sure that all
-- children entries (i.e. ones that have `nre_parent`) have the same
-- `list` as their parent entry.
fixChildrenTermTypes
::
NodeListStory
->
NodeListStory
fixChildrenTermTypes
(
NodeStory
nls
)
=
NodeStory
$
Map
.
fromList
[
(
nId
,
a
&
a_state
%~
fixChildrenInNgramsStatePatch
)
|
(
nId
,
a
)
<-
Map
.
toList
nls
]
fixChildrenInNgramsStatePatch
::
NgramsState'
->
NgramsState'
fixChildrenInNgramsStatePatch
ns
=
archiveStateFromList
$
nsParents
<>
nsChildrenFixed
where
...
...
@@ -623,11 +386,6 @@ fixChildrenInNgramsStatePatch ns = archiveStateFromList $ nsParents <> nsChildre
-- | Sometimes, when we upload a new list, a child can be left without
-- a parent. Find such ngrams and set their 'root' and 'parent' to
-- 'Nothing'.
fixChildrenWithNoParent
::
NodeListStory
->
NodeListStory
fixChildrenWithNoParent
(
NodeStory
nls
)
=
NodeStory
$
Map
.
fromList
[
(
nId
,
a
&
a_state
%~
fixChildrenWithNoParentStatePatch
)
|
(
nId
,
a
)
<-
Map
.
toList
nls
]
fixChildrenWithNoParentStatePatch
::
NgramsState'
->
NgramsState'
fixChildrenWithNoParentStatePatch
ns
=
archiveStateFromList
$
nsParents
<>
nsChildrenFixed
where
...
...
@@ -654,50 +412,37 @@ fixChildrenWithNoParentStatePatch ns = archiveStateFromList $ nsParents <> nsChi
fromDBNodeStoryEnv
::
Pool
PGS
.
Connection
->
IO
NodeStoryEnv
fromDBNodeStoryEnv
pool
=
do
tvar
<-
nodeStoryVar
pool
Nothing
[]
let
saver_immediate
=
do
ns
<-
atomically
$
readTVar
tvar
-- fix children so their 'list' is the same as their parents'
>>=
pure
.
fixChildrenTermTypes
-- fix children that don't have a parent anymore
>>=
pure
.
fixChildrenWithNoParent
>>=
writeTVar
tvar
>>
readTVar
tvar
--
tvar <- nodeStoryVar pool Nothing []
let
saver_immediate
nId
a
=
do
--
ns <- atomically $
--
readTVar tvar
--
-- fix children so their 'list' is the same as their parents'
--
>>= pure . fixChildrenTermTypes
--
-- fix children that don't have a parent anymore
--
>>= pure . fixChildrenWithNoParent
--
>>= writeTVar tvar
--
>> readTVar tvar
withResource
pool
$
\
c
->
do
--printDebug "[mkNodeStorySaver] will call writeNodeStories, ns" ns
writeNodeStories
c
ns
let
archive_saver_immediate
ns
@
(
NodeStory
nls
)
=
withResource
pool
$
\
c
->
do
mapM_
(
\
(
nId
,
a
)
->
do
insertNodeArchiveHistory
c
nId
(
a
^.
a_version
)
$
reverse
$
a
^.
a_history
)
$
Map
.
toList
nls
pure
$
clearHistory
ns
-- writeNodeStories c $ fixChildrenWithNoParent $ fixChildrenTermTypes ns
upsertNodeStories
c
nId
$
a
&
a_state
%~
(
fixChildrenInNgramsStatePatch
.
fixChildrenWithNoParentStatePatch
)
let
archive_saver_immediate
nId
a
=
withResource
pool
$
\
c
->
do
insertNodeArchiveHistory
c
nId
(
a
^.
a_version
)
$
reverse
$
a
^.
a_history
pure
$
a
&
a_history
.~
[]
-- mapM_ (\(nId, a) -> do
-- insertNodeArchiveHistory c nId (a ^. a_version) $ reverse $ a ^. a_history
-- ) $ Map.toList nls
-- pure $ clearHistory ns
pure
$
NodeStoryEnv
{
_nse_var
=
tvar
,
_nse_saver_immediate
=
saver_immediate
pure
$
NodeStoryEnv
{
_nse_saver_immediate
=
saver_immediate
,
_nse_archive_saver_immediate
=
archive_saver_immediate
,
_nse_getter
=
nodeStoryVar
pool
(
Just
tvar
)
,
_nse_getter
=
\
nId
->
withResource
pool
$
\
c
->
getNodeStory'
c
nId
,
_nse_getter_multi
=
\
nIds
->
withResource
pool
$
\
c
->
foldM
(
\
m
nId
->
nodeStoryInc
c
m
nId
)
(
NodeStory
Map
.
empty
)
nIds
}
nodeStoryVar
::
Pool
PGS
.
Connection
->
Maybe
(
TVar
NodeListStory
)
->
[
NodeId
]
->
IO
(
TVar
NodeListStory
)
nodeStoryVar
pool
Nothing
nIds
=
do
state'
<-
withResource
pool
$
\
c
->
nodeStoryIncrementalRead
c
Nothing
nIds
atomically
$
newTVar
state'
nodeStoryVar
pool
(
Just
tv
)
nIds
=
do
nls
<-
atomically
$
readTVar
tv
nls'
<-
withResource
pool
$
\
c
->
nodeStoryIncrementalRead
c
(
Just
nls
)
nIds
_
<-
atomically
$
writeTVar
tv
nls'
pure
tv
clearHistory
::
NodeListStory
->
NodeListStory
clearHistory
(
NodeStory
ns
)
=
NodeStory
$
ns
&
(
traverse
.
a_history
)
.~
emptyHistory
where
emptyHistory
=
[]
::
[
NgramsStatePatch'
]
currentVersion
::
(
HasNodeStory
env
err
m
)
=>
ListId
->
m
Version
currentVersion
listId
=
do
pool
<-
view
connPool
...
...
@@ -748,3 +493,72 @@ fixNodeStoryVersions = do
_
<-
runPGSExecute
c
updateVerQuery
(
maxVersion
,
nId
,
ngramsType
)
pure
()
_
->
panicTrace
"Should get only 1 result!"
-----------------------------------------
-- DEPRECATED
-- nodeStoryVar :: Pool PGS.Connection
-- -> Maybe (TVar NodeListStory)
-- -> [NodeId]
-- -> IO (TVar NodeListStory)
-- nodeStoryVar pool Nothing nIds = do
-- state' <- withResource pool $ \c -> nodeStoryIncrementalRead c Nothing nIds
-- atomically $ newTVar state'
-- nodeStoryVar pool (Just tv) nIds = do
-- nls <- atomically $ readTVar tv
-- nls' <- withResource pool
-- $ \c -> nodeStoryIncrementalRead c (Just nls) nIds
-- _ <- atomically $ writeTVar tv nls'
-- pure tv
-- clearHistory :: NodeListStory -> NodeListStory
-- clearHistory (NodeStory ns) = NodeStory $ ns & (traverse . a_history) .~ emptyHistory
-- where
-- emptyHistory = [] :: [NgramsStatePatch']
-- fixChildrenWithNoParent :: NodeListStory -> NodeListStory
-- fixChildrenWithNoParent (NodeStory nls) =
-- NodeStory $ Map.fromList [ (nId, a & a_state %~ fixChildrenWithNoParentStatePatch)
-- | (nId, a) <- Map.toList nls ]
-- fixChildrenTermTypes :: NodeListStory -> NodeListStory
-- fixChildrenTermTypes (NodeStory nls) =
-- NodeStory $ Map.fromList [ (nId, a & a_state %~ fixChildrenInNgramsStatePatch)
-- | (nId, a) <- Map.toList nls ]
-- nodeStoryIncrementalRead :: PGS.Connection -> Maybe NodeListStory -> [NodeId] -> IO NodeListStory
-- nodeStoryIncrementalRead _ Nothing [] = pure $ NodeStory Map.empty
-- nodeStoryIncrementalRead c Nothing (ni:ns) = do
-- m <- getNodeStory c ni
-- nodeStoryIncrementalRead c (Just m) ns
-- nodeStoryIncrementalRead c (Just nls) ns = foldM (\m n -> nodeStoryInc c m n) nls ns
-- nodeStoryDec :: Pool PGS.Connection -> NodeListStory -> NodeId -> IO NodeListStory
-- nodeStoryDec pool ns@(NodeStory nls) ni = do
-- case Map.lookup ni nls of
-- Nothing -> do
-- _ <- nodeStoryRemove pool ni
-- pure ns
-- Just _ -> do
-- let ns' = Map.filterWithKey (\k _v -> k /= ni) nls
-- _ <- nodeStoryRemove pool ni
-- pure $ NodeStory ns'
------------------------------------
-- writeNodeStories :: PGS.Connection -> NodeListStory -> IO ()
-- writeNodeStories c (NodeStory nls) = do
-- mapM_ (\(nId, a) -> upsertNodeStories c nId a) $ Map.toList nls
-- nodeStoryRemove :: Pool PGS.Connection -> NodeId -> IO Int64
-- nodeStoryRemove pool (NodeId nId) = withResource pool $ \c -> runDelete c delete
-- where
-- delete = Delete { dTable = nodeStoryTable
-- , dWhere = (\row -> node_id row .== sqlInt4 nId)
-- , dReturning = rCount }
src/Gargantext/Database/Action/Flow/List.hs
View file @
030dede0
...
...
@@ -17,16 +17,16 @@ Portability : POSIX
module
Gargantext.Database.Action.Flow.List
where
import
Control.Lens
((
^.
),
(
+~
),
(
%~
),
at
,
(
.~
)
,
_Just
)
import
Control.Lens
((
^.
),
(
+~
),
(
%~
),
at
,
(
.~
))
import
Control.Monad.Reader
import
Data.List
qualified
as
List
import
Data.Map.Strict
(
toList
)
import
Data.Map.Strict
qualified
as
Map
import
Data.Map.Strict.Patch
qualified
as
PM
import
Gargantext.API.Ngrams
(
saveNodeStory
)
import
Gargantext.API.Ngrams.Tools
(
getNodeStory
Var
)
import
Gargantext.API.Ngrams.Tools
(
getNodeStory
)
import
Gargantext.API.Ngrams.Types
import
Gargantext.Core.NodeStory
import
Gargantext.Core.NodeStory
(
HasNodeStory
,
a_history
,
a_state
,
a_version
)
import
Gargantext.Core.Types
(
HasValidationError
(
..
),
assertValid
)
import
Gargantext.Core.Types.Main
(
ListType
(
CandidateTerm
))
import
Gargantext.Database.Admin.Types.Node
...
...
@@ -34,7 +34,6 @@ import Gargantext.Database.Query.Table.Ngrams qualified as TableNgrams
import
Gargantext.Database.Query.Table.NodeNgrams
(
NodeNgramsPoly
(
..
),
NodeNgramsW
,
listInsertDb
,
{- getCgramsId -}
)
import
Gargantext.Database.Schema.Ngrams
(
NgramsType
(
..
))
import
Gargantext.Prelude
hiding
(
toList
)
import
GHC.Conc
(
readTVar
,
writeTVar
)
-- FLOW LIST
-- 1. select specific terms of the corpus when compared with others langs
...
...
@@ -201,11 +200,14 @@ putListNgrams nodeId ngramsType nes = putListNgrams' nodeId ngramsType m
-- The modifyMVar_ would test the patch with applicable first.
-- If valid the rest would be atomic and no merge is required.
-}
var
<-
getNodeStoryVar
[
listId
]
liftBase
$
atomically
$
do
r
<-
readTVar
var
writeTVar
var
$
r
&
unNodeStory
.
at
listId
.
_Just
.
a_version
+~
1
&
unNodeStory
.
at
listId
.
_Just
.
a_history
%~
(
p
:
)
&
unNodeStory
.
at
listId
.
_Just
.
a_state
.
at
ngramsType'
.~
Just
ns
saveNodeStory
a
<-
getNodeStory
listId
let
a'
=
a
&
a_version
+~
1
&
a_history
%~
(
p
:
)
&
a_state
.
at
ngramsType'
.~
Just
ns
-- liftBase $ atomically $ do
-- r <- readTVar var
-- writeTVar var $
-- r & unNodeStory . at listId . _Just . a_version +~ 1
-- & unNodeStory . at listId . _Just . a_history %~ (p :)
-- & unNodeStory . at listId . _Just . a_state . at ngramsType' .~ Just ns
saveNodeStory
listId
a'
test/Test/Database/Operations/NodeStory.hs
View file @
030dede0
...
...
@@ -20,9 +20,9 @@ import Data.Map.Strict.Patch qualified as PM
import
Data.Set
qualified
as
Set
import
Database.PostgreSQL.Simple
qualified
as
PSQL
import
Database.PostgreSQL.Simple.SqlQQ
import
Gargantext.API.Ngrams
(
commitStatePatch
,
mSetFromList
,
setListNgrams
,
saveNodeStory
Immediate
)
import
Gargantext.API.Ngrams
(
commitStatePatch
,
mSetFromList
,
setListNgrams
,
saveNodeStory
)
import
Gargantext.API.Ngrams.Types
(
MSet
(
..
),
NgramsPatch
(
..
),
NgramsRepoElement
(
..
),
NgramsTerm
(
..
),
Versioned
(
..
),
mkNgramsTablePatch
,
nre_children
,
nre_list
,
nre_parent
,
nre_root
)
import
Gargantext.API.Ngrams.Tools
(
getNodeStory
Var
)
import
Gargantext.API.Ngrams.Tools
(
getNodeStory
)
import
Gargantext.Core.NodeStory
hiding
(
runPGSQuery
)
import
Gargantext.Core.Types.Individu
import
Gargantext.Core.Types
(
ListType
(
..
),
ListId
,
NodeId
,
UserId
)
...
...
@@ -35,13 +35,12 @@ import Gargantext.Database.Query.Tree.Root
import
Gargantext.Database.Schema.Ngrams
(
NgramsType
(
..
))
import
Gargantext.Database.Schema.Node
(
NodePoly
(
..
))
import
Gargantext.Prelude
import
GHC.Conc
(
TVar
,
readTVar
)
import
Test.Database.Types
import
Test.Hspec.Expectations
import
Test.Tasty.HUnit
commonInitialization
::
TestMonad
(
UserId
,
NodeId
,
ListId
,
TVar
NodeListStory
)
commonInitialization
::
TestMonad
(
UserId
,
NodeId
,
ListId
,
ArchiveList
)
commonInitialization
=
do
let
user
=
UserName
userMaster
parentId
<-
getRootId
user
...
...
@@ -52,9 +51,9 @@ commonInitialization = do
listId
<-
getOrMkList
corpusId
userId
v
<-
getNodeStoryVar
[
listId
]
a
<-
getNodeStory
listId
pure
$
(
userId
,
corpusId
,
listId
,
v
)
pure
$
(
userId
,
corpusId
,
listId
,
a
)
initArchiveList
::
ArchiveList
...
...
@@ -90,7 +89,7 @@ simpleChildTerm = ( simpleChildTerm'
createListTest
::
TestEnv
->
Assertion
createListTest
env
=
do
flip
runReaderT
env
$
runTestMonad
$
do
(
userId
,
corpusId
,
listId
,
_
v
)
<-
commonInitialization
(
userId
,
corpusId
,
listId
,
_
a
)
<-
commonInitialization
listId'
<-
getOrMkList
corpusId
userId
...
...
@@ -100,28 +99,32 @@ createListTest env = do
queryNodeStoryTest
::
TestEnv
->
Assertion
queryNodeStoryTest
env
=
do
flip
runReaderT
env
$
runTestMonad
$
do
(
_userId
,
_corpusId
,
listId
,
v
)
<-
commonInitialization
(
_userId
,
_corpusId
,
listId
,
a
)
<-
commonInitialization
liftIO
$
do
a
`
shouldBe
`
initArchiveList
saveNodeStory
listId
a
a'
<-
getNodeStory
listId
saveNodeStoryImmediate
liftIO
$
do
ns
<-
atomically
$
readTVar
v
ns
`
shouldBe
`
(
NodeStory
$
Map
.
singleton
listId
initArchiveList
)
a'
`
shouldBe
`
a
insertNewTermsToNodeStoryTest
::
TestEnv
->
Assertion
insertNewTermsToNodeStoryTest
env
=
do
flip
runReaderT
env
$
runTestMonad
$
do
(
_userId
,
_corpusId
,
listId
,
v
)
<-
commonInitialization
(
_userId
,
_corpusId
,
listId
,
_a
)
<-
commonInitialization
let
(
terms
,
nre
)
=
simpleTerm
let
nls
=
Map
.
singleton
terms
nre
setListNgrams
listId
NgramsTerms
nls
a
<-
getNodeStory
listId
liftIO
$
do
ns
<-
atomically
$
readTVar
v
ns
`
shouldBe
`
(
NodeStory
$
Map
.
singleton
listId
(
initArchiveList
{
_a_state
=
Map
.
singleton
NgramsTerms
nls
}))
a
`
shouldBe
`
(
initArchiveList
{
_a_state
=
Map
.
singleton
NgramsTerms
nls
})
-- check that the ngrams are in the DB as well
ngramsMap
<-
selectNgramsId
[
unNgramsTerm
terms
]
liftIO
$
(
snd
<$>
Map
.
toList
ngramsMap
)
`
shouldBe
`
[
unNgramsTerm
terms
]
...
...
@@ -139,7 +142,7 @@ insertNewTermsToNodeStoryTest env = do
insertNewTermsWithChildrenToNodeStoryTest
::
TestEnv
->
Assertion
insertNewTermsWithChildrenToNodeStoryTest
env
=
do
flip
runReaderT
env
$
runTestMonad
$
do
(
_userId
,
_corpusId
,
listId
,
v
)
<-
commonInitialization
(
_userId
,
_corpusId
,
listId
,
_a
)
<-
commonInitialization
let
(
tParent
,
nreParent
)
=
simpleParentTerm
let
(
tChild
,
nreChild
)
=
simpleChildTerm
...
...
@@ -148,10 +151,10 @@ insertNewTermsWithChildrenToNodeStoryTest env = do
let
nls
=
Map
.
fromList
[(
tParent
,
nreParent
),
(
tChild
,
nreChild
)]
setListNgrams
listId
NgramsTerms
nls
a
<-
getNodeStory
listId
liftIO
$
do
ns
<-
atomically
$
readTVar
v
ns
`
shouldBe
`
(
NodeStory
$
Map
.
singleton
listId
(
initArchiveList
{
_a_state
=
Map
.
singleton
NgramsTerms
nls
}))
a
`
shouldBe
`
(
initArchiveList
{
_a_state
=
Map
.
singleton
NgramsTerms
nls
})
-- `setListNgrams` calls saveNodeStory already so we should have
-- the terms in the DB by now
...
...
@@ -178,7 +181,7 @@ insertNewTermsWithChildrenToNodeStoryTest env = do
insertNodeStoryChildrenWithDifferentNgramsTypeThanParentTest
::
TestEnv
->
Assertion
insertNodeStoryChildrenWithDifferentNgramsTypeThanParentTest
env
=
do
flip
runReaderT
env
$
runTestMonad
$
do
(
_userId
,
_corpusId
,
listId
,
v
)
<-
commonInitialization
(
_userId
,
_corpusId
,
listId
,
_a
)
<-
commonInitialization
let
(
tParent
,
nreParent
)
=
simpleParentTerm
let
(
tChild
,
nreChildGoodType
)
=
simpleChildTerm
...
...
@@ -190,10 +193,10 @@ insertNodeStoryChildrenWithDifferentNgramsTypeThanParentTest env = do
setListNgrams
listId
NgramsTerms
nls
a
<-
getNodeStory
listId
liftIO
$
do
ns
<-
atomically
$
readTVar
v
ns
`
shouldBe
`
(
NodeStory
$
Map
.
singleton
listId
(
initArchiveList
{
_a_state
=
Map
.
singleton
NgramsTerms
nlsWithChildFixed
}))
a
`
shouldBe
`
(
initArchiveList
{
_a_state
=
Map
.
singleton
NgramsTerms
nlsWithChildFixed
})
ngramsMap
<-
selectNgramsId
terms
liftIO
$
(
snd
<$>
Map
.
toList
ngramsMap
)
`
shouldBe
`
terms
...
...
@@ -216,16 +219,16 @@ insertNodeStoryChildrenWithDifferentNgramsTypeThanParentTest env = do
setListNgramsUpdatesNodeStoryTest
::
TestEnv
->
Assertion
setListNgramsUpdatesNodeStoryTest
env
=
do
flip
runReaderT
env
$
runTestMonad
$
do
(
_userId
,
_corpusId
,
listId
,
v
)
<-
commonInitialization
(
_userId
,
_corpusId
,
listId
,
_a
)
<-
commonInitialization
let
(
terms
,
nre
)
=
simpleTerm
let
nls
=
Map
.
singleton
terms
nre
setListNgrams
listId
NgramsTerms
nls
a
<-
getNodeStory
listId
liftIO
$
do
ns
<-
atomically
$
readTVar
v
ns
`
shouldBe
`
(
NodeStory
$
Map
.
singleton
listId
(
initArchiveList
{
_a_state
=
Map
.
singleton
NgramsTerms
nls
}))
a
`
shouldBe
`
(
initArchiveList
{
_a_state
=
Map
.
singleton
NgramsTerms
nls
})
-- check that the ngrams are in the DB as well
ngramsMap
<-
selectNgramsId
[
unNgramsTerm
terms
]
liftIO
$
(
snd
<$>
Map
.
toList
ngramsMap
)
`
shouldBe
`
[
unNgramsTerm
terms
]
...
...
@@ -238,27 +241,27 @@ setListNgramsUpdatesNodeStoryTest env = do
let
terms2
=
"WORLD"
let
nls2
=
Map
.
singleton
(
NgramsTerm
terms2
)
nre2
setListNgrams
listId
NgramsTerms
nls2
a'
<-
getNodeStory
listId
liftIO
$
do
ns
<-
atomically
$
readTVar
v
ns
`
shouldBe
`
(
NodeStory
$
Map
.
singleton
listId
(
initArchiveList
{
_a_state
=
Map
.
singleton
NgramsTerms
$
nls
<>
nls2
}))
a'
`
shouldBe
`
(
initArchiveList
{
_a_state
=
Map
.
singleton
NgramsTerms
$
nls
<>
nls2
})
setListNgramsUpdatesNodeStoryWithChildrenTest
::
TestEnv
->
Assertion
setListNgramsUpdatesNodeStoryWithChildrenTest
env
=
do
flip
runReaderT
env
$
runTestMonad
$
do
(
_userId
,
_corpusId
,
listId
,
v
)
<-
commonInitialization
(
_userId
,
_corpusId
,
listId
,
_a
)
<-
commonInitialization
let
(
tChild
,
nreChild
)
=
simpleChildTerm
let
(
tParent
,
nreParent
)
=
simpleParentTerm
let
nls
=
Map
.
fromList
[(
tParent
,
nreParent
),
(
tChild
,
nreChild
)]
setListNgrams
listId
NgramsTerms
nls
a
<-
getNodeStory
listId
liftIO
$
do
ns
<-
atomically
$
readTVar
v
ns
`
shouldBe
`
(
NodeStory
$
Map
.
singleton
listId
(
initArchiveList
{
_a_state
=
Map
.
singleton
NgramsTerms
nls
}))
a
`
shouldBe
`
(
initArchiveList
{
_a_state
=
Map
.
singleton
NgramsTerms
nls
})
-- OK, now we substitute parent with no children, the parent of
-- 'nreChild' should become Nothing
...
...
@@ -270,22 +273,20 @@ setListNgramsUpdatesNodeStoryWithChildrenTest env = do
,
_nre_root
=
Nothing
}
let
nlsNew
=
Map
.
fromList
[(
tParent
,
nreParentNew
),
(
tChild
,
nreChildNew
)]
a'
<-
getNodeStory
listId
liftIO
$
do
ns
<-
atomically
$
readTVar
v
ns
`
shouldBe
`
(
NodeStory
$
Map
.
singleton
listId
(
initArchiveList
{
_a_state
=
Map
.
singleton
NgramsTerms
nlsNew
}))
a'
`
shouldBe
`
(
initArchiveList
{
_a_state
=
Map
.
singleton
NgramsTerms
nlsNew
})
commitPatchSimpleTest
::
TestEnv
->
Assertion
commitPatchSimpleTest
env
=
do
flip
runReaderT
env
$
runTestMonad
$
do
(
_userId
,
_corpusId
,
listId
,
v
)
<-
commonInitialization
(
_userId
,
_corpusId
,
listId
,
a
)
<-
commonInitialization
-- initially, the node story table is empty
liftIO
$
do
ns
<-
atomically
$
readTVar
v
ns
`
shouldBe
`
(
NodeStory
$
Map
.
singleton
listId
(
initArchiveList
{
_a_state
=
Map
.
empty
}))
a
`
shouldBe
`
(
initArchiveList
{
_a_state
=
Map
.
empty
})
let
(
term
,
nre
)
=
simpleTerm
let
tPatch
=
NgramsReplace
{
_patch_old
=
Nothing
...
...
@@ -298,9 +299,9 @@ commitPatchSimpleTest env = do
_patchApplied
<-
commitStatePatch
listId
patch
let
nls
=
Map
.
fromList
[(
term
,
nre
)]
a'
<-
getNodeStory
listId
liftIO
$
do
ns
<-
atomically
$
readTVar
v
ns
`
shouldBe
`
(
NodeStory
$
Map
.
singleton
listId
(
initArchiveList
{
_a_state
=
Map
.
singleton
NgramsTerms
nls
,
_a_version
=
ver
+
1
}))
a'
`
shouldBe
`
(
initArchiveList
{
_a_state
=
Map
.
singleton
NgramsTerms
nls
,
_a_version
=
ver
+
1
})
test/Test/Database/Types.hs
View file @
030dede0
...
...
@@ -115,10 +115,6 @@ instance HasMail TestEnv where
instance
HasNodeStoryEnv
TestEnv
where
hasNodeStory
=
to
test_nodeStory
instance
HasNodeStoryVar
TestEnv
where
hasNodeStoryVar
=
hasNodeStory
.
nse_getter
instance
HasNodeStoryImmediateSaver
TestEnv
where
hasNodeStoryImmediateSaver
=
hasNodeStory
.
nse_saver_immediate
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment