Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
haskell-gargantext
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
150
Issues
150
List
Board
Labels
Milestones
Merge Requests
5
Merge Requests
5
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
gargantext
haskell-gargantext
Commits
7e20e0fb
Commit
7e20e0fb
authored
Mar 14, 2024
by
Alexandre Delanoë
Browse files
Options
Browse Files
Download
Plain Diff
Merge remote-tracking branch 'origin/313-dev-json-csv-import-of-older-ngrams' into dev
parents
1798aab8
7576128b
Changes
27
Hide whitespace changes
Inline
Side-by-side
Showing
27 changed files
with
552 additions
and
351 deletions
+552
-351
.envrc
.envrc
+1
-0
update-project-dependencies
bin/update-project-dependencies
+1
-1
cabal.project
cabal.project
+1
-1
gargantext.cabal
gargantext.cabal
+7
-0
Dev.hs
src/Gargantext/API/Dev.hs
+18
-9
Ngrams.hs
src/Gargantext/API/Ngrams.hs
+21
-23
List.hs
src/Gargantext/API/Ngrams/List.hs
+1
-1
Tools.hs
src/Gargantext/API/Ngrams/Tools.hs
+8
-7
Types.hs
src/Gargantext/API/Ngrams/Types.hs
+12
-1
Node.hs
src/Gargantext/API/Node.hs
+1
-1
Contact.hs
src/Gargantext/API/Node/Contact.hs
+12
-15
FrameCalcUpload.hs
src/Gargantext/API/Node/FrameCalcUpload.hs
+14
-14
NodeStory.hs
src/Gargantext/Core/NodeStory.hs
+50
-25
Types.hs
src/Gargantext/Core/NodeStory/Types.hs
+3
-7
WithStem.hs
src/Gargantext/Core/Text/List/Group/WithStem.hs
+0
-11
Jobs.hs
src/Gargantext/Utils/Jobs.hs
+7
-7
stack.yaml
stack.yaml
+2
-2
Authentication.hs
test/Test/API/Authentication.hs
+3
-5
Errors.hs
test/Test/API/Errors.hs
+4
-3
GraphQL.hs
test/Test/API/GraphQL.hs
+1
-2
Private.hs
test/Test/API/Private.hs
+3
-96
Routes.hs
test/Test/API/Routes.hs
+62
-0
Setup.hs
test/Test/API/Setup.hs
+0
-10
UpdateList.hs
test/Test/API/UpdateList.hs
+118
-71
NodeStory.hs
test/Test/Database/Operations/NodeStory.hs
+20
-20
Types.hs
test/Test/Types.hs
+29
-0
Utils.hs
test/Test/Utils.hs
+153
-19
No files found.
.envrc
View file @
7e20e0fb
use_nix
use_nix
#use_flake
export LANG=C.UTF-8
export LANG=C.UTF-8
bin/update-project-dependencies
View file @
7e20e0fb
...
@@ -18,7 +18,7 @@ fi
...
@@ -18,7 +18,7 @@ fi
# with the `sha256sum` result calculated on the `cabal.project` and
# with the `sha256sum` result calculated on the `cabal.project` and
# `cabal.project.freeze`. This ensures the files stay deterministic so that CI
# `cabal.project.freeze`. This ensures the files stay deterministic so that CI
# cache can kick in.
# cache can kick in.
expected_cabal_project_hash
=
"
c9fe39301e8b60bfd183e60e7e25a14cd1c9c66d8739bf9041ca3f4db89db7c
6"
expected_cabal_project_hash
=
"
1cbb47fd3f929a01b3b968cc2e148dcbf5ef4e662e14ed9832d32471a68f676
6"
expected_cabal_project_freeze_hash
=
"2c8960ffcf1b94aa11a3543e3b5facd2db5af19569fecaec4bc0ab4c1edd22a5"
expected_cabal_project_freeze_hash
=
"2c8960ffcf1b94aa11a3543e3b5facd2db5af19569fecaec4bc0ab4c1edd22a5"
cabal
--store-dir
=
$STORE_DIR
v2-build
--dry-run
cabal
--store-dir
=
$STORE_DIR
v2-build
--dry-run
...
...
cabal.project
View file @
7e20e0fb
...
@@ -121,7 +121,7 @@ source-repository-package
...
@@ -121,7 +121,7 @@ source-repository-package
source
-
repository
-
package
source
-
repository
-
package
type
:
git
type
:
git
location
:
https
://
gitlab
.
iscpif
.
fr
/
gargantext
/
crawlers
/
openalex
.
git
location
:
https
://
gitlab
.
iscpif
.
fr
/
gargantext
/
crawlers
/
openalex
.
git
tag
:
c
d179f6dda15d77a085c0176284c921b7bc50c46
tag
:
c
eb8f2cebd4890b6d9d151ab01ee14e925bc0499
source
-
repository
-
package
source
-
repository
-
package
type
:
git
type
:
git
...
...
gargantext.cabal
View file @
7e20e0fb
...
@@ -841,7 +841,9 @@ test-suite garg-test-tasty
...
@@ -841,7 +841,9 @@ test-suite garg-test-tasty
type: exitcode-stdio-1.0
type: exitcode-stdio-1.0
main-is: drivers/tasty/Main.hs
main-is: drivers/tasty/Main.hs
other-modules:
other-modules:
Test.API.Routes
Test.API.Setup
Test.API.Setup
Test.Core.Similarity
Test.Core.Text
Test.Core.Text
Test.Core.Text.Corpus.Query
Test.Core.Text.Corpus.Query
Test.Core.Text.Examples
Test.Core.Text.Examples
...
@@ -869,6 +871,7 @@ test-suite garg-test-tasty
...
@@ -869,6 +871,7 @@ test-suite garg-test-tasty
Test.Parsers.Date
Test.Parsers.Date
Test.Parsers.Types
Test.Parsers.Types
Test.Parsers.WOS
Test.Parsers.WOS
Test.Types
Test.Utils
Test.Utils
Test.Utils.Crypto
Test.Utils.Crypto
Test.Utils.Jobs
Test.Utils.Jobs
...
@@ -921,6 +924,7 @@ test-suite garg-test-tasty
...
@@ -921,6 +924,7 @@ test-suite garg-test-tasty
, servant-auth
, servant-auth
, servant-auth-client
, servant-auth-client
, servant-client
, servant-client
, servant-client-core
, servant-job
, servant-job
, servant-server
, servant-server
, shelly >= 1.9 && < 2
, shelly >= 1.9 && < 2
...
@@ -953,6 +957,7 @@ test-suite garg-test-hspec
...
@@ -953,6 +957,7 @@ test-suite garg-test-hspec
Test.API.Errors
Test.API.Errors
Test.API.GraphQL
Test.API.GraphQL
Test.API.Private
Test.API.Private
Test.API.Routes
Test.API.Setup
Test.API.Setup
Test.API.UpdateList
Test.API.UpdateList
Test.Database.Operations
Test.Database.Operations
...
@@ -961,6 +966,7 @@ test-suite garg-test-hspec
...
@@ -961,6 +966,7 @@ test-suite garg-test-hspec
Test.Database.Setup
Test.Database.Setup
Test.Database.Types
Test.Database.Types
Test.Utils
Test.Utils
Test.Types
Paths_gargantext
Paths_gargantext
hs-source-dirs:
hs-source-dirs:
test
test
...
@@ -1008,6 +1014,7 @@ test-suite garg-test-hspec
...
@@ -1008,6 +1014,7 @@ test-suite garg-test-hspec
, servant-auth
, servant-auth
, servant-auth-client
, servant-auth-client
, servant-client
, servant-client
, servant-client-core
, servant-job
, servant-job
, servant-server
, servant-server
, shelly >= 1.9 && < 2
, shelly >= 1.9 && < 2
...
...
src/Gargantext/API/Dev.hs
View file @
7e20e0fb
...
@@ -12,20 +12,23 @@ Portability : POSIX
...
@@ -12,20 +12,23 @@ Portability : POSIX
-- Use only for dev/repl
-- Use only for dev/repl
module
Gargantext.API.Dev
where
module
Gargantext.API.Dev
where
import
Control.Lens
(
view
)
import
Control.Monad
(
fail
)
import
Control.Monad
(
fail
)
import
Gargantext.API.Admin.EnvTypes
import
Data.Pool
(
withResource
)
import
Gargantext.API.Admin.Settings
import
Database.PostgreSQL.Simple
qualified
as
PGS
import
Gargantext.API.Errors.Types
import
Gargantext.API.Admin.EnvTypes
(
DevEnv
(
..
),
Mode
(
Dev
)
)
import
Gargantext.API.Prelude
import
Gargantext.API.Admin.Settings
(
devJwkFile
,
devSettings
,
newPool
)
import
Gargantext.API.Errors.Types
(
BackendInternalError
)
import
Gargantext.API.Prelude
(
GargM
)
import
Gargantext.Core.NLP
(
nlpServerMap
)
import
Gargantext.Core.NLP
(
nlpServerMap
)
import
Gargantext.Core.NodeStory
import
Gargantext.Core.NodeStory
(
fromDBNodeStoryEnv
)
import
Gargantext.Database.Prelude
(
Cmd
'
,
Cmd
''
,
databaseParameters
,
runCmd
)
import
Gargantext.Database.Prelude
(
Cmd
'
,
Cmd
''
,
connPool
,
databaseParameters
,
runCmd
)
import
Gargantext.Prelude
import
Gargantext.Prelude
import
Gargantext.Prelude.Config
(
readConfig
)
import
Gargantext.Prelude.Config
(
readConfig
)
import
Gargantext.Prelude.Mail
qualified
as
Mail
import
Gargantext.Prelude.Mail
qualified
as
Mail
import
Gargantext.Prelude.NLP
qualified
as
NLP
import
Gargantext.Prelude.NLP
qualified
as
NLP
import
Gargantext.System.Logging
import
Gargantext.System.Logging
(
withLoggerHoisted
)
import
Servant
import
Servant
(
ServerError
)
type
IniPath
=
FilePath
type
IniPath
=
FilePath
-------------------------------------------------------------------
-------------------------------------------------------------------
...
@@ -67,7 +70,7 @@ runCmdReplServantErr = runCmdRepl
...
@@ -67,7 +70,7 @@ runCmdReplServantErr = runCmdRepl
-- using HasConnectionPool and HasRepoVar.
-- using HasConnectionPool and HasRepoVar.
runCmdDev
::
Show
err
=>
DevEnv
->
Cmd''
DevEnv
err
a
->
IO
a
runCmdDev
::
Show
err
=>
DevEnv
->
Cmd''
DevEnv
err
a
->
IO
a
runCmdDev
env
f
=
runCmdDev
env
f
=
(
either
(
fail
.
show
)
pure
=<<
runCmd
env
f
)
either
(
fail
.
show
)
pure
=<<
runCmd
env
f
runCmdGargDev
::
DevEnv
->
GargM
DevEnv
BackendInternalError
a
->
IO
a
runCmdGargDev
::
DevEnv
->
GargM
DevEnv
BackendInternalError
a
->
IO
a
runCmdGargDev
env
cmd
=
runCmdGargDev
env
cmd
=
...
@@ -81,3 +84,9 @@ runCmdDevServantErr = runCmdDev
...
@@ -81,3 +84,9 @@ runCmdDevServantErr = runCmdDev
runCmdReplEasy
::
Cmd''
DevEnv
BackendInternalError
a
->
IO
a
runCmdReplEasy
::
Cmd''
DevEnv
BackendInternalError
a
->
IO
a
runCmdReplEasy
f
=
withDevEnv
"gargantext.ini"
$
\
env
->
runCmdDev
env
f
runCmdReplEasy
f
=
withDevEnv
"gargantext.ini"
$
\
env
->
runCmdDev
env
f
-- | Execute a function that takes PSQL.Connection from the DB pool as
-- first parameter.
-- e.g.: runCmdReplEasyDB $ \c -> getNodeStory' c
runCmdReplEasyDB
::
(
PGS
.
Connection
->
IO
a
)
->
IO
a
runCmdReplEasyDB
f
=
runCmdReplEasy
$
view
connPool
>>=
(
\
p
->
liftBase
$
withResource
p
f
)
src/Gargantext/API/Ngrams.hs
View file @
7e20e0fb
...
@@ -25,13 +25,14 @@ add get
...
@@ -25,13 +25,14 @@ add get
{-# LANGUAGE IncoherentInstances #-}
{-# LANGUAGE IncoherentInstances #-}
module
Gargantext.API.Ngrams
module
Gargantext.API.Ngrams
(
TableNgramsApi
(
TableNgramsApi
,
TableNgramsApiGet
,
TableNgramsApiGet
,
TableNgramsApiPut
,
TableNgramsApiPut
,
commitStatePatch
,
commitStatePatch
,
searchTableNgrams
,
searchTableNgrams
,
getTableNgrams
,
getTableNgrams
,
getTableNgramsCorpus
,
getTableNgramsCorpus
...
@@ -86,38 +87,35 @@ module Gargantext.API.Ngrams
...
@@ -86,38 +87,35 @@ module Gargantext.API.Ngrams
)
)
where
where
import
Control.Lens
((
.~
),
view
,
(
^.
),
(
^..
),
(
+~
),
(
%~
),
(
.~
),
msumOf
,
at
,
_Just
,
Each
(
..
),
(
%%~
),
mapped
,
ifolded
,
to
,
withIndex
,
over
)
import
Control.Lens
((
.~
),
view
,
(
^.
),
(
^..
),
(
+~
),
(
%~
),
(
.~
),
msumOf
,
at
,
ix
,
_Just
,
Each
(
..
),
(
%%~
),
ifolded
,
to
,
withIndex
,
over
)
import
Control.Monad.Reader
import
Data.Aeson.Text
qualified
as
DAT
import
Data.Aeson.Text
qualified
as
DAT
import
Data.Foldable
import
Data.List
qualified
as
List
import
Data.List
qualified
as
List
import
Data.Map.Strict
qualified
as
Map
import
Data.Map.Strict
qualified
as
Map
import
Data.Map.Strict.Patch
qualified
as
PM
import
Data.Map.Strict.Patch
qualified
as
PM
import
Data.Monoid
import
Data.Patch.Class
(
Action
(
act
),
Transformable
(
..
),
ours
)
import
Data.Patch.Class
(
Action
(
act
),
Transformable
(
..
),
ours
)
import
Data.Set
qualified
as
Set
import
Data.Set
qualified
as
Set
import
Data.Text
(
isInfixOf
,
toLower
,
unpack
)
import
Data.Text
(
isInfixOf
,
toLower
,
unpack
)
import
Data.Text.Lazy.IO
as
DTL
import
Data.Text.Lazy.IO
as
DTL
(
writeFile
)
import
Formatting
(
hprint
,
int
,
(
%
))
import
Formatting
(
hprint
,
int
,
(
%
))
import
Gargantext.API.Admin.EnvTypes
(
Env
,
GargJob
(
..
))
import
Gargantext.API.Admin.EnvTypes
(
Env
,
GargJob
(
..
))
import
Gargantext.API.Admin.Orchestrator.Types
(
JobLog
(
..
),
AsyncJobs
)
import
Gargantext.API.Admin.Orchestrator.Types
(
JobLog
(
..
),
AsyncJobs
)
import
Gargantext.API.Admin.Types
(
HasSettings
)
import
Gargantext.API.Admin.Types
(
HasSettings
)
import
Gargantext.API.Errors.Types
import
Gargantext.API.Errors.Types
(
BackendInternalError
)
import
Gargantext.API.Metrics
qualified
as
Metrics
import
Gargantext.API.Metrics
qualified
as
Metrics
import
Gargantext.API.Ngrams.Tools
(
getNodeStory
)
import
Gargantext.API.Ngrams.Tools
(
getNodeStory
)
import
Gargantext.API.Ngrams.Types
import
Gargantext.API.Ngrams.Types
import
Gargantext.API.Prelude
import
Gargantext.API.Prelude
(
GargM
)
import
Gargantext.Core.NodeStory
(
ArchiveList
,
HasNodeStory
,
HasNodeArchiveStoryImmediateSaver
(
..
),
HasNodeStoryImmediateSaver
(
..
),
NgramsStatePatch
'
,
a_history
,
a_state
,
a_version
,
currentVersion
)
import
Gargantext.Core.NodeStory
(
ArchiveList
,
HasNodeStory
,
HasNodeArchiveStoryImmediateSaver
(
..
),
HasNodeStoryImmediateSaver
(
..
),
NgramsStatePatch
'
,
a_history
,
a_state
,
a_version
,
currentVersion
)
import
Gargantext.Core.Types
(
ListType
(
..
),
NodeId
,
ListId
,
DocId
,
TODO
,
assertValid
,
HasValidationError
,
ContextId
)
import
Gargantext.Core.Types
(
ListType
(
..
),
NodeId
,
ListId
,
DocId
,
TODO
,
assertValid
,
HasValidationError
,
ContextId
)
import
Gargantext.Core.Types.Query
(
Limit
(
..
),
Offset
(
..
),
MinSize
(
..
),
MaxSize
(
..
))
import
Gargantext.Core.Types.Query
(
Limit
(
..
),
Offset
(
..
),
MinSize
(
..
),
MaxSize
(
..
))
import
Gargantext.Database.Action.Metrics.NgramsByContext
(
getOccByNgramsOnlyFast
)
import
Gargantext.Database.Action.Metrics.NgramsByContext
(
getOccByNgramsOnlyFast
)
import
Gargantext.Database.Admin.Config
(
userMaster
)
import
Gargantext.Database.Admin.Config
(
userMaster
)
import
Gargantext.Database.Admin.Types.Node
(
NodeType
(
..
))
import
Gargantext.Database.Admin.Types.Node
(
NodeType
(
..
))
import
Gargantext.Database.Query.Table.Ngrams
hiding
(
NgramsType
(
..
),
ngramsType
,
ngrams_terms
)
import
Gargantext.Database.Query.Table.Ngrams
(
text2ngrams
,
Ngrams
,
insertNgrams
,
selectNgramsByDoc
)
import
Gargantext.Database.
Query.Table
.Ngrams
qualified
as
TableNgrams
import
Gargantext.Database.
Schema
.Ngrams
qualified
as
TableNgrams
import
Gargantext.Database.Query.Table.Node
(
getNode
)
import
Gargantext.Database.Query.Table.Node
(
getNode
)
import
Gargantext.Database.Query.Table.Node.Error
(
HasNodeError
)
import
Gargantext.Database.Query.Table.Node.Error
(
HasNodeError
)
import
Gargantext.Database.Query.Table.Node.Select
import
Gargantext.Database.Query.Table.Node.Select
(
selectNodesWithUsername
)
import
Gargantext.Database.Schema.Node
(
node_id
,
node_parent_id
,
node_user_id
)
import
Gargantext.Database.Schema.Node
(
node_id
,
node_parent_id
,
node_user_id
)
import
Gargantext.Prelude
hiding
(
log
,
to
,
toLower
,
(
%
),
isInfixOf
)
import
Gargantext.Prelude
hiding
(
log
,
to
,
toLower
,
(
%
),
isInfixOf
)
import
Gargantext.Prelude.Clock
(
hasTime
,
getTime
)
import
Gargantext.Prelude.Clock
(
hasTime
,
getTime
)
...
@@ -321,7 +319,7 @@ commitStatePatch listId (Versioned _p_version p) = do
...
@@ -321,7 +319,7 @@ commitStatePatch listId (Versioned _p_version p) = do
-- )
-- )
let
newA
=
Versioned
(
a'
^.
a_version
)
q'
let
newA
=
Versioned
(
a'
^.
a_version
)
q'
-- NOTE Now is the only good time to save the archive history. We
-- NOTE Now is the only good time to save the archive history. We
-- have the handle to the MVar and we need to save its exact
-- have the handle to the MVar and we need to save its exact
-- snapshot. Node Story archive is a linear table, so it's only
-- snapshot. Node Story archive is a linear table, so it's only
...
@@ -370,7 +368,7 @@ tableNgramsPull listId ngramsType p_version = do
...
@@ -370,7 +368,7 @@ tableNgramsPull listId ngramsType p_version = do
let
let
-- a = r ^. unNodeStory . at listId . non initArchive
-- a = r ^. unNodeStory . at listId . non initArchive
q
=
mconcat
$
take
(
a
^.
a_version
-
p_version
)
(
a
^.
a_history
)
q
=
mconcat
$
take
(
a
^.
a_version
-
p_version
)
(
a
^.
a_history
)
q_table
=
q
^.
_PatchMap
.
at
ngramsType
.
_Just
q_table
=
q
^.
_PatchMap
.
ix
ngramsType
pure
(
Versioned
(
a
^.
a_version
)
q_table
)
pure
(
Versioned
(
a
^.
a_version
)
q_table
)
...
@@ -404,7 +402,7 @@ tableNgramsPut tabType listId (Versioned p_version p_table)
...
@@ -404,7 +402,7 @@ tableNgramsPut tabType listId (Versioned p_version p_table)
assertValid
p_validity
assertValid
p_validity
ret
<-
commitStatePatch
listId
(
Versioned
p_version
p
)
ret
<-
commitStatePatch
listId
(
Versioned
p_version
p
)
<&>
v_data
%~
(
view
(
_PatchMap
.
at
ngramsType
.
_Just
)
)
<&>
v_data
%~
view
(
_PatchMap
.
ix
ngramsType
)
pure
ret
pure
ret
...
@@ -474,7 +472,7 @@ tableNgramsPostChartsAsync utn jobHandle = do
...
@@ -474,7 +472,7 @@ tableNgramsPostChartsAsync utn jobHandle = do
_ <- Metrics.updateTree cId (Just listId) tabType MapTerm
_ <- Metrics.updateTree cId (Just listId) tabType MapTerm
-}
-}
markComplete
jobHandle
markComplete
jobHandle
_
->
do
_
otherTabType
->
do
-- printDebug "[tableNgramsPostChartsAsync] no update for tabType = " tabType
-- printDebug "[tableNgramsPostChartsAsync] no update for tabType = " tabType
markStarted
1
jobHandle
markStarted
1
jobHandle
markFailed
Nothing
jobHandle
markFailed
Nothing
jobHandle
...
@@ -494,7 +492,7 @@ getNgramsTableMap :: HasNodeStory env err m
...
@@ -494,7 +492,7 @@ getNgramsTableMap :: HasNodeStory env err m
getNgramsTableMap
nodeId
ngramsType
=
do
getNgramsTableMap
nodeId
ngramsType
=
do
a
<-
getNodeStory
nodeId
a
<-
getNodeStory
nodeId
pure
$
Versioned
(
a
^.
a_version
)
pure
$
Versioned
(
a
^.
a_version
)
(
a
^.
a_state
.
at
ngramsType
.
_Just
)
(
a
^.
a_state
.
ix
ngramsType
)
dumpJsonTableMap
::
HasNodeStory
env
err
m
dumpJsonTableMap
::
HasNodeStory
env
err
m
...
@@ -551,8 +549,8 @@ searchTableNgrams versionedTableMap NgramsSearchQuery{..} =
...
@@ -551,8 +549,8 @@ searchTableNgrams versionedTableMap NgramsSearchQuery{..} =
matchingNode
inputNode
=
matchingNode
inputNode
=
let
nodeSize
=
inputNode
^.
ne_size
let
nodeSize
=
inputNode
^.
ne_size
matchesListType
=
maybe
(
const
True
)
(
==
)
_nsq_listType
matchesListType
=
maybe
(
const
True
)
(
==
)
_nsq_listType
respectsMinSize
=
maybe
(
const
True
)
(
<=
)
(
getMinSize
<$>
_nsq_minSize
)
respectsMinSize
=
maybe
(
const
True
)
(
(
<=
)
.
getMinSize
)
_nsq_minSize
respectsMaxSize
=
maybe
(
const
True
)
(
>=
)
(
getMaxSize
<$>
_nsq_maxSize
)
respectsMaxSize
=
maybe
(
const
True
)
(
(
>=
)
.
getMaxSize
)
_nsq_maxSize
in
respectsMinSize
nodeSize
in
respectsMinSize
nodeSize
&&
respectsMaxSize
nodeSize
&&
respectsMaxSize
nodeSize
...
@@ -623,7 +621,7 @@ getNgramsTable' :: forall env err m.
...
@@ -623,7 +621,7 @@ getNgramsTable' :: forall env err m.
->
m
(
Versioned
(
Map
.
Map
NgramsTerm
NgramsElement
))
->
m
(
Versioned
(
Map
.
Map
NgramsTerm
NgramsElement
))
getNgramsTable'
nId
listId
ngramsType
=
do
getNgramsTable'
nId
listId
ngramsType
=
do
tableMap
<-
getNgramsTableMap
listId
ngramsType
tableMap
<-
getNgramsTableMap
listId
ngramsType
tableMap
&
v_data
%%~
(
setNgramsTableScores
nId
listId
ngramsType
)
tableMap
&
v_data
%%~
setNgramsTableScores
nId
listId
ngramsType
.
Map
.
mapWithKey
ngramsElementFromRepo
.
Map
.
mapWithKey
ngramsElementFromRepo
-- | Helper function to set scores on an `NgramsTable`.
-- | Helper function to set scores on an `NgramsTable`.
...
@@ -648,7 +646,7 @@ setNgramsTableScores nId listId ngramsType table = do
...
@@ -648,7 +646,7 @@ setNgramsTableScores nId listId ngramsType table = do
(
"getTableNgrams/setScores #ngrams="
%
int
%
" time="
%
hasTime
%
"
\n
"
)
(
"getTableNgrams/setScores #ngrams="
%
int
%
" time="
%
hasTime
%
"
\n
"
)
(
length
ngrams_terms
)
t1
t2
(
length
ngrams_terms
)
t1
t2
let
let
setOcc
ne
=
ne
&
ne_occurrences
.~
Set
.
fromList
(
msumOf
(
at
(
ne
^.
ne_ngrams
)
.
_Just
)
occurrences
)
setOcc
ne
=
ne
&
ne_occurrences
.~
Set
.
fromList
(
msumOf
(
ix
(
ne
^.
ne_ngrams
)
)
occurrences
)
--printDebug "[setNgramsTableScores] with occurences" $ table & each %~ setOcc
--printDebug "[setNgramsTableScores] with occurences" $ table & each %~ setOcc
...
@@ -734,7 +732,7 @@ getTableNgramsCorpus :: ( HasNodeStory env err m
...
@@ -734,7 +732,7 @@ getTableNgramsCorpus :: ( HasNodeStory env err m
getTableNgramsCorpus
nId
tabType
listId
limit_
offset
listType
minSize
maxSize
orderBy
mt
=
getTableNgramsCorpus
nId
tabType
listId
limit_
offset
listType
minSize
maxSize
orderBy
mt
=
getTableNgrams
nId
listId
tabType
searchQuery
getTableNgrams
nId
listId
tabType
searchQuery
where
where
searchQueryFn
(
NgramsTerm
nt
)
=
maybe
(
const
True
)
isInfixOf
(
toLower
<$>
mt
)
(
toLower
nt
)
searchQueryFn
(
NgramsTerm
nt
)
=
maybe
(
const
True
)
(
isInfixOf
.
toLower
)
mt
(
toLower
nt
)
searchQuery
=
NgramsSearchQuery
{
searchQuery
=
NgramsSearchQuery
{
_nsq_limit
=
limit_
_nsq_limit
=
limit_
,
_nsq_offset
=
offset
,
_nsq_offset
=
offset
...
@@ -778,7 +776,7 @@ getTableNgramsDoc dId tabType listId limit_ offset listType minSize maxSize orde
...
@@ -778,7 +776,7 @@ getTableNgramsDoc dId tabType listId limit_ offset listType minSize maxSize orde
ns
<-
selectNodesWithUsername
NodeList
userMaster
ns
<-
selectNodesWithUsername
NodeList
userMaster
let
ngramsType
=
ngramsTypeFromTabType
tabType
let
ngramsType
=
ngramsTypeFromTabType
tabType
ngs
<-
selectNgramsByDoc
(
ns
<>
[
listId
])
dId
ngramsType
ngs
<-
selectNgramsByDoc
(
ns
<>
[
listId
])
dId
ngramsType
let
searchQueryFn
(
NgramsTerm
nt
)
=
flip
Set
.
member
(
Set
.
fromList
ngs
)
nt
let
searchQueryFn
(
NgramsTerm
nt
)
=
Set
.
member
nt
(
Set
.
fromList
ngs
)
searchQuery
=
NgramsSearchQuery
{
searchQuery
=
NgramsSearchQuery
{
_nsq_limit
=
limit_
_nsq_limit
=
limit_
,
_nsq_offset
=
offset
,
_nsq_offset
=
offset
...
@@ -828,4 +826,4 @@ listNgramsChangedSince listId ngramsType version
...
@@ -828,4 +826,4 @@ listNgramsChangedSince listId ngramsType version
|
version
<
0
=
|
version
<
0
=
Versioned
<$>
currentVersion
listId
<*>
pure
True
Versioned
<$>
currentVersion
listId
<*>
pure
True
|
otherwise
=
|
otherwise
=
tableNgramsPull
listId
ngramsType
version
&
mapped
.
v_data
%~
(
==
mempty
)
tableNgramsPull
listId
ngramsType
version
<&>
v_data
%~
(
==
mempty
)
src/Gargantext/API/Ngrams/List.hs
View file @
7e20e0fb
...
@@ -152,7 +152,7 @@ postAsyncJSON l ngramsList jobHandle = do
...
@@ -152,7 +152,7 @@ postAsyncJSON l ngramsList jobHandle = do
setList
::
HasNodeStory
env
err
m
=>
m
()
setList
::
HasNodeStory
env
err
m
=>
m
()
setList
=
do
setList
=
do
-- TODO check with Version for optim
-- TODO check with Version for optim
mapM_
(
\
(
nt
,
Versioned
_v
ns
)
->
(
setListNgrams
l
nt
ns
)
)
$
toList
ngramsList
mapM_
(
\
(
nt
,
Versioned
_v
ns
)
->
setListNgrams
l
nt
ns
)
$
toList
ngramsList
-- TODO reindex
-- TODO reindex
...
...
src/Gargantext/API/Ngrams/Tools.hs
View file @
7e20e0fb
...
@@ -11,22 +11,23 @@ Portability : POSIX
...
@@ -11,22 +11,23 @@ Portability : POSIX
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-}
{-# HLINT ignore "Use infix" #-}
module
Gargantext.API.Ngrams.Tools
module
Gargantext.API.Ngrams.Tools
where
where
-- import Gargantext.Core.NodeStoryFile qualified as NSF
-- import Gargantext.Core.NodeStoryFile qualified as NSF
import
Control.Lens
(
_Just
,
(
^.
),
at
,
view
,
At
,
Index
,
IxValue
)
import
Control.Lens
(
_Just
,
(
^.
),
at
,
ix
,
view
,
At
,
Index
,
IxValue
)
import
Control.Monad.Reader
import
Data.HashMap.Strict
(
HashMap
)
import
Data.HashMap.Strict
(
HashMap
)
import
Data.HashMap.Strict
qualified
as
HM
import
Data.HashMap.Strict
qualified
as
HM
import
Data.Map.Strict
qualified
as
Map
import
Data.Map.Strict
qualified
as
Map
import
Data.Set
qualified
as
Set
import
Data.Set
qualified
as
Set
import
Data.Validity
-- import GHC.Conc (TVar, readTVar)
-- import GHC.Conc (TVar, readTVar)
import
Gargantext.API.Ngrams.Types
import
Gargantext.API.Ngrams.Types
import
Gargantext.Core.NodeStory
import
Gargantext.Core.NodeStory
import
Gargantext.Core.Types
(
ListType
(
..
),
NodeId
,
ListId
)
import
Gargantext.Core.Types.Main
(
ListType
(
..
)
)
import
Gargantext.Database.Admin.Types.Node
(
NodeId
,
ListId
)
import
Gargantext.Database.Schema.Ngrams
(
NgramsType
)
import
Gargantext.Database.Schema.Ngrams
(
NgramsType
)
import
Gargantext.Prelude
import
Gargantext.Prelude
...
@@ -93,7 +94,7 @@ listNgramsFromRepo nodeIds ngramsType repo =
...
@@ -93,7 +94,7 @@ listNgramsFromRepo nodeIds ngramsType repo =
^.
unNodeStory
^.
unNodeStory
.
at
nodeId
.
_Just
.
at
nodeId
.
_Just
.
a_state
.
a_state
.
at
ngramsType
.
_Just
.
ix
ngramsType
|
nodeId
<-
nodeIds
|
nodeId
<-
nodeIds
]
]
...
@@ -153,7 +154,7 @@ filterListWithRoot :: [ListType]
...
@@ -153,7 +154,7 @@ filterListWithRoot :: [ListType]
filterListWithRoot
lt
m
=
snd
<$>
HM
.
filter
isMapTerm
m
filterListWithRoot
lt
m
=
snd
<$>
HM
.
filter
isMapTerm
m
where
where
isMapTerm
(
l
,
maybeRoot
)
=
case
maybeRoot
of
isMapTerm
(
l
,
maybeRoot
)
=
case
maybeRoot
of
Nothing
->
elem
l
lt
Nothing
->
l
`
elem
`
lt
Just
r
->
case
HM
.
lookup
r
m
of
Just
r
->
case
HM
.
lookup
r
m
of
Nothing
->
panicTrace
$
"[Garg.API.Ngrams.Tools] filterListWithRoot, unknown key: "
<>
unNgramsTerm
r
Nothing
->
panicTrace
$
"[Garg.API.Ngrams.Tools] filterListWithRoot, unknown key: "
<>
unNgramsTerm
r
Just
(
l'
,
_
)
->
elem
l'
lt
Just
(
l'
,
_
)
->
elem
l'
lt
...
@@ -175,7 +176,7 @@ groupNodesByNgrams syn occs = HM.fromListWith (<>) occs'
...
@@ -175,7 +176,7 @@ groupNodesByNgrams syn occs = HM.fromListWith (<>) occs'
Nothing
->
(
t
,
ns
)
Nothing
->
(
t
,
ns
)
Just
r'
->
(
r'
,
ns
)
Just
r'
->
(
r'
,
ns
)
data
Diagonal
=
Diagonal
Bool
newtype
Diagonal
=
Diagonal
Bool
getCoocByNgrams
::
Diagonal
getCoocByNgrams
::
Diagonal
->
HashMap
NgramsTerm
(
Set
NodeId
)
->
HashMap
NgramsTerm
(
Set
NodeId
)
...
...
src/Gargantext/API/Ngrams/Types.hs
View file @
7e20e0fb
...
@@ -25,9 +25,10 @@ import Data.Aeson.TH (deriveJSON)
...
@@ -25,9 +25,10 @@ import Data.Aeson.TH (deriveJSON)
import
Data.Csv
(
defaultEncodeOptions
,
encodeByNameWith
,
header
,
namedRecord
,
EncodeOptions
(
..
),
NamedRecord
,
Quoting
(
QuoteNone
))
import
Data.Csv
(
defaultEncodeOptions
,
encodeByNameWith
,
header
,
namedRecord
,
EncodeOptions
(
..
),
NamedRecord
,
Quoting
(
QuoteNone
))
import
Data.Csv
qualified
as
Csv
import
Data.Csv
qualified
as
Csv
import
Data.HashMap.Strict.InsOrd
qualified
as
InsOrdHashMap
import
Data.HashMap.Strict.InsOrd
qualified
as
InsOrdHashMap
import
Data.List
qualified
as
List
import
Data.Map.Strict
qualified
as
Map
import
Data.Map.Strict
qualified
as
Map
import
Data.Map.Strict.Patch
qualified
as
PM
import
Data.Map.Strict.Patch
qualified
as
PM
import
Data.Patch.Class
(
Replace
,
replace
,
Action
(
act
),
Group
,
Applicable
(
..
),
Composable
(
..
),
Transformable
(
..
),
PairPatch
(
..
),
Patched
,
ConflictResolution
,
ConflictResolutionReplace
,
MaybePatch
(
Mod
),
unMod
,
old
,
new
)
import
Data.Patch.Class
(
Replace
(
Keep
)
,
replace
,
Action
(
act
),
Group
,
Applicable
(
..
),
Composable
(
..
),
Transformable
(
..
),
PairPatch
(
..
),
Patched
,
ConflictResolution
,
ConflictResolutionReplace
,
MaybePatch
(
Mod
),
unMod
,
old
,
new
)
import
Data.Set
qualified
as
Set
import
Data.Set
qualified
as
Set
import
Data.String
(
IsString
(
..
))
import
Data.String
(
IsString
(
..
))
import
Data.Swagger
(
NamedSchema
(
NamedSchema
),
declareSchemaRef
,
genericDeclareNamedSchema
,
SwaggerType
(
SwaggerObject
),
ToParamSchema
,
ToSchema
(
..
),
HasProperties
(
properties
),
HasRequired
(
required
),
HasType
(
type_
)
)
import
Data.Swagger
(
NamedSchema
(
NamedSchema
),
declareSchemaRef
,
genericDeclareNamedSchema
,
SwaggerType
(
SwaggerObject
),
ToParamSchema
,
ToSchema
(
..
),
HasProperties
(
properties
),
HasRequired
(
required
),
HasType
(
type_
)
)
...
@@ -848,3 +849,13 @@ instance Arbitrary NgramsRepoElement where
...
@@ -848,3 +849,13 @@ instance Arbitrary NgramsRepoElement where
arbitrary
=
elements
$
map
ngramsElementToRepo
ns
arbitrary
=
elements
$
map
ngramsElementToRepo
ns
where
where
NgramsTable
ns
=
mockTable
NgramsTable
ns
=
mockTable
toNgramsPatch
::
[
NgramsTerm
]
->
NgramsPatch
toNgramsPatch
children
=
NgramsPatch
children'
Keep
where
children'
::
PatchMSet
NgramsTerm
children'
=
PatchMSet
$
fst
$
PM
.
fromList
$
List
.
zip
children
(
List
.
cycle
[
addPatch
])
src/Gargantext/API/Node.hs
View file @
7e20e0fb
...
@@ -134,7 +134,7 @@ type NodeAPI a = PolicyChecked (NodeNodeAPI a)
...
@@ -134,7 +134,7 @@ type NodeAPI a = PolicyChecked (NodeNodeAPI a)
:<|>
"category"
:>
CatApi
:<|>
"category"
:>
CatApi
:<|>
"score"
:>
ScoreApi
:<|>
"score"
:>
ScoreApi
:<|>
"search"
:>
(
Search
.
API
Search
.
SearchResult
)
:<|>
"search"
:>
Search
.
API
Search
.
SearchResult
:<|>
"share"
:>
Share
.
API
:<|>
"share"
:>
Share
.
API
-- Pairing utilities
-- Pairing utilities
...
...
src/Gargantext/API/Node/Contact.hs
View file @
7e20e0fb
...
@@ -12,9 +12,7 @@ Portability : POSIX
...
@@ -12,9 +12,7 @@ Portability : POSIX
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeOperators #-}
...
@@ -22,35 +20,34 @@ Portability : POSIX
...
@@ -22,35 +20,34 @@ Portability : POSIX
module
Gargantext.API.Node.Contact
module
Gargantext.API.Node.Contact
where
where
import
Conduit
import
Conduit
(
yield
)
import
Data.Aeson
import
Data.Aeson
import
Data.Either
(
Either
(
Right
))
import
Data.Either
(
Either
(
Right
))
import
Data.Maybe
(
Maybe
(
..
))
import
Data.Maybe
(
Maybe
(
..
))
import
Data.Swagger
import
Data.Swagger
(
ToSchema
)
import
Data.Text
(
Text
)
import
Data.Text
(
Text
)
import
GHC.Generics
(
Generic
)
import
GHC.Generics
(
Generic
)
import
Servant
import
Gargantext.API.Admin.Auth.Types
(
AuthenticatedUser
(
AuthenticatedUser
)
)
import
Test.QuickCheck
(
elements
)
import
Test.QuickCheck.Arbitrary
import
Gargantext.API.Admin.Auth.Types
import
Gargantext.API.Admin.EnvTypes
(
Env
,
GargJob
(
..
))
import
Gargantext.API.Admin.EnvTypes
(
Env
,
GargJob
(
..
))
import
Gargantext.API.Admin.Orchestrator.Types
(
JobLog
(
..
),
AsyncJobs
)
import
Gargantext.API.Admin.Orchestrator.Types
(
JobLog
(
..
),
AsyncJobs
)
import
Gargantext.API.Admin.Types
(
HasSettings
)
import
Gargantext.API.Admin.Types
(
HasSettings
)
import
Gargantext.API.Errors.Types
import
Gargantext.API.Errors.Types
(
BackendInternalError
)
import
Gargantext.API.Node
import
Gargantext.API.Node
(
nodeNodeAPI
,
NodeNodeAPI
)
import
Gargantext.API.Prelude
(
GargM
,
simuLogs
)
import
Gargantext.API.Prelude
(
GargM
,
simuLogs
)
import
Gargantext.Core
(
Lang
(
..
))
import
Gargantext.Core
(
Lang
(
..
))
import
Gargantext.Core.Text.Terms
(
TermType
(
..
))
import
Gargantext.Core.Text.Terms
(
TermType
(
..
))
import
Gargantext.Core.Types.Individu
(
User
(
..
))
import
Gargantext.Core.Types.Individu
(
User
(
..
))
import
Gargantext.Database.Action.Flow
(
flow
)
import
Gargantext.Database.Action.Flow
(
flow
)
import
Gargantext.Database.Action.Flow.Types
(
FlowCmdM
)
import
Gargantext.Database.Action.Flow.Types
(
FlowCmdM
)
import
Gargantext.Database.Admin.Types.Hyperdata
(
HyperdataAnnuaire
(
..
),
HyperdataContact
)
import
Gargantext.Database.Admin.Types.Hyperdata
.Contact
(
HyperdataContact
,
hyperdataContact
)
import
Gargantext.Database.Admin.Types.Hyperdata.Co
ntact
(
hyperdataContact
)
import
Gargantext.Database.Admin.Types.Hyperdata.Co
rpus
(
HyperdataAnnuaire
(
..
)
)
import
Gargantext.Database.Admin.Types.Node
import
Gargantext.Database.Admin.Types.Node
(
CorpusId
,
NodeId
)
import
Gargantext.Prelude
((
$
),
{-printDebug,-}
)
import
Gargantext.Prelude
((
$
),
{-printDebug,-}
)
import
Gargantext.Utils.Aeson
qualified
as
GUA
import
Gargantext.Utils.Jobs
(
serveJobsAPI
,
MonadJobStatus
(
..
))
import
Gargantext.Utils.Jobs
(
serveJobsAPI
,
MonadJobStatus
(
..
))
import
qualified
Gargantext.Utils.Aeson
as
GUA
import
Servant
import
Test.QuickCheck
(
elements
)
import
Test.QuickCheck.Arbitrary
(
Arbitrary
(
arbitrary
)
)
------------------------------------------------------------------------
------------------------------------------------------------------------
type
API
=
"contact"
:>
Summary
"Contact endpoint"
type
API
=
"contact"
:>
Summary
"Contact endpoint"
...
...
src/Gargantext/API/Node/FrameCalcUpload.hs
View file @
7e20e0fb
...
@@ -16,36 +16,36 @@ Portability : POSIX
...
@@ -16,36 +16,36 @@ Portability : POSIX
module
Gargantext.API.Node.FrameCalcUpload
where
module
Gargantext.API.Node.FrameCalcUpload
where
import
Control.Lens
((
^.
))
import
Control.Lens
((
^.
))
import
Data.Aeson
import
Data.Aeson
(
FromJSON
,
ToJSON
)
import
Data.ByteString.Lazy
qualified
as
BSL
import
Data.ByteString.Lazy
qualified
as
BSL
import
Data.ByteString.UTF8
qualified
as
BSU8
import
Data.ByteString.UTF8
qualified
as
BSU8
import
Data.Swagger
import
Data.Swagger
(
ToSchema
)
import
Data.Text
qualified
as
T
import
Data.Text
qualified
as
T
import
Network.HTTP.Client
(
newManager
,
httpLbs
,
parseRequest
,
responseBody
)
import
Gargantext.API.Admin.Auth.Types
(
auth_node_id
,
AuthenticatedUser
)
import
Network.HTTP.Client.TLS
(
tlsManagerSettings
)
import
Servant
import
Web.FormUrlEncoded
(
FromForm
)
import
Gargantext.API.Admin.Auth.Types
import
Gargantext.API.Admin.EnvTypes
(
GargJob
(
..
),
Env
)
import
Gargantext.API.Admin.EnvTypes
(
GargJob
(
..
),
Env
)
import
Gargantext.API.Admin.Orchestrator.Types
(
JobLog
(
..
),
AsyncJobs
)
import
Gargantext.API.Admin.Orchestrator.Types
(
JobLog
(
..
),
AsyncJobs
)
import
Gargantext.API.Errors.Types
import
Gargantext.API.Errors.Types
(
BackendInternalError
)
import
Gargantext.API.Node.Corpus.New
(
addToCorpusWithForm
)
import
Gargantext.API.Node.Corpus.New
(
addToCorpusWithForm
)
import
Gargantext.API.Node.Corpus.New.Types
(
FileFormat
(
..
),
FileType
(
..
))
import
Gargantext.API.Node.Corpus.New.Types
(
FileFormat
(
..
),
FileType
(
..
))
import
Gargantext.API.Node.Types
(
NewWithForm
(
..
))
import
Gargantext.API.Node.Types
(
NewWithForm
(
..
))
import
Gargantext.API.Prelude
import
Gargantext.API.Prelude
(
GargM
)
import
Gargantext.Core
(
Lang
)
import
Gargantext.Core
(
Lang
)
import
Gargantext.Core.NodeStory
(
HasNodeArchiveStoryImmediateSaver
)
import
Gargantext.Core.NodeStory
.Types
(
HasNodeArchiveStoryImmediateSaver
)
import
Gargantext.Core.Text.List.Social
(
FlowSocialListWith
(
..
))
import
Gargantext.Core.Text.List.Social
(
FlowSocialListWith
(
..
))
import
Gargantext.Core.Types.Individu
(
User
(
..
))
import
Gargantext.Core.Types.Individu
(
User
(
..
))
import
Gargantext.Database.Action.Flow.Types
import
Gargantext.Database.Action.Flow.Types
(
FlowCmdM
)
import
Gargantext.Database.Admin.Types.Hyperdata.Frame
import
Gargantext.Database.Admin.Types.Hyperdata.Frame
(
HyperdataFrame
(
..
)
)
import
Gargantext.Database.Admin.Types.Node
import
Gargantext.Database.Admin.Types.Node
(
NodeId
,
NodeType
(
NodeCorpus
)
)
import
Gargantext.Database.Prelude
(
HasConfig
)
import
Gargantext.Database.Prelude
(
HasConfig
)
import
Gargantext.Database.Query.Table.Node
(
getClosestParentIdByType
,
getNodeWith
)
import
Gargantext.Database.Query.Table.Node
(
getClosestParentIdByType
,
getNodeWith
)
import
Gargantext.Database.Schema.Node
(
node_hyperdata
)
import
Gargantext.Database.Schema.Node
(
node_hyperdata
)
import
Gargantext.Prelude
import
Gargantext.Prelude
import
Gargantext.Utils.Jobs
(
serveJobsAPI
,
MonadJobStatus
(
..
))
import
Gargantext.Utils.Jobs
(
serveJobsAPI
,
MonadJobStatus
(
..
))
import
Network.HTTP.Client
(
newManager
,
httpLbs
,
parseRequest
,
responseBody
)
import
Network.HTTP.Client.TLS
(
tlsManagerSettings
)
import
Servant
(
type
(
:>
),
JSON
,
Summary
,
HasServer
(
ServerT
)
)
import
Web.FormUrlEncoded
(
FromForm
)
data
FrameCalcUpload
=
FrameCalcUpload
{
data
FrameCalcUpload
=
FrameCalcUpload
{
_wf_lang
::
!
(
Maybe
Lang
)
_wf_lang
::
!
(
Maybe
Lang
)
...
...
src/Gargantext/Core/NodeStory.hs
View file @
7e20e0fb
...
@@ -43,7 +43,6 @@ TODO:
...
@@ -43,7 +43,6 @@ TODO:
{-# LANGUAGE Arrows #-}
{-# LANGUAGE Arrows #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
module
Gargantext.Core.NodeStory
module
Gargantext.Core.NodeStory
(
module
Gargantext
.
Core
.
NodeStory
.
Types
(
module
Gargantext
.
Core
.
NodeStory
.
Types
...
@@ -54,17 +53,18 @@ module Gargantext.Core.NodeStory
...
@@ -54,17 +53,18 @@ module Gargantext.Core.NodeStory
,
fromDBNodeStoryEnv
,
fromDBNodeStoryEnv
,
upsertNodeStories
,
upsertNodeStories
-- , getNodeStory
-- , getNodeStory
,
getNodeStory'
,
nodeStoriesQuery
,
nodeStoriesQuery
,
currentVersion
,
currentVersion
,
archiveStateFromList
,
archiveStateFromList
,
archiveStateToList
,
archiveStateToList
,
fixNodeStoryVersions
)
,
fixNodeStoryVersions
,
fixChildrenDuplicatedAsParents
,
getParentsChildren
)
where
where
import
Control.Lens
((
^.
),
(
.~
),
(
%~
),
non
,
_Just
,
at
,
view
)
import
Control.Lens
((
^.
),
(
.~
),
(
%~
),
non
,
_Just
,
at
,
over
,
view
)
import
Control.Monad.Except
import
Data.Map.Strict
qualified
as
Map
import
Data.Map.Strict
qualified
as
Map
import
Data.Monoid
import
Data.Pool
(
Pool
,
withResource
)
import
Data.Pool
(
Pool
,
withResource
)
import
Data.Set
qualified
as
Set
import
Data.Set
qualified
as
Set
import
Database.PostgreSQL.Simple
qualified
as
PGS
import
Database.PostgreSQL.Simple
qualified
as
PGS
...
@@ -73,12 +73,12 @@ import Database.PostgreSQL.Simple.ToField qualified as PGS
...
@@ -73,12 +73,12 @@ import Database.PostgreSQL.Simple.ToField qualified as PGS
import
Gargantext.API.Ngrams.Types
import
Gargantext.API.Ngrams.Types
import
Gargantext.Core.NodeStory.DB
import
Gargantext.Core.NodeStory.DB
import
Gargantext.Core.NodeStory.Types
import
Gargantext.Core.NodeStory.Types
import
Gargantext.
Core.Types
(
ListId
,
NodeId
(
..
)
)
import
Gargantext.
Database.Admin.Types.Node
(
ListId
,
NodeId
(
..
)
)
import
Gargantext.Database.Admin.Config
()
import
Gargantext.Database.Admin.Config
()
import
Gargantext.Database.Prelude
(
HasConnectionPool
(
..
))
import
Gargantext.Database.Prelude
(
HasConnectionPool
(
..
))
import
Gargantext.Database.Query.Table.Ngrams
qualified
as
TableNgrams
import
Gargantext.Database.Query.Table.Ngrams
qualified
as
TableNgrams
import
Gargantext.Prelude
hiding
(
to
)
import
Gargantext.Prelude
hiding
(
to
)
import
Gargantext.Prelude.Database
import
Gargantext.Prelude.Database
(
runPGSAdvisoryXactLock
,
runPGSExecute
,
runPGSQuery
)
getNodeStory'
::
PGS
.
Connection
->
NodeId
->
IO
ArchiveList
getNodeStory'
::
PGS
.
Connection
->
NodeId
->
IO
ArchiveList
...
@@ -105,7 +105,7 @@ getNodeStory' c nId = do
...
@@ -105,7 +105,7 @@ getNodeStory' c nId = do
pure ()
pure ()
-}
-}
pure
$
foldl
combine
initArchive
dbData
pure
$
foldl
'
combine
initArchive
dbData
where
where
-- NOTE (<>) for Archive doesn't concatenate states, so we have to use `combine`
-- NOTE (<>) for Archive doesn't concatenate states, so we have to use `combine`
combine
a1
a2
=
a1
&
a_state
%~
combineState
(
a2
^.
a_state
)
combine
a1
a2
=
a1
&
a_state
%~
combineState
(
a2
^.
a_state
)
...
@@ -221,15 +221,12 @@ nodeStoryInc c ns@(NodeStory nls) nId = do
...
@@ -221,15 +221,12 @@ nodeStoryInc c ns@(NodeStory nls) nId = do
-- `nre_parent` and `nre_children`. We want to make sure that all
-- `nre_parent` and `nre_children`. We want to make sure that all
-- children entries (i.e. ones that have `nre_parent`) have the same
-- children entries (i.e. ones that have `nre_parent`) have the same
-- `list` as their parent entry.
-- `list` as their parent entry.
fixChildrenInNgrams
StatePatch
::
NgramsState'
->
NgramsState'
fixChildrenInNgrams
::
NgramsState'
->
NgramsState'
fixChildrenInNgrams
StatePatch
ns
=
archiveStateFromList
$
nsParents
<>
nsChildrenFixed
fixChildrenInNgrams
ns
=
archiveStateFromList
$
nsParents
<>
nsChildrenFixed
where
where
nls
=
archiveStateToList
ns
(
nsParents
,
nsChildren
)
=
getParentsChildren
ns
nsParents
=
filter
(
\
(
_nt
,
_t
,
nre
)
->
isNothing
$
nre
^.
nre_parent
)
nls
parentNtMap
=
Map
.
fromList
$
(
\
(
_nt
,
t
,
nre
)
->
(
t
,
nre
^.
nre_list
))
<$>
nsParents
parentNtMap
=
Map
.
fromList
$
(
\
(
_nt
,
t
,
nre
)
->
(
t
,
nre
^.
nre_list
))
<$>
nsParents
nsChildren
=
filter
(
\
(
_nt
,
_t
,
nre
)
->
isJust
$
nre
^.
nre_parent
)
nls
nsChildrenFixed
=
(
\
(
nt
,
t
,
nre
)
->
nsChildrenFixed
=
(
\
(
nt
,
t
,
nre
)
->
(
nt
(
nt
,
t
,
t
...
@@ -241,15 +238,12 @@ fixChildrenInNgramsStatePatch ns = archiveStateFromList $ nsParents <> nsChildre
...
@@ -241,15 +238,12 @@ fixChildrenInNgramsStatePatch ns = archiveStateFromList $ nsParents <> nsChildre
-- | Sometimes, when we upload a new list, a child can be left without
-- | Sometimes, when we upload a new list, a child can be left without
-- a parent. Find such ngrams and set their 'root' and 'parent' to
-- a parent. Find such ngrams and set their 'root' and 'parent' to
-- 'Nothing'.
-- 'Nothing'.
fixChildrenWithNoParent
StatePatch
::
NgramsState'
->
NgramsState'
fixChildrenWithNoParent
::
NgramsState'
->
NgramsState'
fixChildrenWithNoParent
StatePatch
ns
=
archiveStateFromList
$
nsParents
<>
nsChildrenFixed
fixChildrenWithNoParent
ns
=
archiveStateFromList
$
nsParents
<>
nsChildrenFixed
where
where
nls
=
archiveStateToList
ns
(
nsParents
,
nsChildren
)
=
getParentsChildren
ns
nsParents
=
filter
(
\
(
_nt
,
_t
,
nre
)
->
isNothing
$
nre
^.
nre_parent
)
nls
parentNtMap
=
Map
.
fromList
$
(
\
(
_nt
,
t
,
nre
)
->
(
t
,
nre
^.
nre_children
&
mSetToSet
))
<$>
nsParents
parentNtMap
=
Map
.
fromList
$
(
\
(
_nt
,
t
,
nre
)
->
(
t
,
nre
^.
nre_children
&
mSetToSet
))
<$>
nsParents
nsChildren
=
filter
(
\
(
_nt
,
_t
,
nre
)
->
isJust
$
nre
^.
nre_parent
)
nls
nsChildrenFixFunc
(
nt
,
t
,
nre
)
=
nsChildrenFixFunc
(
nt
,
t
,
nre
)
=
(
nt
(
nt
,
t
,
t
...
@@ -263,6 +257,30 @@ fixChildrenWithNoParentStatePatch ns = archiveStateFromList $ nsParents <> nsChi
...
@@ -263,6 +257,30 @@ fixChildrenWithNoParentStatePatch ns = archiveStateFromList $ nsParents <> nsChi
nsChildrenFixed
=
nsChildrenFixFunc
<$>
nsChildren
nsChildrenFixed
=
nsChildrenFixFunc
<$>
nsChildren
-- | Sometimes children can also become parents (e.g. #313). Find such
-- | children and remove them from the list.
fixChildrenDuplicatedAsParents
::
NgramsState'
->
NgramsState'
fixChildrenDuplicatedAsParents
ns
=
archiveStateFromList
$
nsChildren
<>
nsParentsFixed
where
(
nsParents
,
nsChildren
)
=
getParentsChildren
ns
parentNtMap
=
Map
.
fromList
$
(
\
(
_nt
,
t
,
nre
)
->
(
t
,
nre
^.
nre_children
&
mSetToSet
))
<$>
nsParents
parentsSet
=
Set
.
fromList
$
Map
.
keys
parentNtMap
nsParentsFixed
=
(
\
(
nt
,
t
,
nre
)
->
(
nt
,
t
,
over
nre_children
(
\
c
->
mSetFromSet
$
Set
.
difference
(
mSetToSet
c
)
parentsSet
)
nre
)
)
<$>
nsParents
getParentsChildren
::
NgramsState'
->
(
ArchiveStateList
,
ArchiveStateList
)
getParentsChildren
ns
=
(
nsParents
,
nsChildren
)
where
nls
=
archiveStateToList
ns
nsParents
=
filter
(
\
(
_nt
,
_t
,
nre
)
->
isNothing
$
nre
^.
nre_parent
)
nls
nsChildren
=
filter
(
\
(
_nt
,
_t
,
nre
)
->
isJust
$
nre
^.
nre_parent
)
nls
------------------------------------
------------------------------------
fromDBNodeStoryEnv
::
Pool
PGS
.
Connection
->
IO
NodeStoryEnv
fromDBNodeStoryEnv
::
Pool
PGS
.
Connection
->
IO
NodeStoryEnv
...
@@ -280,8 +298,15 @@ fromDBNodeStoryEnv pool = do
...
@@ -280,8 +298,15 @@ fromDBNodeStoryEnv pool = do
withResource
pool
$
\
c
->
do
withResource
pool
$
\
c
->
do
--printDebug "[mkNodeStorySaver] will call writeNodeStories, ns" ns
--printDebug "[mkNodeStorySaver] will call writeNodeStories, ns" ns
-- writeNodeStories c $ fixChildrenWithNoParent $ fixChildrenTermTypes ns
-- writeNodeStories c $ fixChildrenWithNoParent $ fixChildrenTermTypes ns
-- |NOTE Fixing a_state is kinda a hack. We shouldn't land
-- |with bad state in the first place.
upsertNodeStories
c
nId
$
upsertNodeStories
c
nId
$
a
&
a_state
%~
(
fixChildrenInNgramsStatePatch
.
fixChildrenWithNoParentStatePatch
)
a
&
a_state
%~
(
fixChildrenDuplicatedAsParents
.
fixChildrenInNgrams
.
fixChildrenWithNoParent
)
let
archive_saver_immediate
nId
a
=
withResource
pool
$
\
c
->
do
let
archive_saver_immediate
nId
a
=
withResource
pool
$
\
c
->
do
insertNodeArchiveHistory
c
nId
(
a
^.
a_version
)
$
reverse
$
a
^.
a_history
insertNodeArchiveHistory
c
nId
(
a
^.
a_version
)
$
reverse
$
a
^.
a_history
pure
$
a
&
a_history
.~
[]
pure
$
a
&
a_history
.~
[]
...
@@ -289,13 +314,13 @@ fromDBNodeStoryEnv pool = do
...
@@ -289,13 +314,13 @@ fromDBNodeStoryEnv pool = do
-- insertNodeArchiveHistory c nId (a ^. a_version) $ reverse $ a ^. a_history
-- insertNodeArchiveHistory c nId (a ^. a_version) $ reverse $ a ^. a_history
-- ) $ Map.toList nls
-- ) $ Map.toList nls
-- pure $ clearHistory ns
-- pure $ clearHistory ns
pure
$
NodeStoryEnv
{
_nse_saver_immediate
=
saver_immediate
pure
$
NodeStoryEnv
{
_nse_saver_immediate
=
saver_immediate
,
_nse_archive_saver_immediate
=
archive_saver_immediate
,
_nse_archive_saver_immediate
=
archive_saver_immediate
,
_nse_getter
=
\
nId
->
withResource
pool
$
\
c
->
,
_nse_getter
=
\
nId
->
withResource
pool
$
\
c
->
getNodeStory'
c
nId
getNodeStory'
c
nId
,
_nse_getter_multi
=
\
nIds
->
withResource
pool
$
\
c
->
,
_nse_getter_multi
=
\
nIds
->
withResource
pool
$
\
c
->
foldM
(
\
m
nId
->
nodeStoryInc
c
m
nId
)
(
NodeStory
Map
.
empty
)
nIds
foldM
(
nodeStoryInc
c
)
(
NodeStory
Map
.
empty
)
nIds
}
}
currentVersion
::
(
HasNodeStory
env
err
m
)
=>
ListId
->
m
Version
currentVersion
::
(
HasNodeStory
env
err
m
)
=>
ListId
->
m
Version
...
...
src/Gargantext/Core/NodeStory/Types.hs
View file @
7e20e0fb
...
@@ -47,23 +47,19 @@ module Gargantext.Core.NodeStory.Types
...
@@ -47,23 +47,19 @@ module Gargantext.Core.NodeStory.Types
,
ArchiveStateList
)
,
ArchiveStateList
)
where
where
import
Codec.Serialise.Class
import
Codec.Serialise.Class
(
Serialise
)
import
Control.Lens
(
makeLenses
,
Getter
,
(
^.
))
import
Control.Lens
(
makeLenses
,
Getter
,
(
^.
))
import
Control.Monad.Except
import
Control.Monad.Reader
import
Data.Aeson
hiding
((
.=
),
decode
)
import
Data.Aeson
hiding
((
.=
),
decode
)
import
Data.Map.Strict
qualified
as
Map
import
Data.Map.Strict
qualified
as
Map
import
Data.Monoid
import
Data.Profunctor.Product.TH
(
makeAdaptorAndInstance
)
import
Data.Profunctor.Product.TH
(
makeAdaptorAndInstance
)
import
Data.Semigroup
import
Data.Set
qualified
as
Set
import
Data.Set
qualified
as
Set
import
Database.PostgreSQL.Simple.FromField
(
FromField
(
fromField
),
fromJSONField
)
import
Database.PostgreSQL.Simple.FromField
(
FromField
(
fromField
),
fromJSONField
)
import
Gargantext.API.Ngrams.Types
import
Gargantext.API.Ngrams.Types
import
Gargantext.
Core.Types
(
NodeId
(
..
)
)
import
Gargantext.
Database.Admin.Types.Node
(
NodeId
(
..
)
)
import
Gargantext.Core.Utils.Prefix
(
unPrefix
)
import
Gargantext.Core.Utils.Prefix
(
unPrefix
)
import
Gargantext.Database.Admin.Config
()
import
Gargantext.Database.Admin.Config
()
import
Gargantext.Database.Prelude
(
DbCmd
'
)
import
Gargantext.Database.Prelude
(
DbCmd
'
)
import
Gargantext.Database.
Query.Table
.Ngrams
qualified
as
TableNgrams
import
Gargantext.Database.
Schema
.Ngrams
qualified
as
TableNgrams
import
Gargantext.Database.Query.Table.Node.Error
(
HasNodeError
())
import
Gargantext.Database.Query.Table.Node.Error
(
HasNodeError
())
import
Gargantext.Prelude
hiding
(
to
)
import
Gargantext.Prelude
hiding
(
to
)
import
Opaleye
(
DefaultFromField
(
..
),
SqlJsonb
,
fromPGSFromField
)
import
Opaleye
(
DefaultFromField
(
..
),
SqlJsonb
,
fromPGSFromField
)
...
...
src/Gargantext/Core/Text/List/Group/WithStem.hs
View file @
7e20e0fb
...
@@ -24,8 +24,6 @@ import Data.HashSet (HashSet)
...
@@ -24,8 +24,6 @@ import Data.HashSet (HashSet)
import
Data.HashSet
qualified
as
Set
import
Data.HashSet
qualified
as
Set
import
Data.List
qualified
as
List
import
Data.List
qualified
as
List
import
Data.Map.Strict
qualified
as
Map
import
Data.Map.Strict
qualified
as
Map
import
Data.Map.Strict.Patch
qualified
as
PatchMap
import
Data.Patch.Class
qualified
as
Patch
(
Replace
(
..
))
import
Data.Text
qualified
as
Text
import
Data.Text
qualified
as
Text
import
Gargantext.API.Ngrams.Types
import
Gargantext.API.Ngrams.Types
import
Gargantext.Core
(
Lang
(
..
),
Form
,
Lem
,
NLPServerConfig
)
import
Gargantext.Core
(
Lang
(
..
),
Form
,
Lem
,
NLPServerConfig
)
...
@@ -120,14 +118,5 @@ patch s = case Set.size s > 1 of
...
@@ -120,14 +118,5 @@ patch s = case Set.size s > 1 of
let
children
=
List
.
tail
ngrams
let
children
=
List
.
tail
ngrams
pure
(
parent
,
toNgramsPatch
children
)
pure
(
parent
,
toNgramsPatch
children
)
toNgramsPatch
::
[
NgramsTerm
]
->
NgramsPatch
toNgramsPatch
children
=
NgramsPatch
children'
Patch
.
Keep
where
children'
::
PatchMSet
NgramsTerm
children'
=
PatchMSet
$
fst
$
PatchMap
.
fromList
$
List
.
zip
children
(
List
.
cycle
[
addPatch
])
-- | Instances
-- | Instances
makeLenses
''
G
roupParams
makeLenses
''
G
roupParams
src/Gargantext/Utils/Jobs.hs
View file @
7e20e0fb
...
@@ -20,19 +20,19 @@ module Gargantext.Utils.Jobs (
...
@@ -20,19 +20,19 @@ module Gargantext.Utils.Jobs (
,
MonadJobStatus
(
..
)
,
MonadJobStatus
(
..
)
)
where
)
where
import
Control.Monad.Except
import
Control.Monad.Except
(
runExceptT
)
import
Control.Monad.Reader
import
Control.Monad.Reader
(
MonadReader
(
ask
),
ReaderT
(
runReaderT
)
)
import
Data.Aeson
(
ToJSON
)
import
Data.Aeson
(
ToJSON
)
import
Prelude
import
Prelude
import
System.Directory
(
doesFileExist
)
import
System.Directory
(
doesFileExist
)
import
Text.Read
(
readMaybe
)
import
Text.Read
(
readMaybe
)
import
qualified
Data.Text
as
T
import
qualified
Data.Text
as
T
import
Gargantext.API.Admin.EnvTypes
import
Gargantext.API.Admin.EnvTypes
(
mkJobHandle
,
Env
,
GargJob
(
..
)
)
import
Gargantext.API.Errors.Types
import
Gargantext.API.Errors.Types
(
BackendInternalError
(
InternalJobError
)
)
import
Gargantext.API.Prelude
import
Gargantext.API.Prelude
(
GargM
)
import
qualified
Gargantext.Utils.Jobs.Internal
as
Internal
import
qualified
Gargantext.Utils.Jobs.Internal
as
Internal
import
Gargantext.Utils.Jobs.Monad
import
Gargantext.Utils.Jobs.Monad
(
JobError
,
MonadJobStatus
(
..
)
)
import
Gargantext.System.Logging
import
Gargantext.System.Logging
import
qualified
Servant.Job.Async
as
SJ
import
qualified
Servant.Job.Async
as
SJ
...
@@ -49,7 +49,7 @@ serveJobsAPI
...
@@ -49,7 +49,7 @@ serveJobsAPI
,
ToJSON
(
JobEventType
m
)
,
ToJSON
(
JobEventType
m
)
,
ToJSON
(
JobOutputType
m
)
,
ToJSON
(
JobOutputType
m
)
,
MonadJobStatus
m
,
MonadJobStatus
m
,
m
~
(
GargM
Env
BackendInternalError
)
,
m
~
GargM
Env
BackendInternalError
,
JobEventType
m
~
JobOutputType
m
,
JobEventType
m
~
JobOutputType
m
,
MonadLogger
m
,
MonadLogger
m
)
)
...
...
stack.yaml
View file @
7e20e0fb
...
@@ -147,7 +147,7 @@
...
@@ -147,7 +147,7 @@
git
:
"
https://gitlab.iscpif.fr/gargantext/crawlers/istex.git"
git
:
"
https://gitlab.iscpif.fr/gargantext/crawlers/istex.git"
subdirs
:
subdirs
:
-
.
-
.
-
commit
:
c
d179f6dda15d77a085c0176284c921b7bc50c46
-
commit
:
c
eb8f2cebd4890b6d9d151ab01ee14e925bc0499
git
:
"
https://gitlab.iscpif.fr/gargantext/crawlers/openalex.git"
git
:
"
https://gitlab.iscpif.fr/gargantext/crawlers/openalex.git"
subdirs
:
subdirs
:
-
.
-
.
...
@@ -323,7 +323,7 @@ flags:
...
@@ -323,7 +323,7 @@ flags:
"
full-text-search"
:
"
full-text-search"
:
"
build-search-demo"
:
false
"
build-search-demo"
:
false
gargantext
:
gargantext
:
"
disable-db-obfuscation-executable"
:
tru
e
"
disable-db-obfuscation-executable"
:
fals
e
"
no-phylo-debug-logs"
:
false
"
no-phylo-debug-logs"
:
false
"
test-crypto"
:
false
"
test-crypto"
:
false
"
generic-deriving"
:
"
generic-deriving"
:
...
...
test/Test/API/Authentication.hs
View file @
7e20e0fb
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -Wno-orphans #-}
{-# OPTIONS_GHC -Wno-orphans #-}
{-# LANGUAGE BangPatterns #-}
module
Test.API.Authentication
(
module
Test.API.Authentication
(
tests
tests
...
@@ -21,13 +21,11 @@ import Network.HTTP.Client hiding (Proxy)
...
@@ -21,13 +21,11 @@ import Network.HTTP.Client hiding (Proxy)
import
Prelude
qualified
import
Prelude
qualified
import
Servant.Auth.Client
()
import
Servant.Auth.Client
()
import
Servant.Client
import
Servant.Client
import
Test.API.Routes
(
auth_api
)
import
Test.API.Setup
(
withTestDBAndPort
,
setupEnvironment
)
import
Test.API.Setup
(
withTestDBAndPort
,
setupEnvironment
)
import
Test.Database.Types
import
Test.Database.Types
import
Test.Hspec
import
Test.Hspec
auth_api
::
AuthRequest
->
ClientM
AuthResponse
auth_api
=
client
(
Proxy
::
Proxy
(
MkGargAPI
(
GargAPIVersion
AuthAPI
)))
cannedToken
::
T
.
Text
cannedToken
::
T
.
Text
cannedToken
=
"eyJhbGciOiJIUzUxMiJ9.eyJkYXQiOnsiaWQiOjF9fQ.t49zZSqkPAulEkYEh4pW17H2uwrkyPTdZKwHyG3KUJ0hzU2UUoPBNj8vdv087RCVBJ4tXgxNbP4j0RBv3gxdqg"
cannedToken
=
"eyJhbGciOiJIUzUxMiJ9.eyJkYXQiOnsiaWQiOjF9fQ.t49zZSqkPAulEkYEh4pW17H2uwrkyPTdZKwHyG3KUJ0hzU2UUoPBNj8vdv087RCVBJ4tXgxNbP4j0RBv3gxdqg"
...
@@ -66,7 +64,7 @@ tests = sequential $ aroundAll withTestDBAndPort $ do
...
@@ -66,7 +64,7 @@ tests = sequential $ aroundAll withTestDBAndPort $ do
,
_authRes_user_id
=
fromMaybe
(
UnsafeMkUserId
1
)
$
listToMaybe
$
result0
^..
_Right
.
authRes_user_id
,
_authRes_user_id
=
fromMaybe
(
UnsafeMkUserId
1
)
$
listToMaybe
$
result0
^..
_Right
.
authRes_user_id
}
}
result
`
shouldBe
`
(
Right
expected
)
result
`
shouldBe
`
Right
expected
it
"denies login for user 'alice' if password is invalid"
$
\
((
_testEnv
,
port
),
_
)
->
do
it
"denies login for user 'alice' if password is invalid"
$
\
((
_testEnv
,
port
),
_
)
->
do
let
authPayload
=
AuthRequest
"alice"
(
GargPassword
"wrong"
)
let
authPayload
=
AuthRequest
"alice"
(
GargPassword
"wrong"
)
...
...
test/Test/API/Errors.hs
View file @
7e20e0fb
...
@@ -9,13 +9,14 @@ import Network.HTTP.Types
...
@@ -9,13 +9,14 @@ import Network.HTTP.Types
import
Network.Wai.Test
import
Network.Wai.Test
import
Servant
import
Servant
import
Servant.Auth.Client
()
import
Servant.Auth.Client
()
import
Servant.Auth.Client
qualified
as
SA
import
Servant.Client
import
Servant.Client
import
Test.API.
Private
(
protected
,
withValidLogin
,
protectedNewError
)
import
Test.API.
Routes
(
mkUrl
)
import
Test.API.Setup
(
withTestDBAndPort
,
setupEnvironment
,
mkUrl
,
createAliceAndBob
)
import
Test.API.Setup
(
withTestDBAndPort
,
setupEnvironment
,
createAliceAndBob
)
import
Test.Hspec
import
Test.Hspec
import
Test.Hspec.Wai.Internal
(
withApplication
)
import
Test.Hspec.Wai.Internal
(
withApplication
)
import
Test.Utils
(
protected
,
withValidLogin
,
protectedNewError
)
import
Text.RawString.QQ
(
r
)
import
Text.RawString.QQ
(
r
)
import
qualified
Servant.Auth.Client
as
SA
tests
::
Spec
tests
::
Spec
tests
=
sequential
$
aroundAll
withTestDBAndPort
$
do
tests
=
sequential
$
aroundAll
withTestDBAndPort
$
do
...
...
test/Test/API/GraphQL.hs
View file @
7e20e0fb
...
@@ -10,12 +10,11 @@ module Test.API.GraphQL (
...
@@ -10,12 +10,11 @@ module Test.API.GraphQL (
import
Gargantext.Core.Types.Individu
import
Gargantext.Core.Types.Individu
import
Prelude
import
Prelude
import
Servant.Auth.Client
()
import
Servant.Auth.Client
()
import
Test.API.Private
(
withValidLogin
,
protected
,
protectedNewError
)
import
Test.API.Setup
(
withTestDBAndPort
,
setupEnvironment
,
createAliceAndBob
)
import
Test.API.Setup
(
withTestDBAndPort
,
setupEnvironment
,
createAliceAndBob
)
import
Test.Hspec
import
Test.Hspec
import
Test.Hspec.Wai.Internal
(
withApplication
)
import
Test.Hspec.Wai.Internal
(
withApplication
)
import
Test.Hspec.Wai.JSON
(
json
)
import
Test.Hspec.Wai.JSON
(
json
)
import
Test.Utils
import
Test.Utils
(
protected
,
protectedNewError
,
shouldRespondWithFragment
,
shouldRespondWithFragmentCustomStatus
,
withValidLogin
)
import
Text.RawString.QQ
(
r
)
import
Text.RawString.QQ
(
r
)
tests
::
Spec
tests
::
Spec
...
...
test/Test/API/Private.hs
View file @
7e20e0fb
...
@@ -5,117 +5,24 @@
...
@@ -5,117 +5,24 @@
module
Test.API.Private
(
module
Test.API.Private
(
tests
tests
-- * Utility functions
,
withValidLogin
,
getJSON
,
protected
,
protectedJSON
,
postJSONUrlEncoded
,
protectedNewError
,
protectedWith
)
where
)
where
import
Data.Aeson
qualified
as
JSON
import
Data.ByteString.Lazy
qualified
as
L
import
Data.ByteString.Lazy.Char8
qualified
as
C8L
import
Data.CaseInsensitive
qualified
as
CI
import
Data.Map.Strict
qualified
as
Map
import
Data.Text.Encoding
qualified
as
TE
import
Data.Text.Encoding
qualified
as
TE
import
Gargantext.API.Admin.Auth.Types
import
Gargantext.API.Routes
import
Gargantext.API.Routes
import
Gargantext.Core.Types.Individu
import
Gargantext.Core.Types.Individu
import
Gargantext.Prelude
hiding
(
get
)
import
Gargantext.Prelude
hiding
(
get
)
import
Network.HTTP.Client
hiding
(
Proxy
)
import
Network.HTTP.Client
hiding
(
Proxy
)
import
Network.HTTP.Types
import
Network.Wai.Handler.Warp
qualified
as
Wai
import
Network.Wai.Test
(
SResponse
(
..
))
import
Prelude
qualified
import
Servant
import
Servant
import
Servant.Auth.Client
()
import
Servant.Auth.Client
()
import
Servant.Auth.Client
qualified
as
SA
import
Servant.Auth.Client
qualified
as
SA
import
Servant.Client
import
Servant.Client
import
Test.API.
Authentication
(
auth_api
)
import
Test.API.
Routes
(
mkUrl
)
import
Test.API.Setup
(
withTestDBAndPort
,
setupEnvironment
,
mkUrl
,
createAliceAndBob
)
import
Test.API.Setup
(
withTestDBAndPort
,
setupEnvironment
,
createAliceAndBob
)
import
Test.Hspec
import
Test.Hspec
import
Test.Hspec.Wai
hiding
(
pendingWith
)
import
Test.Hspec.Wai
hiding
(
pendingWith
)
import
Test.Hspec.Wai.Internal
(
withApplication
)
import
Test.Hspec.Wai.Internal
(
withApplication
)
import
Test.Hspec.Wai.JSON
(
json
)
import
Test.Hspec.Wai.JSON
(
json
)
import
Test.Utils
(
shouldRespondWithFragment
)
import
Test.Utils
(
protected
,
shouldRespondWithFragment
,
withValidLogin
)
-- | Issue a request with a valid 'Authorization: Bearer' inside.
protected
::
HasCallStack
=>
Token
->
Method
->
ByteString
->
L
.
ByteString
->
WaiSession
()
SResponse
protected
tkn
mth
url
=
protectedWith
mempty
tkn
mth
url
protectedJSON
::
forall
a
.
(
JSON
.
FromJSON
a
,
Typeable
a
,
HasCallStack
)
=>
Token
->
Method
->
ByteString
->
JSON
.
Value
->
WaiSession
()
a
protectedJSON
tkn
mth
url
=
protectedJSONWith
mempty
tkn
mth
url
protectedJSONWith
::
forall
a
.
(
JSON
.
FromJSON
a
,
Typeable
a
,
HasCallStack
)
=>
[
Network
.
HTTP
.
Types
.
Header
]
->
Token
->
Method
->
ByteString
->
JSON
.
Value
->
WaiSession
()
a
protectedJSONWith
hdrs
tkn
mth
url
jsonV
=
do
SResponse
{
..
}
<-
protectedWith
hdrs
tkn
mth
url
(
JSON
.
encode
jsonV
)
case
JSON
.
eitherDecode
simpleBody
of
Left
err
->
Prelude
.
fail
$
"protectedJSON failed when parsing "
<>
show
(
typeRep
$
Proxy
@
a
)
<>
": "
<>
err
Right
x
->
pure
x
protectedWith
::
HasCallStack
=>
[
Network
.
HTTP
.
Types
.
Header
]
->
Token
->
Method
->
ByteString
->
L
.
ByteString
->
WaiSession
()
SResponse
protectedWith
extraHeaders
tkn
mth
url
payload
=
-- Using a map means that if any of the extra headers contains a clashing header name,
-- the extra headers will take precedence.
let
defaultHeaders
=
[
(
hAccept
,
"application/json;charset=utf-8"
)
,
(
hContentType
,
"application/json"
)
,
(
hAuthorization
,
"Bearer "
<>
TE
.
encodeUtf8
tkn
)
]
hdrs
=
Map
.
toList
$
Map
.
fromList
$
defaultHeaders
<>
extraHeaders
in
request
mth
url
hdrs
payload
protectedNewError
::
HasCallStack
=>
Token
->
Method
->
ByteString
->
L
.
ByteString
->
WaiSession
()
SResponse
protectedNewError
tkn
mth
url
=
protectedWith
newErrorFormat
tkn
mth
url
where
newErrorFormat
=
[(
CI
.
mk
"X-Garg-Error-Scheme"
,
"new"
)]
getJSON
::
Token
->
ByteString
->
WaiSession
()
SResponse
getJSON
tkn
url
=
protectedWith
mempty
tkn
"GET"
url
""
postJSONUrlEncoded
::
forall
a
.
(
JSON
.
FromJSON
a
,
Typeable
a
,
HasCallStack
)
=>
Token
->
ByteString
->
L
.
ByteString
->
WaiSession
()
a
postJSONUrlEncoded
tkn
url
queryPaths
=
do
SResponse
{
..
}
<-
protectedWith
[(
hContentType
,
"application/x-www-form-urlencoded"
)]
tkn
"POST"
url
queryPaths
case
JSON
.
eitherDecode
simpleBody
of
Left
err
->
Prelude
.
fail
$
"postJSONUrlEncoded failed when parsing "
<>
show
(
typeRep
$
Proxy
@
a
)
<>
": "
<>
err
<>
"
\n
Payload was: "
<>
(
C8L
.
unpack
simpleBody
)
Right
x
->
pure
x
withValidLogin
::
(
MonadFail
m
,
MonadIO
m
)
=>
Wai
.
Port
->
Username
->
GargPassword
->
(
Token
->
m
a
)
->
m
a
withValidLogin
port
ur
pwd
act
=
do
baseUrl
<-
liftIO
$
parseBaseUrl
"http://localhost"
manager
<-
liftIO
$
newManager
defaultManagerSettings
let
clientEnv
=
mkClientEnv
manager
(
baseUrl
{
baseUrlPort
=
port
})
let
authPayload
=
AuthRequest
ur
pwd
result
<-
liftIO
$
runClientM
(
auth_api
authPayload
)
clientEnv
case
result
of
Left
err
->
liftIO
$
throwIO
$
Prelude
.
userError
(
show
err
)
Right
res
->
act
$
_authRes_token
res
tests
::
Spec
tests
::
Spec
...
...
test/Test/API/Routes.hs
0 → 100644
View file @
7e20e0fb
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeOperators #-}
module
Test.API.Routes
where
import
Fmt
(
Builder
,
(
+|
),
(
|+
))
import
Gargantext.API.Admin.Auth.Types
(
AuthRequest
,
AuthResponse
)
import
Gargantext.API.Ngrams
(
TableNgramsApiGet
,
TableNgramsApiPut
)
import
Gargantext.API.Ngrams.Types
(
NgramsTable
,
NgramsTablePatch
,
OrderBy
,
TabType
,
Versioned
,
VersionedWithCount
)
import
Gargantext.API.Routes
(
AuthAPI
,
GargAPIVersion
,
MkGargAPI
)
import
Gargantext.Database.Admin.Types.Node
(
NodeId
)
import
Gargantext.Core.Types.Main
(
ListType
)
import
Gargantext.Core.Types.Query
(
Limit
,
MaxSize
,
MinSize
,
Offset
)
import
Gargantext.Prelude
import
Network.Wai.Handler.Warp
(
Port
)
import
Servant
((
:>
),
Capture
)
import
Servant.Client
(
ClientM
,
client
)
-- This is for requests made by http.client directly to hand-crafted URLs
curApi
::
Builder
curApi
=
"v1.0"
mkUrl
::
Port
->
Builder
->
ByteString
mkUrl
_port
urlPiece
=
"/api/"
+|
curApi
|+
urlPiece
-- This is for Servant.Client requests
auth_api
::
AuthRequest
->
ClientM
AuthResponse
auth_api
=
client
(
Proxy
::
Proxy
(
MkGargAPI
(
GargAPIVersion
AuthAPI
)))
-- | Shortcut for TableNgramsApiGet full path
type
APITableNgramsGet
=
MkGargAPI
(
GargAPIVersion
(
"node"
:>
Capture
"node_id"
NodeId
:>
"ngrams"
:>
TableNgramsApiGet
)
)
table_ngrams_get_api
::
NodeId
->
TabType
->
NodeId
->
Limit
->
Maybe
Offset
->
Maybe
ListType
->
Maybe
MinSize
->
Maybe
MaxSize
->
Maybe
OrderBy
->
Maybe
Text
->
ClientM
(
VersionedWithCount
NgramsTable
)
table_ngrams_get_api
=
client
(
Proxy
::
Proxy
APITableNgramsGet
)
type
APITableNgramsPut
=
MkGargAPI
(
GargAPIVersion
(
"node"
:>
Capture
"node_id"
NodeId
:>
"ngrams"
:>
TableNgramsApiPut
)
)
table_ngrams_put_api
::
NodeId
->
TabType
->
NodeId
->
Versioned
NgramsTablePatch
->
ClientM
(
Versioned
NgramsTablePatch
)
table_ngrams_put_api
=
client
(
Proxy
::
Proxy
APITableNgramsPut
)
test/Test/API/Setup.hs
View file @
7e20e0fb
...
@@ -5,8 +5,6 @@ module Test.API.Setup where
...
@@ -5,8 +5,6 @@ module Test.API.Setup where
import
Control.Lens
import
Control.Lens
import
Control.Monad.Reader
import
Control.Monad.Reader
import
Data.ByteString
(
ByteString
)
import
Fmt
(
Builder
,
(
+|
),
(
|+
))
import
Gargantext.API
(
makeApp
)
import
Gargantext.API
(
makeApp
)
import
Gargantext.API.Admin.EnvTypes
(
Mode
(
Mock
),
Env
(
..
))
import
Gargantext.API.Admin.EnvTypes
(
Mode
(
Mock
),
Env
(
..
))
import
Gargantext.API.Admin.Settings
import
Gargantext.API.Admin.Settings
...
@@ -40,7 +38,6 @@ import qualified Gargantext.Utils.Jobs.Monad as Jobs
...
@@ -40,7 +38,6 @@ import qualified Gargantext.Utils.Jobs.Monad as Jobs
import
qualified
Gargantext.Utils.Jobs.Queue
as
Jobs
import
qualified
Gargantext.Utils.Jobs.Queue
as
Jobs
import
qualified
Gargantext.Utils.Jobs.Settings
as
Jobs
import
qualified
Gargantext.Utils.Jobs.Settings
as
Jobs
import
qualified
Network.Wai.Handler.Warp
as
Warp
import
qualified
Network.Wai.Handler.Warp
as
Warp
import
qualified
Network.Wai.Handler.Warp
as
Wai
import
qualified
Servant.Job.Async
as
ServantAsync
import
qualified
Servant.Job.Async
as
ServantAsync
...
@@ -117,10 +114,3 @@ createAliceAndBob testEnv = do
...
@@ -117,10 +114,3 @@ createAliceAndBob testEnv = do
void
$
new_user
nur1
void
$
new_user
nur1
void
$
new_user
nur2
void
$
new_user
nur2
curApi
::
Builder
curApi
=
"v1.0"
mkUrl
::
Wai
.
Port
->
Builder
->
ByteString
mkUrl
_port
urlPiece
=
"/api/"
+|
curApi
|+
urlPiece
test/Test/API/UpdateList.hs
View file @
7e20e0fb
{-# LANGUAGE NumericUnderscores #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE NumericUnderscores #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE ViewPatterns #-}
module
Test.API.UpdateList
(
module
Test.API.UpdateList
(
...
@@ -12,19 +13,22 @@ module Test.API.UpdateList (
...
@@ -12,19 +13,22 @@ module Test.API.UpdateList (
,
pollUntilFinished
,
pollUntilFinished
)
where
)
where
import
Data.Aeson
qualified
as
JSON
import
Control.Lens
((
^.
),
mapped
,
over
,
view
)
import
Control.Monad.Fail
(
fail
)
import
Data.Aeson.QQ
import
Data.Aeson.QQ
import
Data.ByteString.Lazy
qualified
as
BL
import
Data.Map.Strict
qualified
as
Map
import
Data.Map.Strict.Patch
qualified
as
PM
import
Data.Set
qualified
as
Set
import
Data.String
(
fromString
)
import
Data.String
(
fromString
)
import
Data.Text
qualified
as
T
import
Data.Text
qualified
as
T
import
Data.Text.Encoding
qualified
as
T
import
Data.Text.IO
qualified
as
TIO
import
Data.Text.IO
qualified
as
TIO
import
Fmt
import
Fmt
import
Gargantext.API.Admin.Auth.Types
(
Token
)
import
Gargantext.API.Admin.Auth.Types
(
Token
)
import
Gargantext.API.
Admin.Orchestrator.Type
s
import
Gargantext.API.
Ngrams
qualified
as
APINgram
s
import
Gargantext.API.Ngrams.List
(
ngramsListFromCSVData
)
import
Gargantext.API.Ngrams.List
(
ngramsListFromCSVData
)
import
Gargantext.API.Ngrams.Types
import
Gargantext.API.Ngrams.Types
(
MSet
(
..
),
NgramsPatch
(
..
),
NgramsRepoElement
(
..
),
NgramsTablePatch
(
..
),
NgramsTerm
(
..
),
Versioned
(
..
),
mSetToList
,
toNgramsPatch
,
ne_children
,
ne_ngrams
,
vc_data
,
_NgramsTable
)
import
Gargantext.Core.Types
import
Gargantext.Core.NodeStory
(
hasNodeStory
,
nse_getter
,
HasNodeArchiveStoryImmediateSaver
(
..
))
import
Gargantext.Core.Types
(
CorpusId
,
ListId
,
ListType
(
..
),
NodeId
,
_NodeId
)
import
Gargantext.Core.Types.Individu
import
Gargantext.Core.Types.Individu
import
Gargantext.Database.Action.User
import
Gargantext.Database.Action.User
import
Gargantext.Database.Admin.Types.Hyperdata.Corpus
import
Gargantext.Database.Admin.Types.Hyperdata.Corpus
...
@@ -34,38 +38,18 @@ import Gargantext.Database.Schema.Ngrams
...
@@ -34,38 +38,18 @@ import Gargantext.Database.Schema.Ngrams
import
Gargantext.Prelude
hiding
(
get
)
import
Gargantext.Prelude
hiding
(
get
)
import
Network.Wai.Handler.Warp
qualified
as
Wai
import
Network.Wai.Handler.Warp
qualified
as
Wai
import
Paths_gargantext
(
getDataFileName
)
import
Paths_gargantext
(
getDataFileName
)
import
Test.API.Private
(
withValidLogin
,
protectedJSON
,
postJSONUrlEncoded
,
getJSON
)
import
Servant.Client
(
runClientM
)
import
Test.API.Setup
(
withTestDBAndPort
,
setupEnvironment
,
mkUrl
,
createAliceAndBob
)
import
Test.API.Routes
(
mkUrl
,
table_ngrams_get_api
,
table_ngrams_put_api
)
import
Test.API.Setup
(
withTestDBAndPort
,
setupEnvironment
,
createAliceAndBob
)
import
Test.Database.Types
import
Test.Database.Types
import
Test.Hspec
import
Test.Hspec
import
Test.Hspec.Wai
(
shouldRespondWith
)
import
Test.Hspec.Wai.Internal
(
withApplication
,
WaiSession
)
import
Test.Hspec.Wai.Internal
(
withApplication
,
WaiSession
)
import
Test.Hspec.Wai.JSON
(
json
)
import
Test.Hspec.Wai.JSON
(
json
)
import
Test.Hspec.Wai
(
shouldRespondWith
)
import
Test.Types
(
JobPollHandle
(
..
))
import
Test.Utils
(
authenticatedServantClient
,
getJSON
,
pollUntilFinished
,
postJSONUrlEncoded
,
protectedJSON
,
withValidLogin
)
import
Web.FormUrlEncoded
import
Web.FormUrlEncoded
import
qualified
Data.Map.Strict
as
Map
data
JobPollHandle
=
JobPollHandle
{
_jph_id
::
!
Text
,
_jph_log
::
[
JobLog
]
,
_jph_status
::
!
Text
,
_jph_error
::
!
(
Maybe
Text
)
}
deriving
Show
instance
JSON
.
FromJSON
JobPollHandle
where
parseJSON
=
JSON
.
withObject
"JobPollHandle"
$
\
o
->
do
_jph_id
<-
o
JSON
..:
"id"
_jph_log
<-
o
JSON
..:
"log"
_jph_status
<-
o
JSON
..:
"status"
_jph_error
<-
o
JSON
..:?
"error"
pure
JobPollHandle
{
..
}
instance
JSON
.
ToJSON
JobPollHandle
where
toJSON
JobPollHandle
{
..
}
=
JSON
.
object
[
"id"
JSON
..=
JSON
.
toJSON
_jph_id
,
"log"
JSON
..=
JSON
.
toJSON
_jph_log
,
"status"
JSON
..=
JSON
.
toJSON
_jph_status
,
"error"
JSON
..=
JSON
.
toJSON
_jph_error
]
newCorpusForUser
::
TestEnv
->
T
.
Text
->
IO
NodeId
newCorpusForUser
::
TestEnv
->
T
.
Text
->
IO
NodeId
newCorpusForUser
env
uname
=
flip
runReaderT
env
$
runTestMonad
$
do
newCorpusForUser
env
uname
=
flip
runReaderT
env
$
runTestMonad
$
do
...
@@ -75,28 +59,38 @@ newCorpusForUser env uname = flip runReaderT env $ runTestMonad $ do
...
@@ -75,28 +59,38 @@ newCorpusForUser env uname = flip runReaderT env $ runTestMonad $ do
(
corpusId
:
_
)
<-
mk
(
Just
corpusName
)
(
Nothing
::
Maybe
HyperdataCorpus
)
parentId
uid
(
corpusId
:
_
)
<-
mk
(
Just
corpusName
)
(
Nothing
::
Maybe
HyperdataCorpus
)
parentId
uid
pure
corpusId
pure
corpusId
-- | Poll the given URL every second until it finishes.
uploadJSONList
::
Wai
.
Port
->
Token
->
CorpusId
->
WaiSession
()
ListId
-- Retries up to 60 times (i.e. for 1 minute, before giving up)
uploadJSONList
port
token
cId
=
do
pollUntilFinished
::
HasCallStack
([
listId
]
::
[
NodeId
])
<-
protectedJSON
token
"POST"
(
mkUrl
port
(
"/node/"
<>
build
cId
))
[
aesonQQ
|
{"pn_typename":"NodeList","pn_name":"Testing"}
|]
=>
Token
-- Upload the JSON doc
->
Wai
.
Port
simpleNgrams
<-
liftIO
(
TIO
.
readFile
=<<
getDataFileName
"test-data/ngrams/simple.json"
)
->
(
JobPollHandle
->
Builder
)
let
jsonFileFormData
=
[
(
T
.
pack
"_wjf_data"
,
simpleNgrams
)
->
JobPollHandle
,
(
"_wjf_filetype"
,
"JSON"
)
->
WaiSession
()
JobPollHandle
,
(
"_wjf_name"
,
"simple_ngrams.json"
)
pollUntilFinished
tkn
port
mkUrlPiece
=
go
60
]
where
let
url
=
"/lists/"
+|
listId
|+
"/add/form/async"
go
::
Int
->
JobPollHandle
->
WaiSession
()
JobPollHandle
let
mkPollUrl
j
=
"/corpus/"
+|
listId
|+
"/add/form/async/"
+|
_jph_id
j
|+
"/poll?limit=1"
go
0
h
=
panicTrace
$
"pollUntilFinished exhausted attempts. Last found JobPollHandle: "
<>
T
.
decodeUtf8
(
BL
.
toStrict
$
JSON
.
encode
h
)
(
j
::
JobPollHandle
)
<-
postJSONUrlEncoded
token
(
mkUrl
port
url
)
(
urlEncodeFormStable
$
toForm
jsonFileFormData
)
go
n
h
=
case
_jph_status
h
==
"IsPending"
||
_jph_status
h
==
"IsRunning"
of
j'
<-
pollUntilFinished
token
port
mkPollUrl
j
True
->
do
liftIO
(
_jph_status
j'
`
shouldBe
`
"IsFinished"
)
liftIO
$
threadDelay
1
_000_000
h'
<-
protectedJSON
tkn
"GET"
(
mkUrl
port
$
mkUrlPiece
h
)
""
pure
listId
go
(
n
-
1
)
h'
False
-- uploadListPatch :: Wai.Port
|
_jph_status
h
==
"IsFailure"
-- -> Token
->
panicTrace
$
"JobPollHandle contains a failure: "
<>
T
.
decodeUtf8
(
BL
.
toStrict
$
JSON
.
encode
h
)
-- -> CorpusId
|
otherwise
-- -> ListId
->
pure
h
-- -> APINgrams.Version
-- -> PM.PatchMap NgramsTerm NgramsPatch
-- -> WaiSession () ()
-- uploadListPatch port token cId listId version patch = do
-- let js = JSON.toJSON (Versioned version $ NgramsTablePatch patch)
-- -- panicTrace $ "[uploadListPatch] js: " <> show js
-- -- APINgrams.tableNgramsPut Terms listId (Versioned 0 $ NgramsTablePatch $ fst patch)
-- (_res :: Versioned NgramsTablePatch) <- protectedJSON token "PUT" (mkUrl port ("/node/" <> build cId <> "/ngrams?ngramsType=Terms&list=" <> build listId)) js
-- -- panicTrace $ "[uploadListPatch] res: " <> show res
-- pure ()
tests
::
Spec
tests
::
Spec
tests
=
sequential
$
aroundAll
withTestDBAndPort
$
do
tests
=
sequential
$
aroundAll
withTestDBAndPort
$
do
...
@@ -111,18 +105,7 @@ tests = sequential $ aroundAll withTestDBAndPort $ do
...
@@ -111,18 +105,7 @@ tests = sequential $ aroundAll withTestDBAndPort $ do
cId
<-
newCorpusForUser
testEnv
"alice"
cId
<-
newCorpusForUser
testEnv
"alice"
withApplication
app
$
do
withApplication
app
$
do
withValidLogin
port
"alice"
(
GargPassword
"alice"
)
$
\
token
->
do
withValidLogin
port
"alice"
(
GargPassword
"alice"
)
$
\
token
->
do
([
listId
]
::
[
NodeId
])
<-
protectedJSON
token
"POST"
(
mkUrl
port
(
"/node/"
<>
build
cId
))
[
aesonQQ
|
{"pn_typename":"NodeList","pn_name":"Testing"}
|]
listId
<-
uploadJSONList
port
token
cId
-- Upload the JSON doc
simpleNgrams
<-
liftIO
(
TIO
.
readFile
=<<
getDataFileName
"test-data/ngrams/simple.json"
)
let
jsonFileFormData
=
[
(
T
.
pack
"_wjf_data"
,
simpleNgrams
)
,
(
"_wjf_filetype"
,
"JSON"
)
,
(
"_wjf_name"
,
"simple_ngrams.json"
)
]
let
url
=
"/lists/"
+|
listId
|+
"/add/form/async"
let
mkPollUrl
j
=
"/corpus/"
+|
listId
|+
"/add/form/async/"
+|
_jph_id
j
|+
"/poll?limit=1"
(
j
::
JobPollHandle
)
<-
postJSONUrlEncoded
token
(
mkUrl
port
url
)
(
urlEncodeFormStable
$
toForm
jsonFileFormData
)
j'
<-
pollUntilFinished
token
port
mkPollUrl
j
liftIO
(
_jph_status
j'
`
shouldBe
`
"IsFinished"
)
-- Now check that we can retrieve the ngrams
-- Now check that we can retrieve the ngrams
let
getUrl
=
"/node/"
+|
listId
|+
"/ngrams?ngramsType=Terms&listType=MapTerm&list="
+|
listId
|+
"&limit=50"
let
getUrl
=
"/node/"
+|
listId
|+
"/ngrams?ngramsType=Terms&listType=MapTerm&list="
+|
listId
|+
"&limit=50"
...
@@ -140,6 +123,70 @@ tests = sequential $ aroundAll withTestDBAndPort $ do
...
@@ -140,6 +123,70 @@ tests = sequential $ aroundAll withTestDBAndPort $ do
]
]
}
|]
}
|]
it
"does not create duplicates when uploading JSON (#313)"
$
\
((
testEnv
,
port
),
app
)
->
do
cId
<-
newCorpusForUser
testEnv
"alice"
withApplication
app
$
do
withValidLogin
port
"alice"
(
GargPassword
"alice"
)
$
\
token
->
do
-- this term is imported from the .json file
let
importedTerm
=
NgramsTerm
"abelian group"
-- this is the new term, under which importedTerm will be grouped
let
newTerm
=
NgramsTerm
"new abelian group"
clientEnv
<-
liftIO
$
authenticatedServantClient
port
token
listId
<-
uploadJSONList
port
token
cId
let
checkNgrams
expected
=
do
eng
<-
liftIO
$
runClientM
(
table_ngrams_get_api
cId
APINgrams
.
Terms
listId
10
Nothing
(
Just
MapTerm
)
Nothing
Nothing
Nothing
Nothing
)
clientEnv
case
eng
of
Left
err
->
fail
(
show
err
)
Right
r
->
let
real
=
over
mapped
(
\
nt
->
(
nt
^.
ne_ngrams
,
mSetToList
$
nt
^.
ne_children
))
(
r
^.
vc_data
.
_NgramsTable
)
in
liftIO
$
Set
.
fromList
real
`
shouldBe
`
Set
.
fromList
expected
-- The #313 error is about importedTerm being duplicated
-- in a specific case
checkNgrams
[
(
importedTerm
,
[]
)
]
let
nre
=
NgramsRepoElement
1
MapTerm
Nothing
Nothing
(
MSet
mempty
)
let
patch
=
PM
.
fromList
[
(
newTerm
,
NgramsReplace
{
_patch_old
=
Nothing
,
_patch_new
=
Just
nre
}
)
]
_
<-
liftIO
$
runClientM
(
table_ngrams_put_api
cId
APINgrams
.
Terms
listId
(
Versioned
1
$
NgramsTablePatch
$
fst
patch
))
clientEnv
-- check that new term is added (with no parent)
checkNgrams
[
(
newTerm
,
[]
)
,
(
importedTerm
,
[]
)
]
-- now patch it so that we have a group
let
patchChildren
=
PM
.
fromList
[
(
newTerm
,
toNgramsPatch
[
importedTerm
]
)
]
_
<-
liftIO
$
runClientM
(
table_ngrams_put_api
cId
APINgrams
.
Terms
listId
(
Versioned
32
$
NgramsTablePatch
$
fst
patchChildren
))
clientEnv
-- check that new term is parent of old one
checkNgrams
[
(
newTerm
,
[
importedTerm
])
]
-- finally, upload the list again, the group should be as
-- it was before (the bug in #313 was that "abelian group"
-- was created again as a term with no parent)
_
<-
uploadJSONList
port
token
cId
-- old (imported) term shouldn't become parentless
-- (#313 error was that we had [newTerm, importedTerm] instead)
-- NOTE: Unfortunately, I'm not able to reproduce this
-- error here, though I tried. Something is missing, maybe
-- some nodestory integration with tests?
checkNgrams
[
(
newTerm
,
[
importedTerm
])
]
pure
()
describe
"POST /api/v1.0/lists/:id/csv/add/form/async (CSV)"
$
do
describe
"POST /api/v1.0/lists/:id/csv/add/form/async (CSV)"
$
do
it
"parses CSV via ngramsListFromCSVData"
$
\
((
_testEnv
,
_port
),
_app
)
->
do
it
"parses CSV via ngramsListFromCSVData"
$
\
((
_testEnv
,
_port
),
_app
)
->
do
...
@@ -161,8 +208,8 @@ tests = sequential $ aroundAll withTestDBAndPort $ do
...
@@ -161,8 +208,8 @@ tests = sequential $ aroundAll withTestDBAndPort $ do
,
(
"_wtf_filetype"
,
"CSV"
)
,
(
"_wtf_filetype"
,
"CSV"
)
,
(
"_wtf_name"
,
"simple.csv"
)
,
(
"_wtf_name"
,
"simple.csv"
)
]
]
let
url
=
"/lists/"
<>
(
fromString
$
show
$
_NodeId
listId
)
<>
"/csv/add/form/async"
let
url
=
"/lists/"
<>
fromString
(
show
$
_NodeId
listId
)
<>
"/csv/add/form/async"
let
mkPollUrl
j
=
"/corpus/"
<>
(
fromString
$
show
$
_NodeId
listId
)
<>
"/add/form/async/"
+|
_jph_id
j
|+
"/poll?limit=1"
let
mkPollUrl
j
=
"/corpus/"
<>
fromString
(
show
$
_NodeId
listId
)
<>
"/add/form/async/"
+|
_jph_id
j
|+
"/poll?limit=1"
(
j
::
JobPollHandle
)
<-
postJSONUrlEncoded
token
(
mkUrl
port
url
)
(
urlEncodeFormStable
$
toForm
tsvFileFormData
)
(
j
::
JobPollHandle
)
<-
postJSONUrlEncoded
token
(
mkUrl
port
url
)
(
urlEncodeFormStable
$
toForm
tsvFileFormData
)
j'
<-
pollUntilFinished
token
port
mkPollUrl
j
j'
<-
pollUntilFinished
token
port
mkPollUrl
j
liftIO
(
_jph_status
j'
`
shouldBe
`
"IsFinished"
)
liftIO
(
_jph_status
j'
`
shouldBe
`
"IsFinished"
)
...
...
test/Test/Database/Operations/NodeStory.hs
View file @
7e20e0fb
...
@@ -13,7 +13,7 @@ Portability : POSIX
...
@@ -13,7 +13,7 @@ Portability : POSIX
module
Test.Database.Operations.NodeStory
where
module
Test.Database.Operations.NodeStory
where
import
Control.Lens
((
^.
),
(
.~
),
_2
)
import
Control.Lens
((
^.
),
(
.~
),
(
?~
),
_2
)
import
Control.Monad.Reader
import
Control.Monad.Reader
import
Data.Map.Strict
qualified
as
Map
import
Data.Map.Strict
qualified
as
Map
import
Data.Map.Strict.Patch
qualified
as
PM
import
Data.Map.Strict.Patch
qualified
as
PM
...
@@ -23,7 +23,7 @@ import Database.PostgreSQL.Simple.SqlQQ
...
@@ -23,7 +23,7 @@ import Database.PostgreSQL.Simple.SqlQQ
import
Gargantext.API.Ngrams
(
commitStatePatch
,
mSetFromList
,
setListNgrams
,
saveNodeStory
)
import
Gargantext.API.Ngrams
(
commitStatePatch
,
mSetFromList
,
setListNgrams
,
saveNodeStory
)
import
Gargantext.API.Ngrams.Types
(
MSet
(
..
),
NgramsPatch
(
..
),
NgramsRepoElement
(
..
),
NgramsTerm
(
..
),
Versioned
(
..
),
mkNgramsTablePatch
,
nre_children
,
nre_list
,
nre_parent
,
nre_root
)
import
Gargantext.API.Ngrams.Types
(
MSet
(
..
),
NgramsPatch
(
..
),
NgramsRepoElement
(
..
),
NgramsTerm
(
..
),
Versioned
(
..
),
mkNgramsTablePatch
,
nre_children
,
nre_list
,
nre_parent
,
nre_root
)
import
Gargantext.API.Ngrams.Tools
(
getNodeStory
)
import
Gargantext.API.Ngrams.Tools
(
getNodeStory
)
import
Gargantext.Core.NodeStory
hiding
(
runPGSQuery
)
import
Gargantext.Core.NodeStory
import
Gargantext.Core.Types.Individu
import
Gargantext.Core.Types.Individu
import
Gargantext.Core.Types
(
ListType
(
..
),
ListId
,
NodeId
,
UserId
)
import
Gargantext.Core.Types
(
ListType
(
..
),
ListId
,
NodeId
,
UserId
)
import
Gargantext.Database.Action.User
(
getUserId
)
import
Gargantext.Database.Action.User
(
getUserId
)
...
@@ -73,15 +73,15 @@ simpleParentTerm' = fst simpleTerm
...
@@ -73,15 +73,15 @@ simpleParentTerm' = fst simpleTerm
simpleParentTerm
::
(
NgramsTerm
,
NgramsRepoElement
)
simpleParentTerm
::
(
NgramsTerm
,
NgramsRepoElement
)
simpleParentTerm
=
(
simpleParentTerm'
simpleParentTerm
=
(
simpleParentTerm'
,
simpleTerm
^.
_2
,
simpleTerm
^.
_2
&
nre_children
.~
(
mSetFromList
[
simpleChildTerm'
])
)
&
nre_children
.~
mSetFromList
[
simpleChildTerm'
]
)
simpleChildTerm'
::
NgramsTerm
simpleChildTerm'
::
NgramsTerm
simpleChildTerm'
=
NgramsTerm
"world"
simpleChildTerm'
=
NgramsTerm
"world"
simpleChildTerm
::
(
NgramsTerm
,
NgramsRepoElement
)
simpleChildTerm
::
(
NgramsTerm
,
NgramsRepoElement
)
simpleChildTerm
=
(
simpleChildTerm'
simpleChildTerm
=
(
simpleChildTerm'
,
simpleTerm
^.
_2
,
simpleTerm
^.
_2
&
nre_parent
.~
Just
simpleParentTerm'
&
nre_parent
?~
simpleParentTerm'
&
nre_root
.~
Just
simpleParentTerm'
)
&
nre_root
?~
simpleParentTerm'
)
-- tests start here
-- tests start here
...
@@ -92,7 +92,7 @@ createListTest env = do
...
@@ -92,7 +92,7 @@ createListTest env = do
(
userId
,
corpusId
,
listId
,
_a
)
<-
commonInitialization
(
userId
,
corpusId
,
listId
,
_a
)
<-
commonInitialization
listId'
<-
getOrMkList
corpusId
userId
listId'
<-
getOrMkList
corpusId
userId
liftIO
$
listId
`
shouldBe
`
listId'
liftIO
$
listId
`
shouldBe
`
listId'
...
@@ -110,7 +110,7 @@ queryNodeStoryTest env = do
...
@@ -110,7 +110,7 @@ queryNodeStoryTest env = do
liftIO
$
do
liftIO
$
do
a'
`
shouldBe
`
a
a'
`
shouldBe
`
a
insertNewTermsToNodeStoryTest
::
TestEnv
->
Assertion
insertNewTermsToNodeStoryTest
::
TestEnv
->
Assertion
insertNewTermsToNodeStoryTest
env
=
do
insertNewTermsToNodeStoryTest
env
=
do
...
@@ -128,7 +128,7 @@ insertNewTermsToNodeStoryTest env = do
...
@@ -128,7 +128,7 @@ insertNewTermsToNodeStoryTest env = do
-- check that the ngrams are in the DB as well
-- check that the ngrams are in the DB as well
ngramsMap
<-
selectNgramsId
[
unNgramsTerm
terms
]
ngramsMap
<-
selectNgramsId
[
unNgramsTerm
terms
]
liftIO
$
(
snd
<$>
Map
.
toList
ngramsMap
)
`
shouldBe
`
[
unNgramsTerm
terms
]
liftIO
$
(
snd
<$>
Map
.
toList
ngramsMap
)
`
shouldBe
`
[
unNgramsTerm
terms
]
-- Finally, check that node stories are inserted correctly
-- Finally, check that node stories are inserted correctly
dbTerms
<-
runPGSQuery
[
sql
|
dbTerms
<-
runPGSQuery
[
sql
|
SELECT terms
SELECT terms
...
@@ -137,7 +137,7 @@ insertNewTermsToNodeStoryTest env = do
...
@@ -137,7 +137,7 @@ insertNewTermsToNodeStoryTest env = do
WHERE node_id = ?
WHERE node_id = ?
|]
(
PSQL
.
Only
listId
)
|]
(
PSQL
.
Only
listId
)
liftIO
$
dbTerms
`
shouldBe
`
[
PSQL
.
Only
$
unNgramsTerm
terms
]
liftIO
$
dbTerms
`
shouldBe
`
[
PSQL
.
Only
$
unNgramsTerm
terms
]
insertNewTermsWithChildrenToNodeStoryTest
::
TestEnv
->
Assertion
insertNewTermsWithChildrenToNodeStoryTest
::
TestEnv
->
Assertion
insertNewTermsWithChildrenToNodeStoryTest
env
=
do
insertNewTermsWithChildrenToNodeStoryTest
env
=
do
...
@@ -147,7 +147,7 @@ insertNewTermsWithChildrenToNodeStoryTest env = do
...
@@ -147,7 +147,7 @@ insertNewTermsWithChildrenToNodeStoryTest env = do
let
(
tParent
,
nreParent
)
=
simpleParentTerm
let
(
tParent
,
nreParent
)
=
simpleParentTerm
let
(
tChild
,
nreChild
)
=
simpleChildTerm
let
(
tChild
,
nreChild
)
=
simpleChildTerm
let
terms
=
unNgramsTerm
<$>
[
tParent
,
tChild
]
let
terms
=
unNgramsTerm
<$>
[
tParent
,
tChild
]
let
nls
=
Map
.
fromList
[(
tParent
,
nreParent
),
(
tChild
,
nreChild
)]
let
nls
=
Map
.
fromList
[(
tParent
,
nreParent
),
(
tChild
,
nreChild
)]
setListNgrams
listId
NgramsTerms
nls
setListNgrams
listId
NgramsTerms
nls
...
@@ -160,7 +160,7 @@ insertNewTermsWithChildrenToNodeStoryTest env = do
...
@@ -160,7 +160,7 @@ insertNewTermsWithChildrenToNodeStoryTest env = do
-- the terms in the DB by now
-- the terms in the DB by now
ngramsMap
<-
selectNgramsId
terms
ngramsMap
<-
selectNgramsId
terms
liftIO
$
(
snd
<$>
Map
.
toList
ngramsMap
)
`
shouldBe
`
terms
liftIO
$
(
snd
<$>
Map
.
toList
ngramsMap
)
`
shouldBe
`
terms
dbTerms
<-
runPGSQuery
[
sql
|
dbTerms
<-
runPGSQuery
[
sql
|
SELECT terms
SELECT terms
FROM ngrams
FROM ngrams
...
@@ -171,13 +171,13 @@ insertNewTermsWithChildrenToNodeStoryTest env = do
...
@@ -171,13 +171,13 @@ insertNewTermsWithChildrenToNodeStoryTest env = do
-- let (Just (tParentId, _)) = head $ filter ((==) (unNgramsTerm tParent) . snd) $ Map.toList ngramsMap2
-- let (Just (tParentId, _)) = head $ filter ((==) (unNgramsTerm tParent) . snd) $ Map.toList ngramsMap2
-- let (Just (tChildId, _)) = head $ filter ((==) (unNgramsTerm tChild) . snd) $ Map.toList ngramsMap2
-- let (Just (tChildId, _)) = head $ filter ((==) (unNgramsTerm tChild) . snd) $ Map.toList ngramsMap2
-- [PSQL.Only tParentId'] <-
-- [PSQL.Only tParentId'] <-
-- runPGSQuery [sql|SELECT parent_id FROM ngrams WHERE terms = ?|] (PSQL.Only tChild)
-- runPGSQuery [sql|SELECT parent_id FROM ngrams WHERE terms = ?|] (PSQL.Only tChild)
-- liftIO $ tParentId `shouldBe` tParentId'
-- liftIO $ tParentId `shouldBe` tParentId'
insertNodeStoryChildrenWithDifferentNgramsTypeThanParentTest
::
TestEnv
->
Assertion
insertNodeStoryChildrenWithDifferentNgramsTypeThanParentTest
::
TestEnv
->
Assertion
insertNodeStoryChildrenWithDifferentNgramsTypeThanParentTest
env
=
do
insertNodeStoryChildrenWithDifferentNgramsTypeThanParentTest
env
=
do
flip
runReaderT
env
$
runTestMonad
$
do
flip
runReaderT
env
$
runTestMonad
$
do
...
@@ -187,10 +187,10 @@ insertNodeStoryChildrenWithDifferentNgramsTypeThanParentTest env = do
...
@@ -187,10 +187,10 @@ insertNodeStoryChildrenWithDifferentNgramsTypeThanParentTest env = do
let
(
tChild
,
nreChildGoodType
)
=
simpleChildTerm
let
(
tChild
,
nreChildGoodType
)
=
simpleChildTerm
let
nreChildBrokenType
=
nreChildGoodType
&
nre_list
.~
MapTerm
let
nreChildBrokenType
=
nreChildGoodType
&
nre_list
.~
MapTerm
let
terms
=
unNgramsTerm
<$>
[
tParent
,
tChild
]
let
terms
=
unNgramsTerm
<$>
[
tParent
,
tChild
]
let
nls
=
Map
.
fromList
[(
tParent
,
nreParent
),
(
tChild
,
nreChildBrokenType
)]
let
nls
=
Map
.
fromList
[(
tParent
,
nreParent
),
(
tChild
,
nreChildBrokenType
)]
let
nlsWithChildFixed
=
Map
.
fromList
[(
tParent
,
nreParent
),
(
tChild
,
nreChildGoodType
)]
let
nlsWithChildFixed
=
Map
.
fromList
[(
tParent
,
nreParent
),
(
tChild
,
nreChildGoodType
)]
setListNgrams
listId
NgramsTerms
nls
setListNgrams
listId
NgramsTerms
nls
a
<-
getNodeStory
listId
a
<-
getNodeStory
listId
...
@@ -200,7 +200,7 @@ insertNodeStoryChildrenWithDifferentNgramsTypeThanParentTest env = do
...
@@ -200,7 +200,7 @@ insertNodeStoryChildrenWithDifferentNgramsTypeThanParentTest env = do
ngramsMap
<-
selectNgramsId
terms
ngramsMap
<-
selectNgramsId
terms
liftIO
$
(
snd
<$>
Map
.
toList
ngramsMap
)
`
shouldBe
`
terms
liftIO
$
(
snd
<$>
Map
.
toList
ngramsMap
)
`
shouldBe
`
terms
dbTerms
<-
runPGSQuery
[
sql
|
dbTerms
<-
runPGSQuery
[
sql
|
SELECT terms
SELECT terms
FROM ngrams
FROM ngrams
...
@@ -210,12 +210,12 @@ insertNodeStoryChildrenWithDifferentNgramsTypeThanParentTest env = do
...
@@ -210,12 +210,12 @@ insertNodeStoryChildrenWithDifferentNgramsTypeThanParentTest env = do
liftIO
$
(
Set
.
fromList
$
(
\
(
PSQL
.
Only
t
)
->
t
)
<$>
dbTerms
)
`
shouldBe
`
(
Set
.
fromList
terms
)
liftIO
$
(
Set
.
fromList
$
(
\
(
PSQL
.
Only
t
)
->
t
)
<$>
dbTerms
)
`
shouldBe
`
(
Set
.
fromList
terms
)
let
(
Just
(
tChildId
,
_
))
=
head
$
filter
((
==
)
(
unNgramsTerm
tChild
)
.
snd
)
$
Map
.
toList
ngramsMap
let
(
Just
(
tChildId
,
_
))
=
head
$
filter
((
==
)
(
unNgramsTerm
tChild
)
.
snd
)
$
Map
.
toList
ngramsMap
[
PSQL
.
Only
childType
]
<-
runPGSQuery
[
sql
|
SELECT ngrams_repo_element->>'list'
[
PSQL
.
Only
childType
]
<-
runPGSQuery
[
sql
|
SELECT ngrams_repo_element->>'list'
FROM node_stories
FROM node_stories
WHERE ngrams_id = ?
|]
(
PSQL
.
Only
tChildId
)
WHERE ngrams_id = ?
|]
(
PSQL
.
Only
tChildId
)
liftIO
$
childType
`
shouldBe
`
(
"MapTerm"
::
Text
)
liftIO
$
childType
`
shouldBe
`
(
"MapTerm"
::
Text
)
setListNgramsUpdatesNodeStoryTest
::
TestEnv
->
Assertion
setListNgramsUpdatesNodeStoryTest
::
TestEnv
->
Assertion
setListNgramsUpdatesNodeStoryTest
env
=
do
setListNgramsUpdatesNodeStoryTest
env
=
do
flip
runReaderT
env
$
runTestMonad
$
do
flip
runReaderT
env
$
runTestMonad
$
do
...
@@ -232,7 +232,7 @@ setListNgramsUpdatesNodeStoryTest env = do
...
@@ -232,7 +232,7 @@ setListNgramsUpdatesNodeStoryTest env = do
-- check that the ngrams are in the DB as well
-- check that the ngrams are in the DB as well
ngramsMap
<-
selectNgramsId
[
unNgramsTerm
terms
]
ngramsMap
<-
selectNgramsId
[
unNgramsTerm
terms
]
liftIO
$
(
snd
<$>
Map
.
toList
ngramsMap
)
`
shouldBe
`
[
unNgramsTerm
terms
]
liftIO
$
(
snd
<$>
Map
.
toList
ngramsMap
)
`
shouldBe
`
[
unNgramsTerm
terms
]
let
nre2
=
NgramsRepoElement
{
_nre_size
=
1
let
nre2
=
NgramsRepoElement
{
_nre_size
=
1
,
_nre_list
=
MapTerm
,
_nre_list
=
MapTerm
,
_nre_root
=
Nothing
,
_nre_root
=
Nothing
...
...
test/Test/Types.hs
0 → 100644
View file @
7e20e0fb
module
Test.Types
where
import
Data.Aeson
((
.:
),
(
.:?
),
(
.=
),
FromJSON
(
..
),
ToJSON
(
..
),
object
,
withObject
)
import
Gargantext.API.Admin.Orchestrator.Types
(
JobLog
(
..
))
import
Gargantext.Prelude
data
JobPollHandle
=
JobPollHandle
{
_jph_id
::
!
Text
,
_jph_log
::
[
JobLog
]
,
_jph_status
::
!
Text
,
_jph_error
::
!
(
Maybe
Text
)
}
deriving
Show
instance
FromJSON
JobPollHandle
where
parseJSON
=
withObject
"JobPollHandle"
$
\
o
->
do
_jph_id
<-
o
.:
"id"
_jph_log
<-
o
.:
"log"
_jph_status
<-
o
.:
"status"
_jph_error
<-
o
.:?
"error"
pure
JobPollHandle
{
..
}
instance
ToJSON
JobPollHandle
where
toJSON
JobPollHandle
{
..
}
=
object
[
"id"
.=
toJSON
_jph_id
,
"log"
.=
toJSON
_jph_log
,
"status"
.=
toJSON
_jph_status
,
"error"
.=
toJSON
_jph_error
]
test/Test/Utils.hs
View file @
7e20e0fb
{-# LANGUAGE NumericUnderscores #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeApplications #-}
module
Test.Utils
where
module
Test.Utils
where
import
Control.Exception
import
Control.Exception
()
import
Control.
Monad
import
Control.
Lens
((
^.
))
import
Data.Aeson
import
Control.Monad
()
import
Data.Aeson
qualified
as
JSON
import
Data.Aeson
qualified
as
JSON
import
Data.Aeson.KeyMap
qualified
as
KM
import
Data.Aeson.KeyMap
qualified
as
KM
import
Data.ByteString.Char8
qualified
as
B
import
Data.ByteString.Char8
qualified
as
B
import
Data.Char
(
isSpace
)
import
Data.ByteString.Lazy
qualified
as
L
import
Network.HTTP.Types
import
Data.CaseInsensitive
qualified
as
CI
import
Network.Wai.Test
import
Data.Map.Strict
qualified
as
Map
import
Prelude
import
Data.Text
qualified
as
T
import
Data.Text.Encoding
qualified
as
TE
import
Data.Text.Lazy
qualified
as
TL
import
Data.Text.Lazy.Encoding
qualified
as
TLE
import
Fmt
(
Builder
)
import
Gargantext.API.Admin.Auth.Types
(
AuthRequest
(
..
),
Token
,
authRes_token
)
import
Gargantext.Core.Types.Individu
(
Username
,
GargPassword
)
import
Gargantext.Prelude
import
Network.HTTP.Client
(
defaultManagerSettings
,
newManager
)
import
Network.HTTP.Types
(
Header
,
Method
,
status200
)
import
Network.HTTP.Types.Header
(
hAccept
,
hAuthorization
,
hContentType
)
import
Network.Wai.Handler.Warp
(
Port
)
import
Network.Wai.Test
(
SResponse
(
..
))
import
Prelude
qualified
import
Servant.Client
(
ClientEnv
,
baseUrlPort
,
defaultMakeClientRequest
,
makeClientRequest
,
mkClientEnv
,
parseBaseUrl
,
runClientM
)
import
Servant.Client.Core.Request
(
addHeader
)
import
Test.API.Routes
(
auth_api
,
mkUrl
)
import
Test.Hspec.Expectations
import
Test.Hspec.Expectations
import
Test.Hspec.Wai
import
Test.Hspec.Wai
(
MatchBody
(
..
),
WaiExpectation
,
WaiSession
,
request
)
import
Test.Hspec.Wai.JSON
(
FromValue
(
..
))
import
Test.Hspec.Wai.JSON
(
FromValue
(
..
))
import
Test.Hspec.Wai.Matcher
import
Test.Hspec.Wai.Matcher
(
MatchHeader
(
..
),
ResponseMatcher
(
..
),
bodyEquals
,
formatHeader
,
match
)
import
Test.Tasty.HUnit
import
Test.Tasty.HUnit
(
Assertion
)
import
Test.Types
-- | Marks the input 'Assertion' as pending, by ignoring any exception
-- | Marks the input 'Assertion' as pending, by ignoring any exception
-- thrown by it.
-- thrown by it.
pending
::
String
->
Assertion
->
Assertion
pending
::
Prelude
.
String
->
Assertion
->
Assertion
pending
reason
act
=
act
`
catch
`
(
\
(
e
::
SomeException
)
->
do
pending
reason
act
=
act
`
catch
`
(
\
(
e
::
SomeException
)
->
do
putStrLn
$
"PENDING: "
<>
reason
putStrLn
$
"PENDING: "
<>
reason
putStrLn
(
displayException
e
))
putStrLn
(
displayException
e
))
...
@@ -56,9 +77,9 @@ instance FromValue JsonFragmentResponseMatcher where
...
@@ -56,9 +77,9 @@ instance FromValue JsonFragmentResponseMatcher where
fromValue
=
JsonFragmentResponseMatcher
.
ResponseMatcher
200
[
matchHeader
]
.
containsJSON
fromValue
=
JsonFragmentResponseMatcher
.
ResponseMatcher
200
[
matchHeader
]
.
containsJSON
where
where
matchHeader
=
MatchHeader
$
\
headers
_body
->
matchHeader
=
MatchHeader
$
\
headers
_body
->
case
lookup
"Content-Type"
headers
of
case
Prelude
.
lookup
"Content-Type"
headers
of
Just
h
|
isJSON
h
->
Nothing
Just
h
|
isJSON
h
->
Nothing
_
->
Just
$
unlines
[
_
->
Just
$
Prelude
.
unlines
[
"missing header:"
"missing header:"
,
formatHeader
(
"Content-Type"
,
"application/json"
)
,
formatHeader
(
"Content-Type"
,
"application/json"
)
]
]
...
@@ -70,7 +91,7 @@ instance FromValue JsonFragmentResponseMatcher where
...
@@ -70,7 +91,7 @@ instance FromValue JsonFragmentResponseMatcher where
breakAt
c
=
fmap
(
B
.
drop
1
)
.
B
.
break
(
==
c
)
breakAt
c
=
fmap
(
B
.
drop
1
)
.
B
.
break
(
==
c
)
strip
=
B
.
reverse
.
B
.
dropWhile
isSpace
.
B
.
reverse
.
B
.
dropWhile
isSpace
strip
=
B
.
reverse
.
B
.
dropWhile
isSpace
.
B
.
reverse
.
B
.
dropWhile
isSpace
shouldRespondWithJSON
::
(
FromJSON
a
,
ToJSON
a
,
HasCallStack
)
shouldRespondWithJSON
::
(
JSON
.
FromJSON
a
,
JSON
.
ToJSON
a
,
HasCallStack
)
=>
WaiSession
st
a
=>
WaiSession
st
a
->
JsonFragmentResponseMatcher
->
JsonFragmentResponseMatcher
->
WaiExpectation
st
->
WaiExpectation
st
...
@@ -78,14 +99,127 @@ shouldRespondWithJSON action matcher = do
...
@@ -78,14 +99,127 @@ shouldRespondWithJSON action matcher = do
r
<-
action
r
<-
action
forM_
(
match
(
SResponse
status200
mempty
(
JSON
.
encode
r
))
(
getJsonMatcher
matcher
))
(
liftIO
.
expectationFailure
)
forM_
(
match
(
SResponse
status200
mempty
(
JSON
.
encode
r
))
(
getJsonMatcher
matcher
))
(
liftIO
.
expectationFailure
)
containsJSON
::
Value
->
MatchBody
containsJSON
::
JSON
.
Value
->
MatchBody
containsJSON
expected
=
MatchBody
matcher
containsJSON
expected
=
MatchBody
matcher
where
where
matcher
headers
actualBody
=
case
decode
actualBody
of
matcher
headers
actualBody
=
case
JSON
.
decode
actualBody
of
Just
actual
|
expected
`
isSubsetOf
`
actual
->
Nothing
Just
actual
|
expected
`
isSubsetOf
`
actual
->
Nothing
_
->
let
MatchBody
m
=
bodyEquals
(
encode
expected
)
in
m
headers
actualBody
_
->
let
MatchBody
m
=
bodyEquals
(
JSON
.
encode
expected
)
in
m
headers
actualBody
isSubsetOf
::
Value
->
Value
->
Bool
isSubsetOf
::
JSON
.
Value
->
JSON
.
Value
->
Bool
isSubsetOf
(
Object
sub
)
(
Object
sup
)
=
isSubsetOf
(
JSON
.
Object
sub
)
(
JSON
.
Object
sup
)
=
all
(
\
(
key
,
value
)
->
KM
.
lookup
key
sup
==
Just
value
)
(
KM
.
toList
sub
)
all
(
\
(
key
,
value
)
->
KM
.
lookup
key
sup
==
Just
value
)
(
KM
.
toList
sub
)
isSubsetOf
x
y
=
x
==
y
isSubsetOf
x
y
=
x
==
y
authenticatedServantClient
::
Int
->
T
.
Text
->
IO
ClientEnv
authenticatedServantClient
port
token
=
do
baseUrl
<-
parseBaseUrl
"http://localhost"
manager
<-
newManager
defaultManagerSettings
let
requestAddToken
url
req
=
defaultMakeClientRequest
url
$
addHeader
hAuthorization
(
"Bearer "
<>
token
)
$
addHeader
hContentType
(
T
.
pack
"application/json"
)
req
pure
$
(
mkClientEnv
manager
(
baseUrl
{
baseUrlPort
=
port
}))
{
makeClientRequest
=
requestAddToken
}
-- | Issue a request with a valid 'Authorization: Bearer' inside.
protected
::
HasCallStack
=>
Token
->
Method
->
ByteString
->
L
.
ByteString
->
WaiSession
()
SResponse
protected
tkn
mth
url
=
protectedWith
mempty
tkn
mth
url
protectedJSON
::
forall
a
.
(
JSON
.
FromJSON
a
,
Typeable
a
,
HasCallStack
)
=>
Token
->
Method
->
ByteString
->
JSON
.
Value
->
WaiSession
()
a
protectedJSON
tkn
mth
url
=
protectedJSONWith
mempty
tkn
mth
url
protectedJSONWith
::
forall
a
.
(
JSON
.
FromJSON
a
,
Typeable
a
,
HasCallStack
)
=>
[
Header
]
->
Token
->
Method
->
ByteString
->
JSON
.
Value
->
WaiSession
()
a
protectedJSONWith
hdrs
tkn
mth
url
jsonV
=
do
SResponse
{
..
}
<-
protectedWith
hdrs
tkn
mth
url
(
JSON
.
encode
jsonV
)
case
JSON
.
eitherDecode
simpleBody
of
Left
err
->
Prelude
.
fail
$
"protectedJSON failed when parsing "
<>
show
(
typeRep
$
Proxy
@
a
)
<>
": "
<>
err
Right
x
->
pure
x
protectedWith
::
HasCallStack
=>
[
Header
]
->
Token
->
Method
->
ByteString
->
L
.
ByteString
->
WaiSession
()
SResponse
protectedWith
extraHeaders
tkn
mth
url
payload
=
-- Using a map means that if any of the extra headers contains a clashing header name,
-- the extra headers will take precedence.
let
defaultHeaders
=
[
(
hAccept
,
"application/json;charset=utf-8"
)
,
(
hContentType
,
"application/json"
)
,
(
hAuthorization
,
"Bearer "
<>
TE
.
encodeUtf8
tkn
)
]
hdrs
=
Map
.
toList
$
Map
.
fromList
$
defaultHeaders
<>
extraHeaders
in
request
mth
url
hdrs
payload
protectedNewError
::
HasCallStack
=>
Token
->
Method
->
ByteString
->
L
.
ByteString
->
WaiSession
()
SResponse
protectedNewError
tkn
mth
url
=
protectedWith
newErrorFormat
tkn
mth
url
where
newErrorFormat
=
[(
CI
.
mk
"X-Garg-Error-Scheme"
,
"new"
)]
getJSON
::
Token
->
ByteString
->
WaiSession
()
SResponse
getJSON
tkn
url
=
protectedWith
mempty
tkn
"GET"
url
""
postJSONUrlEncoded
::
forall
a
.
(
JSON
.
FromJSON
a
,
Typeable
a
,
HasCallStack
)
=>
Token
->
ByteString
->
L
.
ByteString
->
WaiSession
()
a
postJSONUrlEncoded
tkn
url
queryPaths
=
do
SResponse
{
..
}
<-
protectedWith
[(
hContentType
,
"application/x-www-form-urlencoded"
)]
tkn
"POST"
url
queryPaths
case
JSON
.
eitherDecode
simpleBody
of
Left
err
->
Prelude
.
fail
$
"postJSONUrlEncoded failed when parsing "
<>
show
(
typeRep
$
Proxy
@
a
)
<>
": "
<>
err
<>
"
\n
Payload was: "
<>
(
T
.
unpack
.
TL
.
toStrict
.
TLE
.
decodeUtf8
$
simpleBody
)
Right
x
->
pure
x
withValidLogin
::
(
MonadFail
m
,
MonadIO
m
)
=>
Port
->
Username
->
GargPassword
->
(
Token
->
m
a
)
->
m
a
withValidLogin
port
ur
pwd
act
=
do
baseUrl
<-
liftIO
$
parseBaseUrl
"http://localhost"
manager
<-
liftIO
$
newManager
defaultManagerSettings
let
clientEnv
=
mkClientEnv
manager
(
baseUrl
{
baseUrlPort
=
port
})
let
authPayload
=
AuthRequest
ur
pwd
result
<-
liftIO
$
runClientM
(
auth_api
authPayload
)
clientEnv
case
result
of
Left
err
->
liftIO
$
throwIO
$
Prelude
.
userError
(
show
err
)
Right
res
->
act
$
res
^.
authRes_token
-- | Poll the given job URL every second until it finishes.
-- Retries up to 60 times (i.e. for 1 minute, before giving up)
pollUntilFinished
::
HasCallStack
=>
Token
->
Port
->
(
JobPollHandle
->
Builder
)
->
JobPollHandle
->
WaiSession
()
JobPollHandle
pollUntilFinished
tkn
port
mkUrlPiece
=
go
60
where
go
::
Int
->
JobPollHandle
->
WaiSession
()
JobPollHandle
go
0
h
=
panicTrace
$
"pollUntilFinished exhausted attempts. Last found JobPollHandle: "
<>
TE
.
decodeUtf8
(
L
.
toStrict
$
JSON
.
encode
h
)
go
n
h
=
case
_jph_status
h
==
"IsPending"
||
_jph_status
h
==
"IsRunning"
of
True
->
do
liftIO
$
threadDelay
1
_000_000
h'
<-
protectedJSON
tkn
"GET"
(
mkUrl
port
$
mkUrlPiece
h
)
""
go
(
n
-
1
)
h'
False
|
_jph_status
h
==
"IsFailure"
->
panicTrace
$
"JobPollHandle contains a failure: "
<>
TE
.
decodeUtf8
(
L
.
toStrict
$
JSON
.
encode
h
)
|
otherwise
->
pure
h
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment