Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
haskell-gargantext
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Christian Merten
haskell-gargantext
Commits
1781ba63
Commit
1781ba63
authored
Sep 03, 2022
by
Karen Konou
Browse files
Options
Browse Files
Download
Plain Diff
Merge branch 'dev' into 428-dev-profile-img-upload
parents
79f858de
91b97fbd
Changes
40
Show whitespace changes
Inline
Side-by-side
Showing
40 changed files
with
1020 additions
and
302 deletions
+1020
-302
CHANGELOG.md
CHANGELOG.md
+14
-0
CleanCsvCorpus.hs
bin/gargantext-cli/CleanCsvCorpus.hs
+1
-1
Main.hs
bin/gargantext-cli/Main.hs
+2
-2
Auth.hs
bin/gargantext-client/Auth.hs
+0
-1
Main.hs
bin/gargantext-phylo/Main.hs
+1
-1
install
bin/install
+1
-1
cabal.project
cabal.project
+54
-6
schema.sql
devops/postgres/schema.sql
+27
-0
0.0.6.sql
devops/postgres/upgrade/0.0.6.sql
+39
-0
gargantext.cabal
gargantext.cabal
+7
-2
pkgs.nix
nix/pkgs.nix
+2
-3
package.yaml
package.yaml
+3
-1
server
server
+2
-2
shell.nix
shell.nix
+2
-2
Clustering.hs
src-test/Graph/Clustering.hs
+2
-2
API.hs
src/Gargantext/API.hs
+4
-4
Auth.hs
src/Gargantext/API/Admin/Auth.hs
+5
-17
Types.hs
src/Gargantext/API/Admin/Auth/Types.hs
+7
-0
Settings.hs
src/Gargantext/API/Admin/Settings.hs
+4
-3
Client.hs
src/Gargantext/API/Client.hs
+1
-1
Dev.hs
src/Gargantext/API/Dev.hs
+5
-4
GraphQL.hs
src/Gargantext/API/GraphQL.hs
+9
-4
Team.hs
src/Gargantext/API/GraphQL/Team.hs
+74
-0
Ngrams.hs
src/Gargantext/API/Ngrams.hs
+10
-7
Tools.hs
src/Gargantext/API/Ngrams/Tools.hs
+22
-1
Types.hs
src/Gargantext/API/Ngrams/Types.hs
+15
-11
File.hs
src/Gargantext/API/Node/File.hs
+1
-1
Mail.hs
src/Gargantext/Core/Mail.hs
+2
-1
NodeStory.hs
src/Gargantext/Core/NodeStory.hs
+338
-194
NodeStoryFile.hs
src/Gargantext/Core/NodeStoryFile.hs
+225
-0
CSV.hs
src/Gargantext/Core/Text/Corpus/Parsers/CSV.hs
+4
-4
History.hs
src/Gargantext/Core/Text/List/Social/History.hs
+0
-2
Tools.hs
src/Gargantext/Core/Viz/Graph/Tools.hs
+4
-7
LegacyMain.hs
src/Gargantext/Core/Viz/Phylo/Legacy/LegacyMain.hs
+1
-0
Flow.hs
src/Gargantext/Database/Action/Flow.hs
+1
-1
List.hs
src/Gargantext/Database/Action/Flow/List.hs
+0
-1
GargDB.hs
src/Gargantext/Database/GargDB.hs
+2
-2
Node.hs
src/Gargantext/Database/Query/Table/Node.hs
+3
-0
NodeNode.hs
src/Gargantext/Database/Query/Table/NodeNode.hs
+123
-10
stack.yaml
stack.yaml
+3
-3
No files found.
CHANGELOG.md
View file @
1781ba63
## Version 0.0.6
*
[
OPTIM
]
Ngrams Table optmization. To upgrade:
1.
`./bin/psql gargantext.ini < devops/postgresql/upgrade/0.0.6.sql`
2.
in
`stack --nix repl`
run
`runCmdReplEasy $ migrateFromDirToDb`
*
[
FIX
]
Ngrams Table next button: loads only one time instead of twice previously
*
[
FRONT
][
FIX
]
Resize handler on Write Node
*
[
FRONT
][
FIX
]
Do not highlight ngrams if maximum abstract length > 4500 characters
## Version 0.0.5.9.6
*
[
BACK
][
FIX
]
Nix build ok
*
[
BACK
][
OPTI
]
Confluence optimization
*
[
FRONT
][
GACK
][
FEAT
]
Team management
*
[
FRONT
][
FEAT
]
Legend for graph
## Version 0.0.5.9.5
*
[
FRONT
][
FIX
]
View Document List fix CSS
*
[
FRONT
][
FIX
]
Node Modal fix
...
...
bin/gargantext-cli/CleanCsvCorpus.hs
View file @
1781ba63
...
...
@@ -40,7 +40,7 @@ main = do
--let q = ["water", "scarcity", "morocco", "shortage","flood"]
let
q
=
[
"gratuit"
,
"gratuité"
,
"culture"
,
"culturel"
]
eDocs
<-
CSV
.
readFile
rPath
eDocs
<-
CSV
.
read
CSV
File
rPath
case
eDocs
of
Right
(
h
,
csvDocs
)
->
do
putStrLn
$
"Number of documents before:"
<>
show
(
V
.
length
csvDocs
)
...
...
bin/gargantext-cli/Main.hs
View file @
1781ba63
...
...
@@ -42,7 +42,7 @@ import Gargantext.Core.Types
import
Gargantext.Core.Text.Terms
import
Gargantext.Core.Text.Context
import
Gargantext.Core.Text.Terms.WithList
import
Gargantext.Core.Text.Corpus.Parsers.CSV
(
readFile
,
csv_title
,
csv_abstract
,
csv_publication_year
,
unIntOrDec
,
fromMIntOrDec
,
defaultYear
)
import
Gargantext.Core.Text.Corpus.Parsers.CSV
(
read
CSV
File
,
csv_title
,
csv_abstract
,
csv_publication_year
,
unIntOrDec
,
fromMIntOrDec
,
defaultYear
)
import
Gargantext.Core.Text.List.Formats.CSV
(
csvMapTermList
)
import
Gargantext.Core.Text.Terms
(
terms
)
import
Gargantext.Core.Text.Metrics.Count
(
coocOnContexts
,
Coocs
)
...
...
@@ -86,7 +86,7 @@ main = do
[
corpusFile
,
termListFile
,
outputFile
]
<-
getArgs
--corpus :: IO (DM.IntMap [[Text]])
eCorpusFile
<-
readFile
corpusFile
eCorpusFile
<-
read
CSV
File
corpusFile
case
eCorpusFile
of
Right
cf
->
do
let
corpus
=
DM
.
fromListWith
(
<>
)
...
...
bin/gargantext-client/Auth.hs
View file @
1781ba63
module
Auth
where
import
Prelude
import
Data.Maybe
import
Core
import
Options
...
...
bin/gargantext-phylo/Main.hs
View file @
1781ba63
...
...
@@ -109,7 +109,7 @@ csvToDocs parser patterns time path =
(
termsInText
patterns
$
(
csv_title
row
)
<>
" "
<>
(
csv_abstract
row
))
Nothing
[]
)
<$>
snd
<$>
either
(
\
err
->
panic
$
cs
$
"CSV error"
<>
(
show
err
))
identity
<$>
Csv
.
readFile
path
)
<$>
snd
<$>
either
(
\
err
->
panic
$
cs
$
"CSV error"
<>
(
show
err
))
identity
<$>
Csv
.
read
CSV
File
path
Csv'
limit
->
Vector
.
toList
<$>
Vector
.
take
limit
<$>
Vector
.
map
(
\
row
->
Document
(
toPhyloDate
(
csv'_publication_year
row
)
(
csv'_publication_month
row
)
(
csv'_publication_day
row
)
time
)
...
...
bin/install
View file @
1781ba63
...
...
@@ -2,4 +2,4 @@
#stack install --nix --profile --test --fast --no-install-ghc --skip-ghc-check
env
LANG
=
C.UTF-8 stack
install
--
nix
--test
--no-install-ghc
--skip-ghc-check
env
LANG
=
C.UTF-8 stack
install
--
haddock
--nix
--test
--no-install-ghc
--skip-ghc-check
--no-haddock-deps
cabal.project
View file @
1781ba63
packages
:
.
allow
-
newer
:
base
,
accelerate
,
servant
,
time
,
classy
-
prelude
allow
-
newer
:
binary
,
primitive
,
vector
--
Patches
source
-
repository
-
package
...
...
@@ -61,12 +62,27 @@ source-repository-package
location
:
https
://
gitlab
.
iscpif
.
fr
/
gargantext
/
crawlers
/
hal
.
git
tag
:
020f5f9
b308f5c23c925aedf5fb11f8b4728fb19
source
-
repository
-
package
type
:
git
location
:
https
://
gitlab
.
iscpif
.
fr
/
gargantext
/
crawlers
/
arxiv
-
api
.
git
tag
:
f3e517cc40d92e282c5245b23d253d2ca3f802e5
--
Graphs
source
-
repository
-
package
type
:
git
location
:
https
://
github
.
com
/
alpmestan
/
haskell
-
igraph
.
git
tag
:
9f55
eb36639c8e0965c8bc539a57738869f33e9a
source
-
repository
-
package
type
:
git
location
:
https
://
gitlab
.
iscpif
.
fr
/
gargantext
/
haskell
-
infomap
.
git
tag
:
6
d1d60b952b9b2b272b58fc5539700fd8890ac88
source
-
repository
-
package
type
:
git
location
:
https
://
gitlab
.
iscpif
.
fr
/
gargantext
/
gargantext
-
graph
.
git
tag
:
f41ee8b53c3264e5aa5adc06b2e5b293d2a8c474
--
Data
mining
source
-
repository
-
package
type
:
git
...
...
@@ -116,16 +132,33 @@ source-repository-package
tag
:
fc24987d3af348a677748f226e48d64779a694e9
-- Accelerate
--
numerical
computing
source
-
repository
-
package
type
:
git
location
:
https
://
github
.
com
/
alpmestan
/
accelerate
.
git
tag
:
640
b5af87cea94b61c7737d878e6f7f2fca5c015
source
-
repository
-
package
type
:
git
location
:
https
://
gitlab
.
iscpif
.
fr
/
amestanogullari
/
accelerate
-
utility
.
git
tag
:
a3875fe652d3bb5acb522674c22c6c814c1b4ad0
source
-
repository
-
package
type
:
git
location: https://git
lab.iscpif.fr/anoe/accelerate
.git
tag:
f5c0e0071ec7b6532f9a9cd3eb33d14f340fbcc9
location
:
https
://
git
hub
.
com
/
alpmestan
/
accelerate
-
arithmetic
.
git
tag
:
a110807651036ca2228a76507ee35bbf7aedf87a
source
-
repository
-
package
type
:
git
location: https://gitlab.iscpif.fr/anoe/accelerate-utility.git
tag: 83ada76e78ac10d9559af8ed6bd4064ec81308e4
location
:
https
://
github
.
com
/
alpmestan
/
accelerate
-
llvm
.
git
tag
:
944f5
a4aea35ee6aedb81ea754bf46b131fce9e3
subdir
:
accelerate
-
llvm
/
accelerate
-
llvm
-
native
/
source
-
repository
-
package
type
:
git
location
:
https
://
github
.
com
/
alpmestan
/
hmatrix
.
git
tag
:
b9fca8beee0f23c17a6b2001ec834d071709e6e7
subdir
:
packages
/
base
/
--
Wikidata
...
...
@@ -135,7 +168,22 @@ source-repository-package
tag
:
9637
a82344bb70f7fa8f02e75db3c081ccd434ce
--
numerical
computing
source
-
repository
-
package
type
:
git
location
:
https
://
github
.
com
/
alpmestan
/
sparse
-
linear
.
git
tag
:
bc6ca8058077b0b5702ea4b88bd4189cfcad267a
subdir
:
sparse
-
linear
/
constraints
:
unordered
-
containers
==
0.2.14
.*,
servant
-
ekg
==
0.3.1
,
time
==
1.9.3
,
stm==2.5.0.1
stm
==
2.5.0.1
,
vector
==
0.12.3.0
,
eigen
==
3.3.7.0
,
cborg
==
0.2.6.0
,
primitive
==
0.7.3.0
package
accelerate
flags
:
+
debug
\ No newline at end of file
devops/postgres/schema.sql
View file @
1781ba63
...
...
@@ -219,6 +219,33 @@ CREATE TABLE public.rights (
ALTER
TABLE
public
.
rights
OWNER
TO
gargantua
;
------------------------------------------------------------
-- Node Story
create
table
public
.
node_stories
(
id
SERIAL
,
node_id
INTEGER
NOT
NULL
,
archive
jsonb
DEFAULT
'{}'
::
jsonb
NOT
NULL
,
PRIMARY
KEY
(
id
),
FOREIGN
KEY
(
node_id
)
REFERENCES
public
.
nodes
(
id
)
ON
DELETE
CASCADE
);
ALTER
TABLE
public
.
node_stories
OWNER
TO
gargantua
;
CREATE
UNIQUE
INDEX
ON
public
.
node_stories
USING
btree
(
node_id
);
create
table
public
.
node_story_archive_history
(
id
SERIAL
,
node_id
INTEGER
NOT
NULL
,
ngrams_type_id
INTEGER
NOT
NULL
,
ngrams_id
INTEGER
NOT
NULL
,
patch
jsonb
DEFAULT
'{}'
::
jsonb
NOT
NULL
,
PRIMARY
KEY
(
id
),
FOREIGN
KEY
(
node_id
)
REFERENCES
public
.
nodes
(
id
)
ON
DELETE
CASCADE
,
FOREIGN
KEY
(
ngrams_id
)
REFERENCES
public
.
ngrams
(
id
)
ON
DELETE
CASCADE
);
ALTER
TABLE
public
.
node_story_archive_history
OWNER
TO
gargantua
;
------------------------------------------------------------
-- INDEXES
CREATE
INDEX
ON
public
.
auth_user
USING
btree
(
username
varchar_pattern_ops
);
...
...
devops/postgres/upgrade/0.0.6.sql
0 → 100644
View file @
1781ba63
create
table
public
.
node_stories
(
id
SERIAL
,
node_id
INTEGER
NOT
NULL
,
archive
jsonb
DEFAULT
'{}'
::
jsonb
NOT
NULL
,
PRIMARY
KEY
(
id
),
FOREIGN
KEY
(
node_id
)
REFERENCES
public
.
nodes
(
id
)
ON
DELETE
CASCADE
);
ALTER
TABLE
public
.
node_stories
OWNER
TO
gargantua
;
CREATE
UNIQUE
INDEX
ON
public
.
node_stories
USING
btree
(
node_id
);
create
table
public
.
node_story_archive_history
(
id
SERIAL
,
node_id
INTEGER
NOT
NULL
,
ngrams_type_id
INTEGER
NOT
NULL
,
ngrams_id
INTEGER
NOT
NULL
,
patch
jsonb
DEFAULT
'{}'
::
jsonb
NOT
NULL
,
PRIMARY
KEY
(
id
),
FOREIGN
KEY
(
node_id
)
REFERENCES
public
.
nodes
(
id
)
ON
DELETE
CASCADE
,
FOREIGN
KEY
(
ngrams_id
)
REFERENCES
public
.
ngrams
(
id
)
ON
DELETE
CASCADE
);
ALTER
TABLE
public
.
node_story_archive_history
OWNER
TO
gargantua
;
-- INSERT INTO node_story_archive_history (node_id, ngrams_type_id, patch) SELECT t.node_id, t.ngrams_type_id, t.patch FROM
-- (
-- WITH q AS (SELECT node_id, history.*, row_number() over (ORDER BY node_id) AS sid
-- FROM node_stories,
-- jsonb_to_recordset(archive->'history') AS history("Authors" jsonb, "Institutes" jsonb, "NgramsTerms" jsonb, "Sources" jsonb))
-- (SELECT node_id, sid, 1 AS ngrams_type_id, "Authors" AS patch FROM q WHERE "Authors" IS NOT NULL)
-- UNION (SELECT node_id, sid, 2 AS ngrams_type_id, "Institutes" AS patch FROM q WHERE "Institutes" IS NOT NULL)
-- UNION (SELECT node_id, sid, 4 AS ngrams_type_id, "NgramsTerms" AS patch FROM q WHERE "NgramsTerms" IS NOT NULL)
-- UNION (SELECT node_id, sid, 3 AS ngrams_type_id, "Sources" AS patch FROM q WHERE "Sources" IS NOT NULL)
-- ORDER BY node_id, ngrams_type_id, sid
-- ) AS t;
gargantext.cabal
View file @
1781ba63
cabal-version: 1.12
-- This file has been generated from package.yaml by hpack version 0.34.
4
.
-- This file has been generated from package.yaml by hpack version 0.34.
7
.
--
-- see: https://github.com/sol/hpack
name: gargantext
version: 0.0.
5.9.5
version: 0.0.
6
synopsis: Search, map, share
description: Please see README.md
category: Data
...
...
@@ -96,6 +96,7 @@ library
Gargantext.Core.Viz.Phylo.SynchronicClustering
Gargantext.Core.Viz.Types
other-modules:
-- ConcurrentTest
Gargantext.API.Admin.Auth
Gargantext.API.Admin.FrontEnd
Gargantext.API.Admin.Orchestrator
...
...
@@ -111,6 +112,7 @@ library
Gargantext.API.GraphQL.AsyncTask
Gargantext.API.GraphQL.IMT
Gargantext.API.GraphQL.Node
Gargantext.API.GraphQL.Team
Gargantext.API.GraphQL.TreeFirstLevel
Gargantext.API.GraphQL.User
Gargantext.API.GraphQL.UserInfo
...
...
@@ -162,6 +164,7 @@ library
Gargantext.Core.Methods.Graph.BAC.Proxemy
Gargantext.Core.Methods.Graph.MaxClique
Gargantext.Core.Methods.Matrix.Accelerate.Utils
Gargantext.Core.NodeStoryFile
Gargantext.Core.Statistics
Gargantext.Core.Text.Convert
Gargantext.Core.Text.Corpus.API.Arxiv
...
...
@@ -410,6 +413,7 @@ library
, jose
, json-stream
, lens
, lifted-base
, listsafe
, located-base
, logging-effect
...
...
@@ -488,6 +492,7 @@ library
, transformers-base
, tuple
, unordered-containers
, uri-encode
, utf8-string
, uuid
, validity
...
...
nix/pkgs.nix
View file @
1781ba63
...
...
@@ -6,7 +6,6 @@ rec {
hsBuildInputs
=
[
ghc
pkgs
.
cabal-install
pkgs
.
haskellPackages
.
llvm-hs
];
nonhsBuildInputs
=
with
pkgs
;
[
bzip2
...
...
@@ -18,6 +17,7 @@ rec {
#haskell-language-server
hlint
igraph
libffi
liblapack
lzma
pcre
...
...
@@ -31,8 +31,7 @@ rec {
expat
icu
graphviz
libffi
llvmPackages_9
.
llvm
llvm_9
];
libPaths
=
pkgs
.
lib
.
makeLibraryPath
nonhsBuildInputs
;
shellHook
=
''
...
...
package.yaml
View file @
1781ba63
...
...
@@ -6,7 +6,7 @@ name: gargantext
# | | | +----- Layers * : New versions with API additions
# | | | | +--- Layers * : New versions without API breaking changes
# | | | | |
version
:
'
0.0.
5.9.5
'
version
:
'
0.0.
6
'
synopsis
:
Search, map, share
description
:
Please see README.md
category
:
Data
...
...
@@ -195,6 +195,7 @@ library:
-
jose
-
json-stream
-
lens
-
lifted-base
-
listsafe
-
located-base
-
logging-effect
...
...
@@ -274,6 +275,7 @@ library:
-
unordered-containers
-
utf8-string
-
uuid
-
uri-encode
-
validity
-
vector
-
wai
...
...
server
View file @
1781ba63
...
...
@@ -9,5 +9,5 @@ LOGFILE=$FOLDER"/"$FILE
mkdir
-p
$FOLDER
#
env LANG=en_US.UTF-8 ~/.local/bin/gargantext-server --ini gargantext.ini --run Dev +RTS > $LOGFILE 2>&1 & tail -F $LOGFILE # -p
env
LANG
=
en_US.UTF-8 stack
--docker
exec
gargantext-server
--
--ini
gargantext.ini
--run
Dev +RTS
>
$LOGFILE
2>&1 &
tail
-F
$LOGFILE
# -p
env
LANG
=
en_US.UTF-8 ~/.local/bin/gargantext-server
--ini
gargantext.ini
--run
Dev +RTS
>
$LOGFILE
2>&1 &
tail
-F
$LOGFILE
# -p
#
env LANG=en_US.UTF-8 stack --docker exec gargantext-server -- --ini gargantext.ini --run Dev +RTS > $LOGFILE 2>&1 & tail -F $LOGFILE # -p
shell.nix
View file @
1781ba63
src-test/Graph/Clustering.hs
View file @
1781ba63
...
...
@@ -14,7 +14,7 @@ Portability : POSIX
module
Graph.Clustering
where
import
Gargantext.API.Ngrams.Types
(
NgramsTerm
(
..
))
import
Gargantext.Core.Viz.Graph
(
Graph
(
..
))
import
Gargantext.Core.Viz.Graph
(
Graph
(
..
)
,
Strength
(
..
)
)
import
Gargantext.Core.Viz.Graph.Tools
(
doDistanceMap
)
import
Gargantext.Core.Viz.Graph.Tools.IGraph
(
spinglass
)
import
Gargantext.Prelude
...
...
@@ -35,7 +35,7 @@ test :: IO ()
test
=
hspec
$
do
describe
"Cross"
$
do
let
(
distanceMap
,
_
,
_
)
=
doDistanceMap
Conditional
0
myCooc
(
distanceMap
,
_
,
_
)
=
doDistanceMap
Conditional
0
Weak
myCooc
it
"Partition test"
$
do
partitions
<-
spinglass
1
distanceMap
let
...
...
src/Gargantext/API.hs
View file @
1781ba63
src/Gargantext/API/Admin/Auth.hs
View file @
1781ba63
...
...
@@ -48,8 +48,6 @@ import GHC.Generics (Generic)
import
Servant
import
Servant.Auth.Server
import
Servant.Job.Async
(
JobFunction
(
..
),
serveJobsAPI
)
import
qualified
Text.Blaze.Html.Renderer.Text
as
H
import
qualified
Text.Blaze.Html5
as
H
--import qualified Text.Blaze.Html5.Attributes as HA
import
qualified
Gargantext.Prelude.Crypto.Auth
as
Auth
...
...
@@ -59,7 +57,6 @@ import Gargantext.API.Admin.Orchestrator.Types (JobLog(..), AsyncJobs)
import
Gargantext.API.Admin.Types
import
Gargantext.API.Job
(
jobLogSuccess
)
import
Gargantext.API.Prelude
(
HasJoseError
(
..
),
joseError
,
HasServerError
,
GargServerC
,
GargServer
,
_ServerError
)
import
Gargantext.API.Types
import
Gargantext.Core.Mail
(
MailModel
(
..
),
mail
)
import
Gargantext.Core.Mail.Types
(
HasMail
,
mailSettings
)
import
Gargantext.Core.Types.Individu
(
User
(
..
),
Username
,
GargPassword
(
..
))
...
...
@@ -173,7 +170,7 @@ type ForgotPasswordAPI = Summary "Forgot password POST API"
:>
Post
'[
J
SON
]
ForgotPasswordResponse
:<|>
Summary
"Forgot password GET API"
:>
QueryParam
"uuid"
Text
:>
Get
'[
H
TML
]
Tex
t
:>
Get
'[
J
SON
]
ForgotPasswordGe
t
forgotPassword
::
GargServer
ForgotPasswordAPI
...
...
@@ -193,8 +190,8 @@ forgotPasswordPost (ForgotPasswordRequest email) = do
pure
$
ForgotPasswordResponse
"ok"
forgotPasswordGet
::
(
HasSettings
env
,
HasConnectionPool
env
,
HasJoseError
err
,
HasConfig
env
,
HasMail
env
,
HasServerError
err
)
=>
Maybe
Text
->
Cmd'
env
err
Tex
t
forgotPasswordGet
Nothing
=
pure
""
=>
Maybe
Text
->
Cmd'
env
err
ForgotPasswordGe
t
forgotPasswordGet
Nothing
=
pure
$
ForgotPasswordGet
""
forgotPasswordGet
(
Just
uuid
)
=
do
let
mUuid
=
fromText
uuid
case
mUuid
of
...
...
@@ -209,7 +206,7 @@ forgotPasswordGet (Just uuid) = do
---------------------
forgotPasswordGetUser
::
(
HasSettings
env
,
HasConnectionPool
env
,
HasJoseError
err
,
HasConfig
env
,
HasMail
env
,
HasServerError
err
)
=>
UserLight
->
Cmd'
env
err
Tex
t
=>
UserLight
->
Cmd'
env
err
ForgotPasswordGe
t
forgotPasswordGetUser
(
UserLight
{
..
})
=
do
-- pick some random password
password
<-
liftBase
gargPass
...
...
@@ -225,16 +222,7 @@ forgotPasswordGetUser (UserLight { .. }) = do
-- clear the uuid so that the page can't be refreshed
_
<-
updateUserForgotPasswordUUID
$
UserLight
{
userLight_forgot_password_uuid
=
Nothing
,
..
}
pure
$
toStrict
$
H
.
renderHtml
$
H
.
docTypeHtml
$
do
H
.
html
$
do
H
.
head
$
do
H
.
title
"Gargantext - forgot password"
H
.
body
$
do
H
.
h1
"Forgot password"
H
.
p
$
do
H
.
span
"Here is your password (will be shown only once): "
H
.
b
$
H
.
toHtml
password
pure
$
ForgotPasswordGet
password
forgotUserPassword
::
(
HasConnectionPool
env
,
HasConfig
env
,
HasMail
env
)
=>
UserLight
->
Cmd'
env
err
()
...
...
src/Gargantext/API/Admin/Auth/Types.hs
View file @
1781ba63
...
...
@@ -112,6 +112,7 @@ data PathId = PathNode NodeId | PathNodeNode ListId DocId
---------------------------
type
Email
=
Text
type
Password
=
Text
data
ForgotPasswordRequest
=
ForgotPasswordRequest
{
_fpReq_email
::
Email
}
deriving
(
Generic
)
...
...
@@ -124,3 +125,9 @@ data ForgotPasswordResponse = ForgotPasswordResponse { _fpRes_status :: Text }
$
(
deriveJSON
(
unPrefix
"_fpRes_"
)
''
F
orgotPasswordResponse
)
instance
ToSchema
ForgotPasswordResponse
where
declareNamedSchema
=
genericDeclareNamedSchema
(
unPrefixSwagger
"_fpRes_"
)
data
ForgotPasswordGet
=
ForgotPasswordGet
{
_fpGet_password
::
Password
}
deriving
(
Generic
)
$
(
deriveJSON
(
unPrefix
"_fpGet_"
)
''
F
orgotPasswordGet
)
instance
ToSchema
ForgotPasswordGet
where
declareNamedSchema
=
genericDeclareNamedSchema
(
unPrefixSwagger
"_fpGet_"
)
\ No newline at end of file
src/Gargantext/API/Admin/Settings.hs
View file @
1781ba63
...
...
@@ -27,7 +27,7 @@ import Data.Maybe (fromMaybe)
import
Data.Pool
(
Pool
,
createPool
)
import
Database.PostgreSQL.Simple
(
Connection
,
connect
,
close
,
ConnectInfo
)
import
Gargantext.Core.NodeStory
import
Gargantext.Prelude.Config
(
GargConfig
(
..
),
{-gc_repofilepath,-}
readConfig
)
import
Gargantext.Prelude.Config
(
{-GargConfig(..),-}
{-gc_repofilepath,-}
readConfig
)
import
Network.HTTP.Client.TLS
(
newTlsManager
)
import
Servant.Auth.Server
(
defaultJWTSettings
,
CookieSettings
(
..
),
XsrfCookieSettings
(
..
),
defaultCookieSettings
,
defaultXsrfCookieSettings
,
readKey
,
writeKey
)
import
Servant.Client
(
parseBaseUrl
)
...
...
@@ -180,7 +180,8 @@ newEnv port file = do
self_url_env
<-
parseBaseUrl
$
"http://0.0.0.0:"
<>
show
port
dbParam
<-
databaseParameters
file
pool
<-
newPool
dbParam
nodeStory_env
<-
readNodeStoryEnv
(
_gc_repofilepath
config_env
)
--nodeStory_env <- readNodeStoryEnv (_gc_repofilepath config_env)
nodeStory_env
<-
readNodeStoryEnv
pool
scrapers_env
<-
newJobEnv
defaultSettings
manager_env
logger
<-
newStderrLoggerSet
defaultBufSize
config_mail
<-
Mail
.
readConfig
file
...
...
src/Gargantext/API/Client.hs
View file @
1781ba63
...
...
@@ -68,7 +68,7 @@ getBackendVersion :: ClientM Text
-- * auth API
postAuth
::
AuthRequest
->
ClientM
AuthResponse
forgotPasswordPost
::
ForgotPasswordRequest
->
ClientM
ForgotPasswordResponse
forgotPasswordGet
::
Maybe
Text
->
ClientM
Tex
t
forgotPasswordGet
::
Maybe
Text
->
ClientM
ForgotPasswordGe
t
postForgotPasswordAsync
::
ClientM
(
JobStatus
'S
a
fe
JobLog
)
postForgotPasswordAsyncJob
::
JobInput
Maybe
ForgotPasswordAsyncParams
->
ClientM
(
JobStatus
'S
a
fe
JobLog
)
killForgotPasswordAsyncJob
::
JobID
'U
n
safe
->
Maybe
Limit
->
Maybe
Offset
->
ClientM
(
JobStatus
'S
a
fe
JobLog
)
...
...
src/Gargantext/API/Dev.hs
View file @
1781ba63
...
...
@@ -22,7 +22,7 @@ import Gargantext.API.Prelude
import
Gargantext.Core.NodeStory
import
Gargantext.Database.Prelude
import
Gargantext.Prelude
import
Gargantext.Prelude.Config
(
GargConfig
(
..
),
readConfig
)
import
Gargantext.Prelude.Config
(
readConfig
)
import
qualified
Gargantext.Prelude.Mail
as
Mail
import
Servant
import
System.IO
(
FilePath
)
...
...
@@ -38,8 +38,9 @@ withDevEnv iniPath k = do
newDevEnv
=
do
cfg
<-
readConfig
iniPath
dbParam
<-
databaseParameters
iniPath
nodeStory_env
<-
readNodeStoryEnv
(
_gc_repofilepath
cfg
)
--
nodeStory_env <- readNodeStoryEnv (_gc_repofilepath cfg)
pool
<-
newPool
dbParam
nodeStory_env
<-
readNodeStoryEnv
pool
setts
<-
devSettings
devJwkFile
mail
<-
Mail
.
readConfig
iniPath
pure
$
DevEnv
...
...
@@ -61,7 +62,7 @@ runCmdReplServantErr = runCmdRepl
-- the command.
-- This function is constrained to the DevEnv rather than
-- using HasConnectionPool and HasRepoVar.
runCmdDev
::
(
Show
err
)
=>
DevEnv
->
Cmd''
DevEnv
err
a
->
IO
a
runCmdDev
::
Show
err
=>
DevEnv
->
Cmd''
DevEnv
err
a
->
IO
a
runCmdDev
env
f
=
(
either
(
fail
.
show
)
pure
=<<
runCmd
env
f
)
`
finally
`
...
...
src/Gargantext/API/GraphQL.hs
View file @
1781ba63
...
...
@@ -41,6 +41,7 @@ import qualified Gargantext.API.GraphQL.Node as GQLNode
import
qualified
Gargantext.API.GraphQL.User
as
GQLUser
import
qualified
Gargantext.API.GraphQL.UserInfo
as
GQLUserInfo
import
qualified
Gargantext.API.GraphQL.TreeFirstLevel
as
GQLTree
import
qualified
Gargantext.API.GraphQL.Team
as
GQLTeam
import
Gargantext.API.Prelude
(
GargM
,
GargError
)
import
Gargantext.API.Types
import
Gargantext.Core.Mail.Types
(
HasMail
)
...
...
@@ -72,12 +73,14 @@ data Query m
,
user_infos
::
GQLUserInfo
.
UserInfoArgs
->
m
[
GQLUserInfo
.
UserInfo
]
,
users
::
GQLUser
.
UserArgs
->
m
[
GQLUser
.
User
m
]
,
tree
::
GQLTree
.
TreeArgs
->
m
(
GQLTree
.
TreeFirstLevel
m
)
,
team
::
GQLTeam
.
TeamArgs
->
m
[
GQLTeam
.
TeamMember
]
}
deriving
(
Generic
,
GQLType
)
data
Mutation
m
=
Mutation
{
update_user_info
::
GQLUserInfo
.
UserInfoMArgs
->
m
Int
}
deriving
(
Generic
,
GQLType
)
{
update_user_info
::
GQLUserInfo
.
UserInfoMArgs
->
m
Int
,
delete_team_membership
::
GQLTeam
.
TeamDeleteMArgs
->
m
[
Int
]
}
deriving
(
Generic
,
GQLType
)
-- | Possible GraphQL Events, i.e. here we describe how we will
-- manipulate the data.
...
...
@@ -108,8 +111,10 @@ rootResolver =
,
node_parent
=
GQLNode
.
resolveNodeParent
,
user_infos
=
GQLUserInfo
.
resolveUserInfos
,
users
=
GQLUser
.
resolveUsers
,
tree
=
GQLTree
.
resolveTree
}
,
mutationResolver
=
Mutation
{
update_user_info
=
GQLUserInfo
.
updateUserInfo
}
,
tree
=
GQLTree
.
resolveTree
,
team
=
GQLTeam
.
resolveTeam
}
,
mutationResolver
=
Mutation
{
update_user_info
=
GQLUserInfo
.
updateUserInfo
,
delete_team_membership
=
GQLTeam
.
deleteTeamMembership
}
,
subscriptionResolver
=
Undefined
}
-- | Main GraphQL "app".
...
...
src/Gargantext/API/GraphQL/Team.hs
0 → 100644
View file @
1781ba63
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DuplicateRecordFields #-}
module
Gargantext.API.GraphQL.Team
where
import
Gargantext.Prelude
import
GHC.Generics
(
Generic
)
import
Data.Morpheus.Types
(
GQLType
,
Resolver
,
QUERY
,
ResolverM
,
lift
)
import
Data.Text
(
Text
)
import
Gargantext.API.Prelude
(
GargM
,
GargError
)
import
Gargantext.Database.Action.Share
(
membersOf
,
deleteMemberShip
)
import
Gargantext.Core.Types
(
NodeId
(
..
),
unNodeId
)
import
Gargantext.Database.Prelude
(
HasConnectionPool
)
import
Gargantext.Database
(
HasConfig
)
import
Gargantext.Core.Mail.Types
(
HasMail
)
import
Gargantext.Database.Query.Table.Node
(
getNode
)
import
Gargantext.API.GraphQL.Utils
(
authUser
,
AuthStatus
(
Invalid
,
Valid
))
import
Gargantext.Database.Schema.Node
(
NodePoly
(
Node
,
_node_id
),
_node_user_id
)
import
Gargantext.API.Admin.Types
(
HasSettings
)
import
Gargantext.Database.Query.Table.User
(
getUsersWithNodeHyperdata
)
import
qualified
Data.Text
as
T
data
TeamArgs
=
TeamArgs
{
team_node_id
::
Int
}
deriving
(
Generic
,
GQLType
)
data
TeamMember
=
TeamMember
{
username
::
Text
,
shared_folder_id
::
Int
}
deriving
(
Generic
,
GQLType
)
data
TeamDeleteMArgs
=
TeamDeleteMArgs
{
token
::
Text
,
shared_folder_id
::
Int
,
team_node_id
::
Int
}
deriving
(
Generic
,
GQLType
)
type
GqlM
e
env
=
Resolver
QUERY
e
(
GargM
env
GargError
)
type
GqlM'
e
env
a
=
ResolverM
e
(
GargM
env
GargError
)
a
todo
::
a
todo
=
undefined
resolveTeam
::
(
HasConnectionPool
env
,
HasConfig
env
,
HasMail
env
)
=>
TeamArgs
->
GqlM
e
env
[
TeamMember
]
resolveTeam
TeamArgs
{
team_node_id
}
=
dbTeam
team_node_id
dbTeam
::
(
HasConnectionPool
env
,
HasConfig
env
,
HasMail
env
)
=>
Int
->
GqlM
e
env
[
TeamMember
]
dbTeam
nodeId
=
do
let
nId
=
NodeId
nodeId
res
<-
lift
$
membersOf
nId
pure
$
map
toTeamMember
res
where
toTeamMember
::
(
Text
,
NodeId
)
->
TeamMember
toTeamMember
(
username
,
fId
)
=
TeamMember
{
username
,
shared_folder_id
=
unNodeId
fId
}
-- TODO: list as argument
deleteTeamMembership
::
(
HasConnectionPool
env
,
HasConfig
env
,
HasMail
env
,
HasSettings
env
)
=>
TeamDeleteMArgs
->
GqlM'
e
env
[
Int
]
deleteTeamMembership
TeamDeleteMArgs
{
token
,
shared_folder_id
,
team_node_id
}
=
do
teamNode
<-
lift
$
getNode
$
NodeId
team_node_id
userNodes
<-
lift
(
getUsersWithNodeHyperdata
$
uId
teamNode
)
case
userNodes
of
[]
->
panic
$
"[deleteTeamMembership] User with id "
<>
T
.
pack
(
show
$
uId
teamNode
)
<>
" doesn't exist."
((
_
,
node_u
)
:
_
)
->
do
testAuthUser
<-
lift
$
authUser
(
nId
node_u
)
token
case
testAuthUser
of
Invalid
->
panic
"[deleteTeamMembership] failed to validate user"
Valid
->
do
lift
$
deleteMemberShip
[(
NodeId
shared_folder_id
,
NodeId
team_node_id
)]
where
uId
Node
{
_node_user_id
}
=
_node_user_id
nId
Node
{
_node_id
}
=
_node_id
src/Gargantext/API/Ngrams.hs
View file @
1781ba63
...
...
@@ -106,7 +106,7 @@ import Gargantext.Database.Action.Flow.Types
import
Gargantext.Database.Action.Metrics.NgramsByContext
(
getOccByNgramsOnlyFast'
)
import
Gargantext.Database.Admin.Config
(
userMaster
)
import
Gargantext.Database.Admin.Types.Node
(
NodeType
(
..
))
import
Gargantext.Database.Prelude
(
HasConnectionPool
,
HasConfig
)
import
Gargantext.Database.Prelude
(
HasConnectionPool
(
..
)
,
HasConfig
)
import
Gargantext.Database.Query.Table.Ngrams
hiding
(
NgramsType
(
..
),
ngramsType
,
ngrams_terms
)
import
Gargantext.Database.Query.Table.Node
(
getNode
)
import
Gargantext.Database.Query.Table.Node.Error
(
HasNodeError
)
...
...
@@ -261,7 +261,9 @@ setListNgrams listId ngramsType ns = do
currentVersion
::
HasNodeStory
env
err
m
=>
ListId
->
m
Version
currentVersion
listId
=
do
nls
<-
getRepo
[
listId
]
--nls <- getRepo [listId]
pool
<-
view
connPool
nls
<-
liftBase
$
getNodeStory
pool
listId
pure
$
nls
^.
unNodeStory
.
at
listId
.
_Just
.
a_version
...
...
@@ -282,13 +284,16 @@ commitStatePatch :: (HasNodeStory env err m, HasMail env)
=>
ListId
->
Versioned
NgramsStatePatch'
->
m
(
Versioned
NgramsStatePatch'
)
commitStatePatch
listId
(
Versioned
p_version
p
)
=
do
commitStatePatch
listId
(
Versioned
_
p_version
p
)
=
do
-- printDebug "[commitStatePatch]" listId
var
<-
getNodeStoryVar
[
listId
]
vq'
<-
liftBase
$
modifyMVar
var
$
\
ns
->
do
let
a
=
ns
^.
unNodeStory
.
at
listId
.
_Just
q
=
mconcat
$
take
(
a
^.
a_version
-
p_version
)
(
a
^.
a_history
)
-- apply patches from version p_version to a ^. a_version
-- TODO Check this
--q = mconcat $ take (a ^. a_version - p_version) (a ^. a_history)
q
=
mconcat
$
a
^.
a_history
(
p'
,
q'
)
=
transformWith
ngramsStatePatchConflictResolution
p
q
a'
=
a
&
a_version
+~
1
&
a_state
%~
act
p'
...
...
@@ -808,5 +813,3 @@ listNgramsChangedSince listId ngramsType version
Versioned
<$>
currentVersion
listId
<*>
pure
True
|
otherwise
=
tableNgramsPull
listId
ngramsType
version
&
mapped
.
v_data
%~
(
==
mempty
)
src/Gargantext/API/Ngrams/Tools.hs
View file @
1781ba63
...
...
@@ -22,13 +22,16 @@ import Data.Hashable (Hashable)
import
Data.Set
(
Set
)
import
Data.Validity
import
Gargantext.API.Ngrams.Types
import
Gargantext.Core.Types
(
ListType
(
..
),
NodeId
,
ListId
)
import
Gargantext.Core.Types
(
ListType
(
..
),
NodeId
,
NodeType
(
..
),
ListId
)
import
Gargantext.Database.Prelude
(
CmdM
,
HasConnectionPool
(
..
))
import
Gargantext.Database.Schema.Ngrams
(
NgramsType
)
import
Gargantext.Prelude
import
qualified
Data.HashMap.Strict
as
HM
import
qualified
Data.Map.Strict
as
Map
import
qualified
Data.Set
as
Set
import
Gargantext.Core.NodeStory
import
qualified
Gargantext.Core.NodeStoryFile
as
NSF
mergeNgramsElement
::
NgramsRepoElement
->
NgramsRepoElement
->
NgramsRepoElement
mergeNgramsElement
_neOld
neNew
=
neNew
...
...
@@ -193,3 +196,21 @@ getCoocByNgrams' f (Diagonal diag) m =
where
ks
=
HM
.
keys
m
------------------------------------------
migrateFromDirToDb
::
(
CmdM
env
err
m
,
HasNodeStory
env
err
m
)
=>
m
()
migrateFromDirToDb
=
do
pool
<-
view
connPool
listIds
<-
liftBase
$
getNodesIdWithType
pool
NodeList
printDebug
"[migrateFromDirToDb] listIds"
listIds
(
NodeStory
nls
)
<-
NSF
.
getRepoReadConfig
listIds
printDebug
"[migrateFromDirToDb] nls"
nls
_
<-
mapM
(
\
(
nId
,
a
)
->
do
n
<-
liftBase
$
nodeExists
pool
nId
case
n
of
False
->
pure
0
True
->
liftBase
$
upsertNodeArchive
pool
nId
a
)
$
Map
.
toList
nls
--
_
<-
nodeStoryIncs
(
Just
$
NodeStory
nls
)
listIds
pure
()
src/Gargantext/API/Ngrams/Types.hs
View file @
1781ba63
...
...
@@ -28,7 +28,8 @@ import Data.String (IsString, fromString)
import
Data.Swagger
hiding
(
version
,
patch
)
import
Data.Text
(
Text
,
pack
,
strip
)
import
Data.Validity
import
Database.PostgreSQL.Simple.FromField
(
FromField
,
fromField
,
ResultError
(
ConversionFailed
),
returnError
)
import
Database.PostgreSQL.Simple.FromField
(
FromField
,
fromField
,
fromJSONField
,
ResultError
(
ConversionFailed
),
returnError
)
import
Database.PostgreSQL.Simple.ToField
(
ToField
,
toJSONField
,
toField
)
import
GHC.Generics
(
Generic
)
import
Gargantext.Core.Text
(
size
)
import
Gargantext.Core.Types
(
ListType
(
..
),
ListId
,
NodeId
,
TODO
)
...
...
@@ -124,19 +125,14 @@ instance (ToJSONKey a, ToSchema a) => ToSchema (MSet a) where
------------------------------------------------------------------------
newtype
NgramsTerm
=
NgramsTerm
{
unNgramsTerm
::
Text
}
deriving
(
Ord
,
Eq
,
Show
,
Generic
,
ToJSONKey
,
ToJSON
,
FromJSON
,
Semigroup
,
Arbitrary
,
Serialise
,
ToSchema
,
Hashable
,
NFData
)
instance
IsHashable
NgramsTerm
where
hash
(
NgramsTerm
t
)
=
hash
t
instance
Monoid
NgramsTerm
where
mempty
=
NgramsTerm
""
instance
FromJSONKey
NgramsTerm
where
fromJSONKey
=
FromJSONKeyTextParser
$
\
t
->
pure
$
NgramsTerm
$
strip
t
instance
IsString
NgramsTerm
where
fromString
s
=
NgramsTerm
$
pack
s
instance
FromField
NgramsTerm
where
fromField
field
mb
=
do
...
...
@@ -147,6 +143,9 @@ instance FromField NgramsTerm
$
List
.
intercalate
" "
[
"cannot parse hyperdata for JSON: "
,
show
v
]
instance
ToField
NgramsTerm
where
toField
(
NgramsTerm
n
)
=
toField
n
data
RootParent
=
RootParent
{
_rp_root
::
NgramsTerm
...
...
@@ -448,13 +447,16 @@ instance ToSchema NgramsPatch where
,
(
"old"
,
nreSch
)
,
(
"new"
,
nreSch
)
]
instance
Arbitrary
NgramsPatch
where
arbitrary
=
frequency
[
(
9
,
NgramsPatch
<$>
arbitrary
<*>
(
replace
<$>
arbitrary
<*>
arbitrary
))
,
(
1
,
NgramsReplace
<$>
arbitrary
<*>
arbitrary
)
]
instance
Serialise
NgramsPatch
instance
FromField
NgramsPatch
where
fromField
=
fromJSONField
instance
ToField
NgramsPatch
where
toField
=
toJSONField
instance
Serialise
(
Replace
ListType
)
instance
Serialise
ListType
...
...
@@ -512,7 +514,6 @@ instance Action (PairPatch (PatchMSet NgramsTerm) (Replace ListType)) NgramsRepo
instance
Applicable
NgramsPatch
(
Maybe
NgramsRepoElement
)
where
applicable
p
=
applicable
(
p
^.
_NgramsPatch
)
instance
Action
NgramsPatch
(
Maybe
NgramsRepoElement
)
where
act
p
=
act
(
p
^.
_NgramsPatch
)
...
...
@@ -524,7 +525,11 @@ instance Serialise (PatchMap NgramsTerm NgramsPatch)
instance
FromField
NgramsTablePatch
where
fromField
=
fromField'
fromField
=
fromJSONField
--fromField = fromField'
instance
ToField
NgramsTablePatch
where
toField
=
toJSONField
instance
FromField
(
PatchMap
TableNgrams
.
NgramsType
(
PatchMap
NodeId
NgramsTablePatch
))
where
...
...
@@ -751,4 +756,3 @@ instance ToSchema UpdateTableNgramsCharts where
------------------------------------------------------------------------
type
NgramsList
=
(
Map
TableNgrams
.
NgramsType
(
Versioned
NgramsTableMap
))
src/Gargantext/API/Node/File.hs
View file @
1781ba63
...
...
@@ -76,7 +76,7 @@ fileDownload uId nId = do
let
(
HyperdataFile
{
_hff_name
=
name'
,
_hff_path
=
path
})
=
node
^.
node_hyperdata
Contents
c
<-
GargDB
.
readFile
$
unpack
path
Contents
c
<-
GargDB
.
read
Garg
File
$
unpack
path
let
(
mMime
,
_
)
=
DMT
.
guessType
DMT
.
defaultmtd
False
$
unpack
name'
mime
=
case
mMime
of
...
...
src/Gargantext/Core/Mail.hs
View file @
1781ba63
...
...
@@ -12,6 +12,7 @@ Portability : POSIX
module
Gargantext.Core.Mail
where
import
Control.Lens
(
view
)
import
Network.URI.Encode
(
encodeText
)
import
Data.Text
(
Text
,
unlines
,
splitOn
)
import
Gargantext.Core.Types.Individu
import
Gargantext.Database.Schema.User
(
UserLight
(
..
))
...
...
@@ -90,7 +91,7 @@ bodyWith server (ForgotPassword { user = UserLight { userLight_forgot_password_u
,
forgot_password_link
server
uuid
]
forgot_password_link
::
ServerAddress
->
Text
->
Text
forgot_password_link
server
uuid
=
server
<>
"/
api/v1.0/forgot-password?uuid="
<>
uuid
forgot_password_link
server
uuid
=
server
<>
"/
#/forgotPassword?uuid="
<>
uuid
<>
"&server="
<>
encodeText
server
------------------------------------------------------------------------
email_subject
::
MailModel
->
Text
...
...
src/Gargantext/Core/NodeStory.hs
View file @
1781ba63
...
...
@@ -10,6 +10,30 @@ Portability : POSIX
A Node Story is a Map between NodeId and an Archive (with state,
version and history) for that node.
Couple of words on how this is implemented.
First version used files which stored Archive for each NodeId in a
separate .cbor file.
For performance reasons, it is rewritten to use the DB.
The table `node_stories` contains two columns: `node_id` and
`archive`.
Next, it was observed that `a_history` in `Archive` takes much
space. So a new table was created, `node_story_archive_history` with
columns: `node_id`, `ngrams_type_id`, `patch`. This is because each
history item is in fact a map from `NgramsType` to `NgramsTablePatch`
(see the `NgramsStatePatch'` type).
Moreover, since in ~G.A.Ngrams.commitStatePatch~ we use current state
only, with only recent history items, I concluded that it is not
necessary to load whole history into memory. Instead, it is kept in DB
(history is immutable) and only recent changes are added to
`a_history`. Then that record is cleared whenever `Archive` is saved.
Please note that
TODO:
- remove
- filter
...
...
@@ -17,36 +41,75 @@ TODO:
-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE
TemplateHaskell
#-}
{-# LANGUAGE
Arrows
#-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
module
Gargantext.Core.NodeStory
where
module
Gargantext.Core.NodeStory
(
HasNodeStory
,
HasNodeStoryEnv
,
hasNodeStory
,
HasNodeStoryVar
,
hasNodeStoryVar
,
HasNodeStorySaver
,
hasNodeStorySaver
,
NodeStory
(
..
)
,
NgramsStatePatch
'
,
NodeListStory
,
initNodeListStoryMock
,
NodeStoryEnv
(
..
)
,
initNodeStory
,
nse_getter
,
nse_saver
,
nse_var
,
unNodeStory
,
getNodeArchiveHistory
,
Archive
(
..
)
,
initArchive
,
a_history
,
a_state
,
a_version
,
nodeExists
,
getNodesIdWithType
,
readNodeStoryEnv
,
upsertNodeArchive
,
getNodeStory
)
where
-- import Debug.Trace (traceShow)
import
Co
dec.Serialise
(
serialise
,
deserialise
)
import
Co
ntrol.Debounce
(
mkDebounce
,
defaultDebounceSettings
,
debounceFreq
,
debounceAction
)
import
Codec.Serialise.Class
import
Control.Arrow
(
returnA
)
import
Control.Concurrent
(
MVar
(),
withMVar
,
newMVar
,
modifyMVar_
)
import
Control.
Debounce
(
mkDebounce
,
defaultDebounceSettings
,
debounceFreq
,
debounceAction
)
import
Control.Lens
(
makeLenses
,
Getter
,
(
^.
))
import
Control.
Exception
(
catch
,
throw
,
SomeException
(
..
)
)
import
Control.Lens
(
makeLenses
,
Getter
,
(
^.
)
,
(
.~
),
traverse
)
import
Control.Monad.Except
import
Control.Monad.Reader
import
Data.Aeson
hiding
((
.=
),
decode
)
import
Data.ByteString.Char8
(
hPutStrLn
)
import
Data.Map.Strict
(
Map
)
import
Data.Maybe
(
catMaybes
,
mapMaybe
)
import
Data.Monoid
import
Data.Pool
(
Pool
,
withResource
)
import
Data.Semigroup
import
qualified
Database.PostgreSQL.Simple
as
PGS
import
Database.PostgreSQL.Simple.SqlQQ
(
sql
)
import
Database.PostgreSQL.Simple.FromField
(
FromField
(
fromField
),
fromJSONField
)
import
Data.Profunctor.Product.TH
(
makeAdaptorAndInstance
)
import
GHC.Generics
(
Generic
)
import
Gargantext.API.Ngrams.Types
import
Gargantext.Core.Types
(
NodeId
)
import
Gargantext.Core.Types
(
NodeId
(
..
),
NodeType
)
import
Gargantext.Core.Utils.Prefix
(
unPrefix
)
import
Gargantext.Database.Admin.Config
(
nodeTypeId
)
import
Gargantext.Database.Prelude
(
CmdM
'
,
HasConnectionPool
,
HasConfig
)
import
Gargantext.Database.Query.Table.Node.Error
(
HasNodeError
())
import
Gargantext.Prelude
import
System.Directory
(
renameFile
,
createDirectoryIfMissing
,
doesFileExist
,
removeFile
)
import
System.IO
(
FilePath
,
hClose
)
import
System.IO.Temp
(
withTempFile
)
import
qualified
Data.ByteString.Lazy
as
DBL
import
qualified
Data.List
as
List
import
Opaleye
(
Column
,
DefaultFromField
(
..
),
Insert
(
..
),
Select
,
SqlInt4
,
SqlJsonb
,
Table
,
Update
(
..
),
(
.==
),
fromPGSFromField
,
rCount
,
restrict
,
runInsert
,
runSelect
,
runUpdate
,
selectTable
,
sqlInt4
,
sqlValueJSONB
,
tableField
,
updateEasy
)
import
Opaleye.Internal.Table
(
Table
(
..
))
import
System.IO
(
stderr
)
import
qualified
Data.Map.Strict
as
Map
import
qualified
Data.Map.Strict.Patch
as
PM
import
qualified
Gargantext.Database.Query.Table.Ngrams
as
TableNgrams
------------------------------------------------------------------------
...
...
@@ -79,183 +142,12 @@ class HasNodeStorySaver env where
hasNodeStorySaver
::
Getter
env
(
IO
()
)
------------------------------------------------------------------------
readNodeStoryEnv
::
NodeStoryDir
->
IO
NodeStoryEnv
readNodeStoryEnv
nsd
=
do
mvar
<-
nodeStoryVar
nsd
Nothing
[
0
]
saver
<-
mkNodeStorySaver
nsd
mvar
pure
$
NodeStoryEnv
{
_nse_var
=
mvar
,
_nse_saver
=
saver
,
_nse_getter
=
nodeStoryVar
nsd
(
Just
mvar
)
}
------------------------------------------------------------------------
mkNodeStorySaver
::
NodeStoryDir
->
MVar
NodeListStory
->
IO
(
IO
()
)
mkNodeStorySaver
nsd
mvns
=
mkDebounce
settings
where
settings
=
defaultDebounceSettings
{
debounceAction
=
withMVar
mvns
(
writeNodeStories
nsd
)
,
debounceFreq
=
1
*
minute
-- , debounceEdge = trailingEdge -- Trigger on the trailing edge
}
minute
=
60
*
second
second
=
10
^
(
6
::
Int
)
nodeStoryVar
::
NodeStoryDir
->
Maybe
(
MVar
NodeListStory
)
->
[
NodeId
]
->
IO
(
MVar
NodeListStory
)
nodeStoryVar
nsd
Nothing
ni
=
nodeStoryIncs
nsd
Nothing
ni
>>=
newMVar
nodeStoryVar
nsd
(
Just
mv
)
ni
=
do
_
<-
modifyMVar_
mv
$
\
mv'
->
(
nodeStoryIncs
nsd
(
Just
mv'
)
ni
)
pure
mv
nodeStoryInc
::
NodeStoryDir
->
Maybe
NodeListStory
->
NodeId
->
IO
NodeListStory
nodeStoryInc
nsd
(
Just
ns
@
(
NodeStory
nls
))
ni
=
do
case
Map
.
lookup
ni
nls
of
Nothing
->
do
(
NodeStory
nls'
)
<-
nodeStoryRead
nsd
ni
pure
$
NodeStory
$
Map
.
union
nls
nls'
Just
_
->
pure
ns
nodeStoryInc
nsd
Nothing
ni
=
nodeStoryRead
nsd
ni
nodeStoryIncs
::
NodeStoryDir
->
Maybe
NodeListStory
->
[
NodeId
]
->
IO
NodeListStory
nodeStoryIncs
_
Nothing
[]
=
panic
"nodeStoryIncs: Empty"
nodeStoryIncs
nsd
(
Just
nls
)
ns
=
foldM
(
\
m
n
->
nodeStoryInc
nsd
(
Just
m
)
n
)
nls
ns
nodeStoryIncs
nsd
Nothing
(
ni
:
ns
)
=
do
m
<-
nodeStoryRead
nsd
ni
nodeStoryIncs
nsd
(
Just
m
)
ns
nodeStoryDec
::
NodeStoryDir
->
NodeListStory
->
NodeId
->
IO
NodeListStory
nodeStoryDec
nsd
ns
@
(
NodeStory
nls
)
ni
=
do
case
Map
.
lookup
ni
nls
of
Nothing
->
do
-- we make sure the corresponding file repo is really removed
_
<-
nodeStoryRemove
nsd
ni
pure
ns
Just
_
->
do
let
ns'
=
Map
.
filterWithKey
(
\
k
_v
->
k
/=
ni
)
nls
_
<-
nodeStoryRemove
nsd
ni
pure
$
NodeStory
ns'
-- | TODO lock
nodeStoryRead
::
NodeStoryDir
->
NodeId
->
IO
NodeListStory
nodeStoryRead
nsd
ni
=
do
_repoDir
<-
createDirectoryIfMissing
True
nsd
let
nsp
=
nodeStoryPath
nsd
ni
exists
<-
doesFileExist
nsp
if
exists
then
deserialise
<$>
DBL
.
readFile
nsp
else
pure
(
initNodeStory
ni
)
nodeStoryRemove
::
NodeStoryDir
->
NodeId
->
IO
()
nodeStoryRemove
nsd
ni
=
do
let
nsp
=
nodeStoryPath
nsd
ni
exists
<-
doesFileExist
nsp
if
exists
then
removeFile
nsp
else
pure
()
nodeStoryRead_test
::
NodeStoryDir
->
NodeId
->
IO
(
Maybe
[
TableNgrams
.
NgramsType
])
nodeStoryRead_test
nsd
ni
=
nodeStoryRead
nsd
ni
>>=
\
n
->
pure
$
fmap
Map
.
keys
$
fmap
_a_state
$
Map
.
lookup
ni
$
_unNodeStory
n
------------------------------------------------------------------------
type
NodeStoryDir
=
FilePath
writeNodeStories
::
NodeStoryDir
->
NodeListStory
->
IO
()
writeNodeStories
fp
nls
=
do
_done
<-
mapM
(
writeNodeStory
fp
)
$
splitByNode
nls
-- printDebug "[writeNodeStories]" done
pure
()
writeNodeStory
::
NodeStoryDir
->
(
NodeId
,
NodeListStory
)
->
IO
()
writeNodeStory
rdfp
(
n
,
ns
)
=
saverAction'
rdfp
n
ns
splitByNode
::
NodeListStory
->
[(
NodeId
,
NodeListStory
)]
splitByNode
(
NodeStory
m
)
=
List
.
map
(
\
(
n
,
a
)
->
(
n
,
NodeStory
$
Map
.
singleton
n
a
))
$
Map
.
toList
m
saverAction'
::
Serialise
a
=>
NodeStoryDir
->
NodeId
->
a
->
IO
()
saverAction'
repoDir
nId
a
=
do
withTempFile
repoDir
((
cs
$
show
nId
)
<>
"-tmp-repo.cbor"
)
$
\
fp
h
->
do
-- printDebug "[repoSaverAction]" fp
DBL
.
hPut
h
$
serialise
a
hClose
h
renameFile
fp
(
nodeStoryPath
repoDir
nId
)
nodeStoryPath
::
NodeStoryDir
->
NodeId
->
FilePath
nodeStoryPath
repoDir
nId
=
repoDir
<>
"/"
<>
filename
where
filename
=
"repo"
<>
"-"
<>
(
cs
$
show
nId
)
<>
".cbor"
------------------------------------------------------------------------
-- TODO : repo Migration TODO TESTS
{-
repoMigration :: NodeStoryDir -> NgramsRepo -> IO ()
repoMigration fp r = writeNodeStories fp (repoToNodeListStory r)
repoToNodeListStory :: NgramsRepo -> NodeListStory
repoToNodeListStory (Repo _v s h) = NodeStory $ Map.fromList ns
where
s' = ngramsState_migration s
h' = ngramsStatePatch_migration h
ns = List.map (\(n,ns')
-> (n, let hs = fromMaybe [] (Map.lookup n h') in
Archive { _a_version = List.length hs
, _a_state = ns'
, _a_history = hs }
)
) $ Map.toList s'
ngramsState_migration :: NgramsState
-> Map NodeId NgramsState'
ngramsState_migration ns =
Map.fromListWith (Map.union) $
List.concat $
map (\(nt, nTable)
-> map (\(nid, table)
-> (nid, Map.singleton nt table)
) $ Map.toList nTable
) $ Map.toList ns
ngramsStatePatch_migration :: [NgramsStatePatch]
-> Map NodeId [NgramsStatePatch']
ngramsStatePatch_migration np' = Map.fromListWith (<>)
$ List.concat
$ map toPatch np'
where
toPatch :: NgramsStatePatch -> [(NodeId, [NgramsStatePatch'])]
toPatch p =
List.concat $
map (\(nt, nTable)
-> map (\(nid, table)
-> (nid, [fst $ Patch.singleton nt table])
) $ Patch.toList nTable
) $ Patch.toList p
-}
------------------------------------------------------------------------
{- | Node Story for each NodeType where the Key of the Map is NodeId
TODO : generalize for any NodeType, let's start with NodeList which
is implemented already
-}
data
NodeStory
s
p
=
NodeStory
{
_unNodeStory
::
Map
NodeId
(
Archive
s
p
)
}
newtype
NodeStory
s
p
=
NodeStory
{
_unNodeStory
::
Map
NodeId
(
Archive
s
p
)
}
deriving
(
Generic
,
Show
)
instance
(
FromJSON
s
,
FromJSON
p
)
=>
FromJSON
(
NodeStory
s
p
)
...
...
@@ -267,6 +159,14 @@ data Archive s p = Archive
,
_a_state
::
!
s
,
_a_history
::
!
[
p
]
-- first patch in the list is the most recent
-- We use `take` in `commitStatePatch`, that's why.
-- History is immutable, we just insert things on top of existing
-- list.
-- We don't need to store the whole history in memory, this
-- structure holds only recent history, the one that will be
-- inserted to the DB.
}
deriving
(
Generic
,
Show
)
...
...
@@ -278,18 +178,24 @@ type NodeListStory = NodeStory NgramsState' NgramsStatePatch'
type
NgramsState'
=
Map
TableNgrams
.
NgramsType
NgramsTableMap
type
NgramsStatePatch'
=
PatchMap
TableNgrams
.
NgramsType
NgramsTablePatch
instance
Serialise
NgramsStatePatch'
instance
FromField
(
Archive
NgramsState'
NgramsStatePatch'
)
where
fromField
=
fromJSONField
instance
DefaultFromField
SqlJsonb
(
Archive
NgramsState'
NgramsStatePatch'
)
where
defaultFromField
=
fromPGSFromField
-- TODO Semigroup instance for unions
-- TODO check this
instance
(
Semigroup
s
,
Semigroup
p
)
=>
Semigroup
(
Archive
s
p
)
where
(
<>
)
(
Archive
{
_a_history
=
p
})
(
Archive
{
_a_version
=
v'
,
_a_state
=
s'
,
_a_history
=
p'
})
=
,
_a_history
=
p'
})
=
Archive
{
_a_version
=
v'
,
_a_state
=
s'
,
_a_history
=
p'
<>
p
}
instance
Monoid
(
Archive
NgramsState'
NgramsStatePatch'
)
where
instance
(
Monoid
s
,
Semigroup
p
)
=>
Monoid
(
Archive
s
p
)
where
mempty
=
Archive
{
_a_version
=
0
,
_a_state
=
mempty
,
_a_history
=
[]
}
...
...
@@ -302,13 +208,11 @@ instance (ToJSON s, ToJSON p) => ToJSON (Archive s p) where
toEncoding
=
genericToEncoding
$
unPrefix
"_a_"
------------------------------------------------------------------------
initNodeStory
::
Monoid
s
=>
NodeId
->
NodeStory
s
p
initNodeStory
::
(
Monoid
s
,
Semigroup
p
)
=>
NodeId
->
NodeStory
s
p
initNodeStory
ni
=
NodeStory
$
Map
.
singleton
ni
initArchive
initArchive
::
Monoid
s
=>
Archive
s
p
initArchive
=
Archive
{
_a_version
=
0
,
_a_state
=
mempty
,
_a_history
=
[]
}
initArchive
::
(
Monoid
s
,
Semigroup
p
)
=>
Archive
s
p
initArchive
=
mempty
initNodeListStoryMock
::
NodeListStory
initNodeListStoryMock
=
NodeStory
$
Map
.
singleton
nodeListId
archive
...
...
@@ -331,3 +235,243 @@ initNodeListStoryMock = NodeStory $ Map.singleton nodeListId archive
makeLenses
''
N
odeStoryEnv
makeLenses
''
N
odeStory
makeLenses
''
A
rchive
-----------------------------------------
data
NodeStoryPoly
a
b
=
NodeStoryDB
{
node_id
::
a
,
archive
::
b
}
deriving
(
Eq
)
type
ArchiveQ
=
Archive
NgramsState'
NgramsStatePatch'
type
NodeStoryWrite
=
NodeStoryPoly
(
Column
SqlInt4
)
(
Column
SqlJsonb
)
type
NodeStoryRead
=
NodeStoryPoly
(
Column
SqlInt4
)
(
Column
SqlJsonb
)
$
(
makeAdaptorAndInstance
"pNodeStory"
''
N
odeStoryPoly
)
runPGSExecuteMany
::
(
PGS
.
ToRow
q
)
=>
Pool
PGS
.
Connection
->
PGS
.
Query
->
[
q
]
->
IO
Int64
runPGSExecuteMany
pool
qs
a
=
withResource
pool
$
\
c
->
catch
(
PGS
.
executeMany
c
qs
a
)
(
printError
c
)
where
printError
_c
(
SomeException
e
)
=
do
--q' <- PGS.formatQuery c qs a
--hPutStrLn stderr q'
throw
(
SomeException
e
)
runPGSQuery
::
(
PGS
.
FromRow
r
,
PGS
.
ToRow
q
)
=>
Pool
PGS
.
Connection
->
PGS
.
Query
->
q
->
IO
[
r
]
runPGSQuery
pool
q
a
=
withResource
pool
$
\
c
->
catch
(
PGS
.
query
c
q
a
)
(
printError
c
)
where
printError
c
(
SomeException
e
)
=
do
q'
<-
PGS
.
formatQuery
c
q
a
hPutStrLn
stderr
q'
throw
(
SomeException
e
)
nodeExists
::
Pool
PGS
.
Connection
->
NodeId
->
IO
Bool
nodeExists
pool
nId
=
(
==
[
PGS
.
Only
True
])
<$>
runPGSQuery
pool
[
sql
|
SELECT true FROM nodes WHERE id = ? AND ?
|]
(
nId
,
True
)
getNodesIdWithType
::
Pool
PGS
.
Connection
->
NodeType
->
IO
[
NodeId
]
getNodesIdWithType
pool
nt
=
do
ns
<-
runPGSQuery
pool
query
(
nodeTypeId
nt
,
True
)
pure
$
map
(
\
(
PGS
.
Only
nId
)
->
NodeId
nId
)
ns
where
query
::
PGS
.
Query
query
=
[
sql
|
SELECT id FROM nodes WHERE typename = ? AND ?
|]
nodeStoryTable
::
Table
NodeStoryRead
NodeStoryWrite
nodeStoryTable
=
Table
"node_stories"
(
pNodeStory
NodeStoryDB
{
node_id
=
tableField
"node_id"
,
archive
=
tableField
"archive"
}
)
nodeStorySelect
::
Select
NodeStoryRead
nodeStorySelect
=
selectTable
nodeStoryTable
-- TODO Check ordering, "first patch in the _a_history list is the most recent"
getNodeArchiveHistory
::
Pool
PGS
.
Connection
->
NodeId
->
IO
[
NgramsStatePatch'
]
getNodeArchiveHistory
pool
nodeId
=
do
as
<-
runPGSQuery
pool
query
(
nodeId
,
True
)
let
asTuples
=
mapMaybe
(
\
(
ngrams_type_id
,
ngrams
,
patch
)
->
(
\
ntId
->
(
ntId
,
ngrams
,
patch
))
<$>
(
TableNgrams
.
fromNgramsTypeId
ngrams_type_id
))
as
pure
$
(
\
(
ntId
,
terms
,
patch
)
->
fst
$
PM
.
singleton
ntId
(
NgramsTablePatch
$
fst
$
PM
.
singleton
terms
patch
))
<$>
asTuples
where
query
::
PGS
.
Query
query
=
[
sql
|
SELECT ngrams_type_id, terms, patch
FROM node_story_archive_history
JOIN ngrams ON ngrams.id = ngrams_id
WHERE node_id = ? AND ?
|]
insertNodeArchiveHistory
::
Pool
PGS
.
Connection
->
NodeId
->
[
NgramsStatePatch'
]
->
IO
()
insertNodeArchiveHistory
_
_
[]
=
pure
()
insertNodeArchiveHistory
pool
nodeId
(
h
:
hs
)
=
do
let
tuples
=
mconcat
$
(
\
(
nType
,
(
NgramsTablePatch
patch
))
->
(
\
(
term
,
p
)
->
(
nodeId
,
TableNgrams
.
ngramsTypeId
nType
,
term
,
p
))
<$>
PM
.
toList
patch
)
<$>
PM
.
toList
h
::
[(
NodeId
,
TableNgrams
.
NgramsTypeId
,
NgramsTerm
,
NgramsPatch
)]
tuplesM
<-
mapM
(
\
(
nId
,
nTypeId
,
term
,
patch
)
->
do
ngrams
<-
runPGSQuery
pool
ngramsQuery
(
term
,
True
)
pure
$
(
\
(
PGS
.
Only
termId
)
->
(
nId
,
nTypeId
,
termId
,
term
,
patch
))
<$>
(
headMay
ngrams
)
)
tuples
::
IO
[
Maybe
(
NodeId
,
TableNgrams
.
NgramsTypeId
,
Int
,
NgramsTerm
,
NgramsPatch
)]
_
<-
runPGSExecuteMany
pool
query
$
((
\
(
nId
,
nTypeId
,
termId
,
_term
,
patch
)
->
(
nId
,
nTypeId
,
termId
,
patch
))
<$>
(
catMaybes
tuplesM
))
_
<-
insertNodeArchiveHistory
pool
nodeId
hs
pure
()
where
ngramsQuery
::
PGS
.
Query
ngramsQuery
=
[
sql
|
SELECT id FROM ngrams WHERE terms = ? AND ?
|]
query
::
PGS
.
Query
query
=
[
sql
|
INSERT INTO node_story_archive_history(node_id, ngrams_type_id, ngrams_id, patch) VALUES (?, ?, ?, ?)
|]
getNodeStory
::
Pool
PGS
.
Connection
->
NodeId
->
IO
NodeListStory
getNodeStory
pool
(
NodeId
nodeId
)
=
do
res
<-
withResource
pool
$
\
c
->
runSelect
c
query
::
IO
[
NodeStoryPoly
NodeId
ArchiveQ
]
withArchive
<-
mapM
(
\
(
NodeStoryDB
{
node_id
=
nId
,
archive
=
Archive
{
..
}
})
->
do
--a <- getNodeArchiveHistory pool nId
let
a
=
[]
::
[
NgramsStatePatch'
]
-- Don't read whole history. Only state is needed and most recent changes.
pure
(
nId
,
Archive
{
_a_history
=
a
,
..
}))
res
pure
$
NodeStory
$
Map
.
fromListWith
(
<>
)
withArchive
--pure $ NodeStory $ Map.fromListWith (<>) $ (\(NodeStoryDB nId a) -> (nId, a)) <$> res
where
query
::
Select
NodeStoryRead
query
=
proc
()
->
do
row
@
(
NodeStoryDB
node_id
_
)
<-
nodeStorySelect
-<
()
restrict
-<
node_id
.==
sqlInt4
nodeId
returnA
-<
row
insertNodeArchive
::
Pool
PGS
.
Connection
->
NodeId
->
ArchiveQ
->
IO
Int64
insertNodeArchive
pool
nodeId
@
(
NodeId
nId
)
(
Archive
{
..
})
=
do
ret
<-
withResource
pool
$
\
c
->
runInsert
c
insert
-- NOTE: It is assumed that the most recent change is the first in the
-- list, so we save these in reverse order
insertNodeArchiveHistory
pool
nodeId
$
reverse
_a_history
pure
ret
where
emptyHistory
=
[]
::
[
NgramsStatePatch'
]
insert
=
Insert
{
iTable
=
nodeStoryTable
,
iRows
=
[
NodeStoryDB
{
node_id
=
sqlInt4
nId
,
archive
=
sqlValueJSONB
$
Archive
{
_a_history
=
emptyHistory
,
..
}
}]
,
iReturning
=
rCount
,
iOnConflict
=
Nothing
}
updateNodeArchive
::
Pool
PGS
.
Connection
->
NodeId
->
ArchiveQ
->
IO
Int64
updateNodeArchive
pool
nodeId
@
(
NodeId
nId
)
(
Archive
{
..
})
=
do
ret
<-
withResource
pool
$
\
c
->
runUpdate
c
update
-- NOTE: It is assumed that the most recent change is the first in the
-- list, so we save these in reverse order
insertNodeArchiveHistory
pool
nodeId
$
reverse
_a_history
pure
ret
where
emptyHistory
=
[]
::
[
NgramsStatePatch'
]
update
=
Update
{
uTable
=
nodeStoryTable
,
uUpdateWith
=
updateEasy
(
\
(
NodeStoryDB
{
node_id
})
->
NodeStoryDB
{
archive
=
sqlValueJSONB
$
Archive
{
_a_history
=
emptyHistory
,
..
}
,
..
})
,
uWhere
=
(
\
row
->
node_id
row
.==
sqlInt4
nId
)
,
uReturning
=
rCount
}
-- nodeStoryRemove :: Pool PGS.Connection -> NodeId -> IO Int64
-- nodeStoryRemove pool (NodeId nId) = withResource pool $ \c -> runDelete c delete
-- where
-- delete = Delete { dTable = nodeStoryTable
-- , dWhere = (\row -> node_id row .== sqlInt4 nId)
-- , dReturning = rCount }
upsertNodeArchive
::
Pool
PGS
.
Connection
->
NodeId
->
ArchiveQ
->
IO
Int64
upsertNodeArchive
pool
nId
a
=
do
(
NodeStory
m
)
<-
getNodeStory
pool
nId
case
Map
.
lookup
nId
m
of
Nothing
->
insertNodeArchive
pool
nId
a
Just
_
->
updateNodeArchive
pool
nId
a
writeNodeStories
::
Pool
PGS
.
Connection
->
NodeListStory
->
IO
()
writeNodeStories
pool
(
NodeStory
nls
)
=
do
_
<-
mapM
(
\
(
nId
,
a
)
->
upsertNodeArchive
pool
nId
a
)
$
Map
.
toList
nls
pure
()
-- | Returns a `NodeListStory`, updating the given one for given `NodeId`
nodeStoryInc
::
Pool
PGS
.
Connection
->
Maybe
NodeListStory
->
NodeId
->
IO
NodeListStory
nodeStoryInc
pool
Nothing
nId
=
getNodeStory
pool
nId
nodeStoryInc
pool
(
Just
ns
@
(
NodeStory
nls
))
nId
=
do
case
Map
.
lookup
nId
nls
of
Nothing
->
do
(
NodeStory
nls'
)
<-
getNodeStory
pool
nId
pure
$
NodeStory
$
Map
.
union
nls
nls'
Just
_
->
pure
ns
nodeStoryIncs
::
Pool
PGS
.
Connection
->
Maybe
NodeListStory
->
[
NodeId
]
->
IO
NodeListStory
nodeStoryIncs
_
Nothing
[]
=
pure
$
NodeStory
$
Map
.
empty
nodeStoryIncs
pool
(
Just
nls
)
ns
=
foldM
(
\
m
n
->
nodeStoryInc
pool
(
Just
m
)
n
)
nls
ns
nodeStoryIncs
pool
Nothing
(
ni
:
ns
)
=
do
m
<-
getNodeStory
pool
ni
nodeStoryIncs
pool
(
Just
m
)
ns
-- nodeStoryDec :: Pool PGS.Connection -> NodeListStory -> NodeId -> IO NodeListStory
-- nodeStoryDec pool ns@(NodeStory nls) ni = do
-- case Map.lookup ni nls of
-- Nothing -> do
-- _ <- nodeStoryRemove pool ni
-- pure ns
-- Just _ -> do
-- let ns' = Map.filterWithKey (\k _v -> k /= ni) nls
-- _ <- nodeStoryRemove pool ni
-- pure $ NodeStory ns'
------------------------------------
readNodeStoryEnv
::
Pool
PGS
.
Connection
->
IO
NodeStoryEnv
readNodeStoryEnv
pool
=
do
mvar
<-
nodeStoryVar
pool
Nothing
[]
saver
<-
mkNodeStorySaver
pool
mvar
-- let saver = modifyMVar_ mvar $ \mv -> do
-- writeNodeStories pool mv
-- printDebug "[readNodeStoryEnv] saver" mv
-- let mv' = clearHistory mv
-- printDebug "[readNodeStoryEnv] saver, cleared" mv'
-- return mv'
pure
$
NodeStoryEnv
{
_nse_var
=
mvar
,
_nse_saver
=
saver
,
_nse_getter
=
nodeStoryVar
pool
(
Just
mvar
)
}
nodeStoryVar
::
Pool
PGS
.
Connection
->
Maybe
(
MVar
NodeListStory
)
->
[
NodeId
]
->
IO
(
MVar
NodeListStory
)
nodeStoryVar
pool
Nothing
nIds
=
do
state
<-
nodeStoryIncs
pool
Nothing
nIds
newMVar
state
nodeStoryVar
pool
(
Just
mv
)
nIds
=
do
_
<-
modifyMVar_
mv
$
\
nsl
->
(
nodeStoryIncs
pool
(
Just
nsl
)
nIds
)
pure
mv
-- Debounce is useful since it could delay the saving to some later
-- time, asynchronously and we keep operating on memory only.
mkNodeStorySaver
::
Pool
PGS
.
Connection
->
MVar
NodeListStory
->
IO
(
IO
()
)
mkNodeStorySaver
pool
mvns
=
mkDebounce
settings
where
settings
=
defaultDebounceSettings
{
debounceAction
=
do
withMVar
mvns
(
\
ns
->
writeNodeStories
pool
ns
)
withMVar
mvns
(
\
ns
->
printDebug
"[mkNodeStorySaver] debounce nodestory"
ns
)
modifyMVar_
mvns
$
\
ns
->
pure
$
clearHistory
ns
,
debounceFreq
=
1
*
minute
}
minute
=
60
*
second
second
=
10
^
(
6
::
Int
)
clearHistory
::
NodeListStory
->
NodeListStory
clearHistory
(
NodeStory
ns
)
=
NodeStory
$
ns
&
(
traverse
.
a_history
)
.~
emptyHistory
where
emptyHistory
=
[]
::
[
NgramsStatePatch'
]
-- mkNodeStorySaver :: MVar NodeListStory -> Cmd err (Cmd err ())
-- mkNodeStorySaver mvns = mkDebounce settings
-- where
-- settings = defaultDebounceSettings
-- { debounceAction = withMVar mvns (\ns -> writeNodeStories ns)
-- , debounceFreq = 1 * minute
-- -- , debounceEdge = trailingEdge -- Trigger on the trailing edge
-- }
-- minute = 60 * second
-- second = 10^(6 :: Int)
-----------------------------------------
src/Gargantext/Core/NodeStoryFile.hs
0 → 100644
View file @
1781ba63
{- NOTE This is legacy code. It keeps node stories in a directory
repo. We now have migrated to the DB. However this code is needed to
make the migration (see Gargantext.API.Ngrams.Tools) -}
module
Gargantext.Core.NodeStoryFile
where
import
Control.Lens
(
view
)
import
Control.Monad
(
foldM
)
import
Codec.Serialise
(
serialise
,
deserialise
)
import
Codec.Serialise.Class
import
Control.Concurrent
(
MVar
(),
modifyMVar_
,
newMVar
,
readMVar
,
withMVar
)
import
Control.Debounce
(
mkDebounce
,
defaultDebounceSettings
,
debounceFreq
,
debounceAction
)
import
Gargantext.Core.NodeStory
hiding
(
readNodeStoryEnv
)
import
Gargantext.Core.Types
(
ListId
,
NodeId
(
..
))
import
Gargantext.Database.Prelude
(
CmdM
,
hasConfig
)
import
Gargantext.Prelude
import
Gargantext.Prelude.Config
(
gc_repofilepath
)
import
System.Directory
(
renameFile
,
createDirectoryIfMissing
,
doesFileExist
,
removeFile
)
import
System.IO
(
FilePath
,
hClose
)
import
System.IO.Temp
(
withTempFile
)
import
qualified
Data.ByteString.Lazy
as
DBL
import
qualified
Data.List
as
List
import
qualified
Data.Map.Strict
as
Map
import
qualified
Gargantext.Database.Query.Table.Ngrams
as
TableNgrams
getRepo
::
HasNodeStory
env
err
m
=>
[
ListId
]
->
m
NodeListStory
getRepo
listIds
=
do
g
<-
getNodeListStory
liftBase
$
do
v
<-
g
listIds
readMVar
v
-- v <- liftBase $ f listIds
-- v' <- liftBase $ readMVar v
-- pure $ v'
getRepoReadConfig
::
(
CmdM
env
err
m
)
=>
[
ListId
]
->
m
NodeListStory
getRepoReadConfig
listIds
=
do
repoFP
<-
view
$
hasConfig
.
gc_repofilepath
env
<-
liftBase
$
readNodeStoryEnv
repoFP
let
g
=
view
nse_getter
env
liftBase
$
do
v
<-
g
listIds
readMVar
v
getNodeListStory
::
HasNodeStory
env
err
m
=>
m
([
NodeId
]
->
IO
(
MVar
NodeListStory
))
getNodeListStory
=
do
env
<-
view
hasNodeStory
pure
$
view
nse_getter
env
readNodeStoryEnv
::
NodeStoryDir
->
IO
NodeStoryEnv
readNodeStoryEnv
nsd
=
do
mvar
<-
nodeStoryVar
nsd
Nothing
[]
saver
<-
mkNodeStorySaver
nsd
mvar
pure
$
NodeStoryEnv
{
_nse_var
=
mvar
,
_nse_saver
=
saver
,
_nse_getter
=
nodeStoryVar
nsd
(
Just
mvar
)
}
------------------------------------------------------------------------
mkNodeStorySaver
::
NodeStoryDir
->
MVar
NodeListStory
->
IO
(
IO
()
)
mkNodeStorySaver
nsd
mvns
=
mkDebounce
settings
where
settings
=
defaultDebounceSettings
{
debounceAction
=
withMVar
mvns
(
writeNodeStories
nsd
)
,
debounceFreq
=
1
*
minute
-- , debounceEdge = trailingEdge -- Trigger on the trailing edge
}
minute
=
60
*
second
second
=
10
^
(
6
::
Int
)
nodeStoryVar
::
NodeStoryDir
->
Maybe
(
MVar
NodeListStory
)
->
[
NodeId
]
->
IO
(
MVar
NodeListStory
)
nodeStoryVar
nsd
Nothing
ni
=
nodeStoryIncs
nsd
Nothing
ni
>>=
newMVar
nodeStoryVar
nsd
(
Just
mv
)
ni
=
do
_
<-
modifyMVar_
mv
$
\
mv'
->
(
nodeStoryIncs
nsd
(
Just
mv'
)
ni
)
pure
mv
nodeStoryInc
::
NodeStoryDir
->
Maybe
NodeListStory
->
NodeId
->
IO
NodeListStory
nodeStoryInc
nsd
(
Just
ns
@
(
NodeStory
nls
))
ni
=
do
case
Map
.
lookup
ni
nls
of
Nothing
->
do
(
NodeStory
nls'
)
<-
nodeStoryRead
nsd
ni
pure
$
NodeStory
$
Map
.
union
nls
nls'
Just
_
->
pure
ns
nodeStoryInc
nsd
Nothing
ni
=
nodeStoryRead
nsd
ni
nodeStoryIncs
::
NodeStoryDir
->
Maybe
NodeListStory
->
[
NodeId
]
->
IO
NodeListStory
nodeStoryIncs
_
Nothing
[]
=
pure
$
NodeStory
$
Map
.
empty
nodeStoryIncs
nsd
(
Just
nls
)
ns
=
foldM
(
\
m
n
->
nodeStoryInc
nsd
(
Just
m
)
n
)
nls
ns
nodeStoryIncs
nsd
Nothing
(
ni
:
ns
)
=
do
m
<-
nodeStoryRead
nsd
ni
nodeStoryIncs
nsd
(
Just
m
)
ns
nodeStoryDec
::
NodeStoryDir
->
NodeListStory
->
NodeId
->
IO
NodeListStory
nodeStoryDec
nsd
ns
@
(
NodeStory
nls
)
ni
=
do
case
Map
.
lookup
ni
nls
of
Nothing
->
do
-- we make sure the corresponding file repo is really removed
_
<-
nodeStoryRemove
nsd
ni
pure
ns
Just
_
->
do
let
ns'
=
Map
.
filterWithKey
(
\
k
_v
->
k
/=
ni
)
nls
_
<-
nodeStoryRemove
nsd
ni
pure
$
NodeStory
ns'
-- | TODO lock
nodeStoryRead
::
NodeStoryDir
->
NodeId
->
IO
NodeListStory
nodeStoryRead
nsd
ni
=
do
_repoDir
<-
createDirectoryIfMissing
True
nsd
let
nsp
=
nodeStoryPath
nsd
ni
exists
<-
doesFileExist
nsp
if
exists
then
deserialise
<$>
DBL
.
readFile
nsp
else
pure
(
initNodeStory
ni
)
nodeStoryRemove
::
NodeStoryDir
->
NodeId
->
IO
()
nodeStoryRemove
nsd
ni
=
do
let
nsp
=
nodeStoryPath
nsd
ni
exists
<-
doesFileExist
nsp
if
exists
then
removeFile
nsp
else
pure
()
nodeStoryRead_test
::
NodeStoryDir
->
NodeId
->
IO
(
Maybe
[
TableNgrams
.
NgramsType
])
nodeStoryRead_test
nsd
ni
=
nodeStoryRead
nsd
ni
>>=
\
n
->
pure
$
fmap
Map
.
keys
$
fmap
_a_state
$
Map
.
lookup
ni
$
_unNodeStory
n
------------------------------------------------------------------------
type
NodeStoryDir
=
FilePath
writeNodeStories
::
NodeStoryDir
->
NodeListStory
->
IO
()
writeNodeStories
fp
nls
=
do
_done
<-
mapM
(
writeNodeStory
fp
)
$
splitByNode
nls
-- printDebug "[writeNodeStories]" done
pure
()
writeNodeStory
::
NodeStoryDir
->
(
NodeId
,
NodeListStory
)
->
IO
()
writeNodeStory
rdfp
(
n
,
ns
)
=
saverAction'
rdfp
n
ns
splitByNode
::
NodeListStory
->
[(
NodeId
,
NodeListStory
)]
splitByNode
(
NodeStory
m
)
=
List
.
map
(
\
(
n
,
a
)
->
(
n
,
NodeStory
$
Map
.
singleton
n
a
))
$
Map
.
toList
m
saverAction'
::
Serialise
a
=>
NodeStoryDir
->
NodeId
->
a
->
IO
()
saverAction'
repoDir
nId
a
=
do
withTempFile
repoDir
((
cs
$
show
nId
)
<>
"-tmp-repo.cbor"
)
$
\
fp
h
->
do
-- printDebug "[repoSaverAction]" fp
DBL
.
hPut
h
$
serialise
a
hClose
h
renameFile
fp
(
nodeStoryPath
repoDir
nId
)
nodeStoryPath
::
NodeStoryDir
->
NodeId
->
FilePath
nodeStoryPath
repoDir
nId
=
repoDir
<>
"/"
<>
filename
where
filename
=
"repo"
<>
"-"
<>
(
cs
$
show
nId
)
<>
".cbor"
------------------------------------------------------------------------
-- TODO : repo Migration TODO TESTS
{-
repoMigration :: NodeStoryDir -> NgramsRepo -> IO ()
repoMigration fp r = writeNodeStories fp (repoToNodeListStory r)
repoToNodeListStory :: NgramsRepo -> NodeListStory
repoToNodeListStory (Repo _v s h) = NodeStory $ Map.fromList ns
where
s' = ngramsState_migration s
h' = ngramsStatePatch_migration h
ns = List.map (\(n,ns')
-> (n, let hs = fromMaybe [] (Map.lookup n h') in
Archive { _a_version = List.length hs
, _a_state = ns'
, _a_history = hs }
)
) $ Map.toList s'
ngramsState_migration :: NgramsState
-> Map NodeId NgramsState'
ngramsState_migration ns =
Map.fromListWith (Map.union) $
List.concat $
map (\(nt, nTable)
-> map (\(nid, table)
-> (nid, Map.singleton nt table)
) $ Map.toList nTable
) $ Map.toList ns
ngramsStatePatch_migration :: [NgramsStatePatch]
-> Map NodeId [NgramsStatePatch']
ngramsStatePatch_migration np' = Map.fromListWith (<>)
$ List.concat
$ map toPatch np'
where
toPatch :: NgramsStatePatch -> [(NodeId, [NgramsStatePatch'])]
toPatch p =
List.concat $
map (\(nt, nTable)
-> map (\(nid, table)
-> (nid, [fst $ Patch.singleton nt table])
) $ Patch.toList nTable
) $ Patch.toList p
-}
src/Gargantext/Core/Text/Corpus/Parsers/CSV.hs
View file @
1781ba63
...
...
@@ -234,7 +234,7 @@ delimiter Comma = fromIntegral $ ord ','
------------------------------------------------------------------------
readCsvOn'
::
[
CsvDoc
->
Text
]
->
FilePath
->
IO
(
Either
Prelude
.
String
[
Text
])
readCsvOn'
fields
fp
=
do
r
<-
readFile
fp
r
<-
read
CSV
File
fp
pure
$
(
V
.
toList
.
V
.
map
(
\
l
->
intercalate
(
pack
" "
)
$
map
(
\
field
->
field
l
)
fields
)
.
snd
)
<$>
r
...
...
@@ -267,8 +267,8 @@ readByteStringStrict d ff = (readByteStringLazy d ff) . BL.fromStrict
------------------------------------------------------------------------
-- | TODO use readFileLazy
readFile
::
FilePath
->
IO
(
Either
Prelude
.
String
(
Header
,
Vector
CsvDoc
))
readFile
fp
=
do
read
CSV
File
::
FilePath
->
IO
(
Either
Prelude
.
String
(
Header
,
Vector
CsvDoc
))
read
CSV
File
fp
=
do
result
<-
fmap
(
readCsvLazyBS
Comma
)
$
BL
.
readFile
fp
case
result
of
Left
_err
->
fmap
(
readCsvLazyBS
Tab
)
$
BL
.
readFile
fp
...
...
@@ -448,7 +448,7 @@ parseHal' bs = (V.toList . V.map csvHal2doc . snd) <$> readCsvHalLazyBS bs
------------------------------------------------------------------------
parseCsv
::
FilePath
->
IO
(
Either
Prelude
.
String
[
HyperdataDocument
])
parseCsv
fp
=
fmap
(
V
.
toList
.
V
.
map
csv2doc
.
snd
)
<$>
readFile
fp
parseCsv
fp
=
fmap
(
V
.
toList
.
V
.
map
csv2doc
.
snd
)
<$>
read
CSV
File
fp
{-
parseCsv' :: BL.ByteString -> Either Prelude.String [HyperdataDocument]
...
...
src/Gargantext/Core/Text/List/Social/History.hs
View file @
1781ba63
...
...
@@ -68,5 +68,3 @@ history' types lists = (Map.map (Map.unionsWith (<>)))
->
Map
NgramsType
[
HashMap
NgramsTerm
NgramsPatch
]
toMap
m
=
Map
.
map
(
cons
.
unNgramsTablePatch
)
$
unPatchMapToMap
m
src/Gargantext/Core/Viz/Graph/Tools.hs
View file @
1781ba63
...
...
@@ -132,14 +132,11 @@ cooc2graphWith' doPartitions distance threshold strength myCooc = do
(
as
,
bs
)
=
List
.
unzip
$
Map
.
keys
distanceMap
n'
=
Set
.
size
$
Set
.
fromList
$
as
<>
bs
bridgeness'
=
bridgeness
(
fromIntegral
nodesApprox
)
partitions
distanceMap
confluence'
=
Map
.
empty
-- confluence (Map.keys bridgeness') 3 True False
confluence'
=
Map
.
empty
-- BAC.computeConfluences 3 (Map.keys bridgeness') True
-- confluence (Map.keys bridgeness') 3 True False
seq
bridgeness'
$
printDebug
"bridgeness OK"
()
saveAsFileDebug
"/tmp/bridgeness"
bridgeness'
--seq confluence' $ printDebug "confluence OK" ()
--saveAsFileDebug "/tmp/confluence" confluence'
let
g
=
data2graph
ti
diag
bridgeness'
confluence'
partitions
--saveAsFileDebug "/tmp/graph" g
pure
g
seq
confluence'
$
printDebug
"confluence OK"
()
pure
$
data2graph
ti
diag
bridgeness'
confluence'
partitions
type
Reverse
=
Bool
...
...
src/Gargantext/Core/Viz/Phylo/Legacy/LegacyMain.hs
View file @
1781ba63
...
...
@@ -9,6 +9,7 @@ Portability : POSIX
-}
{-# LANGUAGE MonoLocalBinds #-}
{-# LANGUAGE ViewPatterns #-}
module
Gargantext.Core.Viz.Phylo.Legacy.LegacyMain
...
...
src/Gargantext/Database/Action/Flow.hs
View file @
1781ba63
...
...
@@ -135,7 +135,7 @@ allDataOrigins = map InternalOrigin API.externalAPIs
---------------
data
DataText
=
DataOld
!
[
NodeId
]
|
DataNew
!
(
Maybe
Integer
,
ConduitT
()
HyperdataDocument
IO
()
)
-- | DataNew ![[HyperdataDocument]]
--
-
| DataNew ![[HyperdataDocument]]
-- Show instance is not possible because of IO
printDataText
::
DataText
->
IO
()
...
...
src/Gargantext/Database/Action/Flow/List.hs
View file @
1781ba63
...
...
@@ -210,4 +210,3 @@ putListNgrams nodeId ngramsType nes = putListNgrams' nodeId ngramsType m
&
unNodeStory
.
at
listId
.
_Just
.
a_history
%~
(
p
:
)
&
unNodeStory
.
at
listId
.
_Just
.
a_state
.
at
ngramsType'
.~
Just
ns
saveNodeStory
src/Gargantext/Database/GargDB.hs
View file @
1781ba63
...
...
@@ -140,13 +140,13 @@ writeFile a = do
---
-- | Example to read a file with Type
readFile
::
(
MonadReader
env
m
read
Garg
File
::
(
MonadReader
env
m
,
HasConfig
env
,
MonadBase
IO
m
,
ReadFile
a
)
=>
FilePath
->
m
a
readFile
fp
=
do
read
Garg
File
fp
=
do
dataPath
<-
view
$
hasConfig
.
gc_datafilepath
liftBase
$
readFile'
$
toFilePath
dataPath
fp
...
...
src/Gargantext/Database/Query/Table/Node.hs
View file @
1781ba63
...
...
@@ -230,6 +230,9 @@ selectNodesIdWithType nt = proc () -> do
------------------------------------------------------------------------
nodeExists
::
(
HasNodeError
err
)
=>
NodeId
->
Cmd
err
Bool
nodeExists
nId
=
(
==
[
DPS
.
Only
True
])
<$>
runPGSQuery
[
sql
|
SELECT true FROM nodes WHERE id = ? AND ?
|]
(
nId
,
True
)
getNode
::
HasNodeError
err
=>
NodeId
->
Cmd
err
(
Node
Value
)
getNode
nId
=
do
...
...
src/Gargantext/Database/Query/Table/NodeNode.hs
View file @
1781ba63
...
...
@@ -19,27 +19,35 @@ commentary with @some markup@.
module
Gargantext.Database.Query.Table.NodeNode
(
module
Gargantext
.
Database
.
Schema
.
NodeNode
,
queryNodeNodeTabl
e
,
deleteNodeNod
e
,
getNodeNode
,
insertNodeNode
,
deleteNodeNode
,
nodeNodesCategory
,
nodeNodesScore
,
queryNodeNodeTable
,
selectDocNodes
,
selectDocs
,
selectDocsDates
,
selectPublicNodes
)
where
import
Control.Arrow
(
returnA
)
import
Control.Lens
((
^.
))
import
qualified
Opaleye
as
O
import
Opaleye
import
Control.Lens
((
^.
),
view
)
import
Data.Text
(
Text
,
splitOn
)
import
Data.Maybe
(
catMaybes
)
import
Database.PostgreSQL.Simple.SqlQQ
(
sql
)
import
Database.PostgreSQL.Simple.Types
(
Values
(
..
),
QualifiedIdentifier
(
..
))
import
Gargantext.Core
import
Gargantext.Core.Types
import
Gargantext.Database.Schema.NodeNode
import
Gargantext.Database.Admin.Types.Hyperdata
import
Gargantext.Database.Prelude
import
Gargantext.Database.Schema.Node
import
Gargantext.Database.Schema.NodeNode
import
Gargantext.Prelude
import
Opaleye
import
qualified
Database.PostgreSQL.Simple
as
PGS
import
qualified
Opaleye
as
O
queryNodeNodeTable
::
Select
NodeNodeRead
queryNodeNodeTable
=
selectTable
nodeNodeTable
...
...
@@ -113,8 +121,113 @@ deleteNodeNode n1 n2 = mkCmd $ \conn ->
)
------------------------------------------------------------------------
selectPublicNodes
::
HasDBid
NodeType
=>
(
Hyperdata
a
,
DefaultFromField
SqlJsonb
a
)
-- | Favorite management
_nodeNodeCategory
::
CorpusId
->
DocId
->
Int
->
Cmd
err
[
Int
]
_nodeNodeCategory
cId
dId
c
=
map
(
\
(
PGS
.
Only
a
)
->
a
)
<$>
runPGSQuery
favQuery
(
c
,
cId
,
dId
)
where
favQuery
::
PGS
.
Query
favQuery
=
[
sql
|
UPDATE nodes_nodes SET category = ?
WHERE node1_id = ? AND node2_id = ?
RETURNING node2_id;
|]
nodeNodesCategory
::
[(
CorpusId
,
DocId
,
Int
)]
->
Cmd
err
[
Int
]
nodeNodesCategory
inputData
=
map
(
\
(
PGS
.
Only
a
)
->
a
)
<$>
runPGSQuery
catQuery
(
PGS
.
Only
$
Values
fields
inputData
)
where
fields
=
map
(
\
t
->
QualifiedIdentifier
Nothing
t
)
[
"int4"
,
"int4"
,
"int4"
]
catQuery
::
PGS
.
Query
catQuery
=
[
sql
|
UPDATE nodes_nodes as nn0
SET category = nn1.category
FROM (?) as nn1(node1_id,node2_id,category)
WHERE nn0.node1_id = nn1.node1_id
AND nn0.node2_id = nn1.node2_id
RETURNING nn1.node2_id
|]
------------------------------------------------------------------------
-- | Score management
_nodeNodeScore
::
CorpusId
->
DocId
->
Int
->
Cmd
err
[
Int
]
_nodeNodeScore
cId
dId
c
=
map
(
\
(
PGS
.
Only
a
)
->
a
)
<$>
runPGSQuery
scoreQuery
(
c
,
cId
,
dId
)
where
scoreQuery
::
PGS
.
Query
scoreQuery
=
[
sql
|
UPDATE nodes_nodes SET score = ?
WHERE node1_id = ? AND node2_id = ?
RETURNING node2_id;
|]
nodeNodesScore
::
[(
CorpusId
,
DocId
,
Int
)]
->
Cmd
err
[
Int
]
nodeNodesScore
inputData
=
map
(
\
(
PGS
.
Only
a
)
->
a
)
<$>
runPGSQuery
catScore
(
PGS
.
Only
$
Values
fields
inputData
)
where
fields
=
map
(
\
t
->
QualifiedIdentifier
Nothing
t
)
[
"int4"
,
"int4"
,
"int4"
]
catScore
::
PGS
.
Query
catScore
=
[
sql
|
UPDATE nodes_nodes as nn0
SET score = nn1.score
FROM (?) as nn1(node1_id, node2_id, score)
WHERE nn0.node1_id = nn1.node1_id
AND nn0.node2_id = nn1.node2_id
RETURNING nn1.node2_id
|]
------------------------------------------------------------------------
_selectCountDocs
::
HasDBid
NodeType
=>
CorpusId
->
Cmd
err
Int
_selectCountDocs
cId
=
runCountOpaQuery
(
queryCountDocs
cId
)
where
queryCountDocs
cId'
=
proc
()
->
do
(
n
,
nn
)
<-
joinInCorpus
-<
()
restrict
-<
nn
^.
nn_node1_id
.==
(
toNullable
$
pgNodeId
cId'
)
restrict
-<
nn
^.
nn_category
.>=
(
toNullable
$
sqlInt4
1
)
restrict
-<
n
^.
node_typename
.==
(
sqlInt4
$
toDBid
NodeDocument
)
returnA
-<
n
-- | TODO use UTCTime fast
selectDocsDates
::
HasDBid
NodeType
=>
CorpusId
->
Cmd
err
[
Text
]
selectDocsDates
cId
=
map
(
head'
"selectDocsDates"
.
splitOn
"-"
)
<$>
catMaybes
<$>
map
(
view
hd_publication_date
)
<$>
selectDocs
cId
selectDocs
::
HasDBid
NodeType
=>
CorpusId
->
Cmd
err
[
HyperdataDocument
]
selectDocs
cId
=
runOpaQuery
(
queryDocs
cId
)
queryDocs
::
HasDBid
NodeType
=>
CorpusId
->
O
.
Select
(
Column
SqlJsonb
)
queryDocs
cId
=
proc
()
->
do
(
n
,
nn
)
<-
joinInCorpus
-<
()
restrict
-<
nn
^.
nn_node1_id
.==
(
toNullable
$
pgNodeId
cId
)
restrict
-<
nn
^.
nn_category
.>=
(
toNullable
$
sqlInt4
1
)
restrict
-<
n
^.
node_typename
.==
(
sqlInt4
$
toDBid
NodeDocument
)
returnA
-<
view
(
node_hyperdata
)
n
selectDocNodes
::
HasDBid
NodeType
=>
CorpusId
->
Cmd
err
[
Node
HyperdataDocument
]
selectDocNodes
cId
=
runOpaQuery
(
queryDocNodes
cId
)
queryDocNodes
::
HasDBid
NodeType
=>
CorpusId
->
O
.
Select
NodeRead
queryDocNodes
cId
=
proc
()
->
do
(
n
,
nn
)
<-
joinInCorpus
-<
()
restrict
-<
nn
^.
nn_node1_id
.==
(
toNullable
$
pgNodeId
cId
)
restrict
-<
nn
^.
nn_category
.>=
(
toNullable
$
sqlInt4
1
)
restrict
-<
n
^.
node_typename
.==
(
sqlInt4
$
toDBid
NodeDocument
)
returnA
-<
n
joinInCorpus
::
O
.
Select
(
NodeRead
,
NodeNodeReadNull
)
joinInCorpus
=
leftJoin
queryNodeTable
queryNodeNodeTable
cond
where
cond
::
(
NodeRead
,
NodeNodeRead
)
->
Column
SqlBool
cond
(
n
,
nn
)
=
nn
^.
nn_node2_id
.==
(
view
node_id
n
)
_joinOn1
::
O
.
Select
(
NodeRead
,
NodeNodeReadNull
)
_joinOn1
=
leftJoin
queryNodeTable
queryNodeNodeTable
cond
where
cond
::
(
NodeRead
,
NodeNodeRead
)
->
Column
SqlBool
cond
(
n
,
nn
)
=
nn
^.
nn_node1_id
.==
n
^.
node_id
------------------------------------------------------------------------
selectPublicNodes
::
HasDBid
NodeType
=>
(
Hyperdata
a
,
DefaultFromField
SqlJsonb
a
)
=>
Cmd
err
[(
Node
a
,
Maybe
Int
)]
selectPublicNodes
=
runOpaQuery
(
queryWithType
NodeFolderPublic
)
...
...
stack.yaml
View file @
1781ba63
...
...
@@ -35,7 +35,7 @@ extra-deps:
-
git
:
https://gitlab.iscpif.fr/gargantext/haskell-gargantext-prelude.git
commit
:
08096a4913572cf22762fa77613340207ec6d9fd
-
git
:
https://gitlab.iscpif.fr/gargantext/gargantext-graph.git
commit
:
13131f5173e2e2ab35b968e53f0feaeee13ad8ac
commit
:
f41ee8b53c3264e5aa5adc06b2e5b293d2a8c474
# Data Mining Libs
-
git
:
https://github.com/delanoe/data-time-segment.git
commit
:
10a416b9f6c443866b36479c3441ebb3bcdeb7ef
...
...
@@ -100,7 +100,7 @@ extra-deps:
-
git
:
https://github.com/alpmestan/haskell-igraph.git
commit
:
9f55eb36639c8e0965c8bc539a57738869f33e9a
-
git
:
https://gitlab.iscpif.fr/gargantext/haskell-infomap.git
commit
:
76b795c1eaca37f43418d07da9fbdf5f4e7d8f5c
commit
:
6d1d60b952b9b2b272b58fc5539700fd8890ac88
# Accelerate Linear Algebra and specific instances
-
git
:
https://github.com/alpmestan/accelerate.git
...
...
@@ -110,7 +110,7 @@ extra-deps:
-
git
:
https://gitlab.iscpif.fr/amestanogullari/accelerate-utility.git
commit
:
a3875fe652d3bb5acb522674c22c6c814c1b4ad0
-
git
:
https://github.com/alpmestan/accelerate-llvm.git
commit
:
08eaa8ee771dde88b3dcf37a89b31777f1ca4910
commit
:
944f5a4aea35ee6aedb81ea754bf46b131fce9e3
subdirs
:
-
accelerate-llvm/
-
accelerate-llvm-native/
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment