Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
haskell-gargantext
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
160
Issues
160
List
Board
Labels
Milestones
Merge Requests
14
Merge Requests
14
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
gargantext
haskell-gargantext
Commits
640a4549
Commit
640a4549
authored
Jan 20, 2023
by
Alexandre Delanoë
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
[READ] Code clean
parent
ccf15753
Pipeline
#3598
failed with stage
in 74 minutes
Changes
2
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
46 additions
and
67 deletions
+46
-67
NodeStory.hs
src/Gargantext/Core/NodeStory.hs
+39
-60
Social.hs
src/Gargantext/Core/Text/List/Social.hs
+7
-7
No files found.
src/Gargantext/Core/NodeStory.hs
View file @
640a4549
...
...
@@ -70,7 +70,6 @@ module Gargantext.Core.NodeStory
,
nse_archive_saver_immediate
,
nse_var
,
unNodeStory
,
getNodeArchiveHistory
,
getNodesArchiveHistory
,
Archive
(
..
)
,
initArchive
...
...
@@ -94,42 +93,42 @@ module Gargantext.Core.NodeStory
where
-- import Debug.Trace (traceShow)
import
Control.Debounce
(
mkDebounce
,
defaultDebounceSettings
,
debounceFreq
,
debounceAction
)
import
Codec.Serialise.Class
import
Control.Concurrent
(
MVar
(),
newMVar
,
modifyMVar_
)
import
Control.Debounce
(
mkDebounce
,
defaultDebounceSettings
,
debounceFreq
,
debounceAction
)
import
Control.Exception
(
catch
,
throw
,
SomeException
(
..
))
import
Control.Lens
(
makeLenses
,
Getter
,
(
^.
),
(
.~
),
(
%~
),
_Just
,
at
,
traverse
,
view
)
import
Control.Monad.Except
import
Control.Monad.Reader
import
Data.Aeson
hiding
((
.=
),
decode
)
import
Data.ByteString.Char8
(
hPutStrLn
)
import
Data.HashMap.Strict
(
HashMap
)
import
Data.Map.Strict
(
Map
)
import
Data.Maybe
(
catMaybes
)
import
Data.Monoid
import
Data.Pool
(
Pool
,
withResource
)
import
Data.Profunctor.Product.TH
(
makeAdaptorAndInstance
)
import
Data.Semigroup
import
Database.PostgreSQL.Simple.SqlQQ
(
sql
)
import
Database.PostgreSQL.Simple.FromField
(
FromField
(
fromField
),
fromJSONField
)
import
Data.Profunctor.Product.TH
(
makeAdaptorAndInstance
)
import
Database.PostgreSQL.Simple.SqlQQ
(
sql
)
import
Database.PostgreSQL.Simple.Types
(
Values
(
..
),
QualifiedIdentifier
(
..
))
import
GHC.Generics
(
Generic
)
import
Data.HashMap.Strict
(
HashMap
)
import
qualified
Data.HashMap.Strict
as
HashMap
import
Gargantext.Database.Schema.Ngrams
(
NgramsType
)
import
Gargantext.API.Ngrams.Types
import
Gargantext.Core.Types
(
ListId
,
NodeId
(
..
),
NodeType
)
import
Gargantext.Core.Utils.Prefix
(
unPrefix
)
import
Gargantext.Database.Admin.Config
(
nodeTypeId
)
import
Gargantext.Database.Prelude
(
CmdM
'
,
HasConnectionPool
(
..
),
HasConfig
)
import
Gargantext.Database.Query.Table.Node.Error
(
HasNodeError
())
import
Gargantext.Database.Schema.Ngrams
(
NgramsType
)
import
Gargantext.Prelude
import
Opaleye
(
DefaultFromField
(
..
),
SqlJsonb
,
fromPGSFromField
)
import
Database.PostgreSQL.Simple.Types
(
Values
(
..
),
QualifiedIdentifier
(
..
))
import
System.IO
(
stderr
)
import
qualified
Data.HashMap.Strict
as
HashMap
import
qualified
Data.Map.Strict
as
Map
import
qualified
Data.Map.Strict.Patch
as
PM
import
qualified
Data.Set
as
Set
import
qualified
Data.Text
as
Text
import
qualified
Database.PostgreSQL.Simple
as
PGS
import
qualified
Data.Set
as
Set
import
qualified
Data.Text
as
Text
import
qualified
Database.PostgreSQL.Simple
as
PGS
import
qualified
Gargantext.Database.Query.Table.Ngrams
as
TableNgrams
------------------------------------------------------------------------
...
...
@@ -258,8 +257,6 @@ initNodeListStoryMock = NodeStory $ Map.singleton nodeListId archive
]
------------------------------------------------------------------------
------------------------------------------------------------------------
-- | Lenses at the bottom of the file because Template Haskell would reorder order of execution in others cases
makeLenses
''
N
odeStoryEnv
...
...
@@ -293,7 +290,8 @@ type ArchiveList = Archive NgramsState' NgramsStatePatch'
-- DB stuff
runPGSExecute
::
(
PGS
.
ToRow
q
)
=>
PGS
.
Connection
->
PGS
.
Query
->
q
->
IO
Int64
runPGSExecute
::
(
PGS
.
ToRow
q
)
=>
PGS
.
Connection
->
PGS
.
Query
->
q
->
IO
Int64
runPGSExecute
c
qs
a
=
catch
(
PGS
.
execute
c
qs
a
)
printError
where
printError
(
SomeException
e
)
=
do
...
...
@@ -301,7 +299,8 @@ runPGSExecute c qs a = catch (PGS.execute c qs a) printError
_
<-
panic
$
Text
.
pack
$
show
e
throw
(
SomeException
e
)
runPGSExecuteMany
::
(
PGS
.
ToRow
q
)
=>
PGS
.
Connection
->
PGS
.
Query
->
[
q
]
->
IO
Int64
runPGSExecuteMany
::
(
PGS
.
ToRow
q
)
=>
PGS
.
Connection
->
PGS
.
Query
->
[
q
]
->
IO
Int64
runPGSExecuteMany
c
qs
a
=
catch
(
PGS
.
executeMany
c
qs
a
)
printError
where
printError
(
SomeException
e
)
=
do
...
...
@@ -309,7 +308,8 @@ runPGSExecuteMany c qs a = catch (PGS.executeMany c qs a) printError
_
<-
panic
$
Text
.
pack
$
show
e
throw
(
SomeException
e
)
runPGSQuery
::
(
PGS
.
FromRow
r
,
PGS
.
ToRow
q
)
=>
PGS
.
Connection
->
PGS
.
Query
->
q
->
IO
[
r
]
runPGSQuery
::
(
PGS
.
FromRow
r
,
PGS
.
ToRow
q
)
=>
PGS
.
Connection
->
PGS
.
Query
->
q
->
IO
[
r
]
runPGSQuery
c
q
a
=
catch
(
PGS
.
query
c
q
a
)
printError
where
printError
(
SomeException
e
)
=
do
...
...
@@ -319,22 +319,26 @@ runPGSQuery c q a = catch (PGS.query c q a) printError
runPGSAdvisoryLock
::
PGS
.
Connection
->
Int
->
IO
()
runPGSAdvisoryLock
c
id
=
do
_
<-
runPGSQuery
c
[
sql
|
SELECT pg_advisory_lock(?)
|]
(
PGS
.
Only
id
)
::
IO
[
PGS
.
Only
()
]
_
<-
runPGSQuery
c
[
sql
|
SELECT pg_advisory_lock(?)
|]
(
PGS
.
Only
id
)
::
IO
[
PGS
.
Only
()
]
pure
()
runPGSAdvisoryUnlock
::
PGS
.
Connection
->
Int
->
IO
()
runPGSAdvisoryUnlock
c
id
=
do
_
<-
runPGSQuery
c
[
sql
|
SELECT pg_advisory_unlock(?)
|]
(
PGS
.
Only
id
)
::
IO
[
PGS
.
Only
Bool
]
_
<-
runPGSQuery
c
[
sql
|
SELECT pg_advisory_unlock(?)
|]
(
PGS
.
Only
id
)
::
IO
[
PGS
.
Only
Bool
]
pure
()
runPGSAdvisoryXactLock
::
PGS
.
Connection
->
Int
->
IO
()
runPGSAdvisoryXactLock
c
id
=
do
_
<-
runPGSQuery
c
[
sql
|
SELECT pg_advisory_xact_lock(?)
|]
(
PGS
.
Only
id
)
::
IO
[
PGS
.
Only
()
]
_
<-
runPGSQuery
c
[
sql
|
SELECT pg_advisory_xact_lock(?)
|]
(
PGS
.
Only
id
)
::
IO
[
PGS
.
Only
()
]
pure
()
nodeExists
::
PGS
.
Connection
->
NodeId
->
IO
Bool
nodeExists
c
nId
=
(
==
[
PGS
.
Only
True
])
<$>
runPGSQuery
c
[
sql
|
SELECT true FROM nodes WHERE id = ? LIMIT 1
|]
(
PGS
.
Only
nId
)
<$>
runPGSQuery
c
[
sql
|
SELECT true FROM nodes WHERE id = ? LIMIT 1
|]
(
PGS
.
Only
nId
)
getNodesIdWithType
::
PGS
.
Connection
->
NodeType
->
IO
[
NodeId
]
getNodesIdWithType
c
nt
=
do
...
...
@@ -345,44 +349,22 @@ getNodesIdWithType c nt = do
query
=
[
sql
|
SELECT id FROM nodes WHERE typename = ?
|]
-- nodeStoryTable :: Table NodeStoryRead NodeStoryWrite
-- nodeStoryTable =
-- Table "node_stories"
-- ( pNodeStory NodeStoryDB { node_id = tableField "node_id"
-- , version = tableField "version"
-- , ngrams_type_id = tableField "ngrams_type_id"
-- , ngrams_id = tableField "ngrams_id"
-- , ngrams_repo_element = tableField "ngrams_repo_element"
-- } )
-- nodeStoryArchiveTable :: Table NodeStoryArchiveRead NodeStoryArchiveWrite
-- nodeStoryArchiveTable =
-- Table "node_story_archive_history"
-- ( pNodeArchiveStory NodeStoryArchiveDB { a_node_id = tableField "node_id"
-- , archive = tableField "archive" } )
-- nodeStorySelect :: Select NodeStoryRead
-- nodeStorySelect = selectTable nodeStoryTable
-- NOTE "first patch in the _a_history list is the most recent"
getNodeArchiveHistory
::
PGS
.
Connection
->
NodeId
->
IO
[
NgramsStatePatch'
]
getNodeArchiveHistory
c
nodeId
=
do
as
<-
runPGSQuery
c
query
(
PGS
.
Only
nodeId
)
::
IO
[(
TableNgrams
.
NgramsType
,
NgramsTerm
,
NgramsPatch
)]
pure
$
(
\
(
ngramsType
,
terms
,
patch
)
->
fst
$
PM
.
singleton
ngramsType
(
NgramsTablePatch
$
fst
$
PM
.
singleton
terms
patch
))
<$>
as
where
query
::
PGS
.
Query
query
=
[
sql
|
SELECT ngrams_type_id, terms, patch
FROM node_story_archive_history
JOIN ngrams ON ngrams.id = ngrams_id
WHERE node_id = ?
ORDER BY (version, node_story_archive_history.id) DESC
|]
-- getNodesArchiveHistory :: PGS.Connection -> [NodeId] -> IO [(Int, NgramsStatePatch')]
getNodesArchiveHistory
::
PGS
.
Connection
->
[
NodeId
]
->
IO
[(
NodeId
,
(
Map
NgramsType
[
HashMap
NgramsTerm
NgramsPatch
]))]
-- /!\ This function is using an hard coded parameter
-- which depends on the Ngrams List Flow
-- Version > 5 is hard coded because by default
-- first version of history of manual change is 6
getNodesArchiveHistory
::
PGS
.
Connection
->
[
NodeId
]
->
IO
[(
NodeId
,
(
Map
NgramsType
[
HashMap
NgramsTerm
NgramsPatch
]))]
getNodesArchiveHistory
c
nodesId
=
do
as
<-
runPGSQuery
c
query
(
PGS
.
Only
$
Values
fields
nodesId
)
::
IO
[(
Int
,
TableNgrams
.
NgramsType
,
NgramsTerm
,
NgramsPatch
)]
pure
$
map
(
\
(
nId
,
ngramsType
,
terms
,
patch
)
->
(
NodeId
nId
,
Map
.
singleton
ngramsType
[
HashMap
.
singleton
terms
patch
]))
as
as
<-
runPGSQuery
c
query
(
PGS
.
Only
$
Values
fields
nodesId
)
::
IO
[(
Int
,
TableNgrams
.
NgramsType
,
NgramsTerm
,
NgramsPatch
)]
pure
$
map
(
\
(
nId
,
ngramsType
,
terms
,
patch
)
->
(
NodeId
nId
,
Map
.
singleton
ngramsType
[
HashMap
.
singleton
terms
patch
]
)
)
as
where
fields
=
[
QualifiedIdentifier
Nothing
"int4"
]
...
...
@@ -396,9 +378,6 @@ getNodesArchiveHistory c nodesId = do
ORDER BY (version, node_story_archive_history.id) DESC
|]
-- Version > 5 is hard coded because by default
-- first version of history of manual change is 6
ngramsIdQuery
::
PGS
.
Query
ngramsIdQuery
=
[
sql
|
SELECT id FROM ngrams WHERE terms = ?
|]
...
...
src/Gargantext/Core/Text/List/Social.hs
View file @
640a4549
...
...
@@ -35,7 +35,7 @@ import Gargantext.Database.Query.Tree (NodeMode(Private), HasTreeError)
import
Gargantext.Database.Schema.Ngrams
(
NgramsType
)
import
Gargantext.Prelude
import
Web.Internal.HttpApiData
(
ToHttpApiData
,
FromHttpApiData
,
parseUrlPiece
,
toUrlPiece
)
import
qualified
Data.List
as
List
import
qualified
Data.List
as
List
import
qualified
Data.Map
as
Map
import
qualified
Data.Scientific
as
Scientific
import
qualified
Data.Text
as
T
...
...
@@ -50,8 +50,8 @@ import qualified Prelude
-- This parameter depends on the user choice
data
FlowSocialListWith
=
FlowSocialListWithPriority
{
fslw_priority
::
FlowSocialListPriority
}
|
FlowSocialListWithLists
{
fslw_lists
::
[
ListId
]
}
|
NoList
{
makeList
::
Bool
}
|
FlowSocialListWithLists
{
fslw_lists
::
[
ListId
]
}
|
NoList
{
makeList
::
Bool
}
deriving
(
Eq
,
Show
,
Generic
)
instance
FromJSON
FlowSocialListWith
where
...
...
@@ -61,7 +61,7 @@ instance FromJSON FlowSocialListWith where
case
typ
of
"MyListsFirst"
->
pure
$
FlowSocialListWithPriority
{
fslw_priority
=
MySelfFirst
}
"OtherListsFirst"
->
pure
$
FlowSocialListWithPriority
{
fslw_priority
=
OthersFirst
}
"SelectedLists"
->
pure
$
FlowSocialListWithLists
{
fslw_lists
=
value
}
"SelectedLists"
->
pure
$
FlowSocialListWithLists
{
fslw_lists
=
value
}
"NoList"
->
pure
$
NoList
True
_
->
pure
$
FlowSocialListWithPriority
{
fslw_priority
=
MySelfFirst
}
parseJSON
_
=
mzero
...
...
@@ -113,9 +113,9 @@ flowSocialList :: ( HasNodeStory env err m
->
FlowCont
NgramsTerm
FlowListScores
->
m
(
FlowCont
NgramsTerm
FlowListScores
)
flowSocialList
Nothing
u
=
flowSocialList'
MySelfFirst
u
flowSocialList
(
Just
(
FlowSocialListWithPriority
p
))
u
=
flowSocialList'
p
u
flowSocialList
(
Just
(
FlowSocialListWithPriority
p
))
u
=
flowSocialList'
p
u
flowSocialList
(
Just
(
FlowSocialListWithLists
ls
))
_
=
getHistoryScores
ls
flowSocialList
(
Just
(
NoList
_
))
_u
=
panic
"[G.C.T.L.Social] Should not be executed"
flowSocialList
(
Just
(
NoList
_
))
_u
=
panic
"[G.C.T.L.Social] Should not be executed"
flowSocialList'
::
(
HasNodeStory
env
err
m
,
CmdM
env
err
m
...
...
@@ -182,7 +182,7 @@ getHistory :: ( HasNodeStory env err m
->
m
(
Map
ListId
(
Map
NgramsType
[
HashMap
NgramsTerm
NgramsPatch
]))
getHistory
types
listsId
=
do
pool
<-
view
connPool
nsp
<-
liftBase
$
withResource
pool
$
\
c
->
getNodesArchiveHistory
c
listsId
nsp
<-
liftBase
$
withResource
pool
$
\
c
->
getNodesArchiveHistory
c
listsId
pure
$
Map
.
map
(
Map
.
filterWithKey
(
\
k
_
->
List
.
elem
k
types
))
$
Map
.
filterWithKey
(
\
k
_
->
List
.
elem
k
listsId
)
$
Map
.
fromListWith
(
Map
.
unionWith
(
<>
))
nsp
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment