Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
H
haskell-gargantext
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Przemyslaw Kaminski
haskell-gargantext
Commits
89ee8ad1
Commit
89ee8ad1
authored
Oct 18, 2018
by
Alexandre Delanoë
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
[DBFLOW] Add Node to Corpus/Annuaire, function without duplicata and with transactional force.
parent
670baca2
Changes
6
Hide whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
149 additions
and
54 deletions
+149
-54
Node.hs
src/Gargantext/API/Node.hs
+2
-2
Facet.hs
src/Gargantext/Database/Facet.hs
+7
-7
Flow.hs
src/Gargantext/Database/Flow.hs
+5
-23
Add.hs
src/Gargantext/Database/Node/Document/Add.hs
+96
-0
Insert.hs
src/Gargantext/Database/Node/Document/Insert.hs
+6
-6
NodeNode.hs
src/Gargantext/Database/NodeNode.hs
+33
-16
No files found.
src/Gargantext/API/Node.hs
View file @
89ee8ad1
...
...
@@ -55,7 +55,7 @@ import Gargantext.Database.Node ( runCmd
,
getNode
,
getNodesWith
,
deleteNode
,
deleteNodes
,
mk
,
JSONB
)
import
qualified
Gargantext.Database.Node.Update
as
U
(
update
,
Update
(
..
))
import
Gargantext.Database.Facet
(
FacetDoc
,
getDocFacet
import
Gargantext.Database.Facet
(
FacetDoc
{-,getDocFacet-}
,
FacetChart
)
import
Gargantext.Database.Tree
(
treeDB
,
HasTreeError
(
..
),
TreeError
(
..
))
...
...
@@ -214,7 +214,7 @@ getNodesWith' conn id p nodeType offset limit = liftIO (getNodesWith conn id p
getFacet
::
Connection
->
NodeId
->
Maybe
Int
->
Maybe
Int
->
Handler
[
FacetDoc
]
getFacet
conn
id
offset
limit
=
liftIO
(
putStrLn
(
"/facet"
::
Text
))
>>
liftIO
(
getDocFacet
conn
NodeCorpus
id
(
Just
NodeDocument
)
offset
limit
)
getFacet
conn
id
offset
limit
=
undefined
--
liftIO (putStrLn ( "/facet" :: Text)) >> liftIO (getDocFacet conn NodeCorpus id (Just NodeDocument) offset limit)
getChart
::
Connection
->
NodeId
->
Maybe
UTCTime
->
Maybe
UTCTime
->
Handler
[
FacetChart
]
...
...
src/Gargantext/Database/Facet.hs
View file @
89ee8ad1
...
...
@@ -125,7 +125,7 @@ instance Arbitrary FacetChart where
-----------------------------------------------------------------------
{-
getDocFacet :: Connection -> NodeType -> Int -> Maybe NodeType
-> Maybe Offset -> Maybe Limit
-> IO [FacetDoc]
...
...
@@ -145,14 +145,14 @@ selectDocFacet pType parentId maybeNodeType maybeOffset maybeLimit =
nodeNodeLeftJoin :: Query (NodeRead, NodeNodeReadNull)
nodeNodeLeftJoin = leftJoin queryNodeTable queryNodeNodeTable (eqNode)
where
eqNode
(
Node
n1
_
_
_
_
_
_
,
NodeNode
_
n2
_
)
=
((
.==
)
n1
n2
)
eqNode (Node n1 _ _ _ _ _ _, NodeNode _ n2 _
_ _
) = ((.==) n1 n2)
nodeNodeLeftJoin' :: (Column (Nullable PGInt4))
-> Query (NodeRead, NodeNodeReadNull)
nodeNodeLeftJoin' nId = leftJoin queryNodeTable queryNodeNodeTable (eqNode nId)
where
eqNode
n
(
Node
n1
_
_
_
_
_
_
,
NodeNode
n1'
n2
_
)
eqNode n (Node n1 _ _ _ _ _ _, NodeNode n1' n2 _
_ _)
= foldl (.&&) (pgBool True) [ ((.==) n1 n2)
, ((.==) n1' n)
]
...
...
@@ -160,7 +160,7 @@ nodeNodeLeftJoin' nId = leftJoin queryNodeTable queryNodeNodeTable (eqNode nId)
nodeNodeLeftJoin'' :: Query (NodeRead, NodeRead, NodeNodeRead)
nodeNodeLeftJoin'' = join3 queryNodeTable queryNodeTable queryNodeNodeTable eqNode
where
eqNode
(
Node
n1
_
_
_
_
_
_
,
Node
n2
_
_
_
_
_
_
,
NodeNode
n1'
n2'
_
)
eqNode (Node n1 _ _ _ _ _ _, Node n2 _ _ _ _ _ _, NodeNode n1' n2' _
_ _)
= foldl (.&&) (pgBool True) [ ((.==) n2 n2')
, ((.==) (toNullable n1) n1')
]
...
...
@@ -208,11 +208,11 @@ leftJoin3' = leftJoin3 queryNodeTable queryNodeNodeNgramTable queryNodeTable co
leftJoin3''' :: Query (NodeRead, (NodeNodeReadNull, NodeReadNull))
leftJoin3''' = leftJoin3 queryNodeNodeTable queryNodeTable queryNodeTable cond12 cond23
where
cond12
(
NodeNode
favId
_
_
,
Node
favId'
_
_
_
_
_
_
)
cond12 (NodeNode favId _ _
_ _
, Node favId' _ _ _ _ _ _)
= (.||) ((.==) favId (toNullable favId')) (isNull $ toNullable favId)
cond23 :: (NodeRead, (NodeNodeRead, NodeReadNull)) -> Column PGBool
cond23
(
Node
nId
_
_
_
_
_
_
,
(
NodeNode
_
nId'
_
,
Node
_
_
_
_
_
_
_
))
cond23 (Node nId _ _ _ _ _ _, (NodeNode _ nId' _
_ _
, Node _ _ _ _ _ _ _ ))
= ((.==) (nId) (nId'))
...
...
@@ -233,4 +233,4 @@ selectDocFacet' _ pId _ = proc () -> do
returnA -< FacetDoc (_node_id n1) (_node_date n1) (_node_hyperdata n1) (isFav) (pgInt4 1)
-}
src/Gargantext/Database/Flow.hs
View file @
89ee8ad1
...
...
@@ -8,18 +8,10 @@ Stability : experimental
Portability : POSIX
add :: Corpus -> [Documents] -> IO Int
if new id -> extractNgrams + extract Authors + extract Sources
Map (Ngrams, NodeId)
insert Ngrams -> NgramsId
Map (NgramsId, NodeId) -> insert
data NgramsType = Sources | Authors | Terms
nodes_ngrams : column type, column list
documents
sources
authors
...
...
@@ -39,7 +31,8 @@ import Gargantext.Prelude
import
Gargantext.Database.Bashql
(
runCmd'
,
del
)
import
Gargantext.Database.Node
(
Cmd
(
..
),
getRoot
,
mkRoot
,
mkCorpus
,
defaultCorpus
)
import
Gargantext.Database.User
(
getUser
,
UserLight
(
..
),
Username
)
import
Gargantext.Database.Node.Document.Import
(
insertDocuments
)
import
Gargantext.Database.Node.Document.Insert
(
insertDocuments
,
ReturnId
(
reId
))
import
Gargantext.Database.Node.Document.Add
(
add
)
import
Gargantext.Text.Parsers
(
parseDocs
,
FileFormat
(
WOS
))
type
UserId
=
Int
...
...
@@ -76,7 +69,6 @@ flow fp = do
(
masterUserId
,
_
,
corpusId
)
<-
subFlow
"gargantua"
docs
<-
parseDocs
WOS
fp
ids
<-
runCmd'
$
insertDocuments
masterUserId
corpusId
docs
printDebug
"Docs IDs : "
ids
...
...
@@ -86,21 +78,12 @@ flow fp = do
(
userId
,
rootId
,
corpusId2
)
<-
subFlow
"alexandre"
runCmd'
(
del
[
corpusId
])
inserted
<-
runCmd'
$
add
corpusId2
(
map
reId
ids
)
printDebug
"Inserted : "
inserted
-- runCmd' (del [corpusId2, corpusId])
{-
--folderId <- mk Folder parentId (Name "Data") (Descr "All corpora DATA here")
folderId <- mk Folder rootId "Data"
corpusId <- mk Corpus folderId (Name "WOS") (Descr "WOS database description")
-}
{-
docs <- parseDocuments WOS "doc/.."
ids <- add (Documents corpusId) docs
user_id <- runCmd' (get RootUser "alexandre")
...
...
@@ -108,4 +91,3 @@ flow fp = do
corpusId <- mk Corpus
-}
src/Gargantext/Database/Node/Document/Add.hs
0 → 100644
View file @
89ee8ad1
{-|
Module : Gargantext.Database.Node.Document.Add
Description : Importing context of texts (documents)
Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
Add Documents/Contact to a Corpus/Annuaire.
-}
------------------------------------------------------------------------
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
------------------------------------------------------------------------
module
Gargantext.Database.Node.Document.Add
where
import
Control.Lens
(
set
)
import
Data.Aeson
(
toJSON
,
Value
)
import
Data.ByteString.Internal
(
ByteString
)
import
Data.Maybe
(
maybe
)
import
Data.Typeable
(
Typeable
)
import
Database.PostgreSQL.Simple
(
Connection
,
FromRow
,
Query
,
formatQuery
,
query
,
Only
(
..
))
import
Database.PostgreSQL.Simple.FromRow
(
fromRow
,
field
)
import
Database.PostgreSQL.Simple.SqlQQ
import
Database.PostgreSQL.Simple.ToField
(
toField
)
import
Database.PostgreSQL.Simple.ToRow
(
ToRow
(
..
))
import
Database.PostgreSQL.Simple.Types
(
Values
(
..
),
QualifiedIdentifier
(
..
))
import
Data.Text
(
Text
)
import
qualified
Data.Text
as
DT
(
pack
,
unpack
,
concat
)
import
qualified
Data.Digest.Pure.SHA
as
SHA
(
sha256
,
showDigest
)
import
qualified
Data.ByteString.Lazy.Char8
as
DC
(
pack
)
import
Gargantext.Database.Config
(
nodeTypeId
)
import
Gargantext.Database.Node
(
mkCmd
,
Cmd
(
..
))
import
Gargantext.Database.Types.Node
import
Gargantext.Prelude
import
GHC.Generics
(
Generic
)
---------------------------------------------------------------------------
type
ParentId
=
Int
add
::
ParentId
->
[
NodeId
]
->
Cmd
[
Only
Int
]
add
pId
ns
=
mkCmd
$
\
c
->
query
c
queryAdd
(
Only
$
Values
fields
inputData
)
where
fields
=
map
(
\
t
->
QualifiedIdentifier
Nothing
t
)
inputSqlTypes
inputData
=
prepare
pId
ns
add_debug
::
ParentId
->
[
NodeId
]
->
Cmd
ByteString
add_debug
pId
ns
=
mkCmd
$
\
c
->
formatQuery
c
queryAdd
(
Only
$
Values
fields
inputData
)
where
fields
=
map
(
\
t
->
QualifiedIdentifier
Nothing
t
)
inputSqlTypes
inputData
=
prepare
pId
ns
-- | Input Tables: types of the tables
inputSqlTypes
::
[
Text
]
inputSqlTypes
=
map
DT
.
pack
[
"int4"
,
"int4"
]
-- | SQL query to add documents
-- TODO return id of added documents only
queryAdd
::
Query
queryAdd
=
[
sql
|
WITH input_rows(node1_id,node2_id) AS (?)
INSERT INTO nodes_nodes (node1_id, node2_id)
SELECT * FROM input_rows
ON CONFLICT (node1_id, node2_id) DO NOTHING -- on unique index
RETURNING 1
;
|]
prepare
::
ParentId
->
[
NodeId
]
->
[
InputData
]
prepare
pId
ns
=
map
(
\
nId
->
InputData
pId
nId
)
ns
------------------------------------------------------------------------
-- * Main Types used
data
InputData
=
InputData
{
inNode1_id
::
NodeId
,
inNode2_id
::
NodeId
}
deriving
(
Show
,
Generic
,
Typeable
)
instance
ToRow
InputData
where
toRow
inputData
=
[
toField
(
inNode1_id
inputData
)
,
toField
(
inNode2_id
inputData
)
]
src/Gargantext/Database/Node/Document/I
mpo
rt.hs
→
src/Gargantext/Database/Node/Document/I
nse
rt.hs
View file @
89ee8ad1
{-|
Module : Gargantext.Database.Node.Document.I
mpo
rt
Module : Gargantext.Database.Node.Document.I
nse
rt
Description : Importing context of texts (documents)
Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3
...
...
@@ -22,8 +22,8 @@ to be unique, then shared, but how to respect privacy if needed ?
* Methodology to get uniqueness and privacy by design
As a consequence, when importing
a new document in Gargantext, a policy
for the uniqueness of the inserted docuemnts has to be defined.
As a consequence, when importing
/inserting a new document in Gargantext,
a policy for the uniqueness of the inserted docuemnts has to be defined.
That is the purpose of this module which defines its main concepts.
...
...
@@ -56,7 +56,7 @@ the concatenation of the parameters defined by @hashParameters@.
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
------------------------------------------------------------------------
module
Gargantext.Database.Node.Document.I
mpo
rt
where
module
Gargantext.Database.Node.Document.I
nse
rt
where
import
Control.Lens
(
set
)
...
...
@@ -113,8 +113,8 @@ insertDocuments uId pId hs = mkCmd $ \c -> query c queryInsert (Only $ Values fi
-- | Debug SQL function
--
-- to print rendered query (Debug purpose) use @formatQuery@ function.
insertDocuments_Debug
::
Connection
->
UserId
->
ParentId
->
[
HyperdataDocument
]
->
IO
ByteString
insertDocuments_Debug
conn
uId
pId
hs
=
formatQuery
conn
queryInsert
(
Only
$
Values
fields
inputData
)
insertDocuments_Debug
::
UserId
->
ParentId
->
[
HyperdataDocument
]
->
Cmd
ByteString
insertDocuments_Debug
uId
pId
hs
=
mkCmd
$
\
conn
->
formatQuery
conn
queryInsert
(
Only
$
Values
fields
inputData
)
where
fields
=
map
(
\
t
->
QualifiedIdentifier
Nothing
t
)
inputSqlTypes
inputData
=
prepare
uId
pId
hs
...
...
src/Gargantext/Database/NodeNode.hs
View file @
89ee8ad1
...
...
@@ -22,7 +22,7 @@ commentary with @some markup@.
module
Gargantext.Database.NodeNode
where
import
Prelude
import
Gargantext.
Prelude
import
Data.Maybe
(
Maybe
)
import
Data.Profunctor.Product.TH
(
makeAdaptorAndInstance
)
import
Control.Lens.TH
(
makeLensesWith
,
abbreviatedFields
)
...
...
@@ -31,33 +31,44 @@ import qualified Database.PostgreSQL.Simple as PGS
import
Opaleye
data
NodeNodePoly
node1_id
node2_id
score
data
NodeNodePoly
node1_id
node2_id
score
fav
del
=
NodeNode
{
nodeNode_node1_id
::
node1_id
,
nodeNode_node2_id
::
node2_id
,
nodeNode_score
::
score
,
nodeNode_favorite
::
fav
,
nodeNode_delete
::
del
}
deriving
(
Show
)
type
NodeNodeWrite
=
NodeNodePoly
(
Column
(
Nullable
PGInt4
))
(
Column
(
PGInt4
))
type
NodeNodeWrite
=
NodeNodePoly
(
Column
(
PGInt4
))
(
Column
(
PGInt4
))
(
Maybe
(
Column
(
PGFloat8
)))
(
Maybe
(
Column
(
PGBool
)))
(
Maybe
(
Column
(
PGBool
)))
type
NodeNodeRead
=
NodeNodePoly
(
Column
(
PGInt4
))
(
Column
(
PGInt4
))
(
Column
(
PGFloat8
))
(
Column
(
PGBool
))
(
Column
(
PGBool
))
type
NodeNodeReadNull
=
NodeNodePoly
(
Column
(
Nullable
PGInt4
))
(
Column
(
Nullable
PGInt4
))
(
Column
(
Nullable
PGFloat8
))
(
Column
(
Nullable
PGBool
))
(
Column
(
Nullable
PGBool
))
type
NodeNodeRead
=
NodeNodePoly
(
Column
(
Nullable
PGInt4
))
(
Column
(
PGInt4
))
(
Column
(
Nullable
PGFloat8
))
type
NodeNodeReadNull
=
NodeNodePoly
(
Column
(
Nullable
PGInt4
))
(
Column
(
Nullable
PGInt4
))
(
Column
(
Nullable
PGFloat8
))
type
NodeNode
=
NodeNodePoly
Int
Int
(
Maybe
Double
)
type
NodeNode
=
NodeNodePoly
Int
Int
(
Maybe
Double
)
(
Maybe
Bool
)
(
Maybe
Bool
)
$
(
makeAdaptorAndInstance
"pNodeNode"
''
N
odeNodePoly
)
$
(
makeLensesWith
abbreviatedFields
''
N
odeNodePoly
)
nodeNodeTable
::
Table
NodeNodeWrite
NodeNodeRead
nodeNodeTable
=
Table
"nodes_nodes"
(
pNodeNode
NodeNode
{
nodeNode_node1_id
=
required
"node1_id"
nodeNodeTable
::
Table
NodeNodeWrite
NodeNodeRead
nodeNodeTable
=
Table
"nodes_nodes"
(
pNodeNode
NodeNode
{
nodeNode_node1_id
=
required
"node1_id"
,
nodeNode_node2_id
=
required
"node2_id"
,
nodeNode_score
=
required
"score"
,
nodeNode_score
=
optional
"score"
,
nodeNode_favorite
=
optional
"favorite"
,
nodeNode_delete
=
optional
"delete"
}
)
...
...
@@ -75,4 +86,10 @@ instance QueryRunnerColumnDefault (Nullable PGInt4) Int where
instance
QueryRunnerColumnDefault
PGFloat8
(
Maybe
Double
)
where
queryRunnerColumnDefault
=
fieldQueryRunnerColumn
instance
QueryRunnerColumnDefault
PGBool
(
Maybe
Bool
)
where
queryRunnerColumnDefault
=
fieldQueryRunnerColumn
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment