Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
haskell-gargantext
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
149
Issues
149
List
Board
Labels
Milestones
Merge Requests
11
Merge Requests
11
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
gargantext
haskell-gargantext
Commits
e08e94f9
Commit
e08e94f9
authored
Nov 29, 2019
by
Alexandre Delanoë
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
[PAIR][WIP] NodeNgrams -> NodeNodeNgrams, needs tests.
parent
8a83ba4e
Changes
18
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
18 changed files
with
400 additions
and
253 deletions
+400
-253
API.hs
src/Gargantext/API.hs
+2
-1
Node.hs
src/Gargantext/API/Node.hs
+2
-2
Search.hs
src/Gargantext/API/Search.hs
+7
-4
Table.hs
src/Gargantext/API/Table.hs
+3
-2
Facet.hs
src/Gargantext/Database/Facet.hs
+29
-24
Flow.hs
src/Gargantext/Database/Flow.hs
+83
-41
Pairing.hs
src/Gargantext/Database/Flow/Pairing.hs
+38
-32
Utils.hs
src/Gargantext/Database/Flow/Utils.hs
+15
-18
Ngrams.hs
src/Gargantext/Database/Ngrams.hs
+6
-5
Children.hs
src/Gargantext/Database/Node/Children.hs
+15
-5
Insert.hs
src/Gargantext/Database/Node/Document/Insert.hs
+14
-14
Ngrams.hs
src/Gargantext/Database/Schema/Ngrams.hs
+8
-7
Node.hs
src/Gargantext/Database/Schema/Node.hs
+1
-4
NodeNode.hs
src/Gargantext/Database/Schema/NodeNode.hs
+19
-19
NodeNodeNgrams.hs
src/Gargantext/Database/Schema/NodeNodeNgrams.hs
+15
-15
TextSearch.hs
src/Gargantext/Database/TextSearch.hs
+141
-58
Node.hs
src/Gargantext/Database/Types/Node.hs
+1
-1
Utils.hs
src/Gargantext/Prelude/Utils.hs
+1
-1
No files found.
src/Gargantext/API.hs
View file @
e08e94f9
...
@@ -266,7 +266,8 @@ type GargPrivateAPI' =
...
@@ -266,7 +266,8 @@ type GargPrivateAPI' =
:>
ReqBody
'[
J
SON
]
Query
:>
CountAPI
:>
ReqBody
'[
J
SON
]
Query
:>
CountAPI
-- Corpus endpoint --> TODO rename s/search/filter/g
-- Corpus endpoint --> TODO rename s/search/filter/g
:<|>
"search"
:>
Capture
"corpus"
NodeId
:>
SearchPairsAPI
:<|>
"search"
:>
Capture
"corpus"
NodeId
:>
SearchPairsAPI
-- TODO move to NodeAPI?
-- TODO move to NodeAPI?
:<|>
"graph"
:>
Summary
"Graph endpoint"
:<|>
"graph"
:>
Summary
"Graph endpoint"
...
...
src/Gargantext/API/Node.hs
View file @
e08e94f9
...
@@ -131,7 +131,7 @@ type NodeAPI a = Get '[JSON] (Node a)
...
@@ -131,7 +131,7 @@ type NodeAPI a = Get '[JSON] (Node a)
-- TODO gather it
-- TODO gather it
:<|>
"table"
:>
TableApi
:<|>
"table"
:>
TableApi
:<|>
"ngrams"
:>
TableNgramsApi
:<|>
"ngrams"
:>
TableNgramsApi
:<|>
"pairing"
:>
PairingApi
--
:<|> "pairing" :> PairingApi
:<|>
"category"
:>
CatApi
:<|>
"category"
:>
CatApi
:<|>
"search"
:>
SearchDocsAPI
:<|>
"search"
:>
SearchDocsAPI
...
@@ -187,7 +187,7 @@ nodeAPI p uId id = withAccess (Proxy :: Proxy (NodeAPI a)) Proxy uId (PathNode i
...
@@ -187,7 +187,7 @@ nodeAPI p uId id = withAccess (Proxy :: Proxy (NodeAPI a)) Proxy uId (PathNode i
-- TODO gather it
-- TODO gather it
:<|>
tableApi
id
:<|>
tableApi
id
:<|>
apiNgramsTableCorpus
id
:<|>
apiNgramsTableCorpus
id
:<|>
getPairing
id
--
:<|> getPairing id
-- :<|> getTableNgramsDoc id
-- :<|> getTableNgramsDoc id
:<|>
catApi
id
:<|>
catApi
id
...
...
src/Gargantext/API/Search.hs
View file @
e08e94f9
...
@@ -65,7 +65,8 @@ instance Arbitrary SearchDocResults where
...
@@ -65,7 +65,8 @@ instance Arbitrary SearchDocResults where
instance
ToSchema
SearchDocResults
where
instance
ToSchema
SearchDocResults
where
declareNamedSchema
=
genericDeclareNamedSchema
(
unPrefixSwagger
"sdr_"
)
declareNamedSchema
=
genericDeclareNamedSchema
(
unPrefixSwagger
"sdr_"
)
data
SearchPairedResults
=
SearchPairedResults
{
spr_results
::
[
FacetPaired
Int
UTCTime
HyperdataDocument
Int
[
Pair
Int
Text
]]
}
data
SearchPairedResults
=
SearchPairedResults
{
spr_results
::
[
FacetPaired
Int
UTCTime
HyperdataDocument
Int
[
Pair
Int
Text
]]
}
deriving
(
Generic
)
deriving
(
Generic
)
$
(
deriveJSON
(
unPrefix
"spr_"
)
''
S
earchPairedResults
)
$
(
deriveJSON
(
unPrefix
"spr_"
)
''
S
earchPairedResults
)
...
@@ -87,12 +88,14 @@ type SearchAPI results
...
@@ -87,12 +88,14 @@ type SearchAPI results
:>
Post
'[
J
SON
]
results
:>
Post
'[
J
SON
]
results
type
SearchDocsAPI
=
SearchAPI
SearchDocResults
type
SearchDocsAPI
=
SearchAPI
SearchDocResults
type
SearchPairsAPI
=
SearchAPI
SearchPairedResults
type
SearchPairsAPI
=
Summary
""
:>
"list"
:>
Capture
"list"
ListId
:>
SearchAPI
SearchPairedResults
-----------------------------------------------------------------------
-----------------------------------------------------------------------
searchPairs
::
NodeId
->
GargServer
SearchPairsAPI
searchPairs
::
NodeId
->
GargServer
SearchPairsAPI
searchPairs
pId
(
SearchQuery
q
)
o
l
order
=
searchPairs
pId
lId
(
SearchQuery
q
)
o
l
order
=
SearchPairedResults
<$>
searchInCorpusWithContacts
pId
q
o
l
order
SearchPairedResults
<$>
searchInCorpusWithContacts
pId
lId
q
o
l
order
searchDocs
::
NodeId
->
GargServer
SearchDocsAPI
searchDocs
::
NodeId
->
GargServer
SearchDocsAPI
searchDocs
nId
(
SearchQuery
q
)
o
l
order
=
searchDocs
nId
(
SearchQuery
q
)
o
l
order
=
...
...
src/Gargantext/API/Table.hs
View file @
e08e94f9
...
@@ -46,7 +46,7 @@ import GHC.Generics (Generic)
...
@@ -46,7 +46,7 @@ import GHC.Generics (Generic)
import
Gargantext.API.Ngrams
(
TabType
(
..
))
import
Gargantext.API.Ngrams
(
TabType
(
..
))
import
Gargantext.Core.Types
(
Offset
,
Limit
)
import
Gargantext.Core.Types
(
Offset
,
Limit
)
import
Gargantext.Core.Utils.Prefix
(
unPrefix
,
unPrefixSwagger
)
import
Gargantext.Core.Utils.Prefix
(
unPrefix
,
unPrefixSwagger
)
import
Gargantext.Database.Facet
(
FacetDoc
,
runViewDocuments
,
OrderBy
(
..
)
,
runViewAuthorsDoc
)
import
Gargantext.Database.Facet
(
FacetDoc
,
runViewDocuments
,
OrderBy
(
..
))
import
Gargantext.Database.Learn
(
FavOrTrash
(
..
),
moreLike
)
import
Gargantext.Database.Learn
(
FavOrTrash
(
..
),
moreLike
)
import
Gargantext.Database.TextSearch
import
Gargantext.Database.TextSearch
import
Gargantext.Database.Types.Node
import
Gargantext.Database.Types.Node
...
@@ -97,6 +97,7 @@ getTable cId ft o l order =
...
@@ -97,6 +97,7 @@ getTable cId ft o l order =
(
Just
MoreTrash
)
->
moreLike
cId
o
l
order
IsTrash
(
Just
MoreTrash
)
->
moreLike
cId
o
l
order
IsTrash
x
->
panic
$
"not implemented in getTable: "
<>
(
cs
$
show
x
)
x
->
panic
$
"not implemented in getTable: "
<>
(
cs
$
show
x
)
{-
getPairing :: ContactId -> Maybe TabType
getPairing :: ContactId -> Maybe TabType
-> Maybe Offset -> Maybe Limit
-> Maybe Offset -> Maybe Limit
-> Maybe OrderBy -> Cmd err [FacetDoc]
-> Maybe OrderBy -> Cmd err [FacetDoc]
...
@@ -106,4 +107,4 @@ getPairing cId ft o l order =
...
@@ -106,4 +107,4 @@ getPairing cId ft o l order =
(Just Trash) -> runViewAuthorsDoc cId True o l order
(Just Trash) -> runViewAuthorsDoc cId True o l order
_ -> panic $ "not implemented: get Pairing" <> (cs $ show ft)
_ -> panic $ "not implemented: get Pairing" <> (cs $ show ft)
-}
src/Gargantext/Database/Facet.hs
View file @
e08e94f9
...
@@ -26,8 +26,8 @@ Portability : POSIX
...
@@ -26,8 +26,8 @@ Portability : POSIX
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeFamilies #-}
------------------------------------------------------------------------
------------------------------------------------------------------------
module
Gargantext.Database.Facet
module
Gargantext.Database.Facet
(
runViewAuthorsDoc
(
--
runViewAuthorsDoc
,
runViewDocuments
runViewDocuments
,
filterWith
,
filterWith
,
Pair
(
..
)
,
Pair
(
..
)
...
@@ -41,6 +41,7 @@ module Gargantext.Database.Facet
...
@@ -41,6 +41,7 @@ module Gargantext.Database.Facet
where
where
------------------------------------------------------------------------
------------------------------------------------------------------------
import
Control.Arrow
(
returnA
)
import
Control.Arrow
(
returnA
)
import
Control.Lens
((
^.
))
-- import Control.Lens.TH (makeLensesWith, abbreviatedFields)
-- import Control.Lens.TH (makeLensesWith, abbreviatedFields)
import
Data.Aeson
(
FromJSON
,
ToJSON
)
import
Data.Aeson
(
FromJSON
,
ToJSON
)
import
Data.Aeson.TH
(
deriveJSON
)
import
Data.Aeson.TH
(
deriveJSON
)
...
@@ -57,7 +58,6 @@ import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger)
...
@@ -57,7 +58,6 @@ import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger)
import
Gargantext.Database.Config
(
nodeTypeId
)
import
Gargantext.Database.Config
(
nodeTypeId
)
import
Gargantext.Database.Schema.Ngrams
import
Gargantext.Database.Schema.Ngrams
import
Gargantext.Database.Schema.Node
import
Gargantext.Database.Schema.Node
import
Gargantext.Database.Schema.NodeNgram
import
Gargantext.Database.Schema.NodeNode
import
Gargantext.Database.Schema.NodeNode
import
Gargantext.Database.Utils
import
Gargantext.Database.Utils
import
Gargantext.Database.Queries.Join
import
Gargantext.Database.Queries.Join
...
@@ -115,36 +115,39 @@ instance (ToSchema i, ToSchema l) => ToSchema (Pair i l) where
...
@@ -115,36 +115,39 @@ instance (ToSchema i, ToSchema l) => ToSchema (Pair i l) where
instance
(
Arbitrary
i
,
Arbitrary
l
)
=>
Arbitrary
(
Pair
i
l
)
where
instance
(
Arbitrary
i
,
Arbitrary
l
)
=>
Arbitrary
(
Pair
i
l
)
where
arbitrary
=
Pair
<$>
arbitrary
<*>
arbitrary
arbitrary
=
Pair
<$>
arbitrary
<*>
arbitrary
data
FacetPaired
id
date
hyperdata
score
pair
s
=
data
FacetPaired
id
date
hyperdata
score
pair
=
FacetPaired
{
_fp_id
::
id
FacetPaired
{
_fp_id
::
id
,
_fp_date
::
date
,
_fp_date
::
date
,
_fp_hyperdata
::
hyperdata
,
_fp_hyperdata
::
hyperdata
,
_fp_score
::
score
,
_fp_score
::
score
,
_fp_pair
s
::
pairs
,
_fp_pair
::
pair
}
deriving
(
Show
,
Generic
)
}
deriving
(
Show
,
Generic
)
$
(
deriveJSON
(
unPrefix
"_fp_"
)
''
F
acetPaired
)
$
(
deriveJSON
(
unPrefix
"_fp_"
)
''
F
acetPaired
)
$
(
makeAdaptorAndInstance
"pFacetPaired"
''
F
acetPaired
)
$
(
makeAdaptorAndInstance
"pFacetPaired"
''
F
acetPaired
)
instance
(
ToSchema
id
,
ToSchema
date
,
ToSchema
hyperdata
,
ToSchema
pairs
,
ToSchema
score
)
=>
ToSchema
(
FacetPaired
id
date
hyperdata
score
pairs
)
where
instance
(
ToSchema
id
,
ToSchema
date
,
ToSchema
hyperdata
,
ToSchema
score
,
ToSchema
pair
)
=>
ToSchema
(
FacetPaired
id
date
hyperdata
score
pair
)
where
declareNamedSchema
=
genericDeclareNamedSchema
(
unPrefixSwagger
"_fp_"
)
declareNamedSchema
=
genericDeclareNamedSchema
(
unPrefixSwagger
"_fp_"
)
instance
(
Arbitrary
id
instance
(
Arbitrary
id
,
Arbitrary
date
,
Arbitrary
date
,
Arbitrary
hyperdata
,
Arbitrary
hyperdata
,
Arbitrary
score
,
Arbitrary
score
,
Arbitrary
pair
s
,
Arbitrary
pair
)
=>
Arbitrary
(
FacetPaired
id
date
hyperdata
score
pair
s
)
where
)
=>
Arbitrary
(
FacetPaired
id
date
hyperdata
score
pair
)
where
arbitrary
=
FacetPaired
<$>
arbitrary
<*>
arbitrary
<*>
arbitrary
<*>
arbitrary
<*>
arbitrary
arbitrary
=
FacetPaired
<$>
arbitrary
<*>
arbitrary
<*>
arbitrary
<*>
arbitrary
<*>
arbitrary
--{-
type
FacetPairedRead
=
FacetPaired
(
Column
PGInt4
)
type
FacetPairedRead
=
FacetPaired
(
Column
PGInt4
)
(
Column
PGTimestamptz
)
(
Column
PGTimestamptz
)
(
Column
PGJsonb
)
(
Column
PGJsonb
)
(
Column
PGInt4
)
(
Column
PGInt4
)
(
Pair
(
Column
(
Nullable
PGInt4
))
(
Column
(
Nullable
PGText
)))
(
Column
(
Nullable
PGInt4
)
--}
,
Column
(
Nullable
PGText
)
)
-- | JSON instance
-- | JSON instance
$
(
deriveJSON
(
unPrefix
"facetDoc_"
)
''
F
acet
)
$
(
deriveJSON
(
unPrefix
"facetDoc_"
)
''
F
acet
)
...
@@ -206,6 +209,8 @@ instance Arbitrary OrderBy
...
@@ -206,6 +209,8 @@ instance Arbitrary OrderBy
-- TODO-SECURITY check
-- TODO-SECURITY check
{-
runViewAuthorsDoc :: ContactId -> IsTrash -> Maybe Offset -> Maybe Limit -> Maybe OrderBy -> Cmd err [FacetDoc]
runViewAuthorsDoc :: ContactId -> IsTrash -> Maybe Offset -> Maybe Limit -> Maybe OrderBy -> Cmd err [FacetDoc]
runViewAuthorsDoc cId t o l order = runOpaQuery $ filterWith o l order $ viewAuthorsDoc cId t ntId
runViewAuthorsDoc cId t o l order = runOpaQuery $ filterWith o l order $ viewAuthorsDoc cId t ntId
where
where
...
@@ -234,16 +239,16 @@ queryAuthorsDoc = leftJoin5 queryNodeTable queryNodeNgramTable queryNgramsTable
...
@@ -234,16 +239,16 @@ queryAuthorsDoc = leftJoin5 queryNodeTable queryNodeNgramTable queryNgramsTable
.== nng_node_id nodeNgram
.== nng_node_id nodeNgram
cond23 :: (NgramsRead, (NodeNgramRead, NodeReadNull)) -> Column PGBool
cond23 :: (NgramsRead, (NodeNgramRead, NodeReadNull)) -> Column PGBool
cond23
(
ngrams
,
(
nodeNgram
,
_
))
=
ngrams
_id
ngrams
cond23 (ngrams, (nodeNgram, _)) = ngrams
^.ngrams_id
.== nng_ngrams_id nodeNgram
.== nng_ngrams_id nodeNgram
cond34 :: (NodeNgramRead, (NgramsRead, (NodeNgramReadNull, NodeReadNull))) -> Column PGBool
cond34 :: (NodeNgramRead, (NgramsRead, (NodeNgramReadNull, NodeReadNull))) -> Column PGBool
cond34
(
nodeNgram2
,
(
ngrams
,
(
_
,
_
)))
=
ngrams
_id
ngrams
.==
nng_ngrams_id
nodeNgram2
cond34 (nodeNgram2, (ngrams, (_,_)))= ngrams
^.ngrams_id
.== nng_ngrams_id nodeNgram2
cond45 :: (NodeRead, (NodeNgramRead, (NgramsReadNull, (NodeNgramReadNull, NodeReadNull)))) -> Column PGBool
cond45 :: (NodeRead, (NodeNgramRead, (NgramsReadNull, (NodeNgramReadNull, NodeReadNull)))) -> Column PGBool
cond45 (contact, (nodeNgram2, (_, (_,_)))) = _node_id contact .== nng_node_id nodeNgram2
cond45 (contact, (nodeNgram2, (_, (_,_)))) = _node_id contact .== nng_node_id nodeNgram2
-}
------------------------------------------------------------------------
------------------------------------------------------------------------
-- TODO-SECURITY check
-- TODO-SECURITY check
...
@@ -257,12 +262,12 @@ viewDocuments :: CorpusId -> IsTrash -> NodeTypeId -> Query FacetDocRead
...
@@ -257,12 +262,12 @@ viewDocuments :: CorpusId -> IsTrash -> NodeTypeId -> Query FacetDocRead
viewDocuments
cId
t
ntId
=
proc
()
->
do
viewDocuments
cId
t
ntId
=
proc
()
->
do
n
<-
queryNodeTable
-<
()
n
<-
queryNodeTable
-<
()
nn
<-
queryNodeNodeTable
-<
()
nn
<-
queryNodeNodeTable
-<
()
restrict
-<
_node_id
n
.==
nn_node2_id
nn
restrict
-<
n
^.
node_id
.==
nn
^.
nn_node2_id
restrict
-<
nn
_node1_id
nn
.==
(
pgNodeId
cId
)
restrict
-<
nn
^.
nn_node1_id
.==
(
pgNodeId
cId
)
restrict
-<
_node_typename
n
.==
(
pgInt4
ntId
)
restrict
-<
n
^.
node_typename
.==
(
pgInt4
ntId
)
restrict
-<
if
t
then
nn
_category
nn
.==
(
pgInt4
0
)
restrict
-<
if
t
then
nn
^.
nn_category
.==
(
pgInt4
0
)
else
nn
_category
nn
.>=
(
pgInt4
1
)
else
nn
^.
nn_category
.>=
(
pgInt4
1
)
returnA
-<
FacetDoc
(
_node_id
n
)
(
_node_date
n
)
(
_node_name
n
)
(
_node_hyperdata
n
)
(
toNullable
$
nn
_category
nn
)
(
toNullable
$
nn_score
nn
)
returnA
-<
FacetDoc
(
_node_id
n
)
(
_node_date
n
)
(
_node_name
n
)
(
_node_hyperdata
n
)
(
toNullable
$
nn
^.
nn_category
)
(
toNullable
$
nn
^.
nn_score
)
------------------------------------------------------------------------
------------------------------------------------------------------------
...
...
src/Gargantext/Database/Flow.hs
View file @
e08e94f9
This diff is collapsed.
Click to expand it.
src/Gargantext/Database/Flow/Pairing.hs
View file @
e08e94f9
...
@@ -33,21 +33,22 @@ import Data.Text (Text, toLower)
...
@@ -33,21 +33,22 @@ import Data.Text (Text, toLower)
import
qualified
Data.Text
as
DT
import
qualified
Data.Text
as
DT
import
Gargantext.Prelude
hiding
(
sum
)
import
Gargantext.Prelude
hiding
(
sum
)
import
Gargantext.Database.Schema.Ngrams
-- (NgramsType(..))
import
Gargantext.Database.Schema.Ngrams
-- (NgramsType(..))
--import Gargantext.Database.Node.Contact -- (HyperdataContact(..))
import
Gargantext.Database.Node.Contact
-- (HyperdataContact(..))
--import Gargantext.Database.Types.Node -- (Hyperdata(..))
import
Gargantext.Database.Node.Contact
import
Gargantext.Database.Flow.Utils
import
Gargantext.Database.Flow.Utils
import
Gargantext.Database.Utils
(
Cmd
,
runPGSQuery
)
import
Gargantext.Database.Utils
(
Cmd
,
runPGSQuery
)
import
Gargantext.Database.Types.Node
(
AnnuaireId
,
CorpusId
)
import
Gargantext.Database.Types.Node
(
AnnuaireId
,
CorpusId
,
ListId
)
import
Gargantext.Database.Node.Children
import
Gargantext.Database.Node.Children
(
getContacts
)
import
Gargantext.Core.Types
(
NodeType
(
..
))
import
Gargantext.Core.Types
(
NodeType
(
..
))
-- TODO mv this type in Types Main
-- TODO mv this type in Types Main
type
Terms
=
Text
type
Terms
=
Text
-- | TODO : add paring policy as parameter
-- | TODO : add paring policy as parameter
pairing
::
AnnuaireId
->
CorpusId
->
Cmd
err
Int
pairing
::
AnnuaireId
pairing
aId
cId
=
do
->
CorpusId
->
ListId
->
Cmd
err
Int
pairing
aId
cId
lId
=
do
contacts'
<-
getContacts
aId
(
Just
NodeContact
)
contacts'
<-
getContacts
aId
(
Just
NodeContact
)
let
contactsMap
=
pairingPolicyToMap
toLower
$
toMaps
extractNgramsT
contacts'
let
contactsMap
=
pairingPolicyToMap
toLower
$
toMaps
extractNgramsT
contacts'
...
@@ -56,31 +57,34 @@ pairing aId cId = do
...
@@ -56,31 +57,34 @@ pairing aId cId = do
let
indexedNgrams
=
pairMaps
contactsMap
ngramsMap
let
indexedNgrams
=
pairMaps
contactsMap
ngramsMap
insertToNodeNgrams
indexedNgrams
insertDocNgrams
lId
indexedNgrams
-- TODO add List
lastName
::
Terms
->
Terms
lastName
::
Terms
->
Terms
lastName
texte
=
DT
.
toLower
$
maybe
texte
(
\
x
->
if
DT
.
length
x
>
3
then
x
else
texte
)
(
lastName'
texte
)
lastName
texte
=
DT
.
toLower
$
maybe
texte
(
\
x
->
if
DT
.
length
x
>
3
then
x
else
texte
)
(
lastName'
texte
)
where
where
lastName'
=
lastMay
.
DT
.
splitOn
" "
lastName'
=
lastMay
.
DT
.
splitOn
" "
-- TODO: this method
s
is dangerous (maybe equalities of the result are not taken into account
-- TODO: this method is dangerous (maybe equalities of the result are not taken into account
-- emergency demo plan...
-- emergency demo plan...
)
pairingPolicyToMap
::
(
Terms
->
Terms
)
pairingPolicyToMap
::
(
Terms
->
Terms
)
->
Map
(
NgramsT
Ngrams
)
a
->
Map
(
NgramsT
Ngrams
)
a
->
Map
(
NgramsT
Ngrams
)
a
->
Map
(
NgramsT
Ngrams
)
a
pairingPolicyToMap
f
=
DM
.
mapKeys
(
pairingPolicy
f
)
pairingPolicyToMap
f
=
DM
.
mapKeys
(
pairingPolicy
f
)
pairingPolicy
::
(
Terms
->
Terms
)
->
NgramsT
Ngrams
->
NgramsT
Ngrams
pairingPolicy
::
(
Terms
->
Terms
)
->
NgramsT
Ngrams
->
NgramsT
Ngrams
pairingPolicy
f
(
NgramsT
nt
(
Ngrams
ng
_
))
=
(
NgramsT
nt
(
Ngrams
(
f
ng
)
1
))
pairingPolicy
f
(
NgramsT
nt
(
Ngrams
ng
_
))
=
(
NgramsT
nt
(
Ngrams
(
f
ng
)
1
))
-- | TODO : use Occurrences in place of Int
-- | TODO : use Occurrences in place of Int
extractNgramsT
::
HyperdataContact
->
Map
(
NgramsT
Ngrams
)
Int
extractNgramsT
::
HyperdataContact
->
Map
(
NgramsT
Ngrams
)
Int
extractNgramsT
contact
=
fromList
[(
NgramsT
Authors
a'
,
1
)
|
a'
<-
authors
]
extractNgramsT
contact
=
fromList
[(
NgramsT
Authors
a'
,
1
)
|
a'
<-
authors
]
where
where
authors
=
map
text2ngrams
$
catMaybes
[
view
(
hc_who
.
_Just
.
cw_lastName
)
contact
]
authors
=
map
text2ngrams
$
catMaybes
[
view
(
hc_who
.
_Just
.
cw_lastName
)
contact
]
--}
-- NP: notice how this function is no longer specific to the ContactId type
pairMaps
::
Map
(
NgramsT
Ngrams
)
a
pairMaps
::
Map
(
NgramsT
Ngrams
)
a
->
Map
(
NgramsT
Ngrams
)
NgramsId
->
Map
(
NgramsT
Ngrams
)
NgramsId
->
Map
NgramsIndexed
(
Map
NgramsType
a
)
->
Map
NgramsIndexed
(
Map
NgramsType
a
)
...
@@ -92,23 +96,27 @@ pairMaps m1 m2 =
...
@@ -92,23 +96,27 @@ pairMaps m1 m2 =
]
]
-----------------------------------------------------------------------
-----------------------------------------------------------------------
getNgramsTindexed
::
CorpusId
->
NgramsType
->
Cmd
err
(
Map
(
NgramsT
Ngrams
)
NgramsId
)
getNgramsTindexed
::
CorpusId
->
NgramsType
->
Cmd
err
(
Map
(
NgramsT
Ngrams
)
NgramsId
)
getNgramsTindexed
corpusId
ngramsType'
=
fromList
getNgramsTindexed
corpusId
ngramsType'
=
fromList
<$>
map
(
\
(
ngramsId'
,
t
,
n
)
->
(
NgramsT
ngramsType'
(
Ngrams
t
n
),
ngramsId'
))
<$>
map
(
\
(
ngramsId'
,
t
,
n
)
->
(
NgramsT
ngramsType'
(
Ngrams
t
n
),
ngramsId'
))
<$>
selectNgramsTindexed
corpusId
ngramsType'
<$>
selectNgramsTindexed
corpusId
ngramsType'
selectNgramsTindexed
::
CorpusId
->
NgramsType
->
Cmd
err
[(
NgramsId
,
Terms
,
Int
)]
selectNgramsTindexed
corpusId
ngramsType''
=
runPGSQuery
selectQuery
(
corpusId
,
ngramsTypeId
ngramsType''
)
where
where
selectQuery
=
[
sql
|
SELECT n.id,n.terms,n.n from ngrams n
selectNgramsTindexed
::
CorpusId
JOIN nodes_ngrams occ ON occ.ngram_id = n.id
->
NgramsType
JOIN nodes_nodes nn ON nn.node2_id = occ.node_id
->
Cmd
err
[(
NgramsId
,
Terms
,
Int
)]
selectNgramsTindexed
corpusId'
ngramsType''
=
runPGSQuery
selectQuery
(
corpusId'
,
ngramsTypeId
ngramsType''
)
WHERE nn.node1_id = ?
where
AND occ.ngrams_type = ?
selectQuery
=
[
sql
|
SELECT n.id,n.terms,n.n from ngrams n
AND occ.node_id = nn.node2_id
JOIN node_node_ngrams occ ON occ.ngrams_id = n.id
GROUP BY n.id;
JOIN nodes_nodes nn ON nn.node2_id = occ.node2_id
|]
WHERE nn.node1_id = ?
AND occ.ngrams_type = ?
AND occ.node2_id = nn.node2_id
GROUP BY n.id;
|]
{- | TODO more typed SQL queries
{- | TODO more typed SQL queries
selectNgramsTindexed :: CorpusId -> NgramsType -> Query NgramsRead
selectNgramsTindexed :: CorpusId -> NgramsType -> Query NgramsRead
...
@@ -124,5 +132,3 @@ selectNgramsTindexed corpusId ngramsType = proc () -> do
...
@@ -124,5 +132,3 @@ selectNgramsTindexed corpusId ngramsType = proc () -> do
result <- aggregate groupBy (ngrams_id ngrams)
result <- aggregate groupBy (ngrams_id ngrams)
returnA -< result
returnA -< result
--}
--}
src/Gargantext/Database/Flow/Utils.hs
View file @
e08e94f9
...
@@ -22,10 +22,8 @@ import Gargantext.Prelude
...
@@ -22,10 +22,8 @@ import Gargantext.Prelude
import
Gargantext.Database.Schema.Ngrams
import
Gargantext.Database.Schema.Ngrams
import
Gargantext.Database.Types.Node
(
NodeId
,
Node
,
NodePoly
(
..
),
Hyperdata
)
import
Gargantext.Database.Types.Node
(
NodeId
,
Node
,
NodePoly
(
..
),
Hyperdata
)
import
Gargantext.Database.Utils
(
Cmd
)
import
Gargantext.Database.Utils
(
Cmd
)
import
Gargantext.Database.Schema.NodeNgram
import
Gargantext.Database.Schema.NodeNodeNgrams
import
Gargantext.Database.Schema.NodeNodeNgrams
import
Gargantext.Database.Types.Node
import
Gargantext.Database.Types.Node
import
Gargantext.Core.Types.Main
(
ListType
(
..
),
listTypeId
)
toMaps
::
Hyperdata
a
=>
(
a
->
Map
(
NgramsT
Ngrams
)
Int
)
->
[
Node
a
]
->
Map
(
NgramsT
Ngrams
)
(
Map
NodeId
Int
)
toMaps
::
Hyperdata
a
=>
(
a
->
Map
(
NgramsT
Ngrams
)
Int
)
->
[
Node
a
]
->
Map
(
NgramsT
Ngrams
)
(
Map
NodeId
Int
)
toMaps
fun
ns
=
mapNodeIdNgrams
$
documentIdWithNgrams
fun
ns'
toMaps
fun
ns
=
mapNodeIdNgrams
$
documentIdWithNgrams
fun
ns'
...
@@ -39,8 +37,10 @@ mapNodeIdNgrams ds = DM.map (DM.fromListWith (+)) $ DM.fromListWith (<>) xs
...
@@ -39,8 +37,10 @@ mapNodeIdNgrams ds = DM.map (DM.fromListWith (+)) $ DM.fromListWith (<>) xs
n2i
=
map
(
\
d
->
((
documentId
.
documentWithId
)
d
,
document_ngrams
d
))
n2i
=
map
(
\
d
->
((
documentId
.
documentWithId
)
d
,
document_ngrams
d
))
documentIdWithNgrams
::
Hyperdata
a
=>
(
a
->
Map
(
NgramsT
Ngrams
)
Int
)
documentIdWithNgrams
::
Hyperdata
a
->
[
DocumentWithId
a
]
->
[
DocumentIdWithNgrams
a
]
=>
(
a
->
Map
(
NgramsT
Ngrams
)
Int
)
->
[
DocumentWithId
a
]
->
[
DocumentIdWithNgrams
a
]
documentIdWithNgrams
f
=
map
(
\
d
->
DocumentIdWithNgrams
d
((
f
.
documentData
)
d
))
documentIdWithNgrams
f
=
map
(
\
d
->
DocumentIdWithNgrams
d
((
f
.
documentData
)
d
))
...
@@ -56,19 +56,12 @@ data DocumentIdWithNgrams a =
...
@@ -56,19 +56,12 @@ data DocumentIdWithNgrams a =
,
document_ngrams
::
Map
(
NgramsT
Ngrams
)
Int
,
document_ngrams
::
Map
(
NgramsT
Ngrams
)
Int
}
deriving
(
Show
)
}
deriving
(
Show
)
-- | TODO for now, list Type is CandidateTerm because Graph Terms
-- have to be detected in next step in the flow
-- TODO remvoe this
insertToNodeNgrams
::
Map
NgramsIndexed
(
Map
NgramsType
(
Map
NodeId
Int
))
->
Cmd
err
Int
insertToNodeNgrams
m
=
insertNodeNgrams
[
NodeNgram
n
(
_ngramsId
ng
)
Nothing
(
ngramsTypeId
t
)
(
listTypeId
CandidateTerm
)
(
fromIntegral
i
)
|
(
ng
,
t2n2i
)
<-
DM
.
toList
m
,
(
t
,
n2i
)
<-
DM
.
toList
t2n2i
,
(
n
,
i
)
<-
DM
.
toList
n2i
]
docNgrams2nodeNodeNgrams
::
CorpusId
->
DocNgrams
->
NodeNodeNgrams
docNgrams2nodeNodeNgrams
::
CorpusId
docNgrams2nodeNodeNgrams
cId
(
DocNgrams
d
n
nt
w
)
=
NodeNodeNgrams
Nothing
cId
d
n
nt
w
->
DocNgrams
->
NodeNodeNgrams
docNgrams2nodeNodeNgrams
cId
(
DocNgrams
d
n
nt
w
)
=
NodeNodeNgrams
Nothing
cId
d
n
nt
w
data
DocNgrams
=
DocNgrams
{
dn_doc_id
::
DocId
data
DocNgrams
=
DocNgrams
{
dn_doc_id
::
DocId
,
dn_ngrams_id
::
Int
,
dn_ngrams_id
::
Int
...
@@ -76,10 +69,14 @@ data DocNgrams = DocNgrams { dn_doc_id :: DocId
...
@@ -76,10 +69,14 @@ data DocNgrams = DocNgrams { dn_doc_id :: DocId
,
dn_weight
::
Double
,
dn_weight
::
Double
}
}
insertDocNgramsOn
::
CorpusId
->
[
DocNgrams
]
->
Cmd
err
Int
insertDocNgramsOn
::
CorpusId
->
[
DocNgrams
]
->
Cmd
err
Int
insertDocNgramsOn
cId
dn
=
insertNodeNodeNgrams
$
(
map
(
docNgrams2nodeNodeNgrams
cId
)
dn
)
insertDocNgramsOn
cId
dn
=
insertNodeNodeNgrams
$
(
map
(
docNgrams2nodeNodeNgrams
cId
)
dn
)
insertDocNgrams
::
CorpusId
->
Map
NgramsIndexed
(
Map
NgramsType
(
Map
NodeId
Int
))
->
Cmd
err
Int
insertDocNgrams
::
CorpusId
->
Map
NgramsIndexed
(
Map
NgramsType
(
Map
NodeId
Int
))
->
Cmd
err
Int
insertDocNgrams
cId
m
=
insertDocNgramsOn
cId
[
DocNgrams
n
(
_ngramsId
ng
)
(
ngramsTypeId
t
)
(
fromIntegral
i
)
insertDocNgrams
cId
m
=
insertDocNgramsOn
cId
[
DocNgrams
n
(
_ngramsId
ng
)
(
ngramsTypeId
t
)
(
fromIntegral
i
)
|
(
ng
,
t2n2i
)
<-
DM
.
toList
m
|
(
ng
,
t2n2i
)
<-
DM
.
toList
m
,
(
t
,
n2i
)
<-
DM
.
toList
t2n2i
,
(
t
,
n2i
)
<-
DM
.
toList
t2n2i
...
...
src/Gargantext/Database/Ngrams.hs
View file @
e08e94f9
...
@@ -18,6 +18,7 @@ module Gargantext.Database.Ngrams
...
@@ -18,6 +18,7 @@ module Gargantext.Database.Ngrams
where
where
import
Data.Text
(
Text
)
import
Data.Text
(
Text
)
import
Control.Lens
((
^.
))
import
Gargantext.Core.Types
import
Gargantext.Core.Types
import
Gargantext.Database.Utils
(
runOpaQuery
,
Cmd
)
import
Gargantext.Database.Utils
(
runOpaQuery
,
Cmd
)
import
Gargantext.Database.Schema.Ngrams
import
Gargantext.Database.Schema.Ngrams
...
@@ -34,14 +35,14 @@ selectNgramsByDoc cIds dId nt = runOpaQuery (query cIds dId nt)
...
@@ -34,14 +35,14 @@ selectNgramsByDoc cIds dId nt = runOpaQuery (query cIds dId nt)
join
::
Query
(
NgramsRead
,
NodeNodeNgramsReadNull
)
join
::
Query
(
NgramsRead
,
NodeNodeNgramsReadNull
)
join
=
leftJoin
queryNgramsTable
queryNodeNodeNgramsTable
on1
join
=
leftJoin
queryNgramsTable
queryNodeNodeNgramsTable
on1
where
where
on1
(
ng
,
nnng
)
=
ng
rams_id
ng
.==
nnng_ngrams_id
nnng
on1
(
ng
,
nnng
)
=
ng
^.
ngrams_id
.==
nnng
^.
nnng_ngrams_id
query
cIds'
dId'
nt'
=
proc
()
->
do
query
cIds'
dId'
nt'
=
proc
()
->
do
(
ng
,
nnng
)
<-
join
-<
()
(
ng
,
nnng
)
<-
join
-<
()
restrict
-<
foldl
(
\
b
cId
->
((
toNullable
$
pgNodeId
cId
)
.==
nnng
_node1_id
nnng
)
.||
b
)
(
pgBool
True
)
cIds'
restrict
-<
foldl
(
\
b
cId
->
((
toNullable
$
pgNodeId
cId
)
.==
nnng
^.
nnng_node1_id
)
.||
b
)
(
pgBool
True
)
cIds'
restrict
-<
(
toNullable
$
pgNodeId
dId'
)
.==
nnng_node2_id
nnng
restrict
-<
(
toNullable
$
pgNodeId
dId'
)
.==
nnng
^.
nnng_node2_id
restrict
-<
(
toNullable
$
pgNgramsType
nt'
)
.==
nnng
_ngramsType
nnng
restrict
-<
(
toNullable
$
pgNgramsType
nt'
)
.==
nnng
^.
nnng_ngramsType
returnA
-<
ng
rams_terms
ng
returnA
-<
ng
^.
ngrams_terms
postNgrams
::
CorpusId
->
DocId
->
[
Text
]
->
Cmd
err
Int
postNgrams
::
CorpusId
->
DocId
->
[
Text
]
->
Cmd
err
Int
...
...
src/Gargantext/Database/Node/Children.hs
View file @
e08e94f9
...
@@ -30,24 +30,34 @@ import Gargantext.Database.Schema.Node (pgNodeId)
...
@@ -30,24 +30,34 @@ import Gargantext.Database.Schema.Node (pgNodeId)
import
Control.Arrow
(
returnA
)
import
Control.Arrow
(
returnA
)
-- | TODO: use getChildren with Proxy ?
-- | TODO: use getChildren with Proxy ?
getContacts
::
ParentId
->
Maybe
NodeType
->
Cmd
err
[
Node
HyperdataContact
]
getContacts
::
ParentId
->
Maybe
NodeType
->
Cmd
err
[
Node
HyperdataContact
]
getContacts
pId
maybeNodeType
=
runOpaQuery
$
selectChildren
pId
maybeNodeType
getContacts
pId
maybeNodeType
=
runOpaQuery
$
selectChildren
pId
maybeNodeType
getChildren
::
JSONB
a
=>
ParentId
->
proxy
a
->
Maybe
NodeType
->
Maybe
Offset
->
Maybe
Limit
->
Cmd
err
[
Node
a
]
getChildren
::
JSONB
a
=>
ParentId
->
proxy
a
->
Maybe
NodeType
->
Maybe
Offset
->
Maybe
Limit
->
Cmd
err
[
Node
a
]
getChildren
pId
_
maybeNodeType
maybeOffset
maybeLimit
=
runOpaQuery
getChildren
pId
_
maybeNodeType
maybeOffset
maybeLimit
=
runOpaQuery
$
limit'
maybeLimit
$
offset'
maybeOffset
$
limit'
maybeLimit
$
offset'
maybeOffset
$
orderBy
(
asc
_node_id
)
$
orderBy
(
asc
_node_id
)
$
selectChildren
pId
maybeNodeType
$
selectChildren
pId
maybeNodeType
selectChildren
::
ParentId
->
Maybe
NodeType
->
Query
NodeRead
selectChildren
::
ParentId
->
Maybe
NodeType
->
Query
NodeRead
selectChildren
parentId
maybeNodeType
=
proc
()
->
do
selectChildren
parentId
maybeNodeType
=
proc
()
->
do
row
@
(
Node
nId
typeName
_
parent_id
_
_
_
)
<-
queryNodeTable
-<
()
row
@
(
Node
nId
typeName
_
parent_id
_
_
_
)
<-
queryNodeTable
-<
()
(
NodeNode
n1id
n2id
_
_
)
<-
queryNodeNodeTable
-<
()
(
NodeNode
n1id
n2id
_
_
)
<-
queryNodeNodeTable
-<
()
let
nodeType
=
maybe
0
nodeTypeId
maybeNodeType
let
nodeType
=
maybe
0
nodeTypeId
maybeNodeType
restrict
-<
typeName
.==
pgInt4
nodeType
restrict
-<
typeName
.==
pgInt4
nodeType
restrict
-<
(
.||
)
(
parent_id
.==
(
pgNodeId
parentId
))
restrict
-<
(
.||
)
(
parent_id
.==
(
pgNodeId
parentId
))
(
(
.&&
)
(
n1id
.==
pgNodeId
parentId
)
(
(
.&&
)
(
n1id
.==
pgNodeId
parentId
)
(
n2id
.==
nId
))
(
n2id
.==
nId
))
...
...
src/Gargantext/Database/Node/Document/Insert.hs
View file @
e08e94f9
...
@@ -186,7 +186,7 @@ queryInsert = [sql|
...
@@ -186,7 +186,7 @@ queryInsert = [sql|
data
ReturnId
=
ReturnId
{
reInserted
::
Bool
-- if the document is inserted (True: is new, False: is not new)
data
ReturnId
=
ReturnId
{
reInserted
::
Bool
-- if the document is inserted (True: is new, False: is not new)
,
reId
::
NodeId
-- always return the id of the document (even new or not new)
,
reId
::
NodeId
-- always return the id of the document (even new or not new)
-- this is the uniq id in the database
-- this is the uniq id in the database
,
reUniqId
::
Text
-- Hash Id with concatenation of
hash
parameters
,
reUniqId
::
Text
-- Hash Id with concatenation of
sha
parameters
}
deriving
(
Show
,
Generic
)
}
deriving
(
Show
,
Generic
)
instance
FromRow
ReturnId
where
instance
FromRow
ReturnId
where
...
@@ -204,14 +204,14 @@ instance AddUniqId HyperdataDocument
...
@@ -204,14 +204,14 @@ instance AddUniqId HyperdataDocument
addUniqId
=
addUniqIdsDoc
addUniqId
=
addUniqIdsDoc
where
where
addUniqIdsDoc
::
HyperdataDocument
->
HyperdataDocument
addUniqIdsDoc
::
HyperdataDocument
->
HyperdataDocument
addUniqIdsDoc
doc
=
set
hyperdataDocument_uniqIdBdd
(
Just
hash
Bdd
)
addUniqIdsDoc
doc
=
set
hyperdataDocument_uniqIdBdd
(
Just
sha
Bdd
)
$
set
hyperdataDocument_uniqId
(
Just
hash
Uni
)
doc
$
set
hyperdataDocument_uniqId
(
Just
sha
Uni
)
doc
where
where
hashUni
=
hash
$
DT
.
concat
$
map
(
$
doc
)
hash
ParametersDoc
shaUni
=
sha
$
DT
.
concat
$
map
(
$
doc
)
sha
ParametersDoc
hashBdd
=
hash
$
DT
.
concat
$
map
(
$
doc
)
([(
\
d
->
maybeText
(
_hyperdataDocument_bdd
d
))]
<>
hash
ParametersDoc
)
shaBdd
=
sha
$
DT
.
concat
$
map
(
$
doc
)
([(
\
d
->
maybeText
(
_hyperdataDocument_bdd
d
))]
<>
sha
ParametersDoc
)
hash
ParametersDoc
::
[(
HyperdataDocument
->
Text
)]
sha
ParametersDoc
::
[(
HyperdataDocument
->
Text
)]
hash
ParametersDoc
=
[
\
d
->
maybeText
(
_hyperdataDocument_title
d
)
sha
ParametersDoc
=
[
\
d
->
maybeText
(
_hyperdataDocument_title
d
)
,
\
d
->
maybeText
(
_hyperdataDocument_abstract
d
)
,
\
d
->
maybeText
(
_hyperdataDocument_abstract
d
)
,
\
d
->
maybeText
(
_hyperdataDocument_source
d
)
,
\
d
->
maybeText
(
_hyperdataDocument_source
d
)
,
\
d
->
maybeText
(
_hyperdataDocument_publication_date
d
)
,
\
d
->
maybeText
(
_hyperdataDocument_publication_date
d
)
...
@@ -226,18 +226,18 @@ instance AddUniqId HyperdataContact
...
@@ -226,18 +226,18 @@ instance AddUniqId HyperdataContact
addUniqId
=
addUniqIdsContact
addUniqId
=
addUniqIdsContact
addUniqIdsContact
::
HyperdataContact
->
HyperdataContact
addUniqIdsContact
::
HyperdataContact
->
HyperdataContact
addUniqIdsContact
hc
=
set
(
hc_uniqIdBdd
)
(
Just
hash
Bdd
)
addUniqIdsContact
hc
=
set
(
hc_uniqIdBdd
)
(
Just
sha
Bdd
)
$
set
(
hc_uniqId
)
(
Just
hash
Uni
)
hc
$
set
(
hc_uniqId
)
(
Just
sha
Uni
)
hc
where
where
hashUni
=
uniqId
$
DT
.
concat
$
map
(
$
hc
)
hash
ParametersContact
shaUni
=
uniqId
$
DT
.
concat
$
map
(
$
hc
)
sha
ParametersContact
hashBdd
=
uniqId
$
DT
.
concat
$
map
(
$
hc
)
([
\
d
->
maybeText
(
view
hc_bdd
d
)]
<>
hash
ParametersContact
)
shaBdd
=
uniqId
$
DT
.
concat
$
map
(
$
hc
)
([
\
d
->
maybeText
(
view
hc_bdd
d
)]
<>
sha
ParametersContact
)
uniqId
::
Text
->
Text
uniqId
::
Text
->
Text
uniqId
=
DT
.
pack
.
SHA
.
showDigest
.
SHA
.
sha256
.
DC
.
pack
.
DT
.
unpack
uniqId
=
DT
.
pack
.
SHA
.
showDigest
.
SHA
.
sha256
.
DC
.
pack
.
DT
.
unpack
-- | TODO add more
hash
parameters
-- | TODO add more
sha
parameters
hash
ParametersContact
::
[(
HyperdataContact
->
Text
)]
sha
ParametersContact
::
[(
HyperdataContact
->
Text
)]
hash
ParametersContact
=
[
\
d
->
maybeText
$
view
(
hc_who
.
_Just
.
cw_firstName
)
d
sha
ParametersContact
=
[
\
d
->
maybeText
$
view
(
hc_who
.
_Just
.
cw_firstName
)
d
,
\
d
->
maybeText
$
view
(
hc_who
.
_Just
.
cw_lastName
)
d
,
\
d
->
maybeText
$
view
(
hc_who
.
_Just
.
cw_lastName
)
d
,
\
d
->
maybeText
$
view
(
hc_where
.
_head
.
cw_touch
.
_Just
.
ct_mail
)
d
,
\
d
->
maybeText
$
view
(
hc_where
.
_head
.
cw_touch
.
_Just
.
ct_mail
)
d
]
]
...
...
src/Gargantext/Database/Schema/Ngrams.hs
View file @
e08e94f9
...
@@ -51,9 +51,9 @@ type NgramsId = Int
...
@@ -51,9 +51,9 @@ type NgramsId = Int
type
NgramsTerms
=
Text
type
NgramsTerms
=
Text
type
Size
=
Int
type
Size
=
Int
data
NgramsPoly
id
terms
n
=
NgramsDb
{
ngrams_id
::
id
data
NgramsPoly
id
terms
n
=
NgramsDb
{
_
ngrams_id
::
id
,
ngrams_terms
::
terms
,
_
ngrams_terms
::
terms
,
ngrams_n
::
n
,
_
ngrams_n
::
n
}
deriving
(
Show
)
}
deriving
(
Show
)
type
NgramsWrite
=
NgramsPoly
(
Maybe
(
Column
PGInt4
))
type
NgramsWrite
=
NgramsPoly
(
Maybe
(
Column
PGInt4
))
...
@@ -71,12 +71,13 @@ type NgramsReadNull = NgramsPoly (Column (Nullable PGInt4))
...
@@ -71,12 +71,13 @@ type NgramsReadNull = NgramsPoly (Column (Nullable PGInt4))
type
NgramsDb
=
NgramsPoly
Int
Text
Int
type
NgramsDb
=
NgramsPoly
Int
Text
Int
$
(
makeAdaptorAndInstance
"pNgramsDb"
''
N
gramsPoly
)
$
(
makeAdaptorAndInstance
"pNgramsDb"
''
N
gramsPoly
)
-- $(makeLensesWith abbreviatedFields ''NgramsPoly)
makeLenses
''
N
gramsPoly
ngramsTable
::
Table
NgramsWrite
NgramsRead
ngramsTable
::
Table
NgramsWrite
NgramsRead
ngramsTable
=
Table
"ngrams"
(
pNgramsDb
NgramsDb
{
ngrams_id
=
optional
"id"
ngramsTable
=
Table
"ngrams"
(
pNgramsDb
NgramsDb
{
_
ngrams_id
=
optional
"id"
,
ngrams_terms
=
required
"terms"
,
_
ngrams_terms
=
required
"terms"
,
ngrams_n
=
required
"n"
,
_
ngrams_n
=
required
"n"
}
}
)
)
...
...
src/Gargantext/Database/Schema/Node.hs
View file @
e08e94f9
...
@@ -699,6 +699,7 @@ mkNode nt p u = insertNodesR [nodeDefault nt p u]
...
@@ -699,6 +699,7 @@ mkNode nt p u = insertNodesR [nodeDefault nt p u]
mkGraph
::
ParentId
->
UserId
->
Cmd
err
[
GraphId
]
mkGraph
::
ParentId
->
UserId
->
Cmd
err
[
GraphId
]
mkGraph
p
u
=
insertNodesR
[
nodeGraphW
Nothing
Nothing
p
u
]
mkGraph
p
u
=
insertNodesR
[
nodeGraphW
Nothing
Nothing
p
u
]
mkDashboard
::
ParentId
->
UserId
->
Cmd
err
[
NodeId
]
mkDashboard
::
ParentId
->
UserId
->
Cmd
err
[
NodeId
]
mkDashboard
p
u
=
insertNodesR
[
nodeDashboardW
Nothing
Nothing
p
u
]
mkDashboard
p
u
=
insertNodesR
[
nodeDashboardW
Nothing
Nothing
p
u
]
where
where
...
@@ -709,7 +710,6 @@ mkDashboard p u = insertNodesR [nodeDashboardW Nothing Nothing p u]
...
@@ -709,7 +710,6 @@ mkDashboard p u = insertNodesR [nodeDashboardW Nothing Nothing p u]
dashboard
=
maybe
arbitraryDashboard
identity
maybeDashboard
dashboard
=
maybe
arbitraryDashboard
identity
maybeDashboard
mkPhylo
::
ParentId
->
UserId
->
Cmd
err
[
NodeId
]
mkPhylo
::
ParentId
->
UserId
->
Cmd
err
[
NodeId
]
mkPhylo
p
u
=
insertNodesR
[
nodePhyloW
Nothing
Nothing
p
u
]
mkPhylo
p
u
=
insertNodesR
[
nodePhyloW
Nothing
Nothing
p
u
]
...
@@ -718,8 +718,5 @@ mkPhylo p u = insertNodesR [nodePhyloW Nothing Nothing p u]
...
@@ -718,8 +718,5 @@ mkPhylo p u = insertNodesR [nodePhyloW Nothing Nothing p u]
pgNodeId
::
NodeId
->
Column
PGInt4
pgNodeId
::
NodeId
->
Column
PGInt4
pgNodeId
=
pgInt4
.
id2int
pgNodeId
=
pgInt4
.
id2int
getListsWithParentId
::
NodeId
->
Cmd
err
[
Node
HyperdataList
]
getListsWithParentId
::
NodeId
->
Cmd
err
[
Node
HyperdataList
]
getListsWithParentId
n
=
runOpaQuery
$
selectNodesWith'
n
(
Just
NodeList
)
getListsWithParentId
n
=
runOpaQuery
$
selectNodesWith'
n
(
Just
NodeList
)
src/Gargantext/Database/Schema/NodeNode.hs
View file @
e08e94f9
...
@@ -25,11 +25,11 @@ commentary with @some markup@.
...
@@ -25,11 +25,11 @@ commentary with @some markup@.
module
Gargantext.Database.Schema.NodeNode
where
module
Gargantext.Database.Schema.NodeNode
where
import
Control.Lens
(
view
)
import
Control.Lens
(
view
,
(
^.
)
)
import
qualified
Database.PostgreSQL.Simple
as
PGS
(
Query
,
Only
(
..
))
import
qualified
Database.PostgreSQL.Simple
as
PGS
(
Query
,
Only
(
..
))
import
Database.PostgreSQL.Simple.Types
(
Values
(
..
),
QualifiedIdentifier
(
..
))
import
Database.PostgreSQL.Simple.Types
(
Values
(
..
),
QualifiedIdentifier
(
..
))
import
Database.PostgreSQL.Simple.SqlQQ
(
sql
)
import
Database.PostgreSQL.Simple.SqlQQ
(
sql
)
import
Control.Lens.TH
(
makeLensesWith
,
abbreviatedFields
)
import
Control.Lens.TH
(
makeLenses
,
makeLenses
With
,
abbreviatedFields
)
import
Data.Maybe
(
Maybe
,
catMaybes
)
import
Data.Maybe
(
Maybe
,
catMaybes
)
import
Data.Text
(
Text
,
splitOn
)
import
Data.Text
(
Text
,
splitOn
)
import
Data.Profunctor.Product.TH
(
makeAdaptorAndInstance
)
import
Data.Profunctor.Product.TH
(
makeAdaptorAndInstance
)
...
@@ -44,10 +44,10 @@ import Control.Arrow (returnA)
...
@@ -44,10 +44,10 @@ import Control.Arrow (returnA)
import
qualified
Opaleye
as
O
import
qualified
Opaleye
as
O
data
NodeNodePoly
node1_id
node2_id
score
cat
data
NodeNodePoly
node1_id
node2_id
score
cat
=
NodeNode
{
nn_node1_id
::
node1_id
=
NodeNode
{
_
nn_node1_id
::
node1_id
,
nn_node2_id
::
node2_id
,
_
nn_node2_id
::
node2_id
,
nn_score
::
score
,
_
nn_score
::
score
,
nn_category
::
cat
,
_
nn_category
::
cat
}
deriving
(
Show
)
}
deriving
(
Show
)
type
NodeNodeWrite
=
NodeNodePoly
(
Column
(
PGInt4
))
type
NodeNodeWrite
=
NodeNodePoly
(
Column
(
PGInt4
))
...
@@ -59,7 +59,7 @@ type NodeNodeRead = NodeNodePoly (Column (PGInt4))
...
@@ -59,7 +59,7 @@ type NodeNodeRead = NodeNodePoly (Column (PGInt4))
(
Column
(
PGInt4
))
(
Column
(
PGInt4
))
(
Column
(
PGFloat8
))
(
Column
(
PGFloat8
))
(
Column
(
PGInt4
))
(
Column
(
PGInt4
))
type
NodeNodeReadNull
=
NodeNodePoly
(
Column
(
Nullable
PGInt4
))
type
NodeNodeReadNull
=
NodeNodePoly
(
Column
(
Nullable
PGInt4
))
(
Column
(
Nullable
PGInt4
))
(
Column
(
Nullable
PGInt4
))
(
Column
(
Nullable
PGFloat8
))
(
Column
(
Nullable
PGFloat8
))
...
@@ -68,14 +68,14 @@ type NodeNodeReadNull = NodeNodePoly (Column (Nullable PGInt4))
...
@@ -68,14 +68,14 @@ type NodeNodeReadNull = NodeNodePoly (Column (Nullable PGInt4))
type
NodeNode
=
NodeNodePoly
Int
Int
(
Maybe
Double
)
(
Maybe
Int
)
type
NodeNode
=
NodeNodePoly
Int
Int
(
Maybe
Double
)
(
Maybe
Int
)
$
(
makeAdaptorAndInstance
"pNodeNode"
''
N
odeNodePoly
)
$
(
makeAdaptorAndInstance
"pNodeNode"
''
N
odeNodePoly
)
$
(
makeLensesWith
abbreviatedFields
''
N
odeNodePoly
)
makeLenses
''
N
odeNodePoly
nodeNodeTable
::
Table
NodeNodeWrite
NodeNodeRead
nodeNodeTable
::
Table
NodeNodeWrite
NodeNodeRead
nodeNodeTable
=
Table
"nodes_nodes"
(
pNodeNode
nodeNodeTable
=
Table
"nodes_nodes"
(
pNodeNode
NodeNode
{
nn_node1_id
=
required
"node1_id"
NodeNode
{
_
nn_node1_id
=
required
"node1_id"
,
nn_node2_id
=
required
"node2_id"
,
_
nn_node2_id
=
required
"node2_id"
,
nn_score
=
optional
"score"
,
_
nn_score
=
optional
"score"
,
nn_category
=
optional
"category"
,
_
nn_category
=
optional
"category"
}
}
)
)
...
@@ -144,9 +144,9 @@ selectDocs cId = runOpaQuery (queryDocs cId)
...
@@ -144,9 +144,9 @@ selectDocs cId = runOpaQuery (queryDocs cId)
queryDocs
::
CorpusId
->
O
.
Query
(
Column
PGJsonb
)
queryDocs
::
CorpusId
->
O
.
Query
(
Column
PGJsonb
)
queryDocs
cId
=
proc
()
->
do
queryDocs
cId
=
proc
()
->
do
(
n
,
nn
)
<-
joinInCorpus
-<
()
(
n
,
nn
)
<-
joinInCorpus
-<
()
restrict
-<
(
nn_node1_id
nn
)
.==
(
toNullable
$
pgNodeId
cId
)
restrict
-<
nn
^.
nn_node1_id
.==
(
toNullable
$
pgNodeId
cId
)
restrict
-<
(
nn_category
nn
)
.>=
(
toNullable
$
pgInt4
1
)
restrict
-<
nn
^.
nn_category
.>=
(
toNullable
$
pgInt4
1
)
restrict
-<
(
_node_typename
n
)
.==
(
pgInt4
$
nodeTypeId
NodeDocument
)
restrict
-<
n
^.
node_typename
.==
(
pgInt4
$
nodeTypeId
NodeDocument
)
returnA
-<
view
(
node_hyperdata
)
n
returnA
-<
view
(
node_hyperdata
)
n
...
@@ -156,9 +156,9 @@ selectDocNodes cId = runOpaQuery (queryDocNodes cId)
...
@@ -156,9 +156,9 @@ selectDocNodes cId = runOpaQuery (queryDocNodes cId)
queryDocNodes
::
CorpusId
->
O
.
Query
NodeRead
queryDocNodes
::
CorpusId
->
O
.
Query
NodeRead
queryDocNodes
cId
=
proc
()
->
do
queryDocNodes
cId
=
proc
()
->
do
(
n
,
nn
)
<-
joinInCorpus
-<
()
(
n
,
nn
)
<-
joinInCorpus
-<
()
restrict
-<
(
nn_node1_id
nn
)
.==
(
toNullable
$
pgNodeId
cId
)
restrict
-<
nn
^.
nn_node1_id
.==
(
toNullable
$
pgNodeId
cId
)
restrict
-<
(
nn_category
nn
)
.>=
(
toNullable
$
pgInt4
1
)
restrict
-<
nn
^.
nn_category
.>=
(
toNullable
$
pgInt4
1
)
restrict
-<
(
_node_typename
n
)
.==
(
pgInt4
$
nodeTypeId
NodeDocument
)
restrict
-<
n
^.
node_typename
.==
(
pgInt4
$
nodeTypeId
NodeDocument
)
returnA
-<
n
returnA
-<
n
...
@@ -166,7 +166,7 @@ joinInCorpus :: O.Query (NodeRead, NodeNodeReadNull)
...
@@ -166,7 +166,7 @@ joinInCorpus :: O.Query (NodeRead, NodeNodeReadNull)
joinInCorpus
=
leftJoin
queryNodeTable
queryNodeNodeTable
cond
joinInCorpus
=
leftJoin
queryNodeTable
queryNodeNodeTable
cond
where
where
cond
::
(
NodeRead
,
NodeNodeRead
)
->
Column
PGBool
cond
::
(
NodeRead
,
NodeNodeRead
)
->
Column
PGBool
cond
(
n
,
nn
)
=
nn
_node2_id
nn
.==
(
view
node_id
n
)
cond
(
n
,
nn
)
=
nn
^.
nn_node2_id
.==
(
view
node_id
n
)
------------------------------------------------------------------------
------------------------------------------------------------------------
...
...
src/Gargantext/Database/Schema/NodeNodeNgrams.hs
View file @
e08e94f9
...
@@ -25,7 +25,7 @@ module Gargantext.Database.Schema.NodeNodeNgrams
...
@@ -25,7 +25,7 @@ module Gargantext.Database.Schema.NodeNodeNgrams
import
Prelude
import
Prelude
import
Data.Maybe
(
Maybe
)
import
Data.Maybe
(
Maybe
)
import
Data.Profunctor.Product.TH
(
makeAdaptorAndInstance
)
import
Data.Profunctor.Product.TH
(
makeAdaptorAndInstance
)
--import Control.Lens.TH (makeLensesWith, abbreviatedField
s)
import
Control.Lens.TH
(
makeLense
s
)
import
Gargantext.Database.Utils
(
Cmd
,
mkCmd
)
import
Gargantext.Database.Utils
(
Cmd
,
mkCmd
)
import
Gargantext.Database.Schema.Ngrams
(
NgramsTypeId
,
pgNgramsTypeId
,
NgramsId
)
import
Gargantext.Database.Schema.Ngrams
(
NgramsTypeId
,
pgNgramsTypeId
,
NgramsId
)
import
Gargantext.Database.Schema.Node
(
pgNodeId
)
import
Gargantext.Database.Schema.Node
(
pgNodeId
)
...
@@ -35,12 +35,12 @@ import Opaleye
...
@@ -35,12 +35,12 @@ import Opaleye
data
NodeNodeNgramsPoly
id'
n1
n2
ngrams_id
ngt
w
data
NodeNodeNgramsPoly
id'
n1
n2
ngrams_id
ngt
w
=
NodeNodeNgrams
{
nnng_id
::
id'
=
NodeNodeNgrams
{
_
nnng_id
::
id'
,
nnng_node1_id
::
n1
,
_
nnng_node1_id
::
n1
,
nnng_node2_id
::
n2
,
_
nnng_node2_id
::
n2
,
nnng_ngrams_id
::
ngrams_id
,
_
nnng_ngrams_id
::
ngrams_id
,
nnng_ngramsType
::
ngt
,
_
nnng_ngramsType
::
ngt
,
nnng_weight
::
w
,
_
nnng_weight
::
w
}
deriving
(
Show
)
}
deriving
(
Show
)
...
@@ -71,19 +71,19 @@ type NodeNodeNgramsReadNull =
...
@@ -71,19 +71,19 @@ type NodeNodeNgramsReadNull =
type
NodeNodeNgrams
=
type
NodeNodeNgrams
=
NodeNodeNgramsPoly
(
Maybe
Int
)
CorpusId
DocId
NgramsId
NgramsTypeId
Double
NodeNodeNgramsPoly
(
Maybe
Int
)
CorpusId
DocId
NgramsId
NgramsTypeId
Double
--{-
$
(
makeAdaptorAndInstance
"pNodeNodeNgrams"
''
N
odeNodeNgramsPoly
)
$
(
makeAdaptorAndInstance
"pNodeNodeNgrams"
''
N
odeNodeNgramsPoly
)
-- $(makeLensesWith abbreviatedFields ''NodeNodeNgramsPoly)
makeLenses
''
N
odeNodeNgramsPoly
nodeNodeNgramsTable
::
Table
NodeNodeNgramsWrite
NodeNodeNgramsRead
nodeNodeNgramsTable
::
Table
NodeNodeNgramsWrite
NodeNodeNgramsRead
nodeNodeNgramsTable
=
Table
"node_node_ngrams"
nodeNodeNgramsTable
=
Table
"node_node_ngrams"
(
pNodeNodeNgrams
NodeNodeNgrams
(
pNodeNodeNgrams
NodeNodeNgrams
{
nnng_id
=
optional
"id"
{
_
nnng_id
=
optional
"id"
,
nnng_node1_id
=
required
"node1_id"
,
_
nnng_node1_id
=
required
"node1_id"
,
nnng_node2_id
=
required
"node2_id"
,
_
nnng_node2_id
=
required
"node2_id"
,
nnng_ngrams_id
=
required
"ngrams_id"
,
_
nnng_ngrams_id
=
required
"ngrams_id"
,
nnng_ngramsType
=
required
"ngrams_type"
,
_
nnng_ngramsType
=
required
"ngrams_type"
,
nnng_weight
=
required
"weight"
,
_
nnng_weight
=
required
"weight"
}
}
)
)
...
...
src/Gargantext/Database/TextSearch.hs
View file @
e08e94f9
This diff is collapsed.
Click to expand it.
src/Gargantext/Database/Types/Node.hs
View file @
e08e94f9
...
@@ -479,7 +479,7 @@ data NodePolySearch id typename userId
...
@@ -479,7 +479,7 @@ data NodePolySearch id typename userId
hyperdata
search
=
NodeSearch
{
_ns_id
::
id
hyperdata
search
=
NodeSearch
{
_ns_id
::
id
,
_ns_typename
::
typename
,
_ns_typename
::
typename
,
_ns_userId
::
userId
,
_ns_userId
::
userId
-- , nodeUniqId ::
hash
Id
-- , nodeUniqId ::
sha
Id
,
_ns_parentId
::
parentId
,
_ns_parentId
::
parentId
,
_ns_name
::
name
,
_ns_name
::
name
,
_ns_date
::
date
,
_ns_date
::
date
...
...
src/Gargantext/Prelude/Utils.hs
View file @
e08e94f9
...
@@ -85,7 +85,7 @@ writeFile :: (MonadReader env m, MonadIO m, HasSettings env, SaveFile a)
...
@@ -85,7 +85,7 @@ writeFile :: (MonadReader env m, MonadIO m, HasSettings env, SaveFile a)
=>
a
->
m
FilePath
=>
a
->
m
FilePath
writeFile
a
=
do
writeFile
a
=
do
dataPath
<-
view
(
settings
.
fileFolder
)
<$>
ask
dataPath
<-
view
(
settings
.
fileFolder
)
<$>
ask
(
fp
,
fn
)
<-
liftIO
$
(
toPath
3
)
.
hash
.
Text
.
pack
.
show
<$>
newStdGen
(
fp
,
fn
)
<-
liftIO
$
(
toPath
3
)
.
sha
.
Text
.
pack
.
show
<$>
newStdGen
let
foldPath
=
dataPath
<>
"/"
<>
fp
let
foldPath
=
dataPath
<>
"/"
<>
fp
filePath
=
foldPath
<>
"/"
<>
fn
filePath
=
foldPath
<>
"/"
<>
fn
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment