Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
haskell-gargantext
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Julien Moutinho
haskell-gargantext
Commits
c0c8fcf8
Commit
c0c8fcf8
authored
Mar 03, 2023
by
Alexandre Delanoë
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
[FIX] removing debug prints
parent
0312960a
Changes
35
Show whitespace changes
Inline
Side-by-side
Showing
35 changed files
with
130 additions
and
122 deletions
+130
-122
gargantext.cabal
gargantext.cabal
+1
-1
Auth.hs
src/Gargantext/API/Admin/Auth.hs
+1
-1
AsyncTask.hs
src/Gargantext/API/GraphQL/AsyncTask.hs
+3
-3
Metrics.hs
src/Gargantext/API/Metrics.hs
+4
-4
Ngrams.hs
src/Gargantext/API/Ngrams.hs
+6
-6
List.hs
src/Gargantext/API/Ngrams/List.hs
+2
-2
Tools.hs
src/Gargantext/API/Ngrams/Tools.hs
+2
-2
Contact.hs
src/Gargantext/API/Node/Contact.hs
+2
-2
Annuaire.hs
src/Gargantext/API/Node/Corpus/Annuaire.hs
+1
-1
New.hs
src/Gargantext/API/Node/Corpus/New.hs
+16
-16
File.hs
src/Gargantext/API/Node/Corpus/New/File.hs
+12
-11
Searx.hs
src/Gargantext/API/Node/Corpus/Searx.hs
+9
-7
DocumentUpload.hs
src/Gargantext/API/Node/DocumentUpload.hs
+2
-2
DocumentsFromWriteNodes.hs
src/Gargantext/API/Node/DocumentsFromWriteNodes.hs
+7
-3
File.hs
src/Gargantext/API/Node/File.hs
+8
-7
New.hs
src/Gargantext/API/Node/New.hs
+1
-1
Share.hs
src/Gargantext/API/Node/Share.hs
+4
-4
Update.hs
src/Gargantext/API/Node/Update.hs
+2
-2
Prelude.hs
src/Gargantext/API/Prelude.hs
+1
-1
Routes.hs
src/Gargantext/API/Routes.hs
+1
-1
Search.hs
src/Gargantext/API/Search.hs
+1
-1
Table.hs
src/Gargantext/API/Table.hs
+2
-2
NodeStory.hs
src/Gargantext/Core/NodeStory.hs
+8
-8
Istex.hs
src/Gargantext/Core/Text/Corpus/API/Istex.hs
+1
-1
Parsers.hs
src/Gargantext/Core/Text/Corpus/Parsers.hs
+2
-2
Date.hs
src/Gargantext/Core/Text/Corpus/Parsers/Date.hs
+3
-3
List.hs
src/Gargantext/Core/Text/List.hs
+14
-14
Learn.hs
src/Gargantext/Core/Text/List/Learn.hs
+2
-2
Tools.hs
src/Gargantext/Core/Viz/Phylo/API/Tools.hs
+1
-1
Flow.hs
src/Gargantext/Database/Action/Flow.hs
+3
-3
Search.hs
src/Gargantext/Database/Action/Search.hs
+2
-2
New.hs
src/Gargantext/Database/Action/User/New.hs
+1
-1
Facet.hs
src/Gargantext/Database/Query/Facet.hs
+2
-2
Children.hs
src/Gargantext/Database/Query/Table/Node/Children.hs
+2
-2
JohnSnowNLP.hs
src/Gargantext/Utils/JohnSnowNLP.hs
+1
-1
No files found.
gargantext.cabal
View file @
c0c8fcf8
src/Gargantext/API/Admin/Auth.hs
View file @
c0c8fcf8
...
...
@@ -283,7 +283,7 @@ forgotPasswordAsync' (ForgotPasswordAsyncParams { email }) logStatus = do
}
logStatus
jobLog
printDebug
"[forgotPasswordAsync'] email"
email
--
printDebug "[forgotPasswordAsync'] email" email
_
<-
forgotPasswordPost
$
ForgotPasswordRequest
{
_fpReq_email
=
email
}
...
...
src/Gargantext/API/GraphQL/AsyncTask.hs
View file @
c0c8fcf8
...
...
@@ -41,15 +41,15 @@ resolveJobLogs JobLogArgs { job_log_id } = dbJobLogs job_log_id
dbJobLogs
::
(
HasConnectionPool
env
,
HasConfig
env
,
HasJobEnv'
env
)
=>
Int
->
GqlM
e
env
(
Map
Int
JobLog
)
dbJobLogs
job_log_id
=
do
dbJobLogs
_
job_log_id
=
do
--getJobLogs job_log_id
lift
$
do
env
<-
ask
--val <- liftBase $ readMVar $ env ^. job_env . jenv_jobs . env_state_mvar
var
<-
liftIO
$
readMVar
(
env
^.
job_env
.
jenv_jobs
.
env_state_mvar
)
let
envItems
=
var
^.
env_map
printDebug
"[dbJobLogs] env ^. job_env ^. jenv_jobs"
$
length
$
IntMap
.
keys
envItems
printDebug
"[dbJobLogs] job_log_id"
job_log_id
--
printDebug "[dbJobLogs] env ^. job_env ^. jenv_jobs" $ length $ IntMap.keys envItems
--
printDebug "[dbJobLogs] job_log_id" job_log_id
--pure $ IntMap.elems val
liftIO
$
do
let
jobsList
=
IntMap
.
toList
$
IntMap
.
map
(
\
e
->
e
^.
env_item
.
job_async
)
envItems
...
...
src/Gargantext/API/Metrics.hs
View file @
c0c8fcf8
...
...
@@ -94,10 +94,10 @@ updateScatter :: FlowCmdM env err m =>
->
Maybe
Limit
->
m
()
updateScatter
cId
maybeListId
tabType
maybeLimit
=
do
printDebug
"[updateScatter] cId"
cId
printDebug
"[updateScatter] maybeListId"
maybeListId
printDebug
"[updateScatter] tabType"
tabType
printDebug
"[updateScatter] maybeLimit"
maybeLimit
--
printDebug "[updateScatter] cId" cId
--
printDebug "[updateScatter] maybeListId" maybeListId
--
printDebug "[updateScatter] tabType" tabType
--
printDebug "[updateScatter] maybeLimit" maybeLimit
_
<-
updateScatter'
cId
maybeListId
tabType
maybeLimit
pure
()
...
...
src/Gargantext/API/Ngrams.hs
View file @
c0c8fcf8
...
...
@@ -372,7 +372,7 @@ tableNgramsPull :: HasNodeStory env err m
->
Version
->
m
(
Versioned
NgramsTablePatch
)
tableNgramsPull
listId
ngramsType
p_version
=
do
printDebug
"[tableNgramsPull]"
(
listId
,
ngramsType
)
--
printDebug "[tableNgramsPull]" (listId, ngramsType)
var
<-
getNodeStoryVar
[
listId
]
r
<-
liftBase
$
readMVar
var
...
...
@@ -403,12 +403,12 @@ tableNgramsPut :: ( HasNodeStory env err m
->
m
(
Versioned
NgramsTablePatch
)
tableNgramsPut
tabType
listId
(
Versioned
p_version
p_table
)
|
p_table
==
mempty
=
do
printDebug
"[tableNgramsPut]"
(
"TableEmpty"
::
Text
)
--
printDebug "[tableNgramsPut]" ("TableEmpty" :: Text)
let
ngramsType
=
ngramsTypeFromTabType
tabType
tableNgramsPull
listId
ngramsType
p_version
|
otherwise
=
do
printDebug
"[tableNgramsPut]"
(
"TableNonEmpty"
::
Text
)
--
printDebug "[tableNgramsPut]" ("TableNonEmpty" :: Text)
let
ngramsType
=
ngramsTypeFromTabType
tabType
(
p
,
p_validity
)
=
PM
.
singleton
ngramsType
p_table
...
...
@@ -434,7 +434,7 @@ tableNgramsPostChartsAsync utn logStatus = do
let
listId
=
utn
^.
utn_list_id
node
<-
getNode
listId
let
nId
=
node
^.
node_id
let
_
nId
=
node
^.
node_id
_uId
=
node
^.
node_user_id
mCId
=
node
^.
node_parent_id
...
...
@@ -443,7 +443,7 @@ tableNgramsPostChartsAsync utn logStatus = do
case
mCId
of
Nothing
->
do
printDebug
"[tableNgramsPostChartsAsync] can't update charts, no parent, nId"
nId
--
printDebug "[tableNgramsPostChartsAsync] can't update charts, no parent, nId" nId
pure
$
jobLogFail
$
jobLogInit
1
Just
cId
->
do
case
tabType
of
...
...
@@ -499,7 +499,7 @@ tableNgramsPostChartsAsync utn logStatus = do
getRef
_
->
do
printDebug
"[tableNgramsPostChartsAsync] no update for tabType = "
tabType
--
printDebug "[tableNgramsPostChartsAsync] no update for tabType = " tabType
pure
$
jobLogFail
$
jobLogInit
1
{-
...
...
src/Gargantext/API/Ngrams/List.hs
View file @
c0c8fcf8
...
...
@@ -135,7 +135,7 @@ setList :: FlowCmdM env err m
->
m
Bool
setList
l
m
=
do
-- TODO check with Version for optim
printDebug
"New list as file"
l
--
printDebug "New list as file" l
_
<-
mapM
(
\
(
nt
,
Versioned
_v
ns
)
->
setListNgrams
l
nt
ns
)
$
toList
m
-- TODO reindex
pure
True
...
...
@@ -151,7 +151,7 @@ reIndexWith :: ( HasNodeStory env err m
->
Set
ListType
->
m
()
reIndexWith
cId
lId
nt
lts
=
do
printDebug
"(cId,lId,nt,lts)"
(
cId
,
lId
,
nt
,
lts
)
--
printDebug "(cId,lId,nt,lts)" (cId, lId, nt, lts)
-- Getting [NgramsTerm]
ts
<-
List
.
concat
...
...
src/Gargantext/API/Ngrams/Tools.hs
View file @
c0c8fcf8
...
...
@@ -235,9 +235,9 @@ migrateFromDirToDb = do
pool
<-
view
connPool
withResource
pool
$
\
c
->
do
listIds
<-
liftBase
$
getNodesIdWithType
c
NodeList
printDebug
"[migrateFromDirToDb] listIds"
listIds
--
printDebug "[migrateFromDirToDb] listIds" listIds
(
NodeStory
nls
)
<-
NSF
.
getRepoReadConfig
listIds
printDebug
"[migrateFromDirToDb] nls"
nls
--
printDebug "[migrateFromDirToDb] nls" nls
_
<-
mapM
(
\
(
nId
,
a
)
->
do
n
<-
liftBase
$
nodeExists
c
nId
case
n
of
...
...
src/Gargantext/API/Node/Contact.hs
View file @
c0c8fcf8
...
...
@@ -46,7 +46,7 @@ import Gargantext.Database.Action.Flow.Types (FlowCmdM)
import
Gargantext.Database.Admin.Types.Hyperdata
(
HyperdataAnnuaire
(
..
),
HyperdataContact
)
import
Gargantext.Database.Admin.Types.Hyperdata.Contact
(
hyperdataContact
)
import
Gargantext.Database.Admin.Types.Node
import
Gargantext.Prelude
((
$
),
liftBase
,
(
.
),
printDebug
,
pure
)
import
Gargantext.Prelude
((
$
),
liftBase
,
(
.
),
{-printDebug,-}
pure
)
import
qualified
Gargantext.Utils.Aeson
as
GUA
import
Gargantext.Utils.Jobs
(
serveJobsAPI
)
...
...
@@ -76,7 +76,7 @@ api_async u nId =
serveJobsAPI
AddContactJob
$
\
p
log
->
let
log'
x
=
do
printDebug
"addContact"
x
--
printDebug "addContact" x
liftBase
$
log
x
in
addContact
u
nId
p
(
liftBase
.
log'
)
...
...
src/Gargantext/API/Node/Corpus/Annuaire.hs
View file @
c0c8fcf8
...
...
@@ -76,7 +76,7 @@ addToAnnuaireWithForm :: FlowCmdM env err m
->
m
JobLog
addToAnnuaireWithForm
_cid
(
AnnuaireWithForm
{
_wf_filetype
})
logStatus
=
do
printDebug
"ft"
_wf_filetype
--
printDebug "ft" _wf_filetype
logStatus
JobLog
{
_scst_succeeded
=
Just
1
,
_scst_failed
=
Just
0
...
...
src/Gargantext/API/Node/Corpus/New.hs
View file @
c0c8fcf8
...
...
@@ -198,13 +198,13 @@ addToCorpusWithQuery user cid (WithQuery { _wq_query = q
,
_scst_remaining
=
Just
3
,
_scst_events
=
Just
[]
}
printDebug
"[addToCorpusWithQuery] (cid, dbs)"
(
cid
,
dbs
)
printDebug
"[addToCorpusWithQuery] datafield"
datafield
printDebug
"[addToCorpusWithQuery] flowListWith"
flw
--
printDebug "[addToCorpusWithQuery] (cid, dbs)" (cid, dbs)
--
printDebug "[addToCorpusWithQuery] datafield" datafield
--
printDebug "[addToCorpusWithQuery] flowListWith" flw
case
datafield
of
Just
Web
->
do
printDebug
"[addToCorpusWithQuery] processing web request"
datafield
--
printDebug "[addToCorpusWithQuery] processing web request" datafield
_
<-
triggerSearxSearch
user
cid
q
l
logStatus
...
...
@@ -219,12 +219,12 @@ addToCorpusWithQuery user cid (WithQuery { _wq_query = q
-- TODO if cid is folder -> create Corpus
-- if cid is corpus -> add to corpus
-- if cid is root -> create corpus in Private
printDebug
"[G.A.N.C.New] getDataText with query"
q
--
printDebug "[G.A.N.C.New] getDataText with query" q
databaseOrigin
<-
database2origin
dbs
eTxts
<-
mapM
(
\
db
->
getDataText
db
(
Multi
l
)
q
maybeLimit
)
[
databaseOrigin
]
let
lTxts
=
lefts
eTxts
printDebug
"[G.A.N.C.New] lTxts"
lTxts
--
printDebug "[G.A.N.C.New] lTxts" lTxts
case
lTxts
of
[]
->
do
let
txts
=
rights
eTxts
...
...
@@ -235,10 +235,10 @@ addToCorpusWithQuery user cid (WithQuery { _wq_query = q
,
_scst_events
=
Just
[]
}
cids
<-
mapM
(
\
txt
->
do
_
cids
<-
mapM
(
\
txt
->
do
flowDataText
user
txt
(
Multi
l
)
cid
(
Just
flw
)
logStatus
)
txts
printDebug
"corpus id"
cids
printDebug
"sending email"
(
"xxxxxxxxxxxxxxxxxxxxx"
::
Text
)
--
printDebug "corpus id" cids
--
printDebug "sending email" ("xxxxxxxxxxxxxxxxxxxxx" :: Text)
sendMail
user
-- TODO ...
pure
JobLog
{
_scst_succeeded
=
Just
3
...
...
@@ -248,7 +248,7 @@ addToCorpusWithQuery user cid (WithQuery { _wq_query = q
}
(
err
:
_
)
->
do
printDebug
"Error: "
err
--
printDebug "Error: " err
let
jl
=
addEvent
"ERROR"
(
T
.
pack
$
show
err
)
$
JobLog
{
_scst_succeeded
=
Just
2
,
_scst_failed
=
Just
1
...
...
@@ -275,9 +275,9 @@ addToCorpusWithForm :: (FlowCmdM env err m)
->
JobLog
->
m
JobLog
addToCorpusWithForm
user
cid
(
NewWithForm
ft
ff
d
l
_n
sel
)
logStatus
jobLog
=
do
printDebug
"[addToCorpusWithForm] Parsing corpus: "
cid
printDebug
"[addToCorpusWithForm] fileType"
ft
printDebug
"[addToCorpusWithForm] fileFormat"
ff
--
printDebug "[addToCorpusWithForm] Parsing corpus: " cid
--
printDebug "[addToCorpusWithForm] fileType" ft
--
printDebug "[addToCorpusWithForm] fileFormat" ff
logStatus
jobLog
limit'
<-
view
$
hasConfig
.
gc_max_docs_parsers
let
limit
=
fromIntegral
limit'
::
Integer
...
...
@@ -320,7 +320,7 @@ addToCorpusWithForm user cid (NewWithForm ft ff d l _n sel) logStatus jobLog = d
--printDebug "Starting extraction : " cid
-- TODO granularity of the logStatus
printDebug
"flowCorpus with (corpus_id, lang)"
(
cid
,
l
)
--
printDebug "flowCorpus with (corpus_id, lang)" (cid, l)
_cid'
<-
flowCorpus
user
(
Right
[
cid
])
...
...
@@ -331,8 +331,8 @@ addToCorpusWithForm user cid (NewWithForm ft ff d l _n sel) logStatus jobLog = d
--(map (map toHyperdataDocument) docs)
logStatus
printDebug
"Extraction finished : "
cid
printDebug
"sending email"
(
"xxxxxxxxxxxxxxxxxxxxx"
::
Text
)
--
printDebug "Extraction finished : " cid
--
printDebug "sending email" ("xxxxxxxxxxxxxxxxxxxxx" :: Text)
-- TODO uncomment this
--sendMail user
...
...
src/Gargantext/API/Node/Corpus/New/File.hs
View file @
c0c8fcf8
...
...
@@ -70,23 +70,24 @@ postUpload :: NodeId
->
Cmd
err
[
Hash
]
postUpload
_
Nothing
_
_
=
panic
"fileType is a required parameter"
postUpload
_
_
Nothing
_
=
panic
"fileFormat is a required parameter"
postUpload
_
(
Just
fileType
)
(
Just
fileFormat
)
multipartData
=
do
printDebug
"File Type: "
fileType
printDebug
"File format: "
fileFormat
postUpload
_
(
Just
_fileType
)
(
Just
_
fileFormat
)
multipartData
=
do
--
printDebug "File Type: " fileType
--
printDebug "File format: " fileFormat
is
<-
liftBase
$
do
printDebug
"Inputs:"
()
--
printDebug "Inputs:" ()
forM
(
inputs
multipartData
)
$
\
input
->
do
printDebug
"iName "
(
iName
input
)
printDebug
"iValue "
(
iValue
input
)
--
printDebug "iName " (iName input)
--
printDebug "iValue " (iValue input)
pure
$
iName
input
{-
_ <- forM (files multipartData) $ \file -> do
let
content
=
fdPayload
file
printDebug
"XXX "
(
fdFileName
file
)
printDebug
"YYY "
content
--pure
$ cs content
--
let content = fdPayload file
--
printDebug "XXX " (fdFileName file)
--
printDebug "YYY " content
pure () --
$ cs content
-- is <- inputs multipartData
-}
pure
$
map
hash
is
-------------------------------------------------------------------
src/Gargantext/API/Node/Corpus/Searx.hs
View file @
c0c8fcf8
...
...
@@ -120,16 +120,18 @@ insertSearxResponse :: (MonadBase IO m, FlowCmdM env err m)
->
m
()
insertSearxResponse
_
_
_
_
(
Left
_
)
=
pure
()
insertSearxResponse
user
cId
listId
l
(
Right
(
SearxResponse
{
_srs_results
}))
=
do
-- docs :: [Either Text HyperdataDocument]
let
docs
=
hyperdataDocumentFromSearxResult
l
<$>
_srs_results
--printDebug "[triggerSearxSearch] docs" docs
-- docs :: [Either Text HyperdataDocument]
let
docs'
=
catMaybes
$
rightToMaybe
<$>
docs
{-
Prelude.mapM_ (\(HyperdataDocument { _hd_title, _hd_publication_year, _hd_publication_date }) -> do
printDebug "[triggerSearxSearch] doc time" $
"[title] " <> (show _hd_title) <>
" :: [publication_year] " <> (show _hd_publication_year) <>
" :: [publication_date] " <> (show _hd_publication_date)
) docs'
-}
--
_
<-
flowDataText
user
(
DataNew
[
docs'
])
(
Multi
l
)
cId
Nothing
logStatus
let
mCorpus
=
Nothing
::
Maybe
HyperdataCorpus
ids
<-
insertMasterDocs
mCorpus
(
Multi
l
)
docs'
...
...
@@ -162,13 +164,13 @@ triggerSearxSearch user cId q l logStatus = do
}
logStatus
jobLog
printDebug
"[triggerSearxSearch] cId"
cId
printDebug
"[triggerSearxSearch] q"
q
printDebug
"[triggerSearxSearch] l"
l
--
printDebug "[triggerSearxSearch] cId" cId
--
printDebug "[triggerSearxSearch] q" q
--
printDebug "[triggerSearxSearch] l" l
cfg
<-
view
hasConfig
uId
<-
getUserId
user
let
surl
=
_gc_frame_searx_url
cfg
printDebug
"[triggerSearxSearch] surl"
surl
--
printDebug "[triggerSearxSearch] surl" surl
mListId
<-
defaultListMaybe
cId
listId
<-
case
mListId
of
Nothing
->
do
...
...
@@ -176,7 +178,7 @@ triggerSearxSearch user cId q l logStatus = do
pure
listId
Just
listId
->
pure
listId
printDebug
"[triggerSearxSearch] listId"
listId
--
printDebug "[triggerSearxSearch] listId" listId
manager
<-
liftBase
$
newManager
tlsManagerSettings
_
<-
mapM
(
\
page
->
do
...
...
src/Gargantext/API/Node/DocumentUpload.hs
View file @
c0c8fcf8
...
...
@@ -84,8 +84,8 @@ documentUploadAsync _uId nId doc logStatus = do
,
_scst_remaining
=
Just
1
,
_scst_events
=
Just
[]
}
logStatus
jl
docIds
<-
documentUpload
nId
doc
printDebug
"documentUploadAsync"
docIds
_
docIds
<-
documentUpload
nId
doc
--
printDebug "documentUploadAsync" docIds
pure
$
jobLogSuccess
jl
...
...
src/Gargantext/API/Node/DocumentsFromWriteNodes.hs
View file @
c0c8fcf8
...
...
@@ -21,7 +21,9 @@ import Conduit
import
Control.Lens
((
^.
))
import
Data.Aeson
import
Data.Either
(
Either
(
..
),
rights
)
import
Data.Maybe
(
fromMaybe
)
import
Data.Swagger
import
Data.Text
(
Text
)
import
GHC.Generics
(
Generic
)
import
Gargantext.API.Admin.EnvTypes
(
Env
,
GargJob
(
..
))
import
Gargantext.API.Admin.Orchestrator.Types
(
JobLog
(
..
),
AsyncJobs
)
...
...
@@ -44,6 +46,7 @@ import Gargantext.Prelude
import
Gargantext.Utils.Jobs
(
serveJobsAPI
)
import
Gargantext.Core.Text.Corpus.Parsers.Date
(
split'
)
import
Servant
import
Text.Read
(
readMaybe
)
import
qualified
Data.List
as
List
import
qualified
Data.Text
as
T
-- import qualified Gargantext.Defaults as Defaults
...
...
@@ -54,7 +57,7 @@ type API = Summary " Documents from Write nodes."
------------------------------------------------------------------------
data
Params
=
Params
{
id
::
Int
,
paragraphs
::
In
t
,
paragraphs
::
Tex
t
,
lang
::
Lang
,
selection
::
FlowSocialListWith
}
...
...
@@ -106,10 +109,11 @@ documentsFromWriteNodes uId nId Params { selection, lang, paragraphs } logStatus
pure
(
node
,
contents
)
)
frameWrites
let
paragraphs'
=
fromMaybe
(
7
::
Int
)
$
(
readMaybe
$
T
.
unpack
paragraphs
)
let
parsedE
=
(
\
(
node
,
contents
)
->
hyperdataDocumentFromFrameWrite
lang
paragraphs
(
node
,
contents
))
<$>
frameWritesWithContents
->
hyperdataDocumentFromFrameWrite
lang
paragraphs
'
(
node
,
contents
))
<$>
frameWritesWithContents
let
parsed
=
List
.
concat
$
rights
parsedE
printDebug
"DocumentsFromWriteNodes: uId"
uId
--
printDebug "DocumentsFromWriteNodes: uId" uId
_
<-
flowDataText
(
RootId
(
NodeId
uId
))
(
DataNew
(
Just
$
fromIntegral
$
length
parsed
,
yieldMany
parsed
))
(
Multi
lang
)
...
...
src/Gargantext/API/Node/File.hs
View file @
c0c8fcf8
...
...
@@ -70,8 +70,8 @@ fileDownload :: (HasSettings env, FlowCmdM env err m)
->
NodeId
->
m
(
Headers
'[
S
ervant
.
Header
"Content-Type"
Text
]
BSResponse
)
fileDownload
uId
nId
=
do
printDebug
"[fileDownload] uId"
uId
printDebug
"[fileDownload] nId"
nId
--
printDebug "[fileDownload] uId" uId
--
printDebug "[fileDownload] nId" nId
node
<-
getNodeWith
nId
(
Proxy
::
Proxy
HyperdataFile
)
let
(
HyperdataFile
{
_hff_name
=
name'
...
...
@@ -105,7 +105,7 @@ fileAsyncApi uId nId =
serveJobsAPI
AddFileJob
$
\
i
l
->
let
log'
x
=
do
printDebug
"addWithFile"
x
--
printDebug "addWithFile" x
liftBase
$
l
x
in
addWithFile
uId
nId
i
log'
...
...
@@ -118,7 +118,7 @@ addWithFile :: (HasSettings env, FlowCmdM env err m)
->
m
JobLog
addWithFile
uId
nId
nwf
@
(
NewWithFile
_d
_l
fName
)
logStatus
=
do
printDebug
"[addWithFile] Uploading file: "
nId
--
printDebug "[addWithFile] Uploading file: " nId
logStatus
JobLog
{
_scst_succeeded
=
Just
0
,
_scst_failed
=
Just
0
,
_scst_remaining
=
Just
1
...
...
@@ -126,7 +126,7 @@ addWithFile uId nId nwf@(NewWithFile _d _l fName) logStatus = do
}
fPath
<-
GargDB
.
writeFile
nwf
printDebug
"[addWithFile] File saved as: "
fPath
--
printDebug "[addWithFile] File saved as: " fPath
nIds
<-
mkNodeWithParent
NodeFile
(
Just
nId
)
uId
fName
...
...
@@ -137,10 +137,11 @@ addWithFile uId nId nwf@(NewWithFile _d _l fName) logStatus = do
_
<-
updateHyperdata
nId'
$
hl
{
_hff_name
=
fName
,
_hff_path
=
pack
fPath
}
printDebug
"[addWithFile] Created node with id: "
nId'
-- printDebug "[addWithFile] Created node with id: " nId'
pure
()
_
->
pure
()
printDebug
"[addWithFile] File upload finished: "
nId
--
printDebug "[addWithFile] File upload finished: " nId
pure
$
JobLog
{
_scst_succeeded
=
Just
1
,
_scst_failed
=
Just
0
,
_scst_remaining
=
Just
0
...
...
src/Gargantext/API/Node/New.hs
View file @
c0c8fcf8
...
...
@@ -89,7 +89,7 @@ postNodeAsync :: FlowCmdM env err m
->
m
JobLog
postNodeAsync
uId
nId
(
PostNode
nodeName
tn
)
logStatus
=
do
printDebug
"postNodeAsync"
nId
--
printDebug "postNodeAsync" nId
logStatus
JobLog
{
_scst_succeeded
=
Just
1
,
_scst_failed
=
Just
0
,
_scst_remaining
=
Just
2
...
...
src/Gargantext/API/Node/Share.hs
View file @
c0c8fcf8
...
...
@@ -69,13 +69,13 @@ api userInviting nId (ShareTeamParams user') = do
isRegistered
<-
getUserId'
(
UserName
u
)
case
isRegistered
of
Just
_
->
do
printDebug
"[G.A.N.Share.api]"
(
"Team shared with "
<>
u
)
--
printDebug "[G.A.N.Share.api]" ("Team shared with " <> u)
pure
u
Nothing
->
do
username'
<-
getUsername
userInviting
_
<-
case
List
.
elem
username'
arbitraryUsername
of
True
->
do
printDebug
"[G.A.N.Share.api]"
(
"Demo users are not allowed to invite"
::
Text
)
--
printDebug "[G.A.N.Share.api]" ("Demo users are not allowed to invite" :: Text)
pure
()
False
->
do
-- TODO better analysis of the composition of what is shared
...
...
@@ -86,10 +86,10 @@ api userInviting nId (ShareTeamParams user') = do
]
_
<-
case
List
.
null
children
of
True
->
do
printDebug
"[G.A.N.Share.api]"
(
"Invitation is enabled if you share a corpus at least"
::
Text
)
--
printDebug "[G.A.N.Share.api]" ("Invitation is enabled if you share a corpus at least" :: Text)
pure
0
False
->
do
printDebug
"[G.A.N.Share.api]"
(
"Your invitation is sent to: "
<>
user''
)
--
printDebug "[G.A.N.Share.api]" ("Your invitation is sent to: " <> user'')
newUsers
[
user''
]
pure
()
pure
u
...
...
src/Gargantext/API/Node/Update.hs
View file @
c0c8fcf8
...
...
@@ -43,7 +43,7 @@ import Gargantext.Database.Query.Table.Node (defaultList, getNode)
import
Gargantext.Database.Query.Table.Node.UpdateOpaleye
(
updateHyperdata
)
import
Gargantext.Database.Schema.Ngrams
(
NgramsType
(
NgramsTerms
))
import
Gargantext.Database.Schema.Node
(
node_parent_id
)
import
Gargantext.Prelude
(
Bool
(
..
),
Ord
,
Eq
,
(
<$>
),
(
$
),
liftBase
,
(
.
),
printDebug
,
pure
,
show
,
cs
,
(
<>
),
panic
,
(
<*>
))
import
Gargantext.Prelude
(
Bool
(
..
),
Ord
,
Eq
,
(
<$>
),
(
$
),
liftBase
,
(
.
),
{-printDebug,-}
pure
,
show
,
cs
,
(
<>
),
panic
,
(
<*>
))
import
Gargantext.Utils.Jobs
(
serveJobsAPI
)
import
Prelude
(
Enum
,
Bounded
,
minBound
,
maxBound
)
import
Servant
...
...
@@ -97,7 +97,7 @@ api uId nId =
serveJobsAPI
UpdateNodeJob
$
\
p
log''
->
let
log'
x
=
do
printDebug
"updateNode"
x
--
printDebug "updateNode" x
liftBase
$
log''
x
in
updateNode
uId
nId
p
(
liftBase
.
log'
)
...
...
src/Gargantext/API/Prelude.hs
View file @
c0c8fcf8
...
...
@@ -164,5 +164,5 @@ simuTask logStatus cur total = do
,
_scst_remaining
=
(
-
)
<$>
Just
total
<*>
Just
cur
,
_scst_events
=
Just
[]
}
printDebug
"status"
status
--
printDebug "status" status
logStatus
status
src/Gargantext/API/Routes.hs
View file @
c0c8fcf8
...
...
@@ -304,7 +304,7 @@ addCorpusWithFile user cid =
serveJobsAPI
AddCorpusFileJob
$
\
i
log'
->
let
log''
x
=
do
printDebug
"[addToCorpusWithFile]"
x
--
printDebug "[addToCorpusWithFile]" x
liftBase
$
log'
x
in
New
.
addToCorpusWithFile
user
cid
i
log''
...
...
src/Gargantext/API/Search.hs
View file @
c0c8fcf8
...
...
@@ -53,7 +53,7 @@ api nId (SearchQuery q SearchDoc) o l order =
<$>
searchInCorpus
nId
False
q
o
l
order
-- <$> searchInCorpus nId False (concat q) o l order
api
nId
(
SearchQuery
q
SearchContact
)
o
l
order
=
do
printDebug
"isPairedWith"
nId
--
printDebug "isPairedWith" nId
aIds
<-
isPairedWith
nId
NodeAnnuaire
-- TODO if paired with several corpus
case
head
aIds
of
...
...
src/Gargantext/API/Table.hs
View file @
c0c8fcf8
...
...
@@ -110,8 +110,8 @@ getTableApi :: NodeId
->
Maybe
Text
->
Cmd
err
(
HashedResponse
FacetTableResult
)
getTableApi
cId
tabType
_mListId
mLimit
mOffset
mOrderBy
mQuery
mYear
=
do
printDebug
"[getTableApi] mQuery"
mQuery
printDebug
"[getTableApi] mYear"
mYear
--
printDebug "[getTableApi] mQuery" mQuery
--
printDebug "[getTableApi] mYear" mYear
t
<-
getTable
cId
tabType
mOffset
mLimit
mOrderBy
mQuery
mYear
pure
$
constructHashedResponse
t
...
...
src/Gargantext/Core/NodeStory.hs
View file @
c0c8fcf8
...
...
@@ -534,11 +534,11 @@ updateNodeStory c nodeId@(NodeId _nId) currentArchive newArchive = do
let
currentSet
=
archiveStateSet
currentList
let
newSet
=
archiveStateSet
newList
printDebug
"[updateNodeStory] new - current = "
$
Set
.
difference
newSet
currentSet
--
printDebug "[updateNodeStory] new - current = " $ Set.difference newSet currentSet
let
inserts
=
archiveStateListFilterFromSet
(
Set
.
difference
newSet
currentSet
)
newList
-- printDebug "[updateNodeStory] inserts" inserts
printDebug
"[updateNodeStory] current - new"
$
Set
.
difference
currentSet
newSet
--
printDebug "[updateNodeStory] current - new" $ Set.difference currentSet newSet
let
deletes
=
archiveStateListFilterFromSet
(
Set
.
difference
currentSet
newSet
)
currentList
-- printDebug "[updateNodeStory] deletes" deletes
...
...
@@ -547,7 +547,7 @@ updateNodeStory c nodeId@(NodeId _nId) currentArchive newArchive = do
let
commonNewList
=
archiveStateListFilterFromSet
commonSet
newList
let
commonCurrentList
=
archiveStateListFilterFromSet
commonSet
currentList
let
updates
=
Set
.
toList
$
Set
.
difference
(
Set
.
fromList
commonNewList
)
(
Set
.
fromList
commonCurrentList
)
printDebug
"[updateNodeStory] updates"
$
Text
.
unlines
$
(
Text
.
pack
.
show
)
<$>
updates
--
printDebug "[updateNodeStory] updates" $ Text.unlines $ (Text.pack . show) <$> updates
-- 2. Perform inserts/deletes/updates
--printDebug "[updateNodeStory] applying insert" ()
...
...
@@ -580,9 +580,9 @@ updateNodeStory c nodeId@(NodeId _nId) currentArchive newArchive = do
upsertNodeStories
::
PGS
.
Connection
->
NodeId
->
ArchiveList
->
IO
()
upsertNodeStories
c
nodeId
@
(
NodeId
nId
)
newArchive
=
do
printDebug
"[upsertNodeStories] START nId"
nId
--
printDebug "[upsertNodeStories] START nId" nId
PGS
.
withTransaction
c
$
do
printDebug
"[upsertNodeStories] locking nId"
nId
--
printDebug "[upsertNodeStories] locking nId" nId
runPGSAdvisoryXactLock
c
nId
(
NodeStory
m
)
<-
getNodeStory
c
nodeId
...
...
@@ -597,7 +597,7 @@ upsertNodeStories c nodeId@(NodeId nId) newArchive = do
-- 3. Now we need to set versions of all node state to be the same
fixNodeStoryVersion
c
nodeId
newArchive
printDebug
"[upsertNodeStories] STOP nId"
nId
--
printDebug "[upsertNodeStories] STOP nId" nId
fixNodeStoryVersion
::
PGS
.
Connection
->
NodeId
->
ArchiveList
->
IO
()
fixNodeStoryVersion
c
nodeId
newArchive
=
do
...
...
@@ -738,9 +738,9 @@ fixNodeStoryVersions = do
pool
<-
view
connPool
_
<-
withResource
pool
$
\
c
->
liftBase
$
PGS
.
withTransaction
c
$
do
nIds
<-
runPGSQuery
c
[
sql
|
SELECT id FROM nodes WHERE ?
|]
(
PGS
.
Only
True
)
::
IO
[
PGS
.
Only
Int64
]
printDebug
"[fixNodeStoryVersions] nIds"
nIds
--
printDebug "[fixNodeStoryVersions] nIds" nIds
mapM_
(
\
(
PGS
.
Only
nId
)
->
do
printDebug
"[fixNodeStoryVersions] nId"
nId
--
printDebug "[fixNodeStoryVersions] nId" nId
updateVer
c
TableNgrams
.
Authors
nId
updateVer
c
TableNgrams
.
Institutes
nId
...
...
src/Gargantext/Core/Text/Corpus/API/Istex.hs
View file @
c0c8fcf8
...
...
@@ -58,7 +58,7 @@ get la query' maxResults = do
-- in that case we suppose user is knowing what s.he is doing
eDocs
<-
ISTEX
.
getMetadataWith
query
(
fromIntegral
<$>
maxResults
)
printDebug
"[Istex.get] will print length"
(
0
::
Int
)
--
printDebug "[Istex.get] will print length" (0 :: Int)
case
eDocs
of
Left
_
->
pure
()
Right
(
ISTEX
.
Documents
{
_documents_hits
})
->
printDebug
"[Istex.get] length docs"
$
length
_documents_hits
...
...
src/Gargantext/Core/Text/Corpus/Parsers.hs
View file @
c0c8fcf8
...
...
@@ -188,7 +188,7 @@ toDoc ff d = do
let
lang
=
EN
-- maybe EN identity (join $ detectLangDefault <$> (fmap (DT.take 50) abstract))
let
dateToParse
=
DT
.
replace
" "
""
<$>
lookup
"PY"
d
-- <> Just " " <> lookup "publication_date" d
printDebug
"[G.C.T.C.Parsers] dateToParse"
dateToParse
--
printDebug "[G.C.T.C.Parsers] dateToParse" dateToParse
(
utcTime
,
(
pub_year
,
pub_month
,
pub_day
))
<-
Date
.
dateSplit
lang
dateToParse
let
hd
=
HyperdataDocument
{
_hd_bdd
=
Just
$
DT
.
pack
$
show
ff
...
...
@@ -210,7 +210,7 @@ toDoc ff d = do
,
_hd_publication_minute
=
Nothing
,
_hd_publication_second
=
Nothing
,
_hd_language_iso2
=
Just
$
(
DT
.
pack
.
show
)
lang
}
printDebug
"[G.C.T.C.Parsers] HyperdataDocument"
hd
--
printDebug "[G.C.T.C.Parsers] HyperdataDocument" hd
pure
hd
enrichWith
::
FileType
...
...
src/Gargantext/Core/Text/Corpus/Parsers/Date.hs
View file @
c0c8fcf8
...
...
@@ -146,9 +146,9 @@ parseRawSafe lang text = do
let
triedParseRaw
=
parseRaw
lang
text
dateStr'
<-
case
triedParseRaw
of
--Left (CE.SomeException err) -> do
Left
err
->
do
envLang
<-
getEnv
"LANG"
printDebug
"[G.C.T.C.P.Date] Exception: "
(
err
,
envLang
,
lang
,
text
)
Left
_
err
->
do
_
envLang
<-
getEnv
"LANG"
--
printDebug "[G.C.T.C.P.Date] Exception: " (err, envLang, lang, text)
pure
$
DucklingFailure
text
Right
res
->
pure
$
DucklingSuccess
res
pure
dateStr'
...
...
src/Gargantext/Core/Text/List.hs
View file @
c0c8fcf8
...
...
@@ -23,7 +23,7 @@ import Data.Map.Strict (Map)
import
Data.Monoid
(
mempty
)
import
Data.Ord
(
Down
(
..
))
import
Data.Set
(
Set
)
import
Data.Text
(
Text
)
--
import Data.Text (Text)
import
Data.Tuple.Extra
(
both
)
import
Gargantext.API.Ngrams.Types
(
NgramsElement
,
NgramsTerm
(
..
))
import
Gargantext.Core.NodeStory
...
...
@@ -163,12 +163,12 @@ buildNgramsTermsList user uCid mCid mfslw groupParams (nt, MapListSize mapListSi
-- Filter 0 With Double
-- Computing global speGen score
printDebug
"[buildNgramsTermsList: Sample List] / start"
nt
--
printDebug "[buildNgramsTermsList: Sample List] / start" nt
!
(
allTerms
::
HashMap
NgramsTerm
Double
)
<-
getTficf_withSample
uCid
mCid
nt
printDebug
"[buildNgramsTermsList: Sample List / end]"
(
nt
,
HashMap
.
size
allTerms
)
--
printDebug "[buildNgramsTermsList: Sample List / end]" (nt, HashMap.size allTerms)
printDebug
"[buildNgramsTermsList: Flow Social List / start]"
nt
--
printDebug "[buildNgramsTermsList: Flow Social List / start]" nt
-- PrivateFirst for first developments since Public NodeMode is not implemented yet
!
(
socialLists
::
FlowCont
NgramsTerm
FlowListScores
)
...
...
@@ -177,18 +177,18 @@ buildNgramsTermsList user uCid mCid mfslw groupParams (nt, MapListSize mapListSi
$
List
.
zip
(
HashMap
.
keys
allTerms
)
(
List
.
cycle
[
mempty
])
)
printDebug
"[buildNgramsTermsList: Flow Social List / end]"
nt
--
printDebug "[buildNgramsTermsList: Flow Social List / end]" nt
let
!
ngramsKeys
=
HashSet
.
fromList
$
List
.
take
mapListSize
$
HashSet
.
toList
$
HashMap
.
keysSet
allTerms
printDebug
"[buildNgramsTermsList: ngramsKeys]"
(
HashSet
.
size
ngramsKeys
)
--
printDebug "[buildNgramsTermsList: ngramsKeys]" (HashSet.size ngramsKeys)
!
groupParams'
<-
getGroupParams
groupParams
(
HashSet
.
map
(
text2ngrams
.
unNgramsTerm
)
ngramsKeys
)
printDebug
"[buildNgramsTermsList: groupParams']"
(
""
::
Text
)
--
printDebug "[buildNgramsTermsList: groupParams']" ("" :: Text)
let
!
socialLists_Stemmed
=
addScoreStem
groupParams'
ngramsKeys
socialLists
...
...
@@ -199,10 +199,10 @@ buildNgramsTermsList user uCid mCid mfslw groupParams (nt, MapListSize mapListSi
!
(
groupedMono
,
groupedMult
)
=
HashMap
.
partitionWithKey
(
\
(
NgramsTerm
t
)
_v
->
size
t
<
2
)
candidateTerms
printDebug
"[buildNgramsTermsList] socialLists"
socialLists
printDebug
"[buildNgramsTermsList] socialLists with scores"
socialLists_Stemmed
printDebug
"[buildNgramsTermsList] groupedWithList"
groupedWithList
printDebug
"[buildNgramsTermsList] stopTerms"
stopTerms
--
printDebug "[buildNgramsTermsList] socialLists" socialLists
--
printDebug "[buildNgramsTermsList] socialLists with scores" socialLists_Stemmed
--
printDebug "[buildNgramsTermsList] groupedWithList" groupedWithList
--
printDebug "[buildNgramsTermsList] stopTerms" stopTerms
-- splitting monterms and multiterms to take proportional candidates
-- use % of list if to big, or Int if too small
...
...
@@ -223,7 +223,7 @@ buildNgramsTermsList user uCid mCid mfslw groupParams (nt, MapListSize mapListSi
-- Filter 1 With Set NodeId and SpeGen
!
selectedTerms
=
Set
.
toList
$
hasTerms
(
groupedMonoHead
<>
groupedMultHead
)
printDebug
"[buildNgramsTermsList: selectedTerms]"
selectedTerms
--
printDebug "[buildNgramsTermsList: selectedTerms]" selectedTerms
-- TODO remove (and remove HasNodeError instance)
!
userListId
<-
defaultList
uCid
...
...
@@ -235,7 +235,7 @@ buildNgramsTermsList user uCid mCid mfslw groupParams (nt, MapListSize mapListSi
selectedTerms
printDebug
"[buildNgramsTermsList: mapTextDocIds]"
mapTextDocIds
--
printDebug "[buildNgramsTermsList: mapTextDocIds]" mapTextDocIds
let
groupedTreeScores_SetNodeId
::
HashMap
NgramsTerm
(
GroupedTreeScores
(
Set
NodeId
))
...
...
@@ -243,7 +243,7 @@ buildNgramsTermsList user uCid mCid mfslw groupParams (nt, MapListSize mapListSi
$
setScoresWithMap
mapTextDocIds
(
groupedMonoHead
<>
groupedMultHead
)
printDebug
"[buildNgramsTermsList: groupedTreeScores_SetNodeId]"
groupedTreeScores_SetNodeId
--
printDebug "[buildNgramsTermsList: groupedTreeScores_SetNodeId]" groupedTreeScores_SetNodeId
-- Coocurrences computation
--, t1 >= t2 -- permute byAxis diag -- since matrix symmetric
...
...
src/Gargantext/Core/Text/List/Learn.hs
View file @
c0c8fcf8
...
...
@@ -124,8 +124,8 @@ grid s e tr te = do
<$>
mapM
(
\
(
x
,
y
)
->
grid'
x
y
tr
te
)
[(
x
,
y
)
|
x
<-
[
s
..
e
],
y
<-
[
s
..
e
]]
printDebug
"GRID SEARCH"
(
map
fst
r
)
--printDebug "file" fp
--
printDebug "GRID SEARCH" (map fst r)
--
printDebug "file" fp
--fp <- saveFile (ModelSVM model')
--save best result
pure
$
snd
<$>
r
src/Gargantext/Core/Viz/Phylo/API/Tools.hs
View file @
c0c8fcf8
...
...
@@ -108,7 +108,7 @@ corpusIdtoDocuments timeUnit corpusId = do
->
context2phyloDocument
timeUnit
doc
(
ngs_terms
,
ngs_sources
)
)
docs
printDebug
"corpusIdtoDocuments"
(
Prelude
.
map
date
docs'
)
--
printDebug "corpusIdtoDocuments" (Prelude.map date docs')
case
termList
of
Nothing
->
panic
"[G.C.V.Phylo.API] no termList found"
...
...
src/Gargantext/Database/Action/Flow.hs
View file @
c0c8fcf8
...
...
@@ -295,7 +295,7 @@ flow c u cn la mfslw (mLength, docsC) logStatus = do
insertDocs'
::
[(
Integer
,
a
)]
->
m
[
NodeId
]
insertDocs'
[]
=
pure
[]
insertDocs'
docs
=
do
printDebug
"[flow] calling insertDoc, ([idx], mLength) = "
(
fst
<$>
docs
,
mLength
)
--
printDebug "[flow] calling insertDoc, ([idx], mLength) = " (fst <$> docs, mLength)
ids
<-
insertMasterDocs
c
la
(
snd
<$>
docs
)
let
maxIdx
=
maximum
(
fst
<$>
docs
)
case
mLength
of
...
...
@@ -354,7 +354,7 @@ flowCorpusUser l user userCorpusId listId ctype mfslw = do
-- Here the PosTagAlgo should be chosen according to the Lang
_
<-
case
mfslw
of
(
Just
(
NoList
_
))
->
do
printDebug
"Do not build list"
mfslw
--
printDebug "Do not build list" mfslw
pure
()
_
->
do
ngs
<-
buildNgramsLists
user
userCorpusId
masterCorpusId
mfslw
...
...
@@ -431,7 +431,7 @@ saveDocNgramsWith lId mapNgramsDocs' = do
,
(
ngrams_type
,
mapNodeIdWeight
)
<-
Map
.
toList
mapNgramsTypes
,
(
nId
,
(
w
,
_cnt
))
<-
Map
.
toList
mapNodeIdWeight
]
printDebug
"Ngrams2Insert"
ngrams2insert
--
printDebug "Ngrams2Insert" ngrams2insert
_return
<-
insertContextNodeNgrams2
ngrams2insert
-- to be removed
...
...
src/Gargantext/Database/Action/Search.hs
View file @
c0c8fcf8
...
...
@@ -93,10 +93,10 @@ tfidfAll cId ngramIds = do
let
docsWithAllNgrams
=
List
.
filter
(
\
(
ctxId
,
_
,
_
)
->
Set
.
member
ctxId
docsWithAllNgramsS
)
docsWithNgrams
printDebug
"[tfidfAll] docsWithAllNgrams"
docsWithAllNgrams
--
printDebug "[tfidfAll] docsWithAllNgrams" docsWithAllNgrams
let
docsWithCounts
=
Map
.
fromListWith
(
+
)
[
(
ctxId
,
doc_count
)
|
(
ctxId
,
_
,
doc_count
)
<-
docsWithAllNgrams
]
printDebug
"[tfidfAll] docsWithCounts"
docsWithCounts
--
printDebug "[tfidfAll] docsWithCounts" docsWithCounts
let
totals
=
[
(
ctxId
,
ngrams_id
,
fromIntegral
doc_count
::
Double
...
...
src/Gargantext/Database/Action/User/New.hs
View file @
c0c8fcf8
...
...
@@ -79,7 +79,7 @@ newUsers' cfg us = do
r
<-
insertUsers
$
map
toUserWrite
us'
_
<-
mapM
getOrMkRoot
$
map
(
\
u
->
UserName
(
_nu_username
u
))
us
_
<-
mapM
(
\
u
->
mail
cfg
(
Invitation
u
))
us
printDebug
"newUsers'"
us
--
printDebug "newUsers'" us
pure
r
------------------------------------------------------------------------
updateUser
::
HasNodeError
err
...
...
src/Gargantext/Database/Query/Facet.hs
View file @
c0c8fcf8
...
...
@@ -55,7 +55,7 @@ import Gargantext.Database.Prelude
import
Gargantext.Database.Schema.Context
import
Gargantext.Database.Schema.Node
import
Gargantext.Database.Schema.NodeContext
import
Gargantext.Prelude
(
printDebug
)
--
import Gargantext.Prelude (printDebug)
------------------------------------------------------------------------
...
...
@@ -124,7 +124,7 @@ runViewDocuments :: HasDBid NodeType
->
Maybe
Text
->
Cmd
err
[
FacetDoc
]
runViewDocuments
cId
t
o
l
order
query
year
=
do
printDebug
"[runViewDocuments] sqlQuery"
$
showSql
sqlQuery
--
printDebug "[runViewDocuments] sqlQuery" $ showSql sqlQuery
runOpaQuery
$
filterWith
o
l
order
sqlQuery
where
sqlQuery
=
viewDocuments
cId
t
(
toDBid
NodeDocument
)
query
year
...
...
src/Gargantext/Database/Query/Table/Node/Children.hs
View file @
c0c8fcf8
...
...
@@ -72,7 +72,7 @@ getChildrenNode :: (JSONB a, HasDBid NodeType)
->
Maybe
Limit
->
Cmd
err
(
NodeTableResult
a
)
getChildrenNode
pId
_
maybeNodeType
maybeOffset
maybeLimit
=
do
printDebug
"getChildrenNode"
(
pId
,
maybeNodeType
)
--
printDebug "getChildrenNode" (pId, maybeNodeType)
let
query
=
selectChildrenNode
pId
maybeNodeType
docs
<-
runOpaQuery
$
limit'
maybeLimit
...
...
@@ -103,7 +103,7 @@ getChildrenContext :: (JSONB a, HasDBid NodeType)
->
Maybe
Limit
->
Cmd
err
(
NodeTableResult
a
)
getChildrenContext
pId
_
maybeNodeType
maybeOffset
maybeLimit
=
do
printDebug
"getChildrenContext"
(
pId
,
maybeNodeType
)
--
printDebug "getChildrenContext" (pId, maybeNodeType)
let
query
=
selectChildren'
pId
maybeNodeType
docs
<-
runOpaQuery
...
...
src/Gargantext/Utils/JohnSnowNLP.hs
View file @
c0c8fcf8
...
...
@@ -175,7 +175,7 @@ waitForJsTask jsTask = wait' 0
if
counter
>
60
then
panic
"[waitForJsTask] waited for 1 minute and still no answer from JohnSnow NLP"
else
do
printDebug
"[waitForJsTask] task not ready, waiting"
counter
--
printDebug "[waitForJsTask] task not ready, waiting" counter
_
<-
threadDelay
$
1000000
*
1
wait'
$
counter
+
1
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment