Commit 0472aad6 authored by Przemyslaw Kaminski's avatar Przemyslaw Kaminski

Merge branch 'dev' into 97-dev-istex-search

parents 8b108128 931df3f4
...@@ -14,9 +14,22 @@ variables: ...@@ -14,9 +14,22 @@ variables:
#- apt-get install make xz-utils #- apt-get install make xz-utils
stages: stages:
- deps
- docs - docs
- test - test
deps:
cache:
# cache per branch name
# key: ${CI_COMMIT_REF_SLUG}
paths:
- .stack
- .stack-root/
- .stack-work/
- target
script:
- stack build --no-terminal --haddock --no-haddock-deps --only-dependencies --fast
docs: docs:
cache: cache:
# cache per branch name # cache per branch name
...@@ -47,3 +60,4 @@ test: ...@@ -47,3 +60,4 @@ test:
- stack test --no-terminal --fast - stack test --no-terminal --fast
# TOOO # TOOO
## Version 0.0.5.5.1
* [BACK] FIX Graph Explorer search with selected ngrams
* [FRONT] Clean CSS
## Version 0.0.5.5
* [FRONT] Visio frame removed, using a new tab instead (which is working)
* [BACK] Scores on the docs view fixed
## Version 0.0.5.3
* [FRONT] SSL local option
## Version 0.0.5.2
* [QUAL] Scores in Ngrams Table fixed during workflow and user can
refresh it if needed.
## Version 0.0.5.1
* [OPTIM] Upgrade fix with indexes and scores counts
## Version 0.0.5
* [OPTIM][DATABASE] Upgrade Schema, move conTexts in contexts table which requires a version bump.
## Version 0.0.4.9.9.6
* [BACK] PubMed parser fixed
* [FRONT] Visio Frame resized
## Version 0.0.4.9.9.5
* [FIX] Chart Sort
## Version 0.0.4.9.9.4
* [FEAT] Corpus docs download
## Version 0.0.4.9.9.3
* [BACK] Graph update with force option
## Version 0.0.4.9.9.2
* [BACK] Opaleye Upgrade
## Version 0.0.4.9.9.1
* [FRONT] 350-dev-graph-search-in-forms-not-labels
* [FRONT] 359-dev-input-with-autocomplete
## Version 0.0.4.9.9 ## Version 0.0.4.9.9
* [FIX] Continuous Integration (CI) * [FIX] Continuous Integration (CI)
......
...@@ -15,31 +15,32 @@ Import a corpus binary. ...@@ -15,31 +15,32 @@ Import a corpus binary.
module Main where module Main where
import Data.Text (Text)
import Data.Either (Either(..)) import Data.Either (Either(..))
import Gargantext.API.Dev (withDevEnv, runCmdDev) import Gargantext.API.Dev (withDevEnv, runCmdDev)
import Gargantext.API.Prelude (GargError)
import Gargantext.API.Node () -- instances only import Gargantext.API.Node () -- instances only
import Gargantext.API.Prelude (GargError)
import Gargantext.Core.Types.Individu (User(..), arbitraryNewUsers, NewUser(..), arbitraryUsername, GargPassword(..)) import Gargantext.Core.Types.Individu (User(..), arbitraryNewUsers, NewUser(..), arbitraryUsername, GargPassword(..))
import Gargantext.Database.Action.Flow (getOrMkRoot, getOrMk_RootWithCorpus) import Gargantext.Database.Action.Flow (getOrMkRoot, getOrMk_RootWithCorpus)
import Gargantext.Database.Query.Table.Node (getOrMkList)
import Gargantext.Database.Query.Table.User (insertNewUsers, )
import Gargantext.Database.Admin.Config (userMaster, corpusMasterName) import Gargantext.Database.Admin.Config (userMaster, corpusMasterName)
import Gargantext.Database.Admin.Types.Node
import Gargantext.Database.Admin.Trigger.Init (initFirstTriggers, initLastTriggers) import Gargantext.Database.Admin.Trigger.Init (initFirstTriggers, initLastTriggers)
import Gargantext.Database.Admin.Types.Hyperdata (HyperdataCorpus) import Gargantext.Database.Admin.Types.Hyperdata (HyperdataCorpus)
import Gargantext.Database.Admin.Types.Node
import Gargantext.Database.Prelude (Cmd, ) import Gargantext.Database.Prelude (Cmd, )
import Gargantext.Database.Query.Table.Node (getOrMkList)
import Gargantext.Database.Query.Table.User (insertNewUsers, )
import Gargantext.Prelude import Gargantext.Prelude
import System.Environment (getArgs) import Gargantext.Prelude.Config (GargConfig(..), readConfig)
import Prelude (getLine) import Prelude (getLine)
import System.Environment (getArgs)
-- TODO put this in gargantext.ini
secret :: Text
secret = "Database secret to change"
main :: IO () main :: IO ()
main = do main = do
[iniPath] <- getArgs params@[iniPath] <- getArgs
_ <- if length params /= 1
then panic "USAGE: ./gargantext-init gargantext.ini"
else pure ()
putStrLn "Enter master user (gargantua) _password_ :" putStrLn "Enter master user (gargantua) _password_ :"
password <- getLine password <- getLine
...@@ -47,6 +48,8 @@ main = do ...@@ -47,6 +48,8 @@ main = do
putStrLn "Enter master user (gargantua) _email_ :" putStrLn "Enter master user (gargantua) _email_ :"
email <- getLine email <- getLine
cfg <- readConfig iniPath
let secret = _gc_secretkey cfg
let createUsers :: Cmd GargError Int64 let createUsers :: Cmd GargError Int64
createUsers = insertNewUsers (NewUser "gargantua" (cs email) (GargPassword $ cs password) createUsers = insertNewUsers (NewUser "gargantua" (cs email) (GargPassword $ cs password)
......
This diff is collapsed.
#!/bin/bash #!/bin/bash
# sudo su postgres # sudo su postgres
# postgresql://$USER:$PW@localhost/$DB # postgresql://$USER:$PW@localhost/$DB
PW="C8kdcUrAQy66U" INIFILE=$1
DB="gargandbV5"
USER="gargantua" getter () {
grep $1 $INIFILE | sed "s/^.*= //"
}
USER=$(getter "DB_USER")
NAME=$(getter "DB_NAME")
PASS=$(getter "DB_PASS")
HOST=$(getter "DB_HOST")
PORT=$(getter "DB_PORT")
psql -c "CREATE USER \"${USER}\""
psql -c "ALTER USER \"${USER}\" with PASSWORD '${PW}'"
psql -c "DROP DATABASE IF EXISTS \"${DB}\"" #psql -c "CREATE USER \"${USER}\""
createdb "${DB}" #psql -c "ALTER USER \"${USER}\" with PASSWORD '${PW}'"
psql "${DB}" < schema.sql
psql -c "ALTER DATABASE \"${DB}\" OWNER to \"${USER}\"" psql -c "DROP DATABASE IF EXISTS \"${NAME}\""
createdb "${NAME}"
psql "${NAME}" < extensions.sql
#psql "${NAME}" < schema.sql
#../../bin/psql ../../gargantext.ini < gargandb.dump
psql -c "ALTER DATABASE \"${NAME}\" OWNER to \"${USER}\""
#!/bin/bash
# sudo su postgres
# postgresql://$USER:$PW@localhost/$DB
PW="C8kdcUrAQy66U"
DB="gargandb1"
USER="gargantua"
#psql -c "CREATE USER \"${USER}\""
#psql -c "ALTER USER \"${USER}\" with PASSWORD '${PW}'"
psql -c "DROP DATABASE IF EXISTS \"${DB}\""
createdb "${DB}"
psql -c "ALTER DATABASE \"${DB}\" OWNER to \"${USER}\""
CREATE EXTENSION IF NOT EXISTS pgcrypto;
CREATE EXTENSION IF NOT EXISTS tsm_system_rows;
This diff is collapsed.
-- to delete
-- DELETE FROM contexts;
-- WITH docs (id,hash_id,typename,user_id,parent_id,name,date,hyperdata, search)
WITH docs AS (SELECT * from nodes WHERE nodes.typename IN (4,41)),
inserted (id, hash_id) AS (
INSERT INTO contexts (hash_id,typename,user_id,parent_id,name,date,hyperdata, search)
SELECT d.hash_id,d.typename,d.user_id,NULL,d.name,d.date,d.hyperdata,search FROM docs AS d
RETURNING contexts.id, contexts.hash_id
),
indexed (node_id, context_id) AS (
SELECT docs.id, inserted.id from inserted
JOIN docs on docs.hash_id = inserted.hash_id
),
-- nodes_nodes -> nodes_contexts
nodes_contexts_query AS (
INSERT INTO nodes_contexts (node_id, context_id,score, category)
SELECT nn.node1_id,i.context_id,nn.score,nn.category FROM nodes_nodes nn
JOIN indexed i ON i.node_id = nn.node2_id
),
-- nodes_nodes_ngrams -> contexts_nodes_ngrams
contexts_nodes_ngrams_query AS (
INSERT INTO context_node_ngrams
SELECT i.context_id, nnn.node1_id, nnn.ngrams_id, nnn.ngrams_type, nnn.weight FROM node_node_ngrams nnn
JOIN indexed i ON i.node_id = nnn.node2_id
),
---- nodes_nodes_ngrams2 -> contexts_nodes_ngrams2
context_node_ngrams2_query AS (
INSERT INTO context_node_ngrams2
SELECT i.context_id, nnn2.nodengrams_id, nnn2.weight FROM node_node_ngrams2 nnn2
JOIN indexed i ON i.node_id = nnn2.node_id
)
-- WITH CASCADE it should update others tables
DELETE FROM nodes n
USING indexed i WHERE i.node_id = n.id
;
UPDATE contexts SET parent_id = id;
-- TODO typename -> type_id
CREATE TABLE public.contexts (
id SERIAL,
hash_id CHARACTER varying(66) DEFAULT ''::character varying NOT NULL,
typename INTEGER NOT NULL,
user_id INTEGER NOT NULL,
parent_id INTEGER REFERENCES public.contexts(id) ON DELETE CASCADE ,
name CHARACTER varying(255) DEFAULT ''::character varying NOT NULL,
date TIMESTAMP with time zone DEFAULT now() NOT NULL,
hyperdata jsonb DEFAULT '{}'::jsonb NOT NULL,
search tsvector,
PRIMARY KEY (id),
FOREIGN KEY (user_id) REFERENCES public.auth_user(id) ON DELETE CASCADE
);
ALTER TABLE public.contexts OWNER TO gargantua;
-- To attach contexts to a Corpus
CREATE TABLE public.nodes_contexts (
node_id INTEGER NOT NULL REFERENCES public.nodes(id) ON DELETE CASCADE,
context_id INTEGER NOT NULL REFERENCES public.contexts(id) ON DELETE CASCADE,
score REAL ,
category INTEGER ,
PRIMARY KEY (node_id, context_id)
);
ALTER TABLE public.nodes_contexts OWNER TO gargantua;
---------------------------------------------------------------
CREATE TABLE public.context_node_ngrams (
context_id INTEGER NOT NULL REFERENCES public.contexts (id) ON DELETE CASCADE,
node_id INTEGER NOT NULL REFERENCES public.nodes (id) ON DELETE CASCADE,
ngrams_id INTEGER NOT NULL REFERENCES public.ngrams (id) ON DELETE CASCADE,
ngrams_type INTEGER ,
weight double precision,
PRIMARY KEY (context_id, node_id, ngrams_id, ngrams_type)
);
ALTER TABLE public.context_node_ngrams OWNER TO gargantua;
CREATE TABLE public.context_node_ngrams2 (
context_id INTEGER NOT NULL REFERENCES public.contexts (id) ON DELETE CASCADE,
nodengrams_id INTEGER NOT NULL REFERENCES public.node_ngrams (id) ON DELETE CASCADE,
weight double precision,
PRIMARY KEY (context_id, nodengrams_id)
);
ALTER TABLE public.context_node_ngrams2 OWNER TO gargantua;
CREATE INDEX ON public.contexts USING gin (hyperdata);
CREATE INDEX ON public.contexts USING btree (user_id, typename, parent_id);
CREATE INDEX ON public.contexts USING btree (id, typename, date ASC);
CREATE INDEX ON public.contexts USING btree (id, typename, date DESC);
CREATE INDEX ON public.contexts USING btree (typename, id);
CREATE UNIQUE INDEX ON public.contexts USING btree (hash_id);
-- To make the links between Corpus Node and its contexts
CREATE UNIQUE INDEX ON public.nodes_contexts USING btree (node_id, context_id);
CREATE INDEX ON public.nodes_contexts USING btree (node_id, context_id, category);
------------------------------------------------------------------------
CREATE UNIQUE INDEX ON public.context_node_ngrams USING btree (context_id, node_id, ngrams_id, ngrams_type);
CREATE INDEX ON public.context_node_ngrams USING btree (context_id, node_id);
CREATE INDEX ON public.context_node_ngrams USING btree (ngrams_id, node_id);
CREATE INDEX ON public.context_node_ngrams USING btree (ngrams_type);
CREATE INDEX ON public.context_node_ngrams2 USING btree (context_id);
CREATE INDEX ON public.context_node_ngrams2 USING btree (nodengrams_id);
CREATE INDEX ON public.context_node_ngrams2 USING btree (context_id, nodengrams_id);
DROP TABLE if EXISTS public.node_nodengrams_nodengrams;
DROP TRIGGER if EXISTS trigger_count_delete2 ON nodes_nodes;
DROP TRIGGER if EXISTS trigger_count_update_add ON nodes_nodes;
DROP TRIGGER if EXISTS trigger_delete_count ON nodes_nodes;
DROP TRIGGER if EXISTS trigger_insert_count ON nodes_nodes;
name: gargantext name: gargantext
version: '0.0.4.9.9' version: '0.0.5.5.1'
synopsis: Search, map, share synopsis: Search, map, share
description: Please see README.md description: Please see README.md
category: Data category: Data
...@@ -58,7 +58,7 @@ library: ...@@ -58,7 +58,7 @@ library:
- Gargantext.API.Admin.EnvTypes - Gargantext.API.Admin.EnvTypes
- Gargantext.API.Admin.Types - Gargantext.API.Admin.Types
- Gargantext.API.Prelude - Gargantext.API.Prelude
- Gargantext.Client - Gargantext.API.Client
- Gargantext.Core - Gargantext.Core
- Gargantext.Core.NodeStory - Gargantext.Core.NodeStory
- Gargantext.Core.Methods.Distances - Gargantext.Core.Methods.Distances
...@@ -244,6 +244,7 @@ library: ...@@ -244,6 +244,7 @@ library:
- tagsoup - tagsoup
- template-haskell - template-haskell
- temporary - temporary
- text-conversions
- text-metrics - text-metrics
- time - time
- time-locale-compat - time-locale-compat
...@@ -379,19 +380,20 @@ executables: ...@@ -379,19 +380,20 @@ executables:
- gargantext-prelude - gargantext-prelude
- base - base
# gargantext-upgrade: gargantext-upgrade:
# main: Main.hs main: Main.hs
# source-dirs: bin/gargantext-upgrade source-dirs: bin/gargantext-upgrade
# ghc-options: ghc-options:
# - -threaded - -threaded
# - -rtsopts - -rtsopts
# - -with-rtsopts=-N - -with-rtsopts=-N
# - -O2 - -O2
# - -Wmissing-signatures - -Wmissing-signatures
# dependencies: dependencies:
# - gargantext - gargantext
# - gargantext-prelude - gargantext-prelude
# - base - base
- postgresql-simple
gargantext-admin: gargantext-admin:
main: Main.hs main: Main.hs
......
...@@ -106,7 +106,7 @@ repoSnapshot repoDir = repoDir <> "/repo.cbor" ...@@ -106,7 +106,7 @@ repoSnapshot repoDir = repoDir <> "/repo.cbor"
repoSaverAction :: RepoDirFilePath -> Serialise a => a -> IO () repoSaverAction :: RepoDirFilePath -> Serialise a => a -> IO ()
repoSaverAction repoDir a = do repoSaverAction repoDir a = do
withTempFile repoDir "tmp-repo.cbor" $ \fp h -> do withTempFile repoDir "tmp-repo.cbor" $ \fp h -> do
printDebug "repoSaverAction" fp -- printDebug "repoSaverAction" fp
L.hPut h $ serialise a L.hPut h $ serialise a
hClose h hClose h
renameFile fp (repoSnapshot repoDir) renameFile fp (repoSnapshot repoDir)
......
{-# OPTIONS_GHC -freduction-depth=0 #-} {-# OPTIONS_GHC -freduction-depth=0 #-}
{-# OPTIONS_GHC -O0 #-} {-# OPTIONS_GHC -O0 #-}
module Gargantext.Client where module Gargantext.API.Client where
import Data.Int import Data.Int
import Data.Maybe import Data.Maybe
...@@ -23,6 +23,7 @@ import Gargantext.API.Node ...@@ -23,6 +23,7 @@ import Gargantext.API.Node
import Gargantext.API.Node.Contact import Gargantext.API.Node.Contact
import Gargantext.API.Node.Corpus.Export.Types import Gargantext.API.Node.Corpus.Export.Types
import Gargantext.API.Node.Corpus.New import Gargantext.API.Node.Corpus.New
import qualified Gargantext.API.Node.Document.Export.Types as DocumentExport
import Gargantext.API.Node.DocumentsFromWriteNodes import Gargantext.API.Node.DocumentsFromWriteNodes
import Gargantext.API.Node.DocumentUpload import Gargantext.API.Node.DocumentUpload
import Gargantext.API.Node.File import Gargantext.API.Node.File
...@@ -65,7 +66,8 @@ putRoots :: Token -> ClientM Int -- not actually implemented in the backend ...@@ -65,7 +66,8 @@ putRoots :: Token -> ClientM Int -- not actually implemented in the backend
deleteNodes :: Token -> [NodeId] -> ClientM Int deleteNodes :: Token -> [NodeId] -> ClientM Int
-- node api -- node api
getNode :: Token -> NodeId -> ClientM (Node HyperdataAny) getNode :: Token -> NodeId -> ClientM (Node HyperdataAny)
getContext :: Token -> ContextId -> ClientM (Node HyperdataAny)
renameNode :: Token -> NodeId -> RenameNode -> ClientM [Int] renameNode :: Token -> NodeId -> RenameNode -> ClientM [Int]
postNode :: Token -> NodeId -> PostNode -> ClientM [NodeId] postNode :: Token -> NodeId -> PostNode -> ClientM [NodeId]
postNodeAsync :: Token -> NodeId -> ClientM (JobStatus 'Safe JobLog) postNodeAsync :: Token -> NodeId -> ClientM (JobStatus 'Safe JobLog)
...@@ -357,6 +359,11 @@ killDocumentNgramsTableAsyncJob :: Token -> DocId -> JobID 'Unsafe -> Maybe Limi ...@@ -357,6 +359,11 @@ killDocumentNgramsTableAsyncJob :: Token -> DocId -> JobID 'Unsafe -> Maybe Limi
pollDocumentNgramsTableAsyncJob :: Token -> DocId -> JobID 'Unsafe -> Maybe Limit -> Maybe Offset -> ClientM (JobStatus 'Safe JobLog) pollDocumentNgramsTableAsyncJob :: Token -> DocId -> JobID 'Unsafe -> Maybe Limit -> Maybe Offset -> ClientM (JobStatus 'Safe JobLog)
waitDocumentNgramsTableAsyncJob :: Token -> DocId -> JobID 'Unsafe -> ClientM (JobOutput JobLog) waitDocumentNgramsTableAsyncJob :: Token -> DocId -> JobID 'Unsafe -> ClientM (JobOutput JobLog)
-- document export API
getDocumentExportJSON :: Token -> DocId -> ClientM DocumentExport.DocumentExport
getDocumentExportCSV :: Token -> DocId -> ClientM Text
--getDocumentExportCSV :: Token -> DocId -> ClientM [DocumentExport.Document]
-- count api -- count api
postCountQuery :: Token -> Query -> ClientM Counts postCountQuery :: Token -> Query -> ClientM Counts
...@@ -491,6 +498,7 @@ postAuth ...@@ -491,6 +498,7 @@ postAuth
:<|> killNodeDocumentUploadAsyncJob :<|> killNodeDocumentUploadAsyncJob
:<|> pollNodeDocumentUploadAsyncJob :<|> pollNodeDocumentUploadAsyncJob
:<|> waitNodeDocumentUploadAsyncJob :<|> waitNodeDocumentUploadAsyncJob
:<|> getContext
:<|> getCorpus :<|> getCorpus
:<|> renameCorpus :<|> renameCorpus
:<|> postCorpus :<|> postCorpus
...@@ -652,6 +660,8 @@ postAuth ...@@ -652,6 +660,8 @@ postAuth
:<|> killDocumentNgramsTableAsyncJob :<|> killDocumentNgramsTableAsyncJob
:<|> pollDocumentNgramsTableAsyncJob :<|> pollDocumentNgramsTableAsyncJob
:<|> waitDocumentNgramsTableAsyncJob :<|> waitDocumentNgramsTableAsyncJob
:<|> getDocumentExportJSON
:<|> getDocumentExportCSV
:<|> postCountQuery :<|> postCountQuery
:<|> getGraphHyperdata :<|> getGraphHyperdata
:<|> postGraphAsync :<|> postGraphAsync
......
{-|
Module : Gargantext.API.Context
Description : Server API
Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
module Gargantext.API.Context
where
import Data.Aeson (FromJSON, ToJSON)
import Servant
import Gargantext.API.Admin.Auth (withAccess)
import Gargantext.API.Admin.Auth.Types (PathId(..))
import Gargantext.API.Prelude
import Gargantext.Database.Admin.Types.Node
import Gargantext.Database.Prelude -- (Cmd, CmdM)
import Gargantext.Database.Query.Table.Context
-------------------------------------------------------------------
-- TODO use Context instead of Node
type ContextAPI a = Get '[JSON] (Node a)
------------------------------------------------------------------------
-- TODO NodeAPI -> ContextAPI
contextAPI :: forall proxy a.
( JSONB a
, FromJSON a
, ToJSON a
) => proxy a
-> UserId
-> ContextId
-> GargServer (ContextAPI a)
contextAPI p uId id' = withAccess (Proxy :: Proxy (ContextAPI a)) Proxy uId (PathNode id') contextAPI'
where
contextAPI' :: GargServer (ContextAPI a)
contextAPI' = getContextWith id' p
...@@ -103,7 +103,7 @@ import Gargantext.Core.Mail.Types (HasMail) ...@@ -103,7 +103,7 @@ import Gargantext.Core.Mail.Types (HasMail)
import Gargantext.Core.Types (ListType(..), NodeId, ListId, DocId, Limit, Offset, TODO, assertValid, HasInvalidError) import Gargantext.Core.Types (ListType(..), NodeId, ListId, DocId, Limit, Offset, TODO, assertValid, HasInvalidError)
import Gargantext.API.Ngrams.Tools import Gargantext.API.Ngrams.Tools
import Gargantext.Database.Action.Flow.Types import Gargantext.Database.Action.Flow.Types
import Gargantext.Database.Action.Metrics.NgramsByNode (getOccByNgramsOnlyFast') import Gargantext.Database.Action.Metrics.NgramsByContext (getOccByNgramsOnlyFast')
import Gargantext.Database.Admin.Config (userMaster) import Gargantext.Database.Admin.Config (userMaster)
import Gargantext.Database.Admin.Types.Node (NodeType(..)) import Gargantext.Database.Admin.Types.Node (NodeType(..))
import Gargantext.Database.Prelude (HasConnectionPool, HasConfig) import Gargantext.Database.Prelude (HasConnectionPool, HasConfig)
...@@ -245,7 +245,7 @@ setListNgrams :: HasNodeStory env err m ...@@ -245,7 +245,7 @@ setListNgrams :: HasNodeStory env err m
-> Map NgramsTerm NgramsRepoElement -> Map NgramsTerm NgramsRepoElement
-> m () -> m ()
setListNgrams listId ngramsType ns = do setListNgrams listId ngramsType ns = do
printDebug "[setListNgrams]" (listId, ngramsType) -- printDebug "[setListNgrams]" (listId, ngramsType)
getter <- view hasNodeStory getter <- view hasNodeStory
var <- liftBase $ (getter ^. nse_getter) [listId] var <- liftBase $ (getter ^. nse_getter) [listId]
liftBase $ modifyMVar_ var $ liftBase $ modifyMVar_ var $
...@@ -283,7 +283,7 @@ commitStatePatch :: (HasNodeStory env err m, HasMail env) ...@@ -283,7 +283,7 @@ commitStatePatch :: (HasNodeStory env err m, HasMail env)
-> Versioned NgramsStatePatch' -> Versioned NgramsStatePatch'
-> m (Versioned NgramsStatePatch') -> m (Versioned NgramsStatePatch')
commitStatePatch listId (Versioned p_version p) = do commitStatePatch listId (Versioned p_version p) = do
printDebug "[commitStatePatch]" listId -- printDebug "[commitStatePatch]" listId
var <- getNodeStoryVar [listId] var <- getNodeStoryVar [listId]
vq' <- liftBase $ modifyMVar var $ \ns -> do vq' <- liftBase $ modifyMVar var $ \ns -> do
let let
...@@ -343,10 +343,10 @@ tableNgramsPull listId ngramsType p_version = do ...@@ -343,10 +343,10 @@ tableNgramsPull listId ngramsType p_version = do
-- Apply the given patch to the DB and returns the patch to be applied on the -- Apply the given patch to the DB and returns the patch to be applied on the
-- client. -- client.
-- TODO-ACCESS check -- TODO-ACCESS check
tableNgramsPut :: ( HasNodeStory env err m tableNgramsPut :: ( HasNodeStory env err m
, HasInvalidError err , HasInvalidError err
, HasSettings env , HasSettings env
, HasMail env , HasMail env
) )
=> TabType => TabType
-> ListId -> ListId
...@@ -495,7 +495,7 @@ type MaxSize = Int ...@@ -495,7 +495,7 @@ type MaxSize = Int
getTableNgrams :: forall env err m. getTableNgrams :: forall env err m.
(HasNodeStory env err m, HasNodeError err, HasConnectionPool env, HasConfig env, HasMail env) (HasNodeStory env err m, HasNodeError err, HasConnectionPool env, HasConfig env, HasMail env)
=> NodeType -> NodeId -> TabType => NodeType -> NodeId -> TabType
-> ListId -> Limit -> Maybe Offset -> ListId -> Limit -> Maybe Offset
-> Maybe ListType -> Maybe ListType
-> Maybe MinSize -> Maybe MaxSize -> Maybe MinSize -> Maybe MaxSize
-> Maybe OrderBy -> Maybe OrderBy
...@@ -562,21 +562,17 @@ getTableNgrams _nType nId tabType listId limit_ offset ...@@ -562,21 +562,17 @@ getTableNgrams _nType nId tabType listId limit_ offset
setScores False table = pure table setScores False table = pure table
setScores True table = do setScores True table = do
let ngrams_terms = table ^.. each . ne_ngrams let ngrams_terms = table ^.. each . ne_ngrams
-- printDebug "ngrams_terms" ngrams_terms
t1 <- getTime t1 <- getTime
occurrences <- getOccByNgramsOnlyFast' nId occurrences <- getOccByNgramsOnlyFast' nId
listId listId
ngramsType ngramsType
ngrams_terms ngrams_terms
--printDebug "occurrences" occurrences
t2 <- getTime t2 <- getTime
liftBase $ hprint stderr liftBase $ hprint stderr
("getTableNgrams/setScores #ngrams=" % int % " time=" % hasTime % "\n") ("getTableNgrams/setScores #ngrams=" % int % " time=" % hasTime % "\n")
(length ngrams_terms) t1 t2 (length ngrams_terms) t1 t2
{-
occurrences <- getOccByNgramsOnlySlow nType nId
(lIds <> [listId])
ngramsType
ngrams_terms
-}
let let
setOcc ne = ne & ne_occurrences .~ sumOf (at (ne ^. ne_ngrams) . _Just) occurrences setOcc ne = ne & ne_occurrences .~ sumOf (at (ne ^. ne_ngrams) . _Just) occurrences
......
...@@ -36,12 +36,12 @@ import Gargantext.Core.Text.Terms.WithList (buildPatterns, termsInText) ...@@ -36,12 +36,12 @@ import Gargantext.Core.Text.Terms.WithList (buildPatterns, termsInText)
import Gargantext.Core.Types.Main (ListType(..)) import Gargantext.Core.Types.Main (ListType(..))
import Gargantext.Database.Action.Flow (saveDocNgramsWith) import Gargantext.Database.Action.Flow (saveDocNgramsWith)
import Gargantext.Database.Action.Flow.Types (FlowCmdM) import Gargantext.Database.Action.Flow.Types (FlowCmdM)
import Gargantext.Database.Action.Metrics.NgramsByNode (getOccByNgramsOnlyFast') import Gargantext.Database.Action.Metrics.NgramsByContext (getOccByNgramsOnlyFast')
import Gargantext.Database.Admin.Types.Hyperdata.Document import Gargantext.Database.Admin.Types.Hyperdata.Document
import Gargantext.Database.Admin.Types.Node import Gargantext.Database.Admin.Types.Node
import Gargantext.Database.Query.Table.NodeNode (selectDocNodes) import Gargantext.Database.Query.Table.NodeContext (selectDocNodes)
import Gargantext.Database.Schema.Ngrams import Gargantext.Database.Schema.Ngrams
import Gargantext.Database.Schema.Node import Gargantext.Database.Schema.Context
import Gargantext.Database.Types (Indexed(..)) import Gargantext.Database.Types (Indexed(..))
import Gargantext.Prelude import Gargantext.Prelude
import Network.HTTP.Media ((//), (/:)) import Network.HTTP.Media ((//), (/:))
...@@ -155,12 +155,12 @@ reIndexWith cId lId nt lts = do ...@@ -155,12 +155,12 @@ reIndexWith cId lId nt lts = do
<$> HashMap.toList <$> HashMap.toList
<$> getTermsWith identity [lId] nt lts <$> getTermsWith identity [lId] nt lts
-- printDebug "ts" ts printDebug "ts" ts
-- Taking the ngrams with 0 occurrences only (orphans) -- Taking the ngrams with 0 occurrences only (orphans)
occs <- getOccByNgramsOnlyFast' cId lId nt ts occs <- getOccByNgramsOnlyFast' cId lId nt ts
-- printDebug "occs" occs printDebug "occs" occs
let orphans = List.concat let orphans = List.concat
$ map (\t -> case HashMap.lookup t occs of $ map (\t -> case HashMap.lookup t occs of
...@@ -168,28 +168,28 @@ reIndexWith cId lId nt lts = do ...@@ -168,28 +168,28 @@ reIndexWith cId lId nt lts = do
Just n -> if n <= 1 then [t] else [ ] Just n -> if n <= 1 then [t] else [ ]
) ts ) ts
-- printDebug "orphans" orphans printDebug "orphans" orphans
-- Get all documents of the corpus -- Get all documents of the corpus
docs <- selectDocNodes cId docs <- selectDocNodes cId
-- printDebug "docs length" (List.length docs) printDebug "docs length" (List.length docs)
-- Checking Text documents where orphans match -- Checking Text documents where orphans match
-- TODO Tests here -- TODO Tests here
let let
ngramsByDoc = map (HashMap.fromList) ngramsByDoc = map (HashMap.fromList)
$ map (map (\(k,v) -> (SimpleNgrams (text2ngrams k), v))) $ map (map (\(k,v) -> (SimpleNgrams (text2ngrams k), v)))
$ map (\doc -> List.zip $ map (\doc -> List.zip
(termsInText (buildPatterns $ map (\k -> (Text.splitOn " " $ unNgramsTerm k, [])) orphans) (termsInText (buildPatterns $ map (\k -> (Text.splitOn " " $ unNgramsTerm k, [])) orphans)
$ Text.unlines $ catMaybes $ Text.unlines $ catMaybes
[ doc ^. node_hyperdata . hd_title [ doc ^. context_hyperdata . hd_title
, doc ^. node_hyperdata . hd_abstract , doc ^. context_hyperdata . hd_abstract
] ]
) )
(List.cycle [Map.fromList $ [(nt, Map.singleton (doc ^. node_id) 1 )]]) (List.cycle [Map.fromList $ [(nt, Map.singleton (doc ^. context_id) 1 )]])
) docs ) docs
-- printDebug "ngramsByDoc" ngramsByDoc printDebug "ngramsByDoc" ngramsByDoc
-- Saving the indexation in database -- Saving the indexation in database
_ <- mapM (saveDocNgramsWith lId) ngramsByDoc _ <- mapM (saveDocNgramsWith lId) ngramsByDoc
......
...@@ -40,8 +40,8 @@ import Servant ...@@ -40,8 +40,8 @@ import Servant
import Test.QuickCheck (elements) import Test.QuickCheck (elements)
import Test.QuickCheck.Arbitrary (Arbitrary, arbitrary) import Test.QuickCheck.Arbitrary (Arbitrary, arbitrary)
import Gargantext.API.Admin.Auth.Types (PathId(..))
import Gargantext.API.Admin.Auth (withAccess) import Gargantext.API.Admin.Auth (withAccess)
import Gargantext.API.Admin.Auth.Types (PathId(..))
import Gargantext.API.Metrics import Gargantext.API.Metrics
import Gargantext.API.Ngrams (TableNgramsApi, apiNgramsTableCorpus) import Gargantext.API.Ngrams (TableNgramsApi, apiNgramsTableCorpus)
import Gargantext.API.Ngrams.Types (TabType(..)) import Gargantext.API.Ngrams.Types (TabType(..))
...@@ -53,6 +53,7 @@ import Gargantext.Core.Types (NodeTableResult) ...@@ -53,6 +53,7 @@ import Gargantext.Core.Types (NodeTableResult)
import Gargantext.Core.Types.Individu (User(..)) import Gargantext.Core.Types.Individu (User(..))
import Gargantext.Core.Types.Main (Tree, NodeTree) import Gargantext.Core.Types.Main (Tree, NodeTree)
import Gargantext.Core.Utils.Prefix (unPrefix) import Gargantext.Core.Utils.Prefix (unPrefix)
import Gargantext.Core.Viz.Phylo.Legacy.LegacyAPI (PhyloAPI, phyloAPI)
import Gargantext.Database.Action.Flow.Pairing (pairing) import Gargantext.Database.Action.Flow.Pairing (pairing)
import Gargantext.Database.Admin.Types.Hyperdata import Gargantext.Database.Admin.Types.Hyperdata
import Gargantext.Database.Admin.Types.Node import Gargantext.Database.Admin.Types.Node
...@@ -63,12 +64,12 @@ import Gargantext.Database.Query.Table.Node.Children (getChildren) ...@@ -63,12 +64,12 @@ import Gargantext.Database.Query.Table.Node.Children (getChildren)
import Gargantext.Database.Query.Table.Node.Error (HasNodeError(..)) import Gargantext.Database.Query.Table.Node.Error (HasNodeError(..))
import Gargantext.Database.Query.Table.Node.Update (Update(..), update) import Gargantext.Database.Query.Table.Node.Update (Update(..), update)
import Gargantext.Database.Query.Table.Node.UpdateOpaleye (updateHyperdata) import Gargantext.Database.Query.Table.Node.UpdateOpaleye (updateHyperdata)
import Gargantext.Database.Query.Table.NodeContext (nodeContextsCategory, nodeContextsScore)
import Gargantext.Database.Query.Table.NodeNode import Gargantext.Database.Query.Table.NodeNode
import Gargantext.Database.Query.Tree (tree, TreeMode(..)) import Gargantext.Database.Query.Tree (tree, TreeMode(..))
import Gargantext.Prelude import Gargantext.Prelude
import Gargantext.Core.Viz.Phylo.Legacy.LegacyAPI (PhyloAPI, phyloAPI)
import qualified Gargantext.API.Node.DocumentsFromWriteNodes as DocumentsFromWriteNodes
import qualified Gargantext.API.Node.DocumentUpload as DocumentUpload import qualified Gargantext.API.Node.DocumentUpload as DocumentUpload
import qualified Gargantext.API.Node.DocumentsFromWriteNodes as DocumentsFromWriteNodes
import qualified Gargantext.API.Node.FrameCalcUpload as FrameCalcUpload import qualified Gargantext.API.Node.FrameCalcUpload as FrameCalcUpload
import qualified Gargantext.API.Node.Share as Share import qualified Gargantext.API.Node.Share as Share
import qualified Gargantext.API.Node.Update as Update import qualified Gargantext.API.Node.Update as Update
...@@ -212,7 +213,7 @@ nodeAPI p uId id' = withAccess (Proxy :: Proxy (NodeAPI a)) Proxy uId (PathNode ...@@ -212,7 +213,7 @@ nodeAPI p uId id' = withAccess (Proxy :: Proxy (NodeAPI a)) Proxy uId (PathNode
:<|> postNodeAsyncAPI uId id' :<|> postNodeAsyncAPI uId id'
:<|> FrameCalcUpload.api uId id' :<|> FrameCalcUpload.api uId id'
:<|> putNode id' :<|> putNode id'
:<|> Update.api uId id' :<|> Update.api uId id'
:<|> Action.deleteNode (RootId $ NodeId uId) id' :<|> Action.deleteNode (RootId $ NodeId uId) id'
:<|> getChildren id' p :<|> getChildren id' p
...@@ -271,7 +272,7 @@ catApi :: CorpusId -> GargServer CatApi ...@@ -271,7 +272,7 @@ catApi :: CorpusId -> GargServer CatApi
catApi = putCat catApi = putCat
where where
putCat :: CorpusId -> NodesToCategory -> Cmd err [Int] putCat :: CorpusId -> NodesToCategory -> Cmd err [Int]
putCat cId cs' = nodeNodesCategory $ map (\n -> (cId, n, ntc_category cs')) (ntc_nodesId cs') putCat cId cs' = nodeContextsCategory $ map (\n -> (cId, n, ntc_category cs')) (ntc_nodesId cs')
------------------------------------------------------------------------ ------------------------------------------------------------------------
type ScoreApi = Summary " To Score NodeNodes" type ScoreApi = Summary " To Score NodeNodes"
...@@ -292,7 +293,7 @@ scoreApi :: CorpusId -> GargServer ScoreApi ...@@ -292,7 +293,7 @@ scoreApi :: CorpusId -> GargServer ScoreApi
scoreApi = putScore scoreApi = putScore
where where
putScore :: CorpusId -> NodesToScore -> Cmd err [Int] putScore :: CorpusId -> NodesToScore -> Cmd err [Int]
putScore cId cs' = nodeNodesScore $ map (\n -> (cId, n, nts_score cs')) (nts_nodesId cs') putScore cId cs' = nodeContextsScore $ map (\n -> (cId, n, nts_score cs')) (nts_nodesId cs')
------------------------------------------------------------------------ ------------------------------------------------------------------------
-- TODO adapt FacetDoc -> ListDoc (and add type of document as column) -- TODO adapt FacetDoc -> ListDoc (and add type of document as column)
......
{-| {-|
Module : Gargantext.API.Node.Corpus.Export Module : Gargantext.API.Node.Corpus.Export
Description : Get Metrics from Storage (Database like) Description : Corpus export
Copyright : (c) CNRS, 2017-Present Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3 License : AGPL + CECILL v3
Maintainer : team@gargantext.org Maintainer : team@gargantext.org
...@@ -19,28 +19,30 @@ module Gargantext.API.Node.Corpus.Export ...@@ -19,28 +19,30 @@ module Gargantext.API.Node.Corpus.Export
import Data.Map (Map) import Data.Map (Map)
import Data.Maybe (fromMaybe) import Data.Maybe (fromMaybe)
import Data.Set (Set) import Data.Set (Set)
import Data.Text (Text)
import qualified Data.List as List import qualified Data.List as List
import qualified Data.Map as Map import qualified Data.Map as Map
import qualified Data.Set as Set import qualified Data.Set as Set
import qualified Data.HashMap.Strict as HashMap import qualified Data.HashMap.Strict as HashMap
import Gargantext.API.Node.Corpus.Export.Types import Gargantext.API.Node.Corpus.Export.Types
import qualified Gargantext.API.Node.Document.Export.Types as DocumentExport
import Gargantext.API.Ngrams.Types import Gargantext.API.Ngrams.Types
import Gargantext.API.Ngrams.Tools (filterListWithRoot, mapTermListRoot, getRepo') import Gargantext.API.Ngrams.Tools (filterListWithRoot, mapTermListRoot, getRepo')
import Gargantext.API.Prelude (GargNoServer) import Gargantext.API.Prelude (GargNoServer)
import Gargantext.Prelude.Crypto.Hash (hash) import Gargantext.Prelude.Crypto.Hash (hash)
import Gargantext.Core.Types import Gargantext.Core.Types
import Gargantext.Core.NodeStory import Gargantext.Core.NodeStory
import Gargantext.Database.Action.Metrics.NgramsByNode (getNgramsByNodeOnlyUser) import Gargantext.Database.Action.Metrics.NgramsByContext (getNgramsByContextOnlyUser)
import Gargantext.Database.Admin.Config (userMaster) import Gargantext.Database.Admin.Config (userMaster)
import Gargantext.Database.Admin.Types.Hyperdata (HyperdataDocument(..)) import Gargantext.Database.Admin.Types.Hyperdata (HyperdataDocument(..))
import Gargantext.Database.Prelude (Cmd) import Gargantext.Database.Prelude (Cmd)
import Gargantext.Database.Query.Table.Node import Gargantext.Database.Query.Table.Node
import Gargantext.Database.Query.Table.Node.Error (HasNodeError) import Gargantext.Database.Query.Table.Node.Error (HasNodeError)
import Gargantext.Database.Query.Table.Node.Select (selectNodesWithUsername) import Gargantext.Database.Query.Table.Node.Select (selectNodesWithUsername)
import Gargantext.Database.Query.Table.NodeNode (selectDocNodes) import Gargantext.Database.Query.Table.NodeContext (selectDocNodes)
import Gargantext.Database.Schema.Ngrams (NgramsType(..)) import Gargantext.Database.Schema.Ngrams (NgramsType(..))
import Gargantext.Database.Schema.Node (_node_id, _node_hyperdata) import Gargantext.Database.Schema.Context (_context_id, _context_hyperdata)
import Gargantext.Prelude import Gargantext.Prelude
-------------------------------------------------- --------------------------------------------------
...@@ -61,31 +63,32 @@ getCorpus cId lId nt' = do ...@@ -61,31 +63,32 @@ getCorpus cId lId nt' = do
Just l -> pure l Just l -> pure l
ns <- Map.fromList ns <- Map.fromList
<$> map (\n -> (_node_id n, n)) <$> map (\n -> (_context_id n, n))
<$> selectDocNodes cId <$> selectDocNodes cId
repo <- getRepo' [listId] repo <- getRepo' [listId]
ngs <- getNodeNgrams cId listId nt repo ngs <- getContextNgrams cId listId nt repo
let -- uniqId is hash computed already for each document imported in database let -- uniqId is hash computed already for each document imported in database
r = Map.intersectionWith r = Map.intersectionWith
(\a b -> Document { _d_document = a (\a b -> DocumentExport.Document { _d_document = context2node a
, _d_ngrams = Ngrams (Set.toList b) (hash b) , _d_ngrams = DocumentExport.Ngrams (Set.toList b) (hash b)
, _d_hash = d_hash a b } , _d_hash = d_hash a b }
) ns (Map.map (Set.map unNgramsTerm) ngs) ) ns (Map.map (Set.map unNgramsTerm) ngs)
where where
d_hash a b = hash [ fromMaybe "" (_hd_uniqId $ _node_hyperdata a) d_hash :: Context HyperdataDocument -> Set Text -> Text
d_hash a b = hash [ fromMaybe "" (_hd_uniqId $ _context_hyperdata a)
, hash b , hash b
] ]
pure $ Corpus { _c_corpus = Map.elems r pure $ Corpus { _c_corpus = Map.elems r
, _c_hash = hash $ List.map _d_hash $ Map.elems r } , _c_hash = hash $ List.map DocumentExport._d_hash $ Map.elems r }
getNodeNgrams :: HasNodeError err getContextNgrams :: HasNodeError err
=> CorpusId => CorpusId
-> ListId -> ListId
-> NgramsType -> NgramsType
-> NodeListStory -> NodeListStory
-> Cmd err (Map NodeId (Set NgramsTerm)) -> Cmd err (Map ContextId (Set NgramsTerm))
getNodeNgrams cId lId nt repo = do getContextNgrams cId lId nt repo = do
-- lId <- case lId' of -- lId <- case lId' of
-- Nothing -> defaultList cId -- Nothing -> defaultList cId
-- Just l -> pure l -- Just l -> pure l
...@@ -93,7 +96,7 @@ getNodeNgrams cId lId nt repo = do ...@@ -93,7 +96,7 @@ getNodeNgrams cId lId nt repo = do
lIds <- selectNodesWithUsername NodeList userMaster lIds <- selectNodesWithUsername NodeList userMaster
let ngs = filterListWithRoot MapTerm $ mapTermListRoot [lId] nt repo let ngs = filterListWithRoot MapTerm $ mapTermListRoot [lId] nt repo
-- TODO HashMap -- TODO HashMap
r <- getNgramsByNodeOnlyUser cId (lIds <> [lId]) nt (HashMap.keys ngs) r <- getNgramsByContextOnlyUser cId (lIds <> [lId]) nt (HashMap.keys ngs)
pure r pure r
-- TODO -- TODO
......
...@@ -17,51 +17,26 @@ import Data.Aeson.TH (deriveJSON) ...@@ -17,51 +17,26 @@ import Data.Aeson.TH (deriveJSON)
import Data.Swagger import Data.Swagger
import Data.Text (Text) import Data.Text (Text)
import GHC.Generics (Generic) import GHC.Generics (Generic)
import qualified Gargantext.API.Node.Document.Export.Types as DocumentExport
import Gargantext.Core.Types import Gargantext.Core.Types
import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger) import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger)
import Gargantext.Database.Admin.Types.Hyperdata (HyperdataDocument(..))
import Gargantext.Database.Schema.Ngrams (NgramsType(..)) import Gargantext.Database.Schema.Ngrams (NgramsType(..))
import Servant import Servant
-- Corpus Export -- Corpus Export
data Corpus = data Corpus =
Corpus { _c_corpus :: [Document] Corpus { _c_corpus :: [DocumentExport.Document]
, _c_hash :: Hash , _c_hash :: Hash
} deriving (Generic) } deriving (Generic)
-- | Document Export
data Document =
Document { _d_document :: Node HyperdataDocument
, _d_ngrams :: Ngrams
, _d_hash :: Hash
} deriving (Generic)
data Ngrams =
Ngrams { _ng_ngrams :: [Text]
, _ng_hash :: Hash
} deriving (Generic)
type Hash = Text type Hash = Text
------- -------
instance ToSchema Corpus where instance ToSchema Corpus where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_c_") declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_c_")
instance ToSchema Document where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_d_")
instance ToSchema Ngrams where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_ng_")
------- -------
instance ToParamSchema Corpus where instance ToParamSchema Corpus where
toParamSchema _ = toParamSchema (Proxy :: Proxy TODO) toParamSchema _ = toParamSchema (Proxy :: Proxy TODO)
instance ToParamSchema Document where
toParamSchema _ = toParamSchema (Proxy :: Proxy TODO)
instance ToParamSchema Ngrams where
toParamSchema _ = toParamSchema (Proxy :: Proxy TODO)
-------------------------------------------------- --------------------------------------------------
type API = Summary "Corpus Export" type API = Summary "Corpus Export"
:> "export" :> "export"
...@@ -70,5 +45,3 @@ type API = Summary "Corpus Export" ...@@ -70,5 +45,3 @@ type API = Summary "Corpus Export"
:> Get '[JSON] Corpus :> Get '[JSON] Corpus
$(deriveJSON (unPrefix "_c_") ''Corpus) $(deriveJSON (unPrefix "_c_") ''Corpus)
$(deriveJSON (unPrefix "_d_") ''Document)
$(deriveJSON (unPrefix "_ng_") ''Ngrams)
\ No newline at end of file
{-|
Module : Gargantext.API.Node.Document.Export
Description : Document export
Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
-}
module Gargantext.API.Node.Document.Export
where
import qualified Data.ByteString.Lazy.Char8 as BSC
import Data.Csv (encodeDefaultOrderedByName)
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import Data.Version (showVersion)
import Gargantext.API.Node.Document.Export.Types
import Gargantext.API.Prelude (GargNoServer, GargServer)
import Gargantext.Core (toDBid)
import Gargantext.Core.Types
-- import Gargantext.Database.Admin.Types.Hyperdata (HyperdataDocument(..))
import Gargantext.Database.Query.Facet (runViewDocuments, Facet(..))
import Gargantext.Database.Query.Table.Node (getClosestParentIdByType)
import Gargantext.Database.Schema.Node (NodePoly(..))
import Gargantext.Prelude
import qualified Paths_gargantext as PG -- cabal magic build module
import Servant
api :: UserId -> DocId -> GargServer API
api uid dId = getDocumentsJSON uid dId
:<|> getDocumentsCSV uid dId
--------------------------------------------------
-- | Hashes are ordered by Set
getDocumentsJSON :: UserId
-> DocId
-> GargNoServer DocumentExport
getDocumentsJSON uId pId = do
mcId <- getClosestParentIdByType pId NodeCorpus
let cId = maybe (panic "[G.A.N.D.Export] Node has no parent") identity mcId
docs <- runViewDocuments cId False Nothing Nothing Nothing Nothing
pure $ DocumentExport { _de_documents = mapFacetDoc <$> docs
, _de_garg_version = T.pack $ showVersion PG.version }
where
mapFacetDoc (FacetDoc { .. }) =
Document { _d_document =
Node { _node_id = facetDoc_id
, _node_hash_id = Nothing
, _node_typename = toDBid NodeDocument
, _node_user_id = uId
, _node_parent_id = Nothing
, _node_name = facetDoc_title
, _node_date = facetDoc_created
, _node_hyperdata = facetDoc_hyperdata }
, _d_ngrams = Ngrams { _ng_ngrams = []
, _ng_hash = "" }
, _d_hash = "" }
_mapDoc d = Document { _d_document = d
, _d_ngrams = Ngrams { _ng_ngrams = []
, _ng_hash = "" }
, _d_hash = ""}
getDocumentsCSV :: UserId
-> DocId
-> GargNoServer T.Text -- [Document]
getDocumentsCSV uId pId = do
DocumentExport { _de_documents } <- getDocumentsJSON uId pId
let ret = TE.decodeUtf8 $ BSC.toStrict $ encodeDefaultOrderedByName _de_documents
pure ret
{-|
Module : Gargantext.API.Node.Document.Export.Types
Description : Types for Gargantext.API.Node.Document.Export
Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
module Gargantext.API.Node.Document.Export.Types where
import Data.Aeson.TH (deriveJSON)
import Data.Csv (DefaultOrdered(..), ToNamedRecord(..), (.=), header, namedRecord)
import Data.Swagger
--import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import Gargantext.Core.Types
import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger)
import Gargantext.Database.Admin.Types.Hyperdata (HyperdataDocument(..))
import Gargantext.Database.Schema.Node (NodePoly(..))
--import Gargantext.Utils.Servant (CSV)
import Protolude
--import Protolude.Partial (read)
import Servant
-- | Document Export
data DocumentExport =
DocumentExport { _de_documents :: [Document]
, _de_garg_version :: Text
} deriving (Generic)
data Document =
Document { _d_document :: Node HyperdataDocument
, _d_ngrams :: Ngrams
, _d_hash :: Hash
} deriving (Generic)
--instance Read Document where
-- read "" = panic "not implemented"
instance DefaultOrdered Document where
headerOrder _ = header ["Publication Day"
, "Publication Month"
, "Publication Year"
, "Authors"
, "Title"
, "Source"
, "Abstract"]
instance ToNamedRecord Document where
toNamedRecord (Document { _d_document = Node { .. }}) =
namedRecord
[ "Publication Day" .= _hd_publication_day _node_hyperdata
, "Publication Month" .= _hd_publication_month _node_hyperdata
, "Publication Year" .= _hd_publication_year _node_hyperdata
, "Authors" .= _hd_authors _node_hyperdata
, "Title" .= _hd_title _node_hyperdata
, "Source" .= (TE.encodeUtf8 <$> _hd_source _node_hyperdata)
, "Abstract" .= (TE.encodeUtf8 <$> _hd_abstract _node_hyperdata) ]
data Ngrams =
Ngrams { _ng_ngrams :: [Text]
, _ng_hash :: Hash
} deriving (Generic)
type Hash = Text
-------
instance ToSchema DocumentExport where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_de_")
instance ToSchema Document where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_d_")
instance ToSchema Ngrams where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_ng_")
-------
instance ToParamSchema DocumentExport where
toParamSchema _ = toParamSchema (Proxy :: Proxy TODO)
instance ToParamSchema Document where
toParamSchema _ = toParamSchema (Proxy :: Proxy TODO)
instance ToParamSchema Ngrams where
toParamSchema _ = toParamSchema (Proxy :: Proxy TODO)
--------------------------------------------------
type API = Summary "Document Export"
:> "export"
:> ( "json"
:> Get '[JSON] DocumentExport
:<|> "csv"
:> Get '[PlainText] Text) -- [Document])
$(deriveJSON (unPrefix "_de_") ''DocumentExport)
$(deriveJSON (unPrefix "_d_") ''Document)
$(deriveJSON (unPrefix "_ng_") ''Ngrams)
...@@ -18,7 +18,7 @@ module Gargantext.API.Node.Update ...@@ -18,7 +18,7 @@ module Gargantext.API.Node.Update
import Control.Lens (view) import Control.Lens (view)
import Data.Aeson import Data.Aeson
import Data.Maybe (Maybe(..)) import Data.Maybe (Maybe(..), fromMaybe)
import Data.Swagger import Data.Swagger
import GHC.Generics (Generic) import GHC.Generics (Generic)
import Gargantext.API.Admin.Orchestrator.Types (JobLog(..), AsyncJobs) import Gargantext.API.Admin.Orchestrator.Types (JobLog(..), AsyncJobs)
...@@ -30,13 +30,15 @@ import Gargantext.API.Prelude (GargServer, simuLogs) ...@@ -30,13 +30,15 @@ import Gargantext.API.Prelude (GargServer, simuLogs)
import Gargantext.Core.Methods.Distances (GraphMetric(..)) import Gargantext.Core.Methods.Distances (GraphMetric(..))
import Gargantext.Core.Types.Main (ListType(..)) import Gargantext.Core.Types.Main (ListType(..))
import Gargantext.Core.Viz.Graph.API (recomputeGraph) import Gargantext.Core.Viz.Graph.API (recomputeGraph)
import Gargantext.Database.Action.Metrics (updateNgramsOccurrences, updateContextScore)
import Gargantext.Database.Action.Flow.Pairing (pairing) import Gargantext.Database.Action.Flow.Pairing (pairing)
import Gargantext.Database.Query.Table.Node (defaultList)
import Gargantext.Database.Action.Flow.Types (FlowCmdM) import Gargantext.Database.Action.Flow.Types (FlowCmdM)
import Gargantext.Database.Admin.Types.Node import Gargantext.Database.Admin.Types.Node
import Gargantext.Database.Query.Table.Node (getNode) import Gargantext.Database.Query.Table.Node (getNode)
import Gargantext.Database.Schema.Node (node_parent_id) import Gargantext.Database.Schema.Node (node_parent_id)
import Gargantext.Database.Schema.Ngrams (NgramsType(NgramsTerms)) import Gargantext.Database.Schema.Ngrams (NgramsType(NgramsTerms))
import Gargantext.Prelude (Ord, Eq, (<$>), ($), liftBase, (.), printDebug, pure, show, cs, (<>), panic) import Gargantext.Prelude (Bool(..), Ord, Eq, (<$>), ($), liftBase, (.), printDebug, pure, show, cs, (<>), panic)
import qualified Gargantext.Utils.Aeson as GUA import qualified Gargantext.Utils.Aeson as GUA
import Prelude (Enum, Bounded, minBound, maxBound) import Prelude (Enum, Bounded, minBound, maxBound)
import Servant import Servant
...@@ -95,7 +97,7 @@ updateNode uId nId (UpdateNodeParamsGraph metric) logStatus = do ...@@ -95,7 +97,7 @@ updateNode uId nId (UpdateNodeParamsGraph metric) logStatus = do
, _scst_events = Just [] , _scst_events = Just []
} }
_ <- recomputeGraph uId nId (Just metric) _ <- recomputeGraph uId nId (Just metric) True
pure JobLog { _scst_succeeded = Just 2 pure JobLog { _scst_succeeded = Just 2
, _scst_failed = Just 0 , _scst_failed = Just 0
...@@ -165,7 +167,40 @@ updateNode _uId lId (UpdateNodeParamsList _mode) logStatus = do ...@@ -165,7 +167,40 @@ updateNode _uId lId (UpdateNodeParamsList _mode) logStatus = do
} }
_ <- case corpusId of _ <- case corpusId of
Just cId -> reIndexWith cId lId NgramsTerms (Set.singleton MapTerm) Just cId -> do
_ <- reIndexWith cId lId NgramsTerms (Set.singleton MapTerm)
_ <- updateNgramsOccurrences cId (Just lId)
pure ()
Nothing -> pure ()
pure JobLog { _scst_succeeded = Just 3
, _scst_failed = Just 0
, _scst_remaining = Just 0
, _scst_events = Just []
}
updateNode _uId tId (UpdateNodeParamsTexts _mode) logStatus = do
logStatus JobLog { _scst_succeeded = Just 1
, _scst_failed = Just 0
, _scst_remaining = Just 2
, _scst_events = Just []
}
corpusId <- view node_parent_id <$> getNode tId
lId <- defaultList $ fromMaybe (panic "[G.A.N.Update] updateNode/UpdateNodeParamsTexts: no defaultList") corpusId
logStatus JobLog { _scst_succeeded = Just 2
, _scst_failed = Just 0
, _scst_remaining = Just 1
, _scst_events = Just []
}
_ <- case corpusId of
Just cId -> do
_ <- reIndexWith cId lId NgramsTerms (Set.singleton MapTerm)
_ <- updateNgramsOccurrences cId (Just lId)
_ <- updateContextScore cId (Just lId)
-- printDebug "updateContextsScore" (cId, lId, u)
pure ()
Nothing -> pure () Nothing -> pure ()
pure JobLog { _scst_succeeded = Just 3 pure JobLog { _scst_succeeded = Just 3
...@@ -175,6 +210,9 @@ updateNode _uId lId (UpdateNodeParamsList _mode) logStatus = do ...@@ -175,6 +210,9 @@ updateNode _uId lId (UpdateNodeParamsList _mode) logStatus = do
} }
updateNode _uId _nId _p logStatus = do updateNode _uId _nId _p logStatus = do
simuLogs logStatus 10 simuLogs logStatus 10
......
...@@ -9,20 +9,15 @@ Portability : POSIX ...@@ -9,20 +9,15 @@ Portability : POSIX
-} -}
{-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE TypeOperators #-} {-# LANGUAGE TypeOperators #-}
{-# LANGUAGE KindSignatures #-} {-# LANGUAGE KindSignatures #-}
{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeFamilies #-}
---------------------------------------------------------------------
module Gargantext.API.Routes module Gargantext.API.Routes
where where
---------------------------------------------------------------------
-- import qualified Gargantext.API.Search as Search
import Control.Concurrent (threadDelay) import Control.Concurrent (threadDelay)
import Control.Lens (view) import Control.Lens (view)
import Data.Text (Text) import Data.Text (Text)
...@@ -33,29 +28,32 @@ import Servant.Auth.Swagger () ...@@ -33,29 +28,32 @@ import Servant.Auth.Swagger ()
import Servant.Job.Async import Servant.Job.Async
import Servant.Swagger.UI import Servant.Swagger.UI
import qualified Gargantext.API.Ngrams.List as List
import qualified Gargantext.API.Node.Contact as Contact
import qualified Gargantext.API.Node.Corpus.Annuaire as Annuaire
import qualified Gargantext.API.Node.Corpus.Export as Export
import qualified Gargantext.API.Node.Corpus.Export.Types as Export
import qualified Gargantext.API.Node.Corpus.New as New
import qualified Gargantext.API.Public as Public
import Gargantext.API.Admin.Auth.Types (AuthRequest, AuthResponse, AuthenticatedUser(..), PathId(..))
import Gargantext.API.Admin.Auth (withAccess) import Gargantext.API.Admin.Auth (withAccess)
import Gargantext.API.Admin.Auth.Types (AuthRequest, AuthResponse, AuthenticatedUser(..), PathId(..))
import Gargantext.API.Admin.FrontEnd (FrontEndAPI) import Gargantext.API.Admin.FrontEnd (FrontEndAPI)
import Gargantext.API.Context
import Gargantext.API.Count (CountAPI, count, Query) import Gargantext.API.Count (CountAPI, count, Query)
import qualified Gargantext.API.GraphQL as GraphQL
import Gargantext.API.Job (jobLogInit) import Gargantext.API.Job (jobLogInit)
import Gargantext.API.Ngrams (TableNgramsApi, apiNgramsTableDoc) import Gargantext.API.Ngrams (TableNgramsApi, apiNgramsTableDoc)
import Gargantext.API.Node import Gargantext.API.Node
import Gargantext.API.Prelude import Gargantext.API.Prelude
import Gargantext.Core.Types.Individu (User(..)) import Gargantext.Core.Types.Individu (User(..))
import Gargantext.Core.Viz.Graph.API import Gargantext.Core.Viz.Graph.API
import Gargantext.Database.Prelude (HasConfig(..))
import Gargantext.Database.Admin.Types.Hyperdata import Gargantext.Database.Admin.Types.Hyperdata
import Gargantext.Database.Admin.Types.Node import Gargantext.Database.Admin.Types.Node
import Gargantext.Database.Prelude (HasConfig(..))
import Gargantext.Prelude import Gargantext.Prelude
import Gargantext.Prelude.Config (gc_max_docs_scrapers) import Gargantext.Prelude.Config (gc_max_docs_scrapers)
import qualified Gargantext.API.GraphQL as GraphQL
import qualified Gargantext.API.Ngrams.List as List
import qualified Gargantext.API.Node.Contact as Contact
import qualified Gargantext.API.Node.Corpus.Annuaire as Annuaire
import qualified Gargantext.API.Node.Corpus.Export as CorpusExport
import qualified Gargantext.API.Node.Corpus.Export.Types as CorpusExport
import qualified Gargantext.API.Node.Corpus.New as New
import qualified Gargantext.API.Node.Document.Export as DocumentExport
import qualified Gargantext.API.Node.Document.Export.Types as DocumentExport
import qualified Gargantext.API.Public as Public
type GargAPI = "api" :> Summary "API " :> GargAPIVersion type GargAPI = "api" :> Summary "API " :> GargAPIVersion
...@@ -99,6 +97,11 @@ type GargPrivateAPI' = ...@@ -99,6 +97,11 @@ type GargPrivateAPI' =
:> Capture "node_id" NodeId :> Capture "node_id" NodeId
:> NodeAPI HyperdataAny :> NodeAPI HyperdataAny
-- Context endpoint
:<|> "context" :> Summary "Node endpoint"
:> Capture "node_id" ContextId
:> ContextAPI HyperdataAny
-- Corpus endpoints -- Corpus endpoints
:<|> "corpus" :> Summary "Corpus endpoint" :<|> "corpus" :> Summary "Corpus endpoint"
:> Capture "corpus_id" CorpusId :> Capture "corpus_id" CorpusId
...@@ -111,7 +114,7 @@ type GargPrivateAPI' = ...@@ -111,7 +114,7 @@ type GargPrivateAPI' =
:> NodeNodeAPI HyperdataAny :> NodeNodeAPI HyperdataAny
:<|> "corpus" :> Capture "node_id" CorpusId :<|> "corpus" :> Capture "node_id" CorpusId
:> Export.API :> CorpusExport.API
-- Annuaire endpoint -- Annuaire endpoint
{- {-
...@@ -133,6 +136,9 @@ type GargPrivateAPI' = ...@@ -133,6 +136,9 @@ type GargPrivateAPI' =
:> "ngrams" :> "ngrams"
:> TableNgramsApi :> TableNgramsApi
:<|> "texts" :> Capture "node_id" DocId
:> DocumentExport.API
-- :<|> "counts" :> Stream GET NewLineFraming '[JSON] Count :> CountAPI -- :<|> "counts" :> Stream GET NewLineFraming '[JSON] Count :> CountAPI
-- TODO-SECURITY -- TODO-SECURITY
:<|> "count" :> Summary "Count endpoint" :<|> "count" :> Summary "Count endpoint"
...@@ -215,9 +221,10 @@ serverPrivateGargAPI' :: AuthenticatedUser -> GargServer GargPrivateAPI' ...@@ -215,9 +221,10 @@ serverPrivateGargAPI' :: AuthenticatedUser -> GargServer GargPrivateAPI'
serverPrivateGargAPI' (AuthenticatedUser (NodeId uid)) serverPrivateGargAPI' (AuthenticatedUser (NodeId uid))
= serverGargAdminAPI = serverGargAdminAPI
:<|> nodeAPI (Proxy :: Proxy HyperdataAny) uid :<|> nodeAPI (Proxy :: Proxy HyperdataAny) uid
:<|> contextAPI (Proxy :: Proxy HyperdataAny) uid
:<|> nodeAPI (Proxy :: Proxy HyperdataCorpus) uid :<|> nodeAPI (Proxy :: Proxy HyperdataCorpus) uid
:<|> nodeNodeAPI (Proxy :: Proxy HyperdataAny) uid :<|> nodeNodeAPI (Proxy :: Proxy HyperdataAny) uid
:<|> Export.getCorpus -- uid :<|> CorpusExport.getCorpus -- uid
-- :<|> nodeAPI (Proxy :: Proxy HyperdataContact) uid -- :<|> nodeAPI (Proxy :: Proxy HyperdataContact) uid
:<|> nodeAPI (Proxy :: Proxy HyperdataAnnuaire) uid :<|> nodeAPI (Proxy :: Proxy HyperdataAnnuaire) uid
:<|> Contact.api uid :<|> Contact.api uid
...@@ -225,6 +232,8 @@ serverPrivateGargAPI' (AuthenticatedUser (NodeId uid)) ...@@ -225,6 +232,8 @@ serverPrivateGargAPI' (AuthenticatedUser (NodeId uid))
:<|> withAccess (Proxy :: Proxy TableNgramsApi) Proxy uid :<|> withAccess (Proxy :: Proxy TableNgramsApi) Proxy uid
<$> PathNode <*> apiNgramsTableDoc <$> PathNode <*> apiNgramsTableDoc
:<|> DocumentExport.api uid
:<|> count -- TODO: undefined :<|> count -- TODO: undefined
-- :<|> withAccess (Proxy :: Proxy (Search.API Search.SearchResult)) Proxy uid -- :<|> withAccess (Proxy :: Proxy (Search.API Search.SearchResult)) Proxy uid
......
...@@ -177,8 +177,8 @@ type NodeStoryDir = FilePath ...@@ -177,8 +177,8 @@ type NodeStoryDir = FilePath
writeNodeStories :: NodeStoryDir -> NodeListStory -> IO () writeNodeStories :: NodeStoryDir -> NodeListStory -> IO ()
writeNodeStories fp nls = do writeNodeStories fp nls = do
done <- mapM (writeNodeStory fp) $ splitByNode nls _done <- mapM (writeNodeStory fp) $ splitByNode nls
printDebug "[writeNodeStories]" done -- printDebug "[writeNodeStories]" done
pure () pure ()
writeNodeStory :: NodeStoryDir -> (NodeId, NodeListStory) -> IO () writeNodeStory :: NodeStoryDir -> (NodeId, NodeListStory) -> IO ()
...@@ -192,7 +192,7 @@ splitByNode (NodeStory m) = ...@@ -192,7 +192,7 @@ splitByNode (NodeStory m) =
saverAction' :: Serialise a => NodeStoryDir -> NodeId -> a -> IO () saverAction' :: Serialise a => NodeStoryDir -> NodeId -> a -> IO ()
saverAction' repoDir nId a = do saverAction' repoDir nId a = do
withTempFile repoDir ((cs $ show nId) <> "-tmp-repo.cbor") $ \fp h -> do withTempFile repoDir ((cs $ show nId) <> "-tmp-repo.cbor") $ \fp h -> do
printDebug "[repoSaverAction]" fp -- printDebug "[repoSaverAction]" fp
DBL.hPut h $ serialise a DBL.hPut h $ serialise a
hClose h hClose h
renameFile fp (nodeStoryPath repoDir nId) renameFile fp (nodeStoryPath repoDir nId)
......
...@@ -58,22 +58,21 @@ toDoc l (PubMedDoc.PubMed (PubMedDoc.PubMedArticle t j as aus) ...@@ -58,22 +58,21 @@ toDoc l (PubMedDoc.PubMed (PubMedDoc.PubMedArticle t j as aus)
, _hd_publication_second = Nothing , _hd_publication_second = Nothing
, _hd_language_iso2 = Just $ (Text.pack . show) l } , _hd_language_iso2 = Just $ (Text.pack . show) l }
where where
authors :: Maybe [PubMedDoc.Author] -> Maybe Text authors :: [PubMedDoc.Author] -> Maybe Text
authors aus' = case aus' of authors [] = Nothing
Nothing -> Nothing authors au = Just $ (Text.intercalate ", ")
Just au -> Just $ (Text.intercalate ", ") $ catMaybes
$ catMaybes $ map (\n -> PubMedDoc.foreName n <> Just " " <> PubMedDoc.lastName n) au
$ map (\n -> PubMedDoc.foreName n <> Just " " <> PubMedDoc.lastName n) au
institutes :: Maybe [PubMedDoc.Author] -> Maybe Text institutes :: [PubMedDoc.Author] -> Maybe Text
institutes aus' = case aus' of institutes [] = Nothing
Nothing -> Nothing institutes au = Just $ (Text.intercalate ", ")
Just au -> Just $ (Text.intercalate ", ") $ (map (Text.replace ", " " - "))
$ (map (Text.replace ", " " - ")) $ catMaybes
$ catMaybes $ map PubMedDoc.affiliation au
$ map PubMedDoc.affiliation au
abstract :: Maybe [Text] -> Maybe Text abstract :: [Text] -> Maybe Text
abstract as' = fmap (Text.intercalate ", ") as' abstract [] = Nothing
abstract as' = Just $ Text.intercalate ", " as'
...@@ -34,7 +34,7 @@ import Gargantext.Core.Text.List.Social.Prelude ...@@ -34,7 +34,7 @@ import Gargantext.Core.Text.List.Social.Prelude
import Gargantext.Core.Text.Metrics (scored', Scored(..), scored_speExc, scored_genInc, normalizeGlobal, normalizeLocal, scored_terms) import Gargantext.Core.Text.Metrics (scored', Scored(..), scored_speExc, scored_genInc, normalizeGlobal, normalizeLocal, scored_terms)
import Gargantext.Core.Types (ListType(..), MasterCorpusId, UserCorpusId) import Gargantext.Core.Types (ListType(..), MasterCorpusId, UserCorpusId)
import Gargantext.Core.Types.Individu (User(..)) import Gargantext.Core.Types.Individu (User(..))
import Gargantext.Database.Action.Metrics.NgramsByNode (getNodesByNgramsUser, getNodesByNgramsOnlyUser) import Gargantext.Database.Action.Metrics.NgramsByContext (getContextsByNgramsUser, getContextsByNgramsOnlyUser)
import Gargantext.Database.Action.Metrics.TFICF (getTficf_withSample) import Gargantext.Database.Action.Metrics.TFICF (getTficf_withSample)
import Gargantext.Database.Admin.Types.Node (NodeId) import Gargantext.Database.Admin.Types.Node (NodeId)
import Gargantext.Database.Prelude (CmdM) import Gargantext.Database.Prelude (CmdM)
...@@ -98,7 +98,7 @@ buildNgramsOthersList :: ( HasNodeError err ...@@ -98,7 +98,7 @@ buildNgramsOthersList :: ( HasNodeError err
-> (NgramsType, MapListSize) -> (NgramsType, MapListSize)
-> m (Map NgramsType [NgramsElement]) -> m (Map NgramsType [NgramsElement])
buildNgramsOthersList user uCid mfslw _groupParams (nt, MapListSize mapListSize) = do buildNgramsOthersList user uCid mfslw _groupParams (nt, MapListSize mapListSize) = do
allTerms :: HashMap NgramsTerm (Set NodeId) <- getNodesByNgramsUser uCid nt allTerms :: HashMap NgramsTerm (Set NodeId) <- getContextsByNgramsUser uCid nt
-- PrivateFirst for first developments since Public NodeMode is not implemented yet -- PrivateFirst for first developments since Public NodeMode is not implemented yet
socialLists :: FlowCont NgramsTerm FlowListScores socialLists :: FlowCont NgramsTerm FlowListScores
...@@ -159,11 +159,11 @@ buildNgramsTermsList user uCid mCid mfslw groupParams (nt, _mapListSize)= do ...@@ -159,11 +159,11 @@ buildNgramsTermsList user uCid mCid mfslw groupParams (nt, _mapListSize)= do
-- Filter 0 With Double -- Filter 0 With Double
-- Computing global speGen score -- Computing global speGen score
printDebug "[buldNgramsTermsList: Sample List] / start" nt printDebug "[buildNgramsTermsList: Sample List] / start" nt
allTerms :: HashMap NgramsTerm Double <- getTficf_withSample uCid mCid nt allTerms :: HashMap NgramsTerm Double <- getTficf_withSample uCid mCid nt
printDebug "[buldNgramsTermsList: Sample List / end]" nt printDebug "[buildNgramsTermsList: Sample List / end]" (nt, HashMap.size allTerms)
printDebug "[buldNgramsTermsList: Flow Social List / start]" nt printDebug "[buildNgramsTermsList: Flow Social List / start]" nt
-- PrivateFirst for first developments since Public NodeMode is not implemented yet -- PrivateFirst for first developments since Public NodeMode is not implemented yet
socialLists :: FlowCont NgramsTerm FlowListScores socialLists :: FlowCont NgramsTerm FlowListScores
<- flowSocialList mfslw user nt ( FlowCont HashMap.empty <- flowSocialList mfslw user nt ( FlowCont HashMap.empty
...@@ -171,7 +171,7 @@ buildNgramsTermsList user uCid mCid mfslw groupParams (nt, _mapListSize)= do ...@@ -171,7 +171,7 @@ buildNgramsTermsList user uCid mCid mfslw groupParams (nt, _mapListSize)= do
$ List.zip (HashMap.keys allTerms) $ List.zip (HashMap.keys allTerms)
(List.cycle [mempty]) (List.cycle [mempty])
) )
printDebug "[buldNgramsTermsList: Flow Social List / end]" nt printDebug "[buildNgramsTermsList: Flow Social List / end]" nt
let ngramsKeys = HashMap.keysSet allTerms let ngramsKeys = HashMap.keysSet allTerms
...@@ -212,11 +212,14 @@ buildNgramsTermsList user uCid mCid mfslw groupParams (nt, _mapListSize)= do ...@@ -212,11 +212,14 @@ buildNgramsTermsList user uCid mCid mfslw groupParams (nt, _mapListSize)= do
userListId <- defaultList uCid userListId <- defaultList uCid
masterListId <- defaultList mCid masterListId <- defaultList mCid
mapTextDocIds <- getNodesByNgramsOnlyUser uCid mapTextDocIds <- getContextsByNgramsOnlyUser uCid
[userListId, masterListId] [userListId, masterListId]
nt nt
selectedTerms selectedTerms
-- printDebug "mapTextDocIds" mapTextDocIds
let let
groupedTreeScores_SetNodeId :: HashMap NgramsTerm (GroupedTreeScores (Set NodeId)) groupedTreeScores_SetNodeId :: HashMap NgramsTerm (GroupedTreeScores (Set NodeId))
groupedTreeScores_SetNodeId = HashMap.filter (\g -> Set.size (view gts'_score g) > 1) -- removing hapax groupedTreeScores_SetNodeId = HashMap.filter (\g -> Set.size (view gts'_score g) > 1) -- removing hapax
......
...@@ -15,14 +15,15 @@ module Gargantext.Core.Text.List.Social ...@@ -15,14 +15,15 @@ module Gargantext.Core.Text.List.Social
import Control.Monad (mzero) import Control.Monad (mzero)
import Data.Aeson import Data.Aeson
import GHC.Generics
import Data.HashMap.Strict (HashMap) import Data.HashMap.Strict (HashMap)
import Data.Map (Map) import Data.Map (Map)
import Data.Monoid (mconcat) import Data.Monoid (mconcat)
import qualified Data.Scientific as Scientific
import Data.Swagger import Data.Swagger
import qualified Data.Text as T import GHC.Generics
import qualified Data.Vector as V import qualified Data.Scientific as Scientific
import qualified Data.Text as T
import qualified Data.Vector as V
import Gargantext.API.Ngrams.Tools import Gargantext.API.Ngrams.Tools
import Gargantext.API.Ngrams.Types import Gargantext.API.Ngrams.Types
import Gargantext.Core.NodeStory import Gargantext.Core.NodeStory
......
...@@ -147,7 +147,7 @@ type NodeTableResult a = TableResult (Node a) ...@@ -147,7 +147,7 @@ type NodeTableResult a = TableResult (Node a)
data TableResult a = TableResult { tr_count :: Int data TableResult a = TableResult { tr_count :: Int
, tr_docs :: [a] , tr_docs :: [a]
} deriving (Generic) } deriving (Generic, Show)
$(deriveJSON (unPrefix "tr_") ''TableResult) $(deriveJSON (unPrefix "tr_") ''TableResult)
......
...@@ -25,7 +25,7 @@ import Gargantext.Database.Admin.Config ...@@ -25,7 +25,7 @@ import Gargantext.Database.Admin.Config
import Gargantext.Database.Prelude import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.Node import Gargantext.Database.Query.Table.Node
import Gargantext.Database.Query.Table.Node.Select import Gargantext.Database.Query.Table.Node.Select
import Gargantext.Database.Query.Table.NodeNode (selectDocsDates) import Gargantext.Database.Query.Table.NodeContext (selectDocsDates)
import Gargantext.Database.Schema.Node import Gargantext.Database.Schema.Node
import Gargantext.Prelude import Gargantext.Prelude
import Gargantext.Core.Text.Metrics.Count (occurrencesWith) import Gargantext.Core.Text.Metrics.Count (occurrencesWith)
...@@ -36,7 +36,7 @@ import Gargantext.API.Ngrams.Tools ...@@ -36,7 +36,7 @@ import Gargantext.API.Ngrams.Tools
import Gargantext.API.Ngrams.Types import Gargantext.API.Ngrams.Types
import Gargantext.Core.Types import Gargantext.Core.Types
import Gargantext.Database.Action.Flow.Types import Gargantext.Database.Action.Flow.Types
import Gargantext.Database.Action.Metrics.NgramsByNode import Gargantext.Database.Action.Metrics.NgramsByContext
import Gargantext.Database.Schema.Ngrams import Gargantext.Database.Schema.Ngrams
import Gargantext.Core.Viz.Types import Gargantext.Core.Viz.Types
import qualified Data.HashMap.Strict as HashMap import qualified Data.HashMap.Strict as HashMap
...@@ -67,8 +67,8 @@ chartData cId nt lt = do ...@@ -67,8 +67,8 @@ chartData cId nt lt = do
Nothing -> x Nothing -> x
Just x' -> maybe x identity x' Just x' -> maybe x identity x'
(_total,mapTerms) <- countNodesByNgramsWith (group dico) (_total,mapTerms) <- countContextsByNgramsWith (group dico)
<$> getNodesByNgramsOnlyUser cId (ls' <> ls) nt terms <$> getContextsByNgramsOnlyUser cId (ls' <> ls) nt terms
let (dates, count) = V.unzip $ let (dates, count) = V.unzip $
V.fromList $ V.fromList $
List.sortOn snd $ List.sortOn snd $
...@@ -89,7 +89,7 @@ treeData cId nt lt = do ...@@ -89,7 +89,7 @@ treeData cId nt lt = do
dico = filterListWithRoot lt ts dico = filterListWithRoot lt ts
terms = catMaybes $ List.concat $ map (\(a,b) -> [Just a, b]) $ HashMap.toList dico terms = catMaybes $ List.concat $ map (\(a,b) -> [Just a, b]) $ HashMap.toList dico
cs' <- getNodesByNgramsOnlyUser cId (ls' <> ls) nt terms cs' <- getContextsByNgramsOnlyUser cId (ls' <> ls) nt terms
m <- getListNgrams ls nt m <- getListNgrams ls nt
pure $ V.fromList $ toTree lt cs' m pure $ V.fromList $ toTree lt cs' m
......
...@@ -191,9 +191,9 @@ instance FromField HyperdataGraph ...@@ -191,9 +191,9 @@ instance FromField HyperdataGraph
where where
fromField = fromField' fromField = fromField'
instance DefaultFromField PGJsonb HyperdataGraph instance DefaultFromField SqlJsonb HyperdataGraph
where where
defaultFromField = fieldQueryRunnerColumn defaultFromField = fromPGSFromField
----------------------------------------------------------- -----------------------------------------------------------
-- This type is used to return graph via API -- This type is used to return graph via API
......
...@@ -31,7 +31,7 @@ import Gargantext.Core.Types.Main ...@@ -31,7 +31,7 @@ import Gargantext.Core.Types.Main
import Gargantext.Core.Viz.Graph import Gargantext.Core.Viz.Graph
import Gargantext.Core.Viz.Graph.GEXF () import Gargantext.Core.Viz.Graph.GEXF ()
import Gargantext.Core.Viz.Graph.Tools -- (cooc2graph) import Gargantext.Core.Viz.Graph.Tools -- (cooc2graph)
import Gargantext.Database.Action.Metrics.NgramsByNode (getNodesByNgramsOnlyUser) import Gargantext.Database.Action.Metrics.NgramsByContext (getContextsByNgramsOnlyUser)
import Gargantext.Database.Action.Flow.Types (FlowCmdM) import Gargantext.Database.Action.Flow.Types (FlowCmdM)
import Gargantext.Database.Action.Node (mkNodeWithParent) import Gargantext.Database.Action.Node (mkNodeWithParent)
import Gargantext.Database.Admin.Config import Gargantext.Database.Admin.Config
...@@ -120,8 +120,9 @@ recomputeGraph :: FlowCmdM env err m ...@@ -120,8 +120,9 @@ recomputeGraph :: FlowCmdM env err m
=> UserId => UserId
-> NodeId -> NodeId
-> Maybe GraphMetric -> Maybe GraphMetric
-> Bool
-> m Graph -> m Graph
recomputeGraph _uId nId maybeDistance = do recomputeGraph _uId nId maybeDistance force = do
nodeGraph <- getNodeWith nId (Proxy :: Proxy HyperdataGraph) nodeGraph <- getNodeWith nId (Proxy :: Proxy HyperdataGraph)
let let
graph = nodeGraph ^. node_hyperdata . hyperdataGraph graph = nodeGraph ^. node_hyperdata . hyperdataGraph
...@@ -142,21 +143,22 @@ recomputeGraph _uId nId maybeDistance = do ...@@ -142,21 +143,22 @@ recomputeGraph _uId nId maybeDistance = do
repo <- getRepo' [listId] repo <- getRepo' [listId]
let v = repo ^. unNodeStory . at listId . _Just . a_version let v = repo ^. unNodeStory . at listId . _Just . a_version
let computeG mt = do
g <- computeGraph cId similarity NgramsTerms repo
let g' = set graph_metadata mt g
_ <- updateHyperdata nId (HyperdataGraph (Just g') camera)
pure g'
case graph of case graph of
Nothing -> do Nothing -> do
graph' <- computeGraph cId similarity NgramsTerms repo
mt <- defaultGraphMetadata cId "Title" repo (fromMaybe Order1 maybeDistance) mt <- defaultGraphMetadata cId "Title" repo (fromMaybe Order1 maybeDistance)
let graph'' = set graph_metadata (Just mt) graph' g <- computeG $ Just mt
_ <- updateHyperdata nId (HyperdataGraph (Just graph'') camera) pure $ trace "[G.V.G.API.recomputeGraph] Graph empty, computed" g
pure $ trace "[G.V.G.API.recomputeGraph] Graph empty, computed" graph'' Just graph' -> if (listVersion == Just v) && (not force)
Just graph' -> if listVersion == Just v
then pure graph' then pure graph'
else do else do
graph'' <- computeGraph cId similarity NgramsTerms repo g <- computeG graphMetadata
let graph''' = set graph_metadata graphMetadata graph'' pure $ trace "[G.V.G.API] Graph exists, recomputing" g
_ <- updateHyperdata nId (HyperdataGraph (Just graph''') camera)
pure $ trace "[G.V.G.API] Graph exists, recomputing" graph'''
computeGraph :: FlowCmdM env err m computeGraph :: FlowCmdM env err m
...@@ -177,7 +179,7 @@ computeGraph cId d nt repo = do ...@@ -177,7 +179,7 @@ computeGraph cId d nt repo = do
-- <$> getCoocByNgrams (if d == Conditional then Diagonal True else Diagonal False) -- <$> getCoocByNgrams (if d == Conditional then Diagonal True else Diagonal False)
<$> getCoocByNgrams (Diagonal True) <$> getCoocByNgrams (Diagonal True)
<$> groupNodesByNgrams ngs <$> groupNodesByNgrams ngs
<$> getNodesByNgramsOnlyUser cId (lIds <> [lId]) nt (HashMap.keys ngs) <$> getContextsByNgramsOnlyUser cId (lIds <> [lId]) nt (HashMap.keys ngs)
-- printDebug "myCooc" myCooc -- printDebug "myCooc" myCooc
-- saveAsFileDebug "debug/my-cooc" myCooc -- saveAsFileDebug "debug/my-cooc" myCooc
...@@ -242,7 +244,7 @@ graphRecompute u n logStatus = do ...@@ -242,7 +244,7 @@ graphRecompute u n logStatus = do
, _scst_remaining = Just 1 , _scst_remaining = Just 1
, _scst_events = Just [] , _scst_events = Just []
} }
_g <- trace (show u) $ recomputeGraph u n Nothing _g <- trace (show u) $ recomputeGraph u n Nothing False
pure JobLog { _scst_succeeded = Just 1 pure JobLog { _scst_succeeded = Just 1
, _scst_failed = Just 0 , _scst_failed = Just 0
, _scst_remaining = Just 0 , _scst_remaining = Just 0
...@@ -297,7 +299,7 @@ recomputeVersions :: FlowCmdM env err m ...@@ -297,7 +299,7 @@ recomputeVersions :: FlowCmdM env err m
=> UserId => UserId
-> NodeId -> NodeId
-> m Graph -> m Graph
recomputeVersions uId nId = recomputeGraph uId nId Nothing recomputeVersions uId nId = recomputeGraph uId nId Nothing False
------------------------------------------------------------ ------------------------------------------------------------
graphClone :: UserId graphClone :: UserId
......
...@@ -66,8 +66,8 @@ instance Show SVG where ...@@ -66,8 +66,8 @@ instance Show SVG where
instance Accept SVG where instance Accept SVG where
contentType _ = "SVG" // "image/svg+xml" /: ("charset", "utf-8") contentType _ = "SVG" // "image/svg+xml" /: ("charset", "utf-8")
instance Show a => MimeRender PlainText a where --instance Show a => MimeRender PlainText a where
mimeRender _ val = cs ("" <> show val) -- mimeRender _ val = cs ("" <> show val)
instance MimeRender SVG SVG where instance MimeRender SVG SVG where
mimeRender _ (SVG s) = DBL.fromStrict s mimeRender _ (SVG s) = DBL.fromStrict s
......
...@@ -32,7 +32,7 @@ import Gargantext.Database.Action.Flow.Types ...@@ -32,7 +32,7 @@ import Gargantext.Database.Action.Flow.Types
import Gargantext.Core.Viz.LegacyPhylo hiding (Svg, Dot) import Gargantext.Core.Viz.LegacyPhylo hiding (Svg, Dot)
import Gargantext.Database.Admin.Types.Hyperdata import Gargantext.Database.Admin.Types.Hyperdata
import Gargantext.Database.Schema.Ngrams (NgramsType(..)) import Gargantext.Database.Schema.Ngrams (NgramsType(..))
import Gargantext.Database.Query.Table.NodeNode (selectDocs) import Gargantext.Database.Query.Table.NodeContext (selectDocs)
import Gargantext.Core.Types import Gargantext.Core.Types
import Gargantext.Core (HasDBid) import Gargantext.Core (HasDBid)
......
...@@ -86,15 +86,16 @@ import Gargantext.Database.Action.Flow.Types ...@@ -86,15 +86,16 @@ import Gargantext.Database.Action.Flow.Types
import Gargantext.Database.Action.Flow.Utils (insertDocNgrams, DocumentIdWithNgrams(..)) import Gargantext.Database.Action.Flow.Utils (insertDocNgrams, DocumentIdWithNgrams(..))
import Gargantext.Database.Action.Search (searchDocInDatabase) import Gargantext.Database.Action.Search (searchDocInDatabase)
import Gargantext.Database.Admin.Config (userMaster, corpusMasterName) import Gargantext.Database.Admin.Config (userMaster, corpusMasterName)
import Gargantext.Database.Action.Metrics (updateNgramsOccurrences)
import Gargantext.Database.Admin.Types.Hyperdata import Gargantext.Database.Admin.Types.Hyperdata
import Gargantext.Database.Admin.Types.Node -- (HyperdataDocument(..), NodeType(..), NodeId, UserId, ListId, CorpusId, RootId, MasterCorpusId, MasterUserId) import Gargantext.Database.Admin.Types.Node -- (HyperdataDocument(..), NodeType(..), NodeId, UserId, ListId, CorpusId, RootId, MasterCorpusId, MasterUserId)
import Gargantext.Database.Prelude import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.ContextNodeNgrams2
import Gargantext.Database.Query.Table.Ngrams import Gargantext.Database.Query.Table.Ngrams
import Gargantext.Database.Query.Table.Node import Gargantext.Database.Query.Table.Node
import Gargantext.Database.Query.Table.Node.Document.Insert -- (insertDocuments, ReturnId(..), addUniqIdsDoc, addUniqIdsContact, ToDbData(..)) import Gargantext.Database.Query.Table.Node.Document.Insert -- (insertDocuments, ReturnId(..), addUniqIdsDoc, addUniqIdsContact, ToDbData(..))
import Gargantext.Database.Query.Table.Node.Error (HasNodeError(..)) import Gargantext.Database.Query.Table.Node.Error (HasNodeError(..))
import Gargantext.Database.Query.Table.NodeNgrams (listInsertDb , getCgramsId) import Gargantext.Database.Query.Table.NodeNgrams (listInsertDb , getCgramsId)
import Gargantext.Database.Query.Table.NodeNodeNgrams2
import Gargantext.Database.Query.Tree.Root (getOrMkRoot, getOrMk_RootWithCorpus) import Gargantext.Database.Query.Tree.Root (getOrMkRoot, getOrMk_RootWithCorpus)
import Gargantext.Database.Schema.Node (NodePoly(..), node_id) import Gargantext.Database.Schema.Node (NodePoly(..), node_id)
import Gargantext.Database.Types import Gargantext.Database.Types
...@@ -231,6 +232,9 @@ flow c u cn la mfslw docs logStatus = do ...@@ -231,6 +232,9 @@ flow c u cn la mfslw docs logStatus = do
) (zip [1..] docs) ) (zip [1..] docs)
flowCorpusUser (la ^. tt_lang) u cn c (concat ids) mfslw flowCorpusUser (la ^. tt_lang) u cn c (concat ids) mfslw
------------------------------------------------------------------------ ------------------------------------------------------------------------
flowCorpusUser :: ( FlowCmdM env err m flowCorpusUser :: ( FlowCmdM env err m
, MkCorpus c , MkCorpus c
...@@ -265,6 +269,8 @@ flowCorpusUser l user corpusName ctype ids mfslw = do ...@@ -265,6 +269,8 @@ flowCorpusUser l user corpusName ctype ids mfslw = do
let gp = GroupWithPosTag l CoreNLP HashMap.empty let gp = GroupWithPosTag l CoreNLP HashMap.empty
ngs <- buildNgramsLists user userCorpusId masterCorpusId mfslw gp ngs <- buildNgramsLists user userCorpusId masterCorpusId mfslw gp
-- printDebug "flowCorpusUser:ngs" ngs
_userListId <- flowList_DbRepo listId ngs _userListId <- flowList_DbRepo listId ngs
_mastListId <- getOrMkList masterCorpusId masterUserId _mastListId <- getOrMkList masterCorpusId masterUserId
-- _ <- insertOccsUpdates userCorpusId mastListId -- _ <- insertOccsUpdates userCorpusId mastListId
...@@ -275,6 +281,8 @@ flowCorpusUser l user corpusName ctype ids mfslw = do ...@@ -275,6 +281,8 @@ flowCorpusUser l user corpusName ctype ids mfslw = do
--_ <- mkPhylo userCorpusId userId --_ <- mkPhylo userCorpusId userId
-- Annuaire Flow -- Annuaire Flow
-- _ <- mkAnnuaire rootUserId userId -- _ <- mkAnnuaire rootUserId userId
_ <- updateNgramsOccurrences userCorpusId (Just listId)
pure userCorpusId pure userCorpusId
...@@ -314,27 +322,28 @@ saveDocNgramsWith :: ( FlowCmdM env err m) ...@@ -314,27 +322,28 @@ saveDocNgramsWith :: ( FlowCmdM env err m)
-> m () -> m ()
saveDocNgramsWith lId mapNgramsDocs' = do saveDocNgramsWith lId mapNgramsDocs' = do
terms2id <- insertExtractedNgrams $ HashMap.keys mapNgramsDocs' terms2id <- insertExtractedNgrams $ HashMap.keys mapNgramsDocs'
let mapNgramsDocs = HashMap.mapKeys extracted2ngrams mapNgramsDocs' printDebug "terms2id" terms2id
-- to be removed let mapNgramsDocs = HashMap.mapKeys extracted2ngrams mapNgramsDocs'
let indexedNgrams = HashMap.mapKeys (indexNgrams terms2id) mapNgramsDocs
-- new -- new
mapCgramsId <- listInsertDb lId toNodeNgramsW' mapCgramsId <- listInsertDb lId toNodeNgramsW'
$ map (first _ngramsTerms . second Map.keys) $ map (first _ngramsTerms . second Map.keys)
$ HashMap.toList mapNgramsDocs $ HashMap.toList mapNgramsDocs
printDebug "saveDocNgramsWith" mapCgramsId
-- insertDocNgrams -- insertDocNgrams
_return <- insertNodeNodeNgrams2 _return <- insertContextNodeNgrams2
$ catMaybes [ NodeNodeNgrams2 <$> Just nId $ catMaybes [ ContextNodeNgrams2 <$> Just nId
<*> getCgramsId mapCgramsId ngrams_type (_ngramsTerms terms'') <*> (getCgramsId mapCgramsId ngrams_type (_ngramsTerms terms''))
<*> Just (fromIntegral w :: Double) <*> Just (fromIntegral w :: Double)
| (terms'', mapNgramsTypes) <- HashMap.toList mapNgramsDocs | (terms'', mapNgramsTypes) <- HashMap.toList mapNgramsDocs
, (ngrams_type, mapNodeIdWeight) <- Map.toList mapNgramsTypes , (ngrams_type, mapNodeIdWeight) <- Map.toList mapNgramsTypes
, (nId, w) <- Map.toList mapNodeIdWeight , (nId, w) <- Map.toList mapNodeIdWeight
] ]
-- to be removed -- to be removed
_ <- insertDocNgrams lId indexedNgrams _ <- insertDocNgrams lId $ HashMap.mapKeys (indexNgrams terms2id) mapNgramsDocs
pure () pure ()
...@@ -348,7 +357,7 @@ insertDocs :: ( FlowCmdM env err m ...@@ -348,7 +357,7 @@ insertDocs :: ( FlowCmdM env err m
=> UserId => UserId
-> CorpusId -> CorpusId
-> [a] -> [a]
-> m ([DocId], [Indexed NodeId a]) -> m ([ContextId], [Indexed ContextId a])
insertDocs uId cId hs = do insertDocs uId cId hs = do
let docs = map addUniqId hs let docs = map addUniqId hs
newIds <- insertDb uId cId docs newIds <- insertDb uId cId docs
...@@ -476,27 +485,24 @@ instance HasText a => HasText (Node a) ...@@ -476,27 +485,24 @@ instance HasText a => HasText (Node a)
-- | TODO putelsewhere -- | TODO putelsewhere
-- | Upgrade function -- | Upgrade function
-- Suppose all documents are English (this is the case actually) -- Suppose all documents are English (this is the case actually)
indexAllDocumentsWithPosTag :: FlowCmdM env err m => m () indexAllDocumentsWithPosTag :: FlowCmdM env err m
=> m ()
indexAllDocumentsWithPosTag = do indexAllDocumentsWithPosTag = do
rootId <- getRootId (UserName userMaster) rootId <- getRootId (UserName userMaster)
corpusIds <- findNodesId rootId [NodeCorpus] corpusIds <- findNodesId rootId [NodeCorpus]
docs <- List.concat <$> mapM getDocumentsWithParentId corpusIds docs <- List.concat <$> mapM getDocumentsWithParentId corpusIds
_ <- mapM extractInsert (splitEvery 1000 docs) _ <- mapM extractInsert (splitEvery 1000 docs)
pure () pure ()
extractInsert :: FlowCmdM env err m => [Node HyperdataDocument] -> m () extractInsert :: FlowCmdM env err m
=> [Node HyperdataDocument] -> m ()
extractInsert docs = do extractInsert docs = do
let documentsWithId = map (\doc -> Indexed (doc ^. node_id) doc) docs let documentsWithId = map (\doc -> Indexed (doc ^. node_id) doc) docs
mapNgramsDocs' <- mapNodeIdNgrams mapNgramsDocs' <- mapNodeIdNgrams
<$> documentIdWithNgrams <$> documentIdWithNgrams
(extractNgramsT $ withLang (Multi EN) documentsWithId) (extractNgramsT $ withLang (Multi EN) documentsWithId)
documentsWithId documentsWithId
_ <- insertExtractedNgrams $ HashMap.keys mapNgramsDocs' _ <- insertExtractedNgrams $ HashMap.keys mapNgramsDocs'
pure () pure ()
...@@ -21,7 +21,6 @@ import Control.Concurrent ...@@ -21,7 +21,6 @@ import Control.Concurrent
import Control.Lens ((^.), (+~), (%~), at, (.~), _Just) import Control.Lens ((^.), (+~), (%~), at, (.~), _Just)
import Control.Monad.Reader import Control.Monad.Reader
import Data.Map (Map, toList) import Data.Map (Map, toList)
import Data.Maybe (catMaybes)
import Data.Text (Text) import Data.Text (Text)
import Gargantext.API.Ngrams (saveNodeStory) import Gargantext.API.Ngrams (saveNodeStory)
import Gargantext.API.Ngrams.Tools (getNodeStoryVar) import Gargantext.API.Ngrams.Tools (getNodeStoryVar)
...@@ -31,8 +30,8 @@ import Gargantext.Core.Types.Main (ListType(CandidateTerm)) ...@@ -31,8 +30,8 @@ import Gargantext.Core.Types.Main (ListType(CandidateTerm))
import Gargantext.Core.NodeStory import Gargantext.Core.NodeStory
import Gargantext.Database.Action.Flow.Types import Gargantext.Database.Action.Flow.Types
import Gargantext.Database.Admin.Types.Node import Gargantext.Database.Admin.Types.Node
import Gargantext.Database.Query.Table.NodeNgrams (NodeNgramsPoly(..), NodeNgramsW, listInsertDb, getCgramsId) import Gargantext.Database.Query.Table.NodeNgrams (NodeNgramsPoly(..), NodeNgramsW, listInsertDb,{- getCgramsId -})
import Gargantext.Database.Query.Table.Node_NodeNgramsNodeNgrams -- import Gargantext.Database.Query.Table.Node_NodeNgramsNodeNgrams
import Gargantext.Database.Schema.Ngrams (NgramsType(..)) import Gargantext.Database.Schema.Ngrams (NgramsType(..))
import Gargantext.Prelude import Gargantext.Prelude
import qualified Data.List as List import qualified Data.List as List
...@@ -89,17 +88,20 @@ flowList_DbRepo :: FlowCmdM env err m ...@@ -89,17 +88,20 @@ flowList_DbRepo :: FlowCmdM env err m
-> m ListId -> m ListId
flowList_DbRepo lId ngs = do flowList_DbRepo lId ngs = do
-- printDebug "listId flowList" lId -- printDebug "listId flowList" lId
mapCgramsId <- listInsertDb lId toNodeNgramsW (Map.toList ngs) _mapCgramsId <- listInsertDb lId toNodeNgramsW (Map.toList ngs)
{-
let toInsert = catMaybes [ (,) <$> (getCgramsId mapCgramsId ntype <$> (unNgramsTerm <$> parent)) let toInsert = catMaybes [ (,) <$> (getCgramsId mapCgramsId ntype <$> (unNgramsTerm <$> parent))
<*> getCgramsId mapCgramsId ntype ngram <*> getCgramsId mapCgramsId ntype ngram
| (ntype, ngs') <- Map.toList ngs | (ntype, ngs') <- Map.toList ngs
, NgramsElement { _ne_ngrams = NgramsTerm ngram , NgramsElement { _ne_ngrams = NgramsTerm ngram
, _ne_parent = parent } <- ngs' , _ne_parent = parent } <- ngs'
] ]
-}
-- Inserting groups of ngrams -- Inserting groups of ngrams
_r <- insert_Node_NodeNgrams_NodeNgrams -- _r <- insert_Node_NodeNgrams_NodeNgrams
$ map (\(a,b) -> Node_NodeNgrams_NodeNgrams lId a b Nothing) toInsert -- $ map (\(a,b) -> Node_NodeNgrams_NodeNgrams lId a b Nothing) toInsert
-- printDebug "flowList_Tficf':ngs" ngs
listInsert lId ngs listInsert lId ngs
--trace (show $ List.filter (\n -> _ne_ngrams n == "versatile") $ List.concat $ Map.elems ngs) $ listInsert lId ngs --trace (show $ List.filter (\n -> _ne_ngrams n == "versatile") $ List.concat $ Map.elems ngs) $ listInsert lId ngs
...@@ -116,37 +118,40 @@ toNodeNgramsW l ngs = List.concat $ map (toNodeNgramsW'' l) ngs ...@@ -116,37 +118,40 @@ toNodeNgramsW l ngs = List.concat $ map (toNodeNgramsW'' l) ngs
-> (NgramsType, [NgramsElement]) -> (NgramsType, [NgramsElement])
-> [NodeNgramsW] -> [NodeNgramsW]
toNodeNgramsW'' l' (ngrams_type, elms) = toNodeNgramsW'' l' (ngrams_type, elms) =
[ NodeNgrams { _nng_id = Nothing [ NodeNgrams { _nng_id = Nothing
, _nng_node_id = l' , _nng_node_id = l'
, _nng_node_subtype = list_type , _nng_node_subtype = list_type
, _nng_ngrams_id = ngrams_terms' , _nng_ngrams_id = ngrams_terms'
, _nng_ngrams_type = ngrams_type , _nng_ngrams_type = ngrams_type
, _nng_ngrams_field = Nothing , _nng_ngrams_field = Nothing
, _nng_ngrams_tag = Nothing , _nng_ngrams_tag = Nothing
, _nng_ngrams_class = Nothing , _nng_ngrams_class = Nothing
, _nng_ngrams_weight = 0 } | , _nng_ngrams_weight = 0 } |
(NgramsElement { _ne_ngrams = NgramsTerm ngrams_terms' (NgramsElement { _ne_ngrams = NgramsTerm ngrams_terms'
, _ne_size = _size , _ne_size = _size
, _ne_list = list_type , _ne_list = list_type
, _ne_occurrences = _occ , _ne_occurrences = _occ
, _ne_root = _root , _ne_root = _root
, _ne_parent = _parent , _ne_parent = _parent
, _ne_children = _children }) <- elms , _ne_children = _children
}
) <- elms
] ]
toNodeNgramsW' :: ListId toNodeNgramsW' :: ListId
-> [(Text, [NgramsType])] -> [(Text, [NgramsType])]
-> [NodeNgramsW] -> [NodeNgramsW]
toNodeNgramsW' l'' ngs = [ NodeNgrams { _nng_id = Nothing toNodeNgramsW' l'' ngs = [ NodeNgrams { _nng_id = Nothing
, _nng_node_id = l'' , _nng_node_id = l''
, _nng_node_subtype = CandidateTerm , _nng_node_subtype = CandidateTerm
, _nng_ngrams_id = terms , _nng_ngrams_id = terms
, _nng_ngrams_type = ngrams_type , _nng_ngrams_type = ngrams_type
, _nng_ngrams_field = Nothing , _nng_ngrams_field = Nothing
, _nng_ngrams_tag = Nothing , _nng_ngrams_tag = Nothing
, _nng_ngrams_class = Nothing , _nng_ngrams_class = Nothing
, _nng_ngrams_weight = 0 } , _nng_ngrams_weight = 0
}
| (terms, ngrams_types) <- ngs | (terms, ngrams_types) <- ngs
, ngrams_type <- ngrams_types , ngrams_type <- ngrams_types
] ]
......
...@@ -28,7 +28,7 @@ import Gargantext.Core ...@@ -28,7 +28,7 @@ import Gargantext.Core
import Gargantext.Core.Types (TableResult(..)) import Gargantext.Core.Types (TableResult(..))
import Gargantext.Core.Types.Main import Gargantext.Core.Types.Main
import Gargantext.Database import Gargantext.Database
import Gargantext.Database.Action.Metrics.NgramsByNode (getNodesByNgramsOnlyUser) import Gargantext.Database.Action.Metrics.NgramsByContext (getContextsByNgramsOnlyUser)
import Gargantext.Database.Admin.Config import Gargantext.Database.Admin.Config
import Gargantext.Database.Admin.Types.Hyperdata -- (HyperdataContact(..)) import Gargantext.Database.Admin.Types.Hyperdata -- (HyperdataContact(..))
import Gargantext.Database.Admin.Types.Node -- (AnnuaireId, CorpusId, ListId, DocId, ContactId, NodeId) import Gargantext.Database.Admin.Types.Node -- (AnnuaireId, CorpusId, ListId, DocId, ContactId, NodeId)
...@@ -52,14 +52,14 @@ import qualified Data.Text as DT ...@@ -52,14 +52,14 @@ import qualified Data.Text as DT
isPairedWith :: NodeId -> NodeType -> Cmd err [NodeId] isPairedWith :: NodeId -> NodeType -> Cmd err [NodeId]
isPairedWith nId nt = runOpaQuery (selectQuery nt nId) isPairedWith nId nt = runOpaQuery (selectQuery nt nId)
where where
selectQuery :: NodeType -> NodeId -> Query (Column PGInt4) selectQuery :: NodeType -> NodeId -> Select (Column SqlInt4)
selectQuery nt' nId' = proc () -> do selectQuery nt' nId' = proc () -> do
(node, node_node) <- queryJoin -< () (node, node_node) <- queryJoin -< ()
restrict -< (node^.node_typename) .== (sqlInt4 $ toDBid nt') restrict -< (node^.node_typename) .== (sqlInt4 $ toDBid nt')
restrict -< (node_node^.nn_node1_id) .== (toNullable $ pgNodeId nId') restrict -< (node_node^.nn_node1_id) .== (toNullable $ pgNodeId nId')
returnA -< node^.node_id returnA -< node^.node_id
queryJoin :: Query (NodeRead, NodeNodeReadNull) queryJoin :: Select (NodeRead, NodeNodeReadNull)
queryJoin = leftJoin2 queryNodeTable queryNodeNodeTable cond queryJoin = leftJoin2 queryNodeTable queryNodeNodeTable cond
where where
cond (node, node_node) = node^.node_id .== node_node^. nn_node2_id cond (node, node_node) = node^.node_id .== node_node^. nn_node2_id
...@@ -190,4 +190,4 @@ getNgramsDocId cId lId nt = do ...@@ -190,4 +190,4 @@ getNgramsDocId cId lId nt = do
let ngs = filterListWithRoot MapTerm $ mapTermListRoot [lId] nt repo let ngs = filterListWithRoot MapTerm $ mapTermListRoot [lId] nt repo
groupNodesByNgrams ngs groupNodesByNgrams ngs
<$> getNodesByNgramsOnlyUser cId (lIds <> [lId]) nt (HashMap.keys ngs) <$> getContextsByNgramsOnlyUser cId (lIds <> [lId]) nt (HashMap.keys ngs)
...@@ -17,10 +17,11 @@ import Data.Map (Map) ...@@ -17,10 +17,11 @@ import Data.Map (Map)
import Data.HashMap.Strict (HashMap) import Data.HashMap.Strict (HashMap)
import Gargantext.Database.Admin.Types.Node import Gargantext.Database.Admin.Types.Node
import Gargantext.Database.Prelude (Cmd) import Gargantext.Database.Prelude (Cmd)
import Gargantext.Database.Query.Table.NodeNodeNgrams import Gargantext.Database.Query.Table.ContextNodeNgrams
import Gargantext.Database.Schema.Ngrams import Gargantext.Database.Schema.Ngrams
import Gargantext.Database.Types import Gargantext.Database.Types
import Gargantext.Prelude import Gargantext.Prelude
import Control.Lens ((^.))
import qualified Data.Map as DM import qualified Data.Map as DM
import qualified Data.HashMap.Strict as HashMap import qualified Data.HashMap.Strict as HashMap
...@@ -31,35 +32,21 @@ data DocumentIdWithNgrams a b = ...@@ -31,35 +32,21 @@ data DocumentIdWithNgrams a b =
, documentNgrams :: HashMap b (Map NgramsType Int) , documentNgrams :: HashMap b (Map NgramsType Int)
} deriving (Show) } deriving (Show)
docNgrams2nodeNodeNgrams :: CorpusId insertDocNgrams :: ListId
-> DocNgrams -> HashMap (Indexed NgramsId Ngrams) (Map NgramsType (Map DocId Int))
-> NodeNodeNgrams -> Cmd err Int
docNgrams2nodeNodeNgrams cId (DocNgrams d n nt w) = insertDocNgrams lId m = insertContextNodeNgrams ns
NodeNodeNgrams cId d n nt w where
ns = [ ContextNodeNgrams docId lId (ng^.index)
(ngramsTypeId t)
(fromIntegral i)
| (ng, t2n2i) <- HashMap.toList m
, (t, n2i) <- DM.toList t2n2i
, (docId, i) <- DM.toList n2i
]
data DocNgrams = DocNgrams { dn_doc_id :: DocId
, dn_ngrams_id :: Int
, dn_ngrams_type :: NgramsTypeId
, dn_weight :: Double
}
insertDocNgramsOn :: CorpusId
-> [DocNgrams]
-> Cmd err Int
insertDocNgramsOn cId dn =
insertNodeNodeNgrams
$ (map (docNgrams2nodeNodeNgrams cId) dn)
insertDocNgrams :: CorpusId
-> HashMap (Indexed Int Ngrams) (Map NgramsType (Map NodeId Int))
-> Cmd err Int
insertDocNgrams cId m =
insertDocNgramsOn cId [ DocNgrams { dn_doc_id = n
, dn_ngrams_id = _index ng
, dn_ngrams_type = ngramsTypeId t
, dn_weight = fromIntegral i }
| (ng, t2n2i) <- HashMap.toList m
, (t, n2i) <- DM.toList t2n2i
, (n, i) <- DM.toList n2i
]
...@@ -10,25 +10,39 @@ Portability : POSIX ...@@ -10,25 +10,39 @@ Portability : POSIX
Node API Node API
-} -}
{-# LANGUAGE QuasiQuotes #-}
module Gargantext.Database.Action.Metrics module Gargantext.Database.Action.Metrics
where where
import Database.PostgreSQL.Simple.SqlQQ (sql)
import Data.HashMap.Strict (HashMap) import Data.HashMap.Strict (HashMap)
import Data.Map (Map)
import Data.Set (Set)
import Database.PostgreSQL.Simple (Query, Only(..))
import Database.PostgreSQL.Simple.Types (Values(..), QualifiedIdentifier(..))
import Data.Vector (Vector) import Data.Vector (Vector)
import Gargantext.Core (HasDBid(toDBid))
import Gargantext.API.Ngrams.Tools (filterListWithRoot, groupNodesByNgrams, Diagonal(..), getCoocByNgrams, mapTermListRoot, RootTerm, getRepo') import Gargantext.API.Ngrams.Tools (filterListWithRoot, groupNodesByNgrams, Diagonal(..), getCoocByNgrams, mapTermListRoot, RootTerm, getRepo')
import Gargantext.API.Ngrams.Types (TabType(..), ngramsTypeFromTabType, NgramsTerm) import Gargantext.Database.Prelude (runPGSQuery{-, formatPGSQuery-})
import Gargantext.Core.Text.Metrics (scored, Scored(..), {-localMetrics, toScored-}) import Gargantext.API.Ngrams.Types (TabType(..), ngramsTypeFromTabType, NgramsTerm(..))
import Gargantext.Core.Mail.Types (HasMail) import Gargantext.Core.Mail.Types (HasMail)
import Gargantext.Core.Types (ListType(..), Limit, NodeType(..))
import Gargantext.Core.NodeStory import Gargantext.Core.NodeStory
import Gargantext.Core.Text.Metrics (scored, Scored(..), {-localMetrics, toScored-})
import Database.PostgreSQL.Simple.ToField (toField, Action{-, ToField-})
import Gargantext.Core.Types (ListType(..), Limit, NodeType(..), ContextId)
import Gargantext.Database.Action.Flow.Types (FlowCmdM) import Gargantext.Database.Action.Flow.Types (FlowCmdM)
import Gargantext.Database.Action.Metrics.NgramsByNode (getNodesByNgramsOnlyUser{-, getTficfWith-}) import Gargantext.Database.Action.Metrics.NgramsByContext (getContextsByNgramsOnlyUser{-, getTficfWith-})
import Gargantext.Database.Admin.Config (userMaster) import Gargantext.Database.Admin.Config (userMaster)
import Gargantext.Database.Admin.Types.Node (ListId, CorpusId) import Gargantext.Database.Admin.Types.Node (ListId, CorpusId)
import Gargantext.Database.Query.Table.Node (defaultList) import Gargantext.Database.Query.Table.Node (defaultList)
import Gargantext.Database.Query.Table.Node.Select import Gargantext.Database.Query.Table.Node.Select
import Gargantext.Prelude import Gargantext.Prelude
import qualified Data.HashMap.Strict as HM import qualified Data.HashMap.Strict as HM
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.List as List
import qualified Data.Text as Text
getMetrics :: FlowCmdM env err m getMetrics :: FlowCmdM env err m
=> CorpusId -> Maybe ListId -> TabType -> Maybe Limit => CorpusId -> Maybe ListId -> TabType -> Maybe Limit
...@@ -46,39 +60,188 @@ getNgramsCooc :: (FlowCmdM env err m) ...@@ -46,39 +60,188 @@ getNgramsCooc :: (FlowCmdM env err m)
, HashMap (NgramsTerm, NgramsTerm) Int , HashMap (NgramsTerm, NgramsTerm) Int
) )
getNgramsCooc cId maybeListId tabType maybeLimit = do getNgramsCooc cId maybeListId tabType maybeLimit = do
(ngs', ngs) <- getNgrams cId maybeListId tabType
let lId <- case maybeListId of
take' Nothing xs = xs Nothing -> defaultList cId
take' (Just n) xs = take n xs Just lId' -> pure lId'
(ngs', ngs) <- getNgrams lId tabType
lId <- defaultList cId
lIds <- selectNodesWithUsername NodeList userMaster lIds <- selectNodesWithUsername NodeList userMaster
myCooc <- HM.filter (>1) <$> getCoocByNgrams (Diagonal True) myCooc <- HM.filter (>1) <$> getCoocByNgrams (Diagonal True)
<$> groupNodesByNgrams ngs <$> groupNodesByNgrams ngs
<$> getNodesByNgramsOnlyUser cId (lIds <> [lId]) (ngramsTypeFromTabType tabType) <$> getContextsByNgramsOnlyUser cId
(take' maybeLimit $ HM.keys ngs) (lIds <> [lId])
(ngramsTypeFromTabType tabType)
(take' maybeLimit $ HM.keys ngs)
pure $ (ngs', ngs, myCooc) pure $ (ngs', ngs, myCooc)
------------------------------------------------------------------------
------------------------------------------------------------------------
updateNgramsOccurrences :: (FlowCmdM env err m)
=> CorpusId -> Maybe ListId
-> m ()
updateNgramsOccurrences cId mlId = do
_ <- mapM (updateNgramsOccurrences' cId mlId Nothing) [Terms, Sources, Authors, Institutes]
pure ()
getNgrams :: (HasMail env, HasNodeStory env err m) updateNgramsOccurrences' :: (FlowCmdM env err m)
=> CorpusId -> Maybe ListId -> TabType => CorpusId -> Maybe ListId -> Maybe Limit -> TabType
-> m ( HashMap NgramsTerm (ListType, Maybe NgramsTerm) -> m [Int]
, HashMap NgramsTerm (Maybe RootTerm) updateNgramsOccurrences' cId maybeListId maybeLimit tabType = do
)
getNgrams cId maybeListId tabType = do
lId <- case maybeListId of lId <- case maybeListId of
Nothing -> defaultList cId Nothing -> defaultList cId
Just lId' -> pure lId' Just lId' -> pure lId'
result <- getNgramsOccurrences cId lId tabType maybeLimit
let
toInsert :: [[Action]]
toInsert = map (\(ngramsTerm, score)
-> [ toField cId
, toField lId
, toField $ unNgramsTerm ngramsTerm
, toField $ toDBid $ ngramsTypeFromTabType tabType
, toField score
]
)
$ HM.toList result
queryInsert :: Query
queryInsert = [sql|
WITH input(corpus_id, list_id, terms, type_id, weight) AS (?)
INSERT into node_node_ngrams (node1_id, node2_id, ngrams_id, ngrams_type, weight)
SELECT input.corpus_id,input.list_id,ngrams.id,input.type_id,input.weight FROM input
JOIN ngrams on ngrams.terms = input.terms
ON CONFLICT (node1_id, node2_id, ngrams_id, ngrams_type)
DO UPDATE SET weight = excluded.weight
RETURNING 1
|]
let fields = map (\t-> QualifiedIdentifier Nothing t)
$ map Text.pack ["int4", "int4","text","int4","int4"]
map (\(Only a) -> a) <$> runPGSQuery queryInsert (Only $ Values fields toInsert)
------------------------------------------------------------------------
-- Used for scores in Ngrams Table
getNgramsOccurrences :: (FlowCmdM env err m)
=> CorpusId -> ListId -> TabType -> Maybe Limit
-> m (HashMap NgramsTerm Int)
getNgramsOccurrences c l t ml = HM.map Set.size <$> getNgramsContexts c l t ml
getNgramsContexts :: (FlowCmdM env err m)
=> CorpusId -> ListId -> TabType -> Maybe Limit
-> m (HashMap NgramsTerm (Set ContextId))
getNgramsContexts cId lId tabType maybeLimit = do
(_ngs', ngs) <- getNgrams lId tabType
lIds <- selectNodesWithUsername NodeList userMaster
-- TODO maybe add an option to group here
getContextsByNgramsOnlyUser cId
(lIds <> [lId])
(ngramsTypeFromTabType tabType)
(take' maybeLimit $ HM.keys ngs)
------------------------------------------------------------------------
updateContextScore :: (FlowCmdM env err m)
=> CorpusId -> Maybe ListId
-> m [Int]
updateContextScore cId maybeListId = do
lId <- case maybeListId of
Nothing -> defaultList cId
Just lId' -> pure lId'
result <- getContextsNgramsScore cId lId Terms MapTerm Nothing
let
toInsert :: [[Action]]
toInsert = map (\(contextId, score)
-> [ toField cId
, toField contextId
, toField score
]
)
$ Map.toList result
queryInsert :: Query
queryInsert = [sql|
WITH input(node_id, context_id, score) AS (?)
UPDATE nodes_contexts nc
SET score = input.score
FROM input
WHERE nc.node_id = input.node_id
AND nc.context_id = input.context_id
RETURNING 1
|]
let fields = map (\t-> QualifiedIdentifier Nothing t)
$ map Text.pack ["int4", "int4","int4"]
map (\(Only a) -> a) <$> runPGSQuery queryInsert (Only $ Values fields toInsert)
-- Used for scores in Doc Table
getContextsNgramsScore :: (FlowCmdM env err m)
=> CorpusId -> ListId -> TabType -> ListType -> Maybe Limit
-> m (Map ContextId Int)
getContextsNgramsScore cId lId tabType listType maybeLimit
= Map.map Set.size <$> getContextsNgrams cId lId tabType listType maybeLimit
getContextsNgrams :: (FlowCmdM env err m)
=> CorpusId -> ListId -> TabType -> ListType -> Maybe Limit
-> m (Map ContextId (Set NgramsTerm))
getContextsNgrams cId lId tabType listType maybeLimit = do
(ngs', ngs) <- getNgrams lId tabType
lIds <- selectNodesWithUsername NodeList userMaster
result <- groupNodesByNgrams ngs <$> getContextsByNgramsOnlyUser
cId
(lIds <> [lId])
(ngramsTypeFromTabType tabType)
( take' maybeLimit
$ HM.keys
$ HM.filter (\v -> fst v == listType) ngs'
)
-- printDebug "getCoocByNgrams" result
pure $ Map.fromListWith (<>)
$ List.concat
$ map (\(ng, contexts) -> List.zip (Set.toList contexts) (List.cycle [Set.singleton ng]))
$ HM.toList result
------------------------------------------------------------------------
------------------------------------------------------------------------
getNgrams :: (HasMail env, HasNodeStory env err m)
=> ListId -> TabType
-> m ( HashMap NgramsTerm (ListType, Maybe NgramsTerm)
, HashMap NgramsTerm (Maybe RootTerm)
)
getNgrams lId tabType = do
lists <- mapTermListRoot [lId] (ngramsTypeFromTabType tabType) <$> getRepo' [lId] lists <- mapTermListRoot [lId] (ngramsTypeFromTabType tabType) <$> getRepo' [lId]
let maybeSyn = HM.unions $ map (\t -> filterListWithRoot t lists) let maybeSyn = HM.unions $ map (\t -> filterListWithRoot t lists)
[MapTerm, StopTerm, CandidateTerm] [MapTerm, StopTerm, CandidateTerm]
pure (lists, maybeSyn) pure (lists, maybeSyn)
-- Some useful Tools
take' :: Maybe Int -> [a] -> [a]
take' Nothing xs = xs
take' (Just n) xs = take n xs
...@@ -21,15 +21,16 @@ import qualified Data.HashMap.Strict as HM ...@@ -21,15 +21,16 @@ import qualified Data.HashMap.Strict as HM
import Data.Maybe (fromMaybe) import Data.Maybe (fromMaybe)
import Gargantext.Core import Gargantext.Core
import Gargantext.Core.Text.Metrics.TFICF import Gargantext.Core.Text.Metrics.TFICF
import Gargantext.Database.Action.Metrics.NgramsByNode (getNodesByNgramsUser, getOccByNgramsOnlyFast, getOccByNgramsOnlyFast_withSample) import Gargantext.Database.Action.Metrics.NgramsByContext (getContextsByNgramsUser, {-getOccByNgramsOnlyFast,-} getOccByNgramsOnlyFast_withSample)
import Gargantext.Database.Admin.Types.Node -- (ListId, CorpusId, NodeId) import Gargantext.Database.Admin.Types.Node -- (ListId, CorpusId, NodeId)
import Gargantext.Database.Prelude (Cmd) import Gargantext.Database.Prelude (Cmd)
import Gargantext.Database.Query.Table.NodeNode (selectCountDocs) import Gargantext.Database.Query.Table.NodeContext (selectCountDocs)
import Gargantext.Database.Schema.Ngrams (NgramsType(..)) import Gargantext.Database.Schema.Ngrams (NgramsType(..))
import Gargantext.API.Ngrams.Types import Gargantext.API.Ngrams.Types
import Gargantext.Prelude import Gargantext.Prelude
import qualified Data.Set as Set import qualified Data.Set as Set
{-
getTficf :: HasDBid NodeType getTficf :: HasDBid NodeType
=> UserCorpusId => UserCorpusId
-> MasterCorpusId -> MasterCorpusId
...@@ -38,7 +39,7 @@ getTficf :: HasDBid NodeType ...@@ -38,7 +39,7 @@ getTficf :: HasDBid NodeType
getTficf cId mId nt = do getTficf cId mId nt = do
mapTextDoubleLocal <- HM.filter (> 1) mapTextDoubleLocal <- HM.filter (> 1)
<$> HM.map (fromIntegral . Set.size) <$> HM.map (fromIntegral . Set.size)
<$> getNodesByNgramsUser cId nt <$> getContextsByNgramsUser cId nt
mapTextDoubleGlobal <- HM.map fromIntegral mapTextDoubleGlobal <- HM.map fromIntegral
<$> getOccByNgramsOnlyFast mId nt (HM.keys mapTextDoubleLocal) <$> getOccByNgramsOnlyFast mId nt (HM.keys mapTextDoubleLocal)
...@@ -46,13 +47,15 @@ getTficf cId mId nt = do ...@@ -46,13 +47,15 @@ getTficf cId mId nt = do
countLocal <- selectCountDocs cId countLocal <- selectCountDocs cId
countGlobal <- selectCountDocs mId countGlobal <- selectCountDocs mId
printDebug "getTficf" (mapTextDoubleLocal, mapTextDoubleGlobal, countLocal, countGlobal)
pure $ HM.mapWithKey (\t n -> pure $ HM.mapWithKey (\t n ->
tficf (TficfInfra (Count n ) tficf (TficfInfra (Count n )
(Total $ fromIntegral countLocal)) (Total $ fromIntegral countLocal))
(TficfSupra (Count $ fromMaybe 0 $ HM.lookup t mapTextDoubleGlobal) (TficfSupra (Count $ fromMaybe 0 $ HM.lookup t mapTextDoubleGlobal)
(Total $ fromIntegral countGlobal)) (Total $ fromIntegral countGlobal))
) mapTextDoubleLocal ) mapTextDoubleLocal
-}
getTficf_withSample :: HasDBid NodeType getTficf_withSample :: HasDBid NodeType
=> UserCorpusId => UserCorpusId
...@@ -62,7 +65,7 @@ getTficf_withSample :: HasDBid NodeType ...@@ -62,7 +65,7 @@ getTficf_withSample :: HasDBid NodeType
getTficf_withSample cId mId nt = do getTficf_withSample cId mId nt = do
mapTextDoubleLocal <- HM.filter (> 1) mapTextDoubleLocal <- HM.filter (> 1)
<$> HM.map (fromIntegral . Set.size) <$> HM.map (fromIntegral . Set.size)
<$> getNodesByNgramsUser cId nt <$> getContextsByNgramsUser cId nt
countLocal <- selectCountDocs cId countLocal <- selectCountDocs cId
let countGlobal = countLocal * 10 let countGlobal = countLocal * 10
...@@ -71,6 +74,7 @@ getTficf_withSample cId mId nt = do ...@@ -71,6 +74,7 @@ getTficf_withSample cId mId nt = do
<$> getOccByNgramsOnlyFast_withSample mId countGlobal nt <$> getOccByNgramsOnlyFast_withSample mId countGlobal nt
(HM.keys mapTextDoubleLocal) (HM.keys mapTextDoubleLocal)
--printDebug "getTficf_withSample" (mapTextDoubleLocal, mapTextDoubleGlobal, countLocal, countGlobal)
pure $ HM.mapWithKey (\t n -> pure $ HM.mapWithKey (\t n ->
tficf (TficfInfra (Count n ) tficf (TficfInfra (Count n )
(Total $ fromIntegral countLocal)) (Total $ fromIntegral countLocal))
......
...@@ -25,11 +25,14 @@ import Gargantext.Database.Query.Facet ...@@ -25,11 +25,14 @@ import Gargantext.Database.Query.Facet
import Gargantext.Database.Query.Filter import Gargantext.Database.Query.Filter
import Gargantext.Database.Query.Join (leftJoin5) import Gargantext.Database.Query.Join (leftJoin5)
import Gargantext.Database.Query.Table.Node import Gargantext.Database.Query.Table.Node
import Gargantext.Database.Query.Table.Context
import Gargantext.Database.Query.Table.NodeNode import Gargantext.Database.Query.Table.NodeNode
import Gargantext.Database.Query.Table.NodeContext
import Gargantext.Database.Schema.Node import Gargantext.Database.Schema.Node
import Gargantext.Database.Schema.Context
import Gargantext.Prelude import Gargantext.Prelude
import Gargantext.Core.Text.Terms.Mono.Stem.En (stemIt) import Gargantext.Core.Text.Terms.Mono.Stem.En (stemIt)
import Opaleye hiding (Query, Order) import Opaleye hiding (Order)
import Data.Profunctor.Product (p4) import Data.Profunctor.Product (p4)
import qualified Opaleye as O hiding (Order) import qualified Opaleye as O hiding (Order)
...@@ -41,10 +44,10 @@ searchDocInDatabase :: HasDBid NodeType ...@@ -41,10 +44,10 @@ searchDocInDatabase :: HasDBid NodeType
searchDocInDatabase _p t = runOpaQuery (queryDocInDatabase t) searchDocInDatabase _p t = runOpaQuery (queryDocInDatabase t)
where where
-- | Global search query where ParentId is Master Node Corpus Id -- | Global search query where ParentId is Master Node Corpus Id
queryDocInDatabase :: Text -> O.Query (Column PGInt4, Column PGJsonb) queryDocInDatabase :: Text -> O.Select (Column SqlInt4, Column SqlJsonb)
queryDocInDatabase q = proc () -> do queryDocInDatabase q = proc () -> do
row <- queryNodeSearchTable -< () row <- queryNodeSearchTable -< ()
restrict -< (_ns_search row) @@ (pgTSQuery (unpack q)) restrict -< (_ns_search row) @@ (sqlTSQuery (unpack q))
restrict -< (_ns_typename row) .== (sqlInt4 $ toDBid NodeDocument) restrict -< (_ns_typename row) .== (sqlInt4 $ toDBid NodeDocument)
returnA -< (_ns_id row, _ns_hyperdata row) returnA -< (_ns_id row, _ns_hyperdata row)
...@@ -78,29 +81,29 @@ queryInCorpus :: HasDBid NodeType ...@@ -78,29 +81,29 @@ queryInCorpus :: HasDBid NodeType
=> CorpusId => CorpusId
-> IsTrash -> IsTrash
-> Text -> Text
-> O.Query FacetDocRead -> O.Select FacetDocRead
queryInCorpus cId t q = proc () -> do queryInCorpus cId t q = proc () -> do
(n, nn) <- joinInCorpus -< () (c, nc) <- joinInCorpus -< ()
restrict -< (nn^.nn_node1_id) .== (toNullable $ pgNodeId cId) restrict -< (nc^.nc_node_id) .== (toNullable $ pgNodeId cId)
restrict -< if t restrict -< if t
then (nn^.nn_category) .== (toNullable $ sqlInt4 0) then (nc^.nc_category) .== (toNullable $ sqlInt4 0)
else (nn^.nn_category) .>= (toNullable $ sqlInt4 1) else (nc^.nc_category) .>= (toNullable $ sqlInt4 1)
restrict -< (n ^. ns_search) @@ (pgTSQuery (unpack q)) restrict -< (c ^. cs_search) @@ (sqlTSQuery (unpack q))
restrict -< (n ^. ns_typename ) .== (sqlInt4 $ toDBid NodeDocument) restrict -< (c ^. cs_typename ) .== (sqlInt4 $ toDBid NodeDocument)
returnA -< FacetDoc { facetDoc_id = n^.ns_id returnA -< FacetDoc { facetDoc_id = c^.cs_id
, facetDoc_created = n^.ns_date , facetDoc_created = c^.cs_date
, facetDoc_title = n^.ns_name , facetDoc_title = c^.cs_name
, facetDoc_hyperdata = n^.ns_hyperdata , facetDoc_hyperdata = c^.cs_hyperdata
, facetDoc_category = nn^.nn_category , facetDoc_category = nc^.nc_category
, facetDoc_ngramCount = nn^.nn_score , facetDoc_ngramCount = nc^.nc_score
, facetDoc_score = nn^.nn_score , facetDoc_score = nc^.nc_score
} }
joinInCorpus :: O.Query (NodeSearchRead, NodeNodeReadNull) joinInCorpus :: O.Select (ContextSearchRead, NodeContextReadNull)
joinInCorpus = leftJoin queryNodeSearchTable queryNodeNodeTable cond joinInCorpus = leftJoin queryContextSearchTable queryNodeContextTable cond
where where
cond :: (NodeSearchRead, NodeNodeRead) -> Column PGBool cond :: (ContextSearchRead, NodeContextRead) -> Column SqlBool
cond (n, nn) = nn^.nn_node2_id .== _ns_id n cond (c, nc) = nc^.nc_context_id .== _cs_id c
------------------------------------------------------------------------ ------------------------------------------------------------------------
searchInCorpusWithContacts searchInCorpusWithContacts
...@@ -125,15 +128,15 @@ selectContactViaDoc ...@@ -125,15 +128,15 @@ selectContactViaDoc
=> CorpusId => CorpusId
-> AnnuaireId -> AnnuaireId
-> Text -> Text
-> QueryArr () -> SelectArr ()
( Column (Nullable PGInt4) ( Column (Nullable SqlInt4)
, Column (Nullable PGTimestamptz) , Column (Nullable SqlTimestamptz)
, Column (Nullable PGJsonb) , Column (Nullable SqlJsonb)
, Column (Nullable PGInt4) , Column (Nullable SqlInt4)
) )
selectContactViaDoc cId aId q = proc () -> do selectContactViaDoc cId aId q = proc () -> do
(doc, (corpus_doc, (_contact_doc, (annuaire_contact, contact)))) <- queryContactViaDoc -< () (doc, (corpus_doc, (_contact_doc, (annuaire_contact, contact)))) <- queryContactViaDoc -< ()
restrict -< (doc^.ns_search) @@ (pgTSQuery $ unpack q ) restrict -< (doc^.ns_search) @@ (sqlTSQuery $ unpack q )
restrict -< (doc^.ns_typename) .== (sqlInt4 $ toDBid NodeDocument) restrict -< (doc^.ns_typename) .== (sqlInt4 $ toDBid NodeDocument)
restrict -< (corpus_doc^.nn_node1_id) .== (toNullable $ pgNodeId cId) restrict -< (corpus_doc^.nn_node1_id) .== (toNullable $ pgNodeId cId)
restrict -< (annuaire_contact^.nn_node1_id) .== (toNullable $ pgNodeId aId) restrict -< (annuaire_contact^.nn_node1_id) .== (toNullable $ pgNodeId aId)
...@@ -155,15 +158,15 @@ selectGroup cId aId q = proc () -> do ...@@ -155,15 +158,15 @@ selectGroup cId aId q = proc () -> do
returnA -< FacetPaired a b c d returnA -< FacetPaired a b c d
queryContactViaDoc :: O.Query ( NodeSearchRead queryContactViaDoc :: O.Select ( NodeSearchRead
, ( NodeNodeReadNull , ( NodeNodeReadNull
, ( NodeNodeReadNull , ( NodeNodeReadNull
, ( NodeNodeReadNull , ( NodeNodeReadNull
, NodeReadNull , NodeReadNull
) )
) )
) )
) )
queryContactViaDoc = queryContactViaDoc =
leftJoin5 leftJoin5
queryNodeTable queryNodeTable
...@@ -176,14 +179,14 @@ queryContactViaDoc = ...@@ -176,14 +179,14 @@ queryContactViaDoc =
cond34 cond34
cond45 cond45
where where
cond12 :: (NodeNodeRead, NodeRead) -> Column PGBool cond12 :: (NodeNodeRead, NodeRead) -> Column SqlBool
cond12 (annuaire_contact, contact) = contact^.node_id .== annuaire_contact^.nn_node2_id cond12 (annuaire_contact, contact) = contact^.node_id .== annuaire_contact^.nn_node2_id
cond23 :: ( NodeNodeRead cond23 :: ( NodeNodeRead
, ( NodeNodeRead , ( NodeNodeRead
, NodeReadNull , NodeReadNull
) )
) -> Column PGBool ) -> Column SqlBool
cond23 (contact_doc, (annuaire_contact, _)) = contact_doc^.nn_node1_id .== annuaire_contact^.nn_node2_id cond23 (contact_doc, (annuaire_contact, _)) = contact_doc^.nn_node1_id .== annuaire_contact^.nn_node2_id
cond34 :: ( NodeNodeRead cond34 :: ( NodeNodeRead
...@@ -192,7 +195,7 @@ queryContactViaDoc = ...@@ -192,7 +195,7 @@ queryContactViaDoc =
, NodeReadNull , NodeReadNull
) )
) )
) -> Column PGBool ) -> Column SqlBool
cond34 (corpus_doc, (contact_doc, (_,_))) = corpus_doc^.nn_node2_id .== contact_doc^.nn_node2_id cond34 (corpus_doc, (contact_doc, (_,_))) = corpus_doc^.nn_node2_id .== contact_doc^.nn_node2_id
...@@ -204,7 +207,7 @@ queryContactViaDoc = ...@@ -204,7 +207,7 @@ queryContactViaDoc =
) )
) )
) )
) -> Column PGBool ) -> Column SqlBool
cond45 (doc, (corpus_doc, (_,(_,_)))) = doc^.ns_id .== corpus_doc^.nn_node2_id cond45 (doc, (corpus_doc, (_,(_,_)))) = doc^.ns_id .== corpus_doc^.nn_node2_id
......
...@@ -13,12 +13,12 @@ Triggers on NodeNodeNgrams table. ...@@ -13,12 +13,12 @@ Triggers on NodeNodeNgrams table.
{-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE QuasiQuotes #-}
module Gargantext.Database.Admin.Trigger.NodeNodeNgrams module Gargantext.Database.Admin.Trigger.ContextNodeNgrams
where where
import Database.PostgreSQL.Simple.SqlQQ (sql) import Database.PostgreSQL.Simple.SqlQQ (sql)
import Gargantext.Core import Gargantext.Core
import Gargantext.Core.Types.Main (ListType(CandidateTerm)) -- import Gargantext.Core.Types.Main (ListType(CandidateTerm))
import Gargantext.Database.Admin.Types.Node -- (ListId, CorpusId, NodeId) import Gargantext.Database.Admin.Types.Node -- (ListId, CorpusId, NodeId)
import Gargantext.Database.Prelude (Cmd, execPGSQuery) import Gargantext.Database.Prelude (Cmd, execPGSQuery)
import Gargantext.Prelude import Gargantext.Prelude
...@@ -36,13 +36,13 @@ triggerCountInsert = execPGSQuery query (toDBid NodeDocument, toDBid NodeList) ...@@ -36,13 +36,13 @@ triggerCountInsert = execPGSQuery query (toDBid NodeDocument, toDBid NodeList)
END IF; END IF;
IF TG_OP = 'INSERT' THEN IF TG_OP = 'INSERT' THEN
INSERT INTO node_node_ngrams (node1_id, node2_id, ngrams_id, ngrams_type, weight) INSERT INTO node_node_ngrams (node1_id, node2_id, ngrams_id, ngrams_type, weight)
select n.parent_id, n.id, new1.ngrams_id, new1.ngrams_type, count(*) from NEW as new1 select n.parent_id, n.id, new0.ngrams_id, new0.ngrams_type, count(*) from NEW as new0
INNER JOIN nodes n ON n.id = new1.node1_id INNER JOIN contexts n ON n.id = new0.context_id
INNER JOIN nodes n2 ON n2.id = new1.node2_id INNER JOIN nodes n2 ON n2.id = new0.node_id
WHERE n2.typename = ? -- not mandatory WHERE n2.typename = ? -- not mandatory
AND n.typename = ? -- not mandatory AND n.typename = ? -- not mandatory
AND n.parent_id <> n2.id -- not mandatory AND n.parent_id <> n2.id -- not mandatory
GROUP BY n.parent_id, n.id, new1.ngrams_id, new1.ngrams_type GROUP BY n.parent_id, n.id, new0.ngrams_id, new0.ngrams_type
ON CONFLICT (node1_id, node2_id, ngrams_id, ngrams_type) ON CONFLICT (node1_id, node2_id, ngrams_id, ngrams_type)
DO UPDATE set weight = node_node_ngrams.weight + excluded.weight DO UPDATE set weight = node_node_ngrams.weight + excluded.weight
; ;
...@@ -52,9 +52,9 @@ triggerCountInsert = execPGSQuery query (toDBid NodeDocument, toDBid NodeList) ...@@ -52,9 +52,9 @@ triggerCountInsert = execPGSQuery query (toDBid NodeDocument, toDBid NodeList)
END END
$$ LANGUAGE plpgsql; $$ LANGUAGE plpgsql;
-- DROP trigger trigger_count_insert on node_node_ngrams; DROP trigger IF EXISTS trigger_count_insert on context_node_ngrams;
CREATE TRIGGER trigger_count_insert AFTER INSERT on node_node_ngrams CREATE TRIGGER trigger_count_insert AFTER INSERT on context_node_ngrams
REFERENCING NEW TABLE AS NEW REFERENCING NEW TABLE AS NEW
FOR EACH STATEMENT FOR EACH STATEMENT
EXECUTE PROCEDURE set_ngrams_global_count(); EXECUTE PROCEDURE set_ngrams_global_count();
...@@ -74,11 +74,11 @@ triggerCountInsert2 = execPGSQuery query ( toDBid NodeCorpus ...@@ -74,11 +74,11 @@ triggerCountInsert2 = execPGSQuery query ( toDBid NodeCorpus
RETURN NEW; RETURN NEW;
END IF; END IF;
IF TG_OP = 'INSERT' THEN IF TG_OP = 'INSERT' THEN
INSERT INTO node_node_ngrams2 (node_id, nodengrams_id, weight) INSERT INTO context_node_ngrams2 (context_id, nodengrams_id, weight)
SELECT corpus.id, nng.id, count(*) from NEW as new1 SELECT corpus.id, nng.id, count(*) from NEW as new3
INNER JOIN node_ngrams nng ON nng.id = new1.nodengrams_id INNER JOIN node_ngrams nng ON nng.id = new3.nodengrams_id
INNER JOIN nodes list ON list.id = nng.node_id INNER JOIN nodes list ON list.id = nng.node_id
INNER JOIN nodes_nodes nn ON nn.node2_id = new1.node_id INNER JOIN nodes_nodes nn ON nn.node2_id = new3.context_id
INNER JOIN nodes corpus ON corpus.id = nn.node1_id INNER JOIN nodes corpus ON corpus.id = nn.node1_id
INNER JOIN nodes doc ON doc.id = nn.node2_id INNER JOIN nodes doc ON doc.id = nn.node2_id
WHERE corpus.typename = ? -- 30 -- corpus WHERE corpus.typename = ? -- 30 -- corpus
...@@ -86,8 +86,8 @@ triggerCountInsert2 = execPGSQuery query ( toDBid NodeCorpus ...@@ -86,8 +86,8 @@ triggerCountInsert2 = execPGSQuery query ( toDBid NodeCorpus
AND list.typename = ? -- 5 -- list AND list.typename = ? -- 5 -- list
GROUP BY corpus.id, nng.id GROUP BY corpus.id, nng.id
ON CONFLICT (node_id, nodengrams_id) ON CONFLICT (context_id, nodengrams_id)
DO UPDATE set weight = node_node_ngrams2.weight + excluded.weight DO UPDATE set weight = context_node_ngrams2.weight + excluded.weight
; ;
END IF; END IF;
...@@ -95,15 +95,17 @@ triggerCountInsert2 = execPGSQuery query ( toDBid NodeCorpus ...@@ -95,15 +95,17 @@ triggerCountInsert2 = execPGSQuery query ( toDBid NodeCorpus
END END
$$ LANGUAGE plpgsql; $$ LANGUAGE plpgsql;
-- DROP trigger trigger_count_insert2 on node_node_ngrams2; DROP trigger IF EXISTS trigger_count_insert2 on context_node_ngrams2;
CREATE TRIGGER trigger_count_insert2 AFTER INSERT on node_node_ngrams2 CREATE TRIGGER trigger_count_insert2 AFTER INSERT on context_node_ngrams2
REFERENCING NEW TABLE AS NEW REFERENCING NEW TABLE AS NEW
FOR EACH STATEMENT FOR EACH STATEMENT
EXECUTE PROCEDURE set_ngrams_global_count2(); EXECUTE PROCEDURE set_ngrams_global_count2();
|] |]
-- TODO add the groups -- TODO add the groups
-- TODO use context instead of nodes of type doc
{-
triggerCoocInsert :: HasDBid NodeType => Cmd err Int64 triggerCoocInsert :: HasDBid NodeType => Cmd err Int64
triggerCoocInsert = execPGSQuery query ( toDBid NodeCorpus triggerCoocInsert = execPGSQuery query ( toDBid NodeCorpus
, toDBid NodeDocument , toDBid NodeDocument
...@@ -122,10 +124,10 @@ triggerCoocInsert = execPGSQuery query ( toDBid NodeCorpus ...@@ -122,10 +124,10 @@ triggerCoocInsert = execPGSQuery query ( toDBid NodeCorpus
IF TG_OP = 'INSERT' THEN IF TG_OP = 'INSERT' THEN
INSERT INTO node_nodengrams_nodengrams (node_id, node_ngrams1_id, node_ngrams2_id, weight) INSERT INTO node_nodengrams_nodengrams (node_id, node_ngrams1_id, node_ngrams2_id, weight)
WITH input(corpus_id, nn1, nn2, weight) AS ( WITH input(corpus_id, nn1, nn2, weight) AS (
SELECT corpus.id, nng1.id, nng2.id, count(*) from NEW as new1 SELECT corpus.id, nng1.id, nng2.id, count(*) from NEW as new2
INNER JOIN node_ngrams nng1 ON nng1.id = new1.nodengrams_id INNER JOIN node_ngrams nng1 ON nng1.id = new2.nodengrams_id
INNER JOIN nodes list ON list.id = nng1.node_id INNER JOIN nodes list ON list.id = nng1.node_id
INNER JOIN nodes_nodes nn ON nn.node2_id = new1.node_id INNER JOIN nodes_nodes nn ON nn.node2_id = new2.node_id
INNER JOIN nodes corpus ON corpus.id = nn.node1_id INNER JOIN nodes corpus ON corpus.id = nn.node1_id
INNER JOIN nodes doc ON doc.id = nn.node2_id INNER JOIN nodes doc ON doc.id = nn.node2_id
...@@ -159,4 +161,4 @@ triggerCoocInsert = execPGSQuery query ( toDBid NodeCorpus ...@@ -159,4 +161,4 @@ triggerCoocInsert = execPGSQuery query ( toDBid NodeCorpus
FOR EACH STATEMENT FOR EACH STATEMENT
EXECUTE PROCEDURE set_cooc(); EXECUTE PROCEDURE set_cooc();
|] |]
-}
...@@ -13,7 +13,7 @@ Triggers on Nodes table. ...@@ -13,7 +13,7 @@ Triggers on Nodes table.
{-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE QuasiQuotes #-}
module Gargantext.Database.Admin.Trigger.Nodes module Gargantext.Database.Admin.Trigger.Contexts
where where
import Data.Text (Text) import Data.Text (Text)
...@@ -33,7 +33,6 @@ triggerSearchUpdate = execPGSQuery query ( toDBid NodeDocument ...@@ -33,7 +33,6 @@ triggerSearchUpdate = execPGSQuery query ( toDBid NodeDocument
where where
query :: DPS.Query query :: DPS.Query
query = [sql| query = [sql|
-- DROP TRIGGER search_update_trigger on nodes;
CREATE OR REPLACE FUNCTION public.search_update() CREATE OR REPLACE FUNCTION public.search_update()
RETURNS trigger AS $$ RETURNS trigger AS $$
begin begin
...@@ -57,13 +56,14 @@ triggerSearchUpdate = execPGSQuery query ( toDBid NodeDocument ...@@ -57,13 +56,14 @@ triggerSearchUpdate = execPGSQuery query ( toDBid NodeDocument
ALTER FUNCTION public.search_update() OWNER TO gargantua; ALTER FUNCTION public.search_update() OWNER TO gargantua;
DROP TRIGGER IF EXISTS search_update_trigger on contexts;
CREATE TRIGGER search_update_trigger CREATE TRIGGER search_update_trigger
BEFORE INSERT OR UPDATE BEFORE INSERT OR UPDATE
ON nodes FOR EACH ROW ON contexts FOR EACH ROW
EXECUTE PROCEDURE search_update(); EXECUTE PROCEDURE search_update();
-- Initialize index with already existing data -- Initialize index with already existing data
UPDATE nodes SET hyperdata = hyperdata; UPDATE contexts SET hyperdata = hyperdata;
|] |]
...@@ -109,10 +109,13 @@ triggerUpdateHash secret = execPGSQuery query ( toDBid NodeDocument ...@@ -109,10 +109,13 @@ triggerUpdateHash secret = execPGSQuery query ( toDBid NodeDocument
END END
$$ LANGUAGE plpgsql; $$ LANGUAGE plpgsql;
DROP TRIGGER IF EXISTS nodes_hash_insert ON nodes;
DROP TRIGGER IF EXISTS nodes_hash_update ON nodes;
CREATE TRIGGER nodes_hash_insert BEFORE INSERT ON nodes FOR EACH ROW EXECUTE PROCEDURE hash_insert_nodes(); CREATE TRIGGER nodes_hash_insert BEFORE INSERT ON nodes FOR EACH ROW EXECUTE PROCEDURE hash_insert_nodes();
CREATE TRIGGER nodes_hash_update BEFORE UPDATE ON nodes FOR EACH ROW EXECUTE PROCEDURE hash_update_nodes(); CREATE TRIGGER nodes_hash_update BEFORE UPDATE ON nodes FOR EACH ROW EXECUTE PROCEDURE hash_update_nodes();
DROP TRIGGER IF EXISTS contexts_hash_insert ON contexts;
DROP TRIGGER IF EXISTS contexts_hash_update ON contexts;
CREATE TRIGGER contexts_hash_insert BEFORE INSERT ON contexts FOR EACH ROW EXECUTE PROCEDURE hash_insert_nodes();
CREATE TRIGGER contexts_hash_update BEFORE UPDATE ON contexts FOR EACH ROW EXECUTE PROCEDURE hash_update_nodes();
|] |]
...@@ -17,9 +17,9 @@ module Gargantext.Database.Admin.Trigger.Init ...@@ -17,9 +17,9 @@ module Gargantext.Database.Admin.Trigger.Init
where where
import Data.Text (Text) import Data.Text (Text)
import Gargantext.Database.Admin.Trigger.NodeNodeNgrams (triggerCountInsert, triggerCountInsert2) import Gargantext.Database.Admin.Trigger.ContextNodeNgrams (triggerCountInsert, triggerCountInsert2)
import Gargantext.Database.Admin.Trigger.Nodes (triggerSearchUpdate, triggerUpdateHash) import Gargantext.Database.Admin.Trigger.Contexts (triggerSearchUpdate, triggerUpdateHash)
import Gargantext.Database.Admin.Trigger.NodesNodes (triggerDeleteCount, triggerInsertCount, triggerUpdateAdd, triggerUpdateDel, MasterListId) -- , triggerCoocInsert) import Gargantext.Database.Admin.Trigger.NodesContexts ({-triggerDeleteCount,-} triggerInsertCount, triggerUpdateAdd, triggerUpdateDel, MasterListId) -- , triggerCoocInsert)
import Gargantext.Database.Prelude (Cmd) import Gargantext.Database.Prelude (Cmd)
import Gargantext.Prelude import Gargantext.Prelude
...@@ -34,8 +34,8 @@ initLastTriggers lId = do ...@@ -34,8 +34,8 @@ initLastTriggers lId = do
t0 <- triggerSearchUpdate t0 <- triggerSearchUpdate
t1 <- triggerCountInsert t1 <- triggerCountInsert
t1' <- triggerCountInsert2 t1' <- triggerCountInsert2
-- t1'' <- triggerCoocInsert lId -- t1'' <- triggerCoocInsert lId
t2 <- triggerDeleteCount lId -- t2 <- triggerDeleteCount lId
t3 <- triggerInsertCount lId t3 <- triggerInsertCount lId
t4 <- triggerUpdateAdd lId t4 <- triggerUpdateAdd lId
t5 <- triggerUpdateDel lId t5 <- triggerUpdateDel lId
...@@ -43,7 +43,7 @@ initLastTriggers lId = do ...@@ -43,7 +43,7 @@ initLastTriggers lId = do
,t1 ,t1
,t1' ,t1'
-- ,t1'' -- ,t1''
,t2 -- ,t2
,t3 ,t3
,t4 ,t4
,t5] ,t5]
......
...@@ -45,7 +45,7 @@ instance ToSchema HyperdataAny where ...@@ -45,7 +45,7 @@ instance ToSchema HyperdataAny where
instance FromField HyperdataAny where instance FromField HyperdataAny where
fromField = fromField' fromField = fromField'
instance DefaultFromField PGJsonb HyperdataAny instance DefaultFromField SqlJsonb HyperdataAny
where where
defaultFromField = fieldQueryRunnerColumn defaultFromField = fromPGSFromField
...@@ -194,12 +194,12 @@ instance FromField HyperdataContact where ...@@ -194,12 +194,12 @@ instance FromField HyperdataContact where
fromField = fromField' fromField = fromField'
-- | Database (Opaleye instance) -- | Database (Opaleye instance)
instance DefaultFromField PGJsonb HyperdataContact where instance DefaultFromField SqlJsonb HyperdataContact where
defaultFromField = fieldQueryRunnerColumn defaultFromField = fromPGSFromField
instance DefaultFromField (Nullable PGJsonb) HyperdataContact where instance DefaultFromField (Nullable SqlJsonb) HyperdataContact where
defaultFromField = fieldQueryRunnerColumn defaultFromField = fromPGSFromField
......
...@@ -90,10 +90,10 @@ instance FromField HyperdataAnnuaire ...@@ -90,10 +90,10 @@ instance FromField HyperdataAnnuaire
where where
fromField = fromField' fromField = fromField'
------------------------------------------------------------------------ ------------------------------------------------------------------------
instance DefaultFromField PGJsonb HyperdataCorpus instance DefaultFromField SqlJsonb HyperdataCorpus
where where
defaultFromField = fieldQueryRunnerColumn defaultFromField = fromPGSFromField
instance DefaultFromField PGJsonb HyperdataAnnuaire instance DefaultFromField SqlJsonb HyperdataAnnuaire
where where
defaultFromField = fieldQueryRunnerColumn defaultFromField = fromPGSFromField
...@@ -71,7 +71,7 @@ instance ToSchema HyperdataDashboard where ...@@ -71,7 +71,7 @@ instance ToSchema HyperdataDashboard where
instance FromField HyperdataDashboard where instance FromField HyperdataDashboard where
fromField = fromField' fromField = fromField'
instance DefaultFromField PGJsonb HyperdataDashboard instance DefaultFromField SqlJsonb HyperdataDashboard
where where
defaultFromField = fieldQueryRunnerColumn defaultFromField = fromPGSFromField
...@@ -202,11 +202,11 @@ instance ToField HyperdataDocumentV3 where ...@@ -202,11 +202,11 @@ instance ToField HyperdataDocumentV3 where
toField = toJSONField toField = toJSONField
------------------------------------------------------------------------ ------------------------------------------------------------------------
instance DefaultFromField PGJsonb HyperdataDocument instance DefaultFromField SqlJsonb HyperdataDocument
where where
defaultFromField = fieldQueryRunnerColumn defaultFromField = fromPGSFromField
instance DefaultFromField PGJsonb HyperdataDocumentV3 instance DefaultFromField SqlJsonb HyperdataDocumentV3
where where
defaultFromField = fieldQueryRunnerColumn defaultFromField = fromPGSFromField
------------------------------------------------------------------------ ------------------------------------------------------------------------
...@@ -54,9 +54,9 @@ instance FromField HyperdataFile ...@@ -54,9 +54,9 @@ instance FromField HyperdataFile
where where
fromField = fromField' fromField = fromField'
instance DefaultFromField PGJsonb HyperdataFile instance DefaultFromField SqlJsonb HyperdataFile
where where
defaultFromField = fieldQueryRunnerColumn defaultFromField = fromPGSFromField
instance ToSchema HyperdataFile where instance ToSchema HyperdataFile where
declareNamedSchema proxy = declareNamedSchema proxy =
......
...@@ -23,10 +23,10 @@ module Gargantext.Database.Admin.Types.Hyperdata.Frame ...@@ -23,10 +23,10 @@ module Gargantext.Database.Admin.Types.Hyperdata.Frame
import Control.Lens import Control.Lens
import Data.ByteString.Lazy (toStrict) import Data.ByteString.Lazy (toStrict)
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8) import Data.Text.Encoding (decodeUtf8)
import Gargantext.Prelude
import Gargantext.Database.Admin.Types.Hyperdata.Prelude import Gargantext.Database.Admin.Types.Hyperdata.Prelude
import Gargantext.Prelude
import qualified Data.Text as T
import qualified Network.Wreq as Wreq import qualified Network.Wreq as Wreq
------------------------------------------------------------------------ ------------------------------------------------------------------------
...@@ -58,9 +58,9 @@ instance FromField HyperdataFrame ...@@ -58,9 +58,9 @@ instance FromField HyperdataFrame
where where
fromField = fromField' fromField = fromField'
instance DefaultFromField PGJsonb HyperdataFrame instance DefaultFromField SqlJsonb HyperdataFrame
where where
defaultFromField = fieldQueryRunnerColumn defaultFromField = fromPGSFromField
instance ToSchema HyperdataFrame where instance ToSchema HyperdataFrame where
declareNamedSchema proxy = declareNamedSchema proxy =
......
...@@ -98,12 +98,12 @@ instance FromField HyperdataListCooc ...@@ -98,12 +98,12 @@ instance FromField HyperdataListCooc
where where
fromField = fromField' fromField = fromField'
instance DefaultFromField PGJsonb HyperdataList instance DefaultFromField SqlJsonb HyperdataList
where where
defaultFromField = fieldQueryRunnerColumn defaultFromField = fromPGSFromField
instance DefaultFromField PGJsonb HyperdataListCooc instance DefaultFromField SqlJsonb HyperdataListCooc
where where
defaultFromField = fieldQueryRunnerColumn defaultFromField = fromPGSFromField
instance ToSchema HyperdataList where instance ToSchema HyperdataList where
......
...@@ -48,9 +48,9 @@ instance FromField HyperdataModel ...@@ -48,9 +48,9 @@ instance FromField HyperdataModel
where where
fromField = fromField' fromField = fromField'
instance DefaultFromField PGJsonb HyperdataModel instance DefaultFromField SqlJsonb HyperdataModel
where where
defaultFromField = fieldQueryRunnerColumn defaultFromField = fromPGSFromField
instance ToSchema HyperdataModel where instance ToSchema HyperdataModel where
declareNamedSchema proxy = declareNamedSchema proxy =
......
...@@ -56,6 +56,6 @@ instance ToSchema HyperdataPhylo where ...@@ -56,6 +56,6 @@ instance ToSchema HyperdataPhylo where
instance FromField HyperdataPhylo where instance FromField HyperdataPhylo where
fromField = fromField' fromField = fromField'
instance DefaultFromField PGJsonb HyperdataPhylo instance DefaultFromField SqlJsonb HyperdataPhylo
where where
defaultFromField = fieldQueryRunnerColumn defaultFromField = fromPGSFromField
...@@ -48,7 +48,7 @@ import GHC.Generics (Generic) ...@@ -48,7 +48,7 @@ import GHC.Generics (Generic)
import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger, wellNamedSchema) import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger, wellNamedSchema)
import Gargantext.Database.Prelude (fromField') import Gargantext.Database.Prelude (fromField')
import Gargantext.Prelude import Gargantext.Prelude
import Opaleye (DefaultFromField, defaultFromField, PGJsonb, fieldQueryRunnerColumn, Nullable) import Opaleye (DefaultFromField, defaultFromField, Nullable, SqlJsonb, fromPGSFromField)
import Test.QuickCheck (elements) import Test.QuickCheck (elements)
import Test.QuickCheck.Arbitrary hiding (vector) import Test.QuickCheck.Arbitrary hiding (vector)
......
...@@ -54,7 +54,7 @@ instance ToSchema HyperdataTexts where ...@@ -54,7 +54,7 @@ instance ToSchema HyperdataTexts where
instance FromField HyperdataTexts where instance FromField HyperdataTexts where
fromField = fromField' fromField = fromField'
instance DefaultFromField PGJsonb HyperdataTexts instance DefaultFromField SqlJsonb HyperdataTexts
where where
defaultFromField = fieldQueryRunnerColumn defaultFromField = fromPGSFromField
...@@ -135,12 +135,12 @@ instance FromField HyperdataPublic where ...@@ -135,12 +135,12 @@ instance FromField HyperdataPublic where
fromField = fromField' fromField = fromField'
-- | Database (Opaleye instance) -- | Database (Opaleye instance)
instance DefaultFromField PGJsonb HyperdataUser where instance DefaultFromField SqlJsonb HyperdataUser where
defaultFromField = fieldQueryRunnerColumn defaultFromField = fromPGSFromField
instance DefaultFromField PGJsonb HyperdataPrivate where instance DefaultFromField SqlJsonb HyperdataPrivate where
defaultFromField = fieldQueryRunnerColumn defaultFromField = fromPGSFromField
instance DefaultFromField PGJsonb HyperdataPublic where instance DefaultFromField SqlJsonb HyperdataPublic where
defaultFromField = fieldQueryRunnerColumn defaultFromField = fromPGSFromField
...@@ -23,6 +23,7 @@ import Codec.Serialise (Serialise()) ...@@ -23,6 +23,7 @@ import Codec.Serialise (Serialise())
import Control.Monad (mzero) import Control.Monad (mzero)
import Data.Aeson import Data.Aeson
import Data.Aeson.TH (deriveJSON) import Data.Aeson.TH (deriveJSON)
import qualified Data.Csv as Csv
import Data.Either import Data.Either
import Data.Hashable (Hashable) import Data.Hashable (Hashable)
import Data.Morpheus.Types (GQLType) import Data.Morpheus.Types (GQLType)
...@@ -32,26 +33,37 @@ import Data.Time (UTCTime) ...@@ -32,26 +33,37 @@ import Data.Time (UTCTime)
import Database.PostgreSQL.Simple.FromField (FromField, fromField) import Database.PostgreSQL.Simple.FromField (FromField, fromField)
import Database.PostgreSQL.Simple.ToField (ToField, toField) import Database.PostgreSQL.Simple.ToField (ToField, toField)
import GHC.Generics (Generic) import GHC.Generics (Generic)
import Servant import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger, wellNamedSchema)
import qualified Opaleye as O import Gargantext.Database.Schema.Context
import Opaleye (DefaultFromField, defaultFromField, PGInt4, PGText, PGTSVector, Nullable, fieldQueryRunnerColumn) import Gargantext.Database.Schema.Node
import Test.QuickCheck (elements) import Gargantext.Prelude
import Gargantext.Prelude.Crypto.Hash (Hash) import Gargantext.Prelude.Crypto.Hash (Hash)
import Opaleye (DefaultFromField, defaultFromField, SqlInt4, SqlText, SqlTSVector, Nullable, fromPGSFromField)
import Servant hiding (Context)
import Test.QuickCheck (elements)
import Test.QuickCheck.Arbitrary import Test.QuickCheck.Arbitrary
import Test.QuickCheck.Instances.Text () import Test.QuickCheck.Instances.Text ()
import Test.QuickCheck.Instances.Time () import Test.QuickCheck.Instances.Time ()
import Text.Read (read) import Text.Read (read)
import qualified Opaleye as O
import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger, wellNamedSchema)
-- import Gargantext.Database.Prelude (fromField')
import Gargantext.Database.Schema.Node
import Gargantext.Prelude
type UserId = Int type UserId = Int
type MasterUserId = UserId type MasterUserId = UserId
type NodeTypeId = Int
type NodeName = Text
type ContextName = Text
type TSVector = Text
type ContextTitle = Text
------------------------------------------------------------------------ ------------------------------------------------------------------------
-- | NodePoly indicates that Node has a Polymorphism Type -- | NodePoly indicates that Node has a Polymorphism Type
type Node json = NodePoly NodeId (Maybe Hash) NodeTypeId UserId (Maybe ParentId) NodeName UTCTime json type Node json = NodePoly NodeId (Maybe Hash) NodeTypeId UserId (Maybe ParentId) NodeName UTCTime json
type Context json = ContextPoly NodeId (Maybe Hash) NodeTypeId UserId (Maybe ParentId) ContextTitle UTCTime json
-- | NodeSearch (queries) -- | NodeSearch (queries)
-- type NodeSearch json = NodePolySearch NodeId NodeTypeId UserId (Maybe ParentId) NodeName UTCTime json (Maybe TSVector) -- type NodeSearch json = NodePolySearch NodeId NodeTypeId UserId (Maybe ParentId) NodeName UTCTime json (Maybe TSVector)
...@@ -119,6 +131,8 @@ instance (Arbitrary nodeId ...@@ -119,6 +131,8 @@ instance (Arbitrary nodeId
<*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
<*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
instance (Arbitrary hyperdata instance (Arbitrary hyperdata
,Arbitrary nodeId ,Arbitrary nodeId
,Arbitrary toDBid ,Arbitrary toDBid
...@@ -143,16 +157,65 @@ instance (Arbitrary hyperdata ...@@ -143,16 +157,65 @@ instance (Arbitrary hyperdata
<*> arbitrary <*> arbitrary
<*> arbitrary <*> arbitrary
instance (Arbitrary contextId
,Arbitrary hashId
,Arbitrary toDBid
,Arbitrary userId
,Arbitrary contextParentId
, Arbitrary hyperdata
) => Arbitrary (ContextPoly contextId hashId toDBid userId contextParentId
ContextName UTCTime hyperdata) where
--arbitrary = Node 1 1 (Just 1) 1 "name" (jour 2018 01 01) (arbitrary) (Just "")
arbitrary = Context <$> arbitrary <*> arbitrary <*> arbitrary
<*> arbitrary <*> arbitrary <*> arbitrary
<*> arbitrary <*> arbitrary
instance (Arbitrary hyperdata
,Arbitrary contextId
,Arbitrary toDBid
,Arbitrary userId
,Arbitrary contextParentId
) => Arbitrary (ContextPolySearch contextId
toDBid
userId
contextParentId
ContextName
UTCTime
hyperdata
(Maybe TSVector)
) where
--arbitrary = Node 1 1 (Just 1) 1 "name" (jour 2018 01 01) (arbitrary) (Just "")
arbitrary = ContextSearch <$> arbitrary
<*> arbitrary
<*> arbitrary
<*> arbitrary
<*> arbitrary
<*> arbitrary
<*> arbitrary
<*> arbitrary
------------------------------------------------------------------------ ------------------------------------------------------------------------
pgNodeId :: NodeId -> O.Column O.PGInt4 pgNodeId :: NodeId -> O.Column O.SqlInt4
pgNodeId = O.sqlInt4 . id2int pgNodeId = O.sqlInt4 . id2int
where where
id2int :: NodeId -> Int id2int :: NodeId -> Int
id2int (NodeId n) = n id2int (NodeId n) = n
pgContextId :: ContextId -> O.Column O.SqlInt4
pgContextId = pgNodeId
------------------------------------------------------------------------ ------------------------------------------------------------------------
newtype NodeId = NodeId Int newtype NodeId = NodeId Int
deriving (Read, Generic, Num, Eq, Ord, Enum, ToJSONKey, FromJSONKey, ToJSON, FromJSON, Hashable) deriving (Read, Generic, Num, Eq, Ord, Enum, ToJSONKey, FromJSONKey, ToJSON, FromJSON, Hashable, Csv.ToField)
-- TODO make another type?
type ContextId = NodeId
instance GQLType NodeId instance GQLType NodeId
instance Show NodeId where instance Show NodeId where
show (NodeId n) = "nodeId-" <> show n show (NodeId n) = "nodeId-" <> show n
...@@ -166,14 +229,12 @@ instance FromField NodeId where ...@@ -166,14 +229,12 @@ instance FromField NodeId where
then return $ NodeId n then return $ NodeId n
else mzero else mzero
instance ToSchema NodeId instance ToSchema NodeId
--instance Csv.ToField NodeId where
-- toField (NodeId nodeId) = Csv.toField nodeId
unNodeId :: NodeId -> Int unNodeId :: NodeId -> Int
unNodeId (NodeId n) = n unNodeId (NodeId n) = n
type NodeTypeId = Int
type NodeName = Text
type TSVector = Text
------------------------------------------------------------------------ ------------------------------------------------------------------------
------------------------------------------------------------------------ ------------------------------------------------------------------------
instance FromHttpApiData NodeId where instance FromHttpApiData NodeId where
...@@ -184,13 +245,13 @@ instance ToParamSchema NodeId ...@@ -184,13 +245,13 @@ instance ToParamSchema NodeId
instance Arbitrary NodeId where instance Arbitrary NodeId where
arbitrary = NodeId <$> arbitrary arbitrary = NodeId <$> arbitrary
type ParentId = NodeId type ParentId = NodeId
type CorpusId = NodeId type CorpusId = NodeId
type CommunityId = NodeId type CommunityId = NodeId
type ListId = NodeId type ListId = NodeId
type DocumentId = NodeId type DocumentId = NodeId
type DocId = NodeId type DocId = NodeId
type RootId = NodeId type RootId = NodeId
type MasterCorpusId = CorpusId type MasterCorpusId = CorpusId
type UserCorpusId = CorpusId type UserCorpusId = CorpusId
...@@ -357,28 +418,32 @@ instance FromField (NodeId, Text) ...@@ -357,28 +418,32 @@ instance FromField (NodeId, Text)
fromField = fromField' fromField = fromField'
-} -}
------------------------------------------------------------------------ ------------------------------------------------------------------------
instance DefaultFromField PGTSVector (Maybe TSVector) instance DefaultFromField SqlTSVector (Maybe TSVector)
where where
defaultFromField = fieldQueryRunnerColumn defaultFromField = fromPGSFromField
instance DefaultFromField PGInt4 (Maybe NodeId) instance DefaultFromField SqlInt4 (Maybe NodeId)
where where
defaultFromField = fieldQueryRunnerColumn defaultFromField = fromPGSFromField
instance DefaultFromField PGInt4 NodeId instance DefaultFromField SqlInt4 NodeId
where where
defaultFromField = fieldQueryRunnerColumn defaultFromField = fromPGSFromField
instance DefaultFromField (Nullable PGInt4) NodeId instance DefaultFromField (Nullable SqlInt4) NodeId
where where
defaultFromField = fieldQueryRunnerColumn defaultFromField = fromPGSFromField
instance (DefaultFromField (Nullable O.PGTimestamptz) UTCTime) instance (DefaultFromField (Nullable O.SqlTimestamptz) UTCTime)
where where
defaultFromField = fieldQueryRunnerColumn defaultFromField = fromPGSFromField
instance DefaultFromField PGText (Maybe Hash) instance DefaultFromField SqlText (Maybe Hash)
where where
defaultFromField = fieldQueryRunnerColumn defaultFromField = fromPGSFromField
---------------------------------------------------------------------
context2node :: Context a -> Node a
context2node (Context ci ch ct cu cp cn cd chy) = Node ci ch ct cu cp cn cd chy
...@@ -32,7 +32,7 @@ import Database.PostgreSQL.Simple.Internal (Field) ...@@ -32,7 +32,7 @@ import Database.PostgreSQL.Simple.Internal (Field)
import Gargantext.Core.Mail.Types (HasMail) import Gargantext.Core.Mail.Types (HasMail)
import Gargantext.Prelude import Gargantext.Prelude
import Gargantext.Prelude.Config (readIniFile', val) import Gargantext.Prelude.Config (readIniFile', val)
import Opaleye (Query, Unpackspec, showSql, FromFields, Select, runSelect, PGJsonb, DefaultFromField) import Opaleye (Unpackspec, showSql, FromFields, Select, runSelect, SqlJsonb, DefaultFromField)
import Opaleye.Aggregate (countRows) import Opaleye.Aggregate (countRows)
import System.IO (FilePath) import System.IO (FilePath)
import System.IO (stderr) import System.IO (stderr)
...@@ -57,7 +57,7 @@ instance HasConfig GargConfig where ...@@ -57,7 +57,7 @@ instance HasConfig GargConfig where
hasConfig = identity hasConfig = identity
------------------------------------------------------- -------------------------------------------------------
type JSONB = DefaultFromField PGJsonb type JSONB = DefaultFromField SqlJsonb
------------------------------------------------------- -------------------------------------------------------
type CmdM'' env err m = type CmdM'' env err m =
...@@ -148,16 +148,14 @@ runPGSQuery_ :: ( CmdM env err m ...@@ -148,16 +148,14 @@ runPGSQuery_ :: ( CmdM env err m
runPGSQuery_ q = mkCmd $ \conn -> catch (PGS.query_ conn q) printError runPGSQuery_ q = mkCmd $ \conn -> catch (PGS.query_ conn q) printError
where where
printError (SomeException e) = do printError (SomeException e) = do
printDebug "[G.D.P.runPGSQuery_]" ("TODO: format query error query" :: Text) printDebug "[G.D.P.runPGSQuery_]" ("TODO: format query error" :: Text)
throw (SomeException e) throw (SomeException e)
execPGSQuery :: PGS.ToRow a => PGS.Query -> a -> Cmd err Int64 execPGSQuery :: PGS.ToRow a => PGS.Query -> a -> Cmd err Int64
execPGSQuery q a = mkCmd $ \conn -> PGS.execute conn q a execPGSQuery q a = mkCmd $ \conn -> PGS.execute conn q a
------------------------------------------------------------------------ ------------------------------------------------------------------------
databaseParameters :: FilePath -> IO PGS.ConnectInfo databaseParameters :: FilePath -> IO PGS.ConnectInfo
databaseParameters fp = do databaseParameters fp = do
ini <- readIniFile' fp ini <- readIniFile' fp
...@@ -185,6 +183,6 @@ fromField' field mb = do ...@@ -185,6 +183,6 @@ fromField' field mb = do
, show v , show v
] ]
printSqlOpa :: Default Unpackspec a a => Query a -> IO () printSqlOpa :: Default Unpackspec a a => Select a -> IO ()
printSqlOpa = putStrLn . maybe "Empty query" identity . showSql printSqlOpa = putStrLn . maybe "Empty query" identity . showSql
This diff is collapsed.
...@@ -20,12 +20,12 @@ module Gargantext.Database.Query.Filter ...@@ -20,12 +20,12 @@ module Gargantext.Database.Query.Filter
import Gargantext.Core.Types (Limit, Offset) import Gargantext.Core.Types (Limit, Offset)
import Data.Maybe (Maybe, maybe) import Data.Maybe (Maybe, maybe)
import Opaleye (Query, limit, offset) import Opaleye (Select, limit, offset)
limit' :: Maybe Limit -> Query a -> Query a limit' :: Maybe Limit -> Select a -> Select a
limit' maybeLimit query = maybe query (\l -> limit l query) maybeLimit limit' maybeLimit query = maybe query (\l -> limit l query) maybeLimit
offset' :: Maybe Offset -> Query a -> Query a offset' :: Maybe Offset -> Select a -> Select a
offset' maybeOffset query = maybe query (\o -> offset o query) maybeOffset offset' maybeOffset query = maybe query (\o -> offset o query) maybeOffset
...@@ -33,48 +33,35 @@ module Gargantext.Database.Query.Join ( leftJoin2 ...@@ -33,48 +33,35 @@ module Gargantext.Database.Query.Join ( leftJoin2
) )
where where
import Control.Arrow ((>>>)) import Control.Arrow ((>>>), returnA)
import Data.Profunctor.Product.Default import Data.Profunctor.Product.Default
import Gargantext.Prelude import Gargantext.Prelude
import Opaleye import Opaleye hiding (keepWhen)
import Opaleye.Internal.Join (NullMaker(..)) import Opaleye.Internal.Join (NullMaker(..))
import qualified Opaleye.Internal.Unpackspec() import qualified Opaleye.Internal.Unpackspec()
keepWhen :: (a -> Field SqlBool) -> SelectArr a a
keepWhen p = proc a -> do
restrict -< p a
returnA -< a
------------------------------------------------------------------------ ------------------------------------------------------------------------
leftJoin2 :: (Default Unpackspec fieldsL fieldsL, leftJoin2 :: (Default Unpackspec fieldsL fieldsL,
Default Unpackspec fieldsR fieldsR, Default Unpackspec fieldsR fieldsR,
Default NullMaker fieldsR nullableFieldsR) => Default NullMaker fieldsR nullableFieldsR) =>
Select fieldsL Select fieldsL
-> Select fieldsR -> Select fieldsR
-> ((fieldsL, fieldsR) -> Column PGBool) -> ((fieldsL, fieldsR) -> Column SqlBool)
-> Select (fieldsL, nullableFieldsR) -> Select (fieldsL, nullableFieldsR)
leftJoin2 = leftJoin leftJoin2 = leftJoin
------------------------------------------------------------------------ ------------------------------------------------------------------------
-- | LeftJoin3 in two ways to write it -- | LeftJoin3 in two ways to write it
_leftJoin3 :: Query columnsA -> Query columnsB -> Query columnsC leftJoin3 :: Select columnsA -> Select columnsB -> Select columnsC
-> ((columnsA, columnsB, columnsC) -> Column PGBool) -> ((columnsA, columnsB, columnsC) -> Column SqlBool)
-> Query (columnsA, columnsB, columnsC) -> Select (columnsA, columnsB, columnsC)
_leftJoin3 q1 q2 q3 cond = ((,,) <$> q1 <*> q2 <*> q3) >>> keepWhen cond leftJoin3 q1 q2 q3 cond = ((,,) <$> q1 <*> q2 <*> q3) >>> keepWhen cond
leftJoin3 :: ( Default Unpackspec b2 b2
, Default Unpackspec b3 b3
, Default Unpackspec fieldsL fieldsL
, Default Unpackspec fieldsR fieldsR
, Default NullMaker b3 b4
, Default NullMaker b2 b5
, Default NullMaker fieldsR b2) =>
Select fieldsR
-> Select b3
-> Select fieldsL
-> ((b3, fieldsR) -> Column PGBool)
-> ((fieldsL, (b3, b2)) -> Column PGBool)
-> Select (fieldsL, (b4, b5))
leftJoin3 q1 q2 q3
cond12 cond23 =
leftJoin q3 ( leftJoin q2 q1 cond12) cond23
leftJoin4 :: (Default Unpackspec b2 b2, leftJoin4 :: (Default Unpackspec b2 b2,
...@@ -88,9 +75,9 @@ leftJoin4 :: (Default Unpackspec b2 b2, ...@@ -88,9 +75,9 @@ leftJoin4 :: (Default Unpackspec b2 b2,
-> Select b3 -> Select b3
-> Select b2 -> Select b2
-> Select fieldsL -> Select fieldsL
-> ((b3, fieldsR) -> Column PGBool) -> ((b3, fieldsR) -> Column SqlBool)
-> ((b2, (b3, b4)) -> Column PGBool) -> ((b2, (b3, b4)) -> Column SqlBool)
-> ((fieldsL, (b2, (b5, b6))) -> Column PGBool) -> ((fieldsL, (b2, (b5, b6))) -> Column SqlBool)
-> Select (fieldsL, (b7, (b8, b9))) -> Select (fieldsL, (b7, (b8, b9)))
leftJoin4 q1 q2 q3 q4 leftJoin4 q1 q2 q3 q4
cond12 cond23 cond34 = cond12 cond23 cond34 =
...@@ -117,10 +104,10 @@ leftJoin5 :: (Default Unpackspec b2 b2, Default Unpackspec b3 b3, ...@@ -117,10 +104,10 @@ leftJoin5 :: (Default Unpackspec b2 b2, Default Unpackspec b3 b3,
-> Select b7 -> Select b7
-> Select b8 -> Select b8
-> Select fieldsL -> Select fieldsL
-> ((b5, fieldsR) -> Column PGBool) -> ((b5, fieldsR) -> Column SqlBool)
-> ((b7, (b5, b4)) -> Column PGBool) -> ((b7, (b5, b4)) -> Column SqlBool)
-> ((b8, (b7, (b9, b10))) -> Column PGBool) -> ((b8, (b7, (b9, b10))) -> Column SqlBool)
-> ((fieldsL, (b8, (b6, (b3, b2)))) -> Column PGBool) -> ((fieldsL, (b8, (b6, (b3, b2)))) -> Column SqlBool)
-> Select (fieldsL, (b12, (b11, (b13, b14)))) -> Select (fieldsL, (b12, (b11, (b13, b14))))
leftJoin5 q1 q2 q3 q4 q5 leftJoin5 q1 q2 q3 q4 q5
cond12 cond23 cond34 cond45 = cond12 cond23 cond34 cond45 =
...@@ -155,11 +142,11 @@ leftJoin6 :: (Default Unpackspec b2 b2, Default Unpackspec b3 b3, ...@@ -155,11 +142,11 @@ leftJoin6 :: (Default Unpackspec b2 b2, Default Unpackspec b3 b3,
-> Select b5 -> Select b5
-> Select b6 -> Select b6
-> Select fieldsL -> Select fieldsL
-> ((b8, fieldsR) -> Column PGBool) -> ((b8, fieldsR) -> Column SqlBool)
-> ((b3, (b8, b9)) -> Column PGBool) -> ((b3, (b8, b9)) -> Column SqlBool)
-> ((b5, (b3, (b14, b15))) -> Column PGBool) -> ((b5, (b3, (b14, b15))) -> Column SqlBool)
-> ((b6, (b5, (b7, (b10, b11)))) -> Column PGBool) -> ((b6, (b5, (b7, (b10, b11)))) -> Column SqlBool)
-> ((fieldsL, (b6, (b4, (b2, (b12, b13))))) -> Column PGBool) -> ((fieldsL, (b6, (b4, (b2, (b12, b13))))) -> Column SqlBool)
-> Select (fieldsL, (b17, (b16, (b18, (b19, b20))))) -> Select (fieldsL, (b17, (b16, (b18, (b19, b20)))))
leftJoin6 q1 q2 q3 q4 q5 q6 leftJoin6 q1 q2 q3 q4 q5 q6
cond12 cond23 cond34 cond45 cond56 = cond12 cond23 cond34 cond45 cond56 =
...@@ -203,13 +190,13 @@ leftJoin7 :: (Default Unpackspec b2 b2, Default Unpackspec b3 b3, ...@@ -203,13 +190,13 @@ leftJoin7 :: (Default Unpackspec b2 b2, Default Unpackspec b3 b3,
-> Select b14 -> Select b14
-> Select b13 -> Select b13
-> Select fieldsL -> Select fieldsL
-> ((b7, fieldsR) -> Column PGBool) -> ((b7, fieldsR) -> Column SqlBool)
-> ((b11, (b7, b6)) -> Column PGBool) -> ((b11, (b7, b6)) -> Column SqlBool)
-> ((b16, (b11, (b20, b21))) -> Column PGBool) -> ((b16, (b11, (b20, b21))) -> Column SqlBool)
-> ((b14, (b16, (b8, (b5, b4)))) -> Column PGBool) -> ((b14, (b16, (b8, (b5, b4)))) -> Column SqlBool)
-> ((b13, (b14, (b12, (b10, (b18, b19))))) -> Column PGBool) -> ((b13, (b14, (b12, (b10, (b18, b19))))) -> Column SqlBool)
-> ((fieldsL, (b13, (b15, (b17, (b9, (b3, b2)))))) -> ((fieldsL, (b13, (b15, (b17, (b9, (b3, b2))))))
-> Column PGBool) -> Column SqlBool)
-> Select (fieldsL, (b24, (b25, (b23, (b22, (b26, b27)))))) -> Select (fieldsL, (b24, (b25, (b23, (b22, (b26, b27))))))
leftJoin7 q1 q2 q3 q4 q5 q6 q7 leftJoin7 q1 q2 q3 q4 q5 q6 q7
cond12 cond23 cond34 cond45 cond56 cond67 = cond12 cond23 cond34 cond45 cond56 cond67 =
...@@ -263,14 +250,14 @@ leftJoin8 :: (Default Unpackspec b2 b2, Default Unpackspec b3 b3, ...@@ -263,14 +250,14 @@ leftJoin8 :: (Default Unpackspec b2 b2, Default Unpackspec b3 b3,
-> Select b11 -> Select b11
-> Select b10 -> Select b10
-> Select fieldsL -> Select fieldsL
-> ((b17, fieldsR) -> Column PGBool) -> ((b17, fieldsR) -> Column SqlBool)
-> ((b4, (b17, b18)) -> Column PGBool) -> ((b4, (b17, b18)) -> Column SqlBool)
-> ((b8, (b4, (b27, b28))) -> Column PGBool) -> ((b8, (b4, (b27, b28))) -> Column SqlBool)
-> ((b13, (b8, (b16, (b19, b20)))) -> Column PGBool) -> ((b13, (b8, (b16, (b19, b20)))) -> Column SqlBool)
-> ((b11, (b13, (b5, (b3, (b25, b26))))) -> Column PGBool) -> ((b11, (b13, (b5, (b3, (b25, b26))))) -> Column SqlBool)
-> ((b10, (b11, (b9, (b7, (b15, (b21, b22)))))) -> Column PGBool) -> ((b10, (b11, (b9, (b7, (b15, (b21, b22)))))) -> Column SqlBool)
-> ((fieldsL, (b10, (b12, (b14, (b6, (b2, (b23, b24))))))) -> ((fieldsL, (b10, (b12, (b14, (b6, (b2, (b23, b24)))))))
-> Column PGBool) -> Column SqlBool)
-> Select (fieldsL, (b31, (b32, (b30, (b29, (b33, (b34, b35))))))) -> Select (fieldsL, (b31, (b32, (b30, (b29, (b33, (b34, b35)))))))
leftJoin8 q1 q2 q3 q4 q5 q6 q7 q8 leftJoin8 q1 q2 q3 q4 q5 q6 q7 q8
cond12 cond23 cond34 cond45 cond56 cond67 cond78 = cond12 cond23 cond34 cond45 cond56 cond67 cond78 =
...@@ -336,16 +323,16 @@ leftJoin9 :: (Default Unpackspec b2 b2, Default Unpackspec b3 b3, ...@@ -336,16 +323,16 @@ leftJoin9 :: (Default Unpackspec b2 b2, Default Unpackspec b3 b3,
-> Select b21 -> Select b21
-> Select b22 -> Select b22
-> Select fieldsL -> Select fieldsL
-> ((b9, fieldsR) -> Column PGBool) -> ((b9, fieldsR) -> Column SqlBool)
-> ((b15, (b9, b8)) -> Column PGBool) -> ((b15, (b9, b8)) -> Column SqlBool)
-> ((b28, (b15, (b35, b36))) -> Column PGBool) -> ((b28, (b15, (b35, b36))) -> Column SqlBool)
-> ((b24, (b28, (b10, (b7, b6)))) -> Column PGBool) -> ((b24, (b28, (b10, (b7, b6)))) -> Column SqlBool)
-> ((b19, (b24, (b16, (b14, (b33, b34))))) -> Column PGBool) -> ((b19, (b24, (b16, (b14, (b33, b34))))) -> Column SqlBool)
-> ((b21, (b19, (b27, (b29, (b11, (b5, b4)))))) -> Column PGBool) -> ((b21, (b19, (b27, (b29, (b11, (b5, b4)))))) -> Column SqlBool)
-> ((b22, (b21, (b23, (b25, (b17, (b13, (b31, b32))))))) -> ((b22, (b21, (b23, (b25, (b17, (b13, (b31, b32)))))))
-> Column PGBool) -> Column SqlBool)
-> ((fieldsL, (b22, (b20, (b18, (b26, (b30, (b12, (b3, b2)))))))) -> ((fieldsL, (b22, (b20, (b18, (b26, (b30, (b12, (b3, b2))))))))
-> Column PGBool) -> Column SqlBool)
-> Select -> Select
(fieldsL, (b40, (b39, (b41, (b42, (b38, (b37, (b43, b44)))))))) (fieldsL, (b40, (b39, (b41, (b42, (b38, (b37, (b43, b44))))))))
leftJoin9 q1 q2 q3 q4 q5 q6 q7 q8 q9 leftJoin9 q1 q2 q3 q4 q5 q6 q7 q8 q9
......
{-|
import Gargantext.Database.Prelude (Cmd, runPGSQuery)
Module : Gargantext.Database.Query.Table.Node
Description : Main Tools of Node to the database
Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE Arrows #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module Gargantext.Database.Query.Table.Context
where
import Control.Arrow (returnA)
import Gargantext.Core
import Gargantext.Core.Types
import Gargantext.Database.Admin.Types.Hyperdata
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Filter (limit', offset')
import Gargantext.Database.Query.Table.Node.Error
import Gargantext.Database.Schema.Context
import Gargantext.Prelude hiding (sum, head)
import Opaleye hiding (FromField)
import Prelude hiding (null, id, map, sum)
getContextWith :: (HasNodeError err, JSONB a)
=> ContextId -> proxy a -> Cmd err (Node a)
getContextWith nId _ = do
maybeContext <- headMay <$> runOpaQuery (selectContext (pgNodeId nId))
case maybeContext of
Nothing -> nodeError (DoesNotExist nId)
Just r -> pure $ context2node r
queryContextSearchTable :: Select ContextSearchRead
queryContextSearchTable = selectTable contextTableSearch
selectContext :: Column SqlInt4 -> Select ContextRead
selectContext id' = proc () -> do
row <- queryContextTable -< ()
restrict -< _context_id row .== id'
returnA -< row
runGetContexts :: Select ContextRead -> Cmd err [Context HyperdataAny]
runGetContexts = runOpaQuery
------------------------------------------------------------------------
------------------------------------------------------------------------
-- | order by publication date
-- Favorites (Bool), node_ngrams
selectContextsWith :: HasDBid NodeType
=> ParentId -> Maybe NodeType
-> Maybe Offset -> Maybe Limit -> Select ContextRead
selectContextsWith parentId maybeContextType maybeOffset maybeLimit =
--offset' maybeOffset $ limit' maybeLimit $ orderBy (asc (hyperdataDocument_Publication_date . node_hyperdata)) $ selectNodesWith' parentId typeId
limit' maybeLimit $ offset' maybeOffset
$ orderBy (asc _context_id)
$ selectContextsWith' parentId maybeContextType
selectContextsWith' :: HasDBid NodeType
=> ParentId -> Maybe NodeType -> Select ContextRead
selectContextsWith' parentId maybeContextType = proc () -> do
context' <- (proc () -> do
row@(Context _ _ typeId _ parentId' _ _ _) <- queryContextTable -< ()
restrict -< parentId' .== (pgNodeId parentId)
let typeId' = maybe 0 toDBid maybeContextType
restrict -< if typeId' > 0
then typeId .== (sqlInt4 (typeId' :: Int))
else (sqlBool True)
returnA -< row ) -< ()
returnA -< context'
------------------------------------------------------------------------
getDocumentsV3WithParentId :: HasDBid NodeType => NodeId -> Cmd err [Context HyperdataDocumentV3]
getDocumentsV3WithParentId n = runOpaQuery $ selectContextsWith' n (Just NodeDocument)
-- TODO: merge with getDocumentsWithParentId by having a class IsHyperdataDocument
getDocumentsWithParentId :: HasDBid NodeType => NodeId -> Cmd err [Context HyperdataDocument]
getDocumentsWithParentId n = runOpaQuery $ selectContextsWith' n (Just NodeDocument)
------------------------------------------------------------------------
selectContextsWithParentID :: NodeId -> Select ContextRead
selectContextsWithParentID n = proc () -> do
row@(Context _ _ _ _ parent_id _ _ _) <- queryContextTable -< ()
restrict -< parent_id .== (pgNodeId n)
returnA -< row
------------------------------------------------------------------------
-- | Example of use:
-- runCmdReplEasy (getNodesWithType NodeList (Proxy :: Proxy HyperdataList))
getContextsWithType :: (HasNodeError err, JSONB a, HasDBid NodeType) => NodeType -> proxy a -> Cmd err [Context a]
getContextsWithType nt _ = runOpaQuery $ selectContextsWithType nt
where
selectContextsWithType :: HasDBid NodeType
=> NodeType -> Select ContextRead
selectContextsWithType nt' = proc () -> do
row@(Context _ _ tn _ _ _ _ _) <- queryContextTable -< ()
restrict -< tn .== (sqlInt4 $ toDBid nt')
returnA -< row
getContextsIdWithType :: (HasNodeError err, HasDBid NodeType) => NodeType -> Cmd err [ContextId]
getContextsIdWithType nt = do
ns <- runOpaQuery $ selectContextsIdWithType nt
pure (map NodeId ns)
selectContextsIdWithType :: HasDBid NodeType
=> NodeType -> Select (Column SqlInt4)
selectContextsIdWithType nt = proc () -> do
row@(Context _ _ tn _ _ _ _ _) <- queryContextTable -< ()
restrict -< tn .== (sqlInt4 $ toDBid nt)
returnA -< _context_id row
------------------------------------------------------------------------
{-|
Module : Gargantext.Database.Schema.NodeNodeNgrams
Description : TODO: remove this module and table in database
Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE Arrows #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE TemplateHaskell #-}
module Gargantext.Database.Query.Table.ContextNodeNgrams
( module Gargantext.Database.Schema.ContextNodeNgrams
, queryContextNodeNgramsTable
, insertContextNodeNgrams
)
where
import Gargantext.Database.Admin.Types.Node (pgNodeId, pgContextId)
import Gargantext.Database.Prelude (Cmd, mkCmd)
import Gargantext.Database.Schema.Ngrams (pgNgramsTypeId)
import Gargantext.Database.Schema.ContextNodeNgrams
import Gargantext.Database.Schema.Prelude
import Prelude
queryContextNodeNgramsTable :: Query ContextNodeNgramsRead
queryContextNodeNgramsTable = selectTable contextNodeNgramsTable
-- | Insert utils
insertContextNodeNgrams :: [ContextNodeNgrams] -> Cmd err Int
insertContextNodeNgrams = insertContextNodeNgramsW
. map (\(ContextNodeNgrams c n ng nt w) ->
ContextNodeNgrams (pgContextId c)
(pgNodeId n)
(sqlInt4 ng)
(pgNgramsTypeId nt)
(sqlDouble w)
)
insertContextNodeNgramsW :: [ContextNodeNgramsWrite] -> Cmd err Int
insertContextNodeNgramsW nnnw =
mkCmd $ \c -> fromIntegral <$> runInsert_ c insertNothing
where
insertNothing = Insert { iTable = contextNodeNgramsTable
, iRows = nnnw
, iReturning = rCount
, iOnConflict = (Just DoNothing)
}
...@@ -15,36 +15,37 @@ Portability : POSIX ...@@ -15,36 +15,37 @@ Portability : POSIX
{-# LANGUAGE FunctionalDependencies #-} {-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TemplateHaskell #-}
module Gargantext.Database.Query.Table.NodeNodeNgrams2 module Gargantext.Database.Query.Table.ContextNodeNgrams2
( module Gargantext.Database.Schema.NodeNodeNgrams2 ( module Gargantext.Database.Schema.ContextNodeNgrams2
, insertNodeNodeNgrams2 , insertContextNodeNgrams2
, queryContextNodeNgrams2Table
) )
where where
import Gargantext.Database.Schema.Prelude import Gargantext.Database.Schema.Prelude
import Gargantext.Database.Schema.NodeNodeNgrams2 import Gargantext.Database.Schema.ContextNodeNgrams2
import Gargantext.Database.Admin.Types.Node (pgNodeId) import Gargantext.Database.Admin.Types.Node (pgNodeId)
import Gargantext.Database.Prelude (Cmd, mkCmd) import Gargantext.Database.Prelude (Cmd, mkCmd)
import Prelude import Prelude
_queryNodeNodeNgrams2Table :: Query NodeNodeNgrams2Read queryContextNodeNgrams2Table :: Query ContextNodeNgrams2Read
_queryNodeNodeNgrams2Table = selectTable nodeNodeNgrams2Table queryContextNodeNgrams2Table = selectTable contextNodeNgrams2Table
-- | Insert utils -- | Insert utils
insertNodeNodeNgrams2 :: [NodeNodeNgrams2] -> Cmd err Int insertContextNodeNgrams2 :: [ContextNodeNgrams2] -> Cmd err Int
insertNodeNodeNgrams2 = insertNodeNodeNgrams2W insertContextNodeNgrams2 = insertContextNodeNgrams2W
. map (\(NodeNodeNgrams2 n1 n2 w) -> . map (\(ContextNodeNgrams2 n1 n2 w) ->
NodeNodeNgrams2 (pgNodeId n1) ContextNodeNgrams2 (pgNodeId n1)
(sqlInt4 n2) (sqlInt4 n2)
(pgDouble w) (sqlDouble w)
) )
insertNodeNodeNgrams2W :: [NodeNodeNgrams2Write] -> Cmd err Int insertContextNodeNgrams2W :: [ContextNodeNgrams2Write] -> Cmd err Int
insertNodeNodeNgrams2W nnnw = insertContextNodeNgrams2W nnnw =
mkCmd $ \c -> fromIntegral <$> runInsert_ c insertNothing mkCmd $ \c -> fromIntegral <$> runInsert_ c insertNothing
where where
insertNothing = Insert { iTable = nodeNodeNgrams2Table insertNothing = Insert { iTable = contextNodeNgrams2Table
, iRows = nnnw , iRows = nnnw
, iReturning = rCount , iReturning = rCount
, iOnConflict = (Just DoNothing) , iOnConflict = (Just DoNothing)
......
...@@ -22,39 +22,44 @@ module Gargantext.Database.Query.Table.Ngrams ...@@ -22,39 +22,44 @@ module Gargantext.Database.Query.Table.Ngrams
where where
import Control.Lens ((^.)) import Control.Lens ((^.))
import Data.HashMap.Strict (HashMap)
import Data.ByteString.Internal (ByteString) import Data.ByteString.Internal (ByteString)
import Data.HashMap.Strict (HashMap)
import Data.Text (Text) import Data.Text (Text)
import qualified Database.PostgreSQL.Simple as PGS
import qualified Data.List as List
import qualified Data.HashMap.Strict as HashMap import qualified Data.HashMap.Strict as HashMap
import qualified Data.List as List
import qualified Database.PostgreSQL.Simple as PGS
import Gargantext.Core.Types import Gargantext.Core.Types
import Gargantext.Database.Prelude (runOpaQuery, Cmd) import Gargantext.Database.Prelude (runOpaQuery, Cmd, formatPGSQuery, runPGSQuery)
import Gargantext.Database.Prelude (runPGSQuery, formatPGSQuery) import Gargantext.Database.Query.Join (leftJoin3)
import Gargantext.Database.Query.Table.NodeNodeNgrams import Gargantext.Database.Query.Table.ContextNodeNgrams2
import Gargantext.Database.Schema.Ngrams import Gargantext.Database.Schema.Ngrams
import Gargantext.Database.Schema.NodeNgrams
import Gargantext.Database.Query.Table.NodeNgrams (queryNodeNgramsTable)
import Gargantext.Database.Schema.Prelude import Gargantext.Database.Schema.Prelude
import Gargantext.Database.Types import Gargantext.Database.Types
import Gargantext.Prelude import Gargantext.Prelude
queryNgramsTable :: Query NgramsRead queryNgramsTable :: Select NgramsRead
queryNgramsTable = selectTable ngramsTable queryNgramsTable = selectTable ngramsTable
selectNgramsByDoc :: [ListId] -> DocId -> NgramsType -> Cmd err [Text] selectNgramsByDoc :: [ListId] -> DocId -> NgramsType -> Cmd err [Text]
selectNgramsByDoc lIds dId nt = runOpaQuery (query lIds dId nt) selectNgramsByDoc lIds dId nt = runOpaQuery (query lIds dId nt)
where where
join :: Query (NgramsRead, NodeNodeNgramsReadNull) join :: Select (NgramsRead, NodeNgramsRead, ContextNodeNgrams2Read)
join = leftJoin queryNgramsTable queryNodeNodeNgramsTable on1 join = leftJoin3 queryNgramsTable queryNodeNgramsTable queryContextNodeNgrams2Table on1 -- on2
where where
on1 (ng,nnng) = ng^.ngrams_id .== nnng^.nnng_ngrams_id on1 :: (NgramsRead, NodeNgramsRead, ContextNodeNgrams2Read) -> Column SqlBool
on1 (ng, nng, cnng) = (.&&)
query cIds' dId' nt' = proc () -> do (ng^.ngrams_id .== nng^.nng_ngrams_id)
(ng,nnng) <- join -< () (nng^.nng_id .== cnng^.cnng2_nodengrams_id)
restrict -< foldl (\b cId -> ((toNullable $ pgNodeId cId) .== nnng^.nnng_node1_id) .|| b) (pgBool True) cIds'
restrict -< (toNullable $ pgNodeId dId') .== nnng^.nnng_node2_id query lIds' dId' nt' = proc () -> do
restrict -< (toNullable $ pgNgramsType nt') .== nnng^.nnng_ngramsType (ng,nng,cnng) <- join -< ()
restrict -< foldl (\b lId -> ((pgNodeId lId) .== nng^.nng_node_id) .|| b) (sqlBool True) lIds'
restrict -< (pgNodeId dId') .== cnng^.cnng2_context_id
restrict -< (pgNgramsType nt') .== nng^.nng_ngrams_type
returnA -< ng^.ngrams_terms returnA -< ng^.ngrams_terms
......
This diff is collapsed.
...@@ -33,13 +33,13 @@ import Gargantext.Prelude ...@@ -33,13 +33,13 @@ import Gargantext.Prelude
--------------------------------------------------------------------------- ---------------------------------------------------------------------------
add :: ParentId -> [NodeId] -> Cmd err [Only Int] add :: CorpusId -> [ContextId] -> Cmd err [Only Int]
add pId ns = runPGSQuery queryAdd (Only $ Values fields inputData) add pId ns = runPGSQuery queryAdd (Only $ Values fields inputData)
where where
fields = map (\t-> QualifiedIdentifier Nothing t) inputSqlTypes fields = map (\t-> QualifiedIdentifier Nothing t) inputSqlTypes
inputData = prepare pId ns inputData = prepare pId ns
add_debug :: ParentId -> [NodeId] -> Cmd err ByteString add_debug :: CorpusId -> [ContextId] -> Cmd err ByteString
add_debug pId ns = formatPGSQuery queryAdd (Only $ Values fields inputData) add_debug pId ns = formatPGSQuery queryAdd (Only $ Values fields inputData)
where where
fields = map (\t-> QualifiedIdentifier Nothing t) inputSqlTypes fields = map (\t-> QualifiedIdentifier Nothing t) inputSqlTypes
...@@ -48,33 +48,34 @@ add_debug pId ns = formatPGSQuery queryAdd (Only $ Values fields inputData) ...@@ -48,33 +48,34 @@ add_debug pId ns = formatPGSQuery queryAdd (Only $ Values fields inputData)
-- | Input Tables: types of the tables -- | Input Tables: types of the tables
inputSqlTypes :: [Text] inputSqlTypes :: [Text]
inputSqlTypes = ["int4","int4","int4"] inputSqlTypes = ["int4","int4","int4","int4"]
-- | SQL query to add documents -- | SQL query to add documents
-- TODO return id of added documents only -- TODO return id of added documents only
queryAdd :: Query queryAdd :: Query
queryAdd = [sql| queryAdd = [sql|
WITH input_rows(node1_id,node2_id,category) AS (?) WITH input_rows(node_id,context_id,score,category) AS (?)
INSERT INTO nodes_nodes (node1_id, node2_id,category) INSERT INTO nodes_contexts (node_id, context_id,score,category)
SELECT * FROM input_rows SELECT * FROM input_rows
ON CONFLICT (node1_id, node2_id) DO NOTHING -- on unique index ON CONFLICT (node_id, context_id) DO NOTHING -- on unique index
RETURNING 1 RETURNING 1
; ;
|] |]
prepare :: ParentId -> [NodeId] -> [InputData] prepare :: ParentId -> [ContextId] -> [InputData]
prepare pId ns = map (\nId -> InputData pId nId) ns prepare pId ns = map (\cId -> InputData pId cId) ns
------------------------------------------------------------------------ ------------------------------------------------------------------------
-- * Main Types used -- * Main Types used
data InputData = InputData { inNode1_id :: NodeId data InputData = InputData { inNode_id :: NodeId
, inNode2_id :: NodeId , inContext_id :: ContextId
} deriving (Show, Generic, Typeable) } deriving (Show, Generic, Typeable)
instance ToRow InputData where instance ToRow InputData where
toRow inputData = [ toField (inNode1_id inputData) toRow inputData = [ toField (inNode_id inputData)
, toField (inNode2_id inputData) , toField (inContext_id inputData)
, toField (0 :: Int)
, toField (1 :: Int) , toField (1 :: Int)
] ]
...@@ -35,7 +35,7 @@ selectNodesWithUsername nt u = runOpaQuery (q u) ...@@ -35,7 +35,7 @@ selectNodesWithUsername nt u = runOpaQuery (q u)
restrict -< _node_typename n .== (sqlInt4 $ toDBid nt) restrict -< _node_typename n .== (sqlInt4 $ toDBid nt)
returnA -< _node_id n returnA -< _node_id n
join' :: Query (NodeRead, UserReadNull) join' :: Select (NodeRead, UserReadNull)
join' = leftJoin queryNodeTable queryUserTable on1 join' = leftJoin queryNodeTable queryUserTable on1
where where
on1 (n,us) = _node_user_id n .== user_id us on1 (n,us) = _node_user_id n .== user_id us
......
...@@ -37,7 +37,7 @@ updateHyperdataQuery i h = Update ...@@ -37,7 +37,7 @@ updateHyperdataQuery i h = Update
, uWhere = (\row -> _node_id row .== pgNodeId i ) , uWhere = (\row -> _node_id row .== pgNodeId i )
, uReturning = rCount , uReturning = rCount
} }
where h' = (pgJSONB $ cs $ encode $ h) where h' = (sqlJSONB $ cs $ encode $ h)
---------------------------------------------------------------------------------- ----------------------------------------------------------------------------------
updateNodesWithType :: ( HasNodeError err updateNodesWithType :: ( HasNodeError err
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment