Commit 93098b80 authored by Nicolas Pouillard's avatar Nicolas Pouillard
parents 5705c4d3 e6298f58
# This file is a template, and might need editing before it works on your project. # Thanks to:
# see https://docs.gitlab.com/ce/ci/yaml/README.html for all available options # https://vadosware.io/post/zero-to-continuous-integrated-testing-a-haskell-project-with-gitlab/
#
# you can delete this line if you're not using Docker #
#image: busybox:latest image: haskell:8
before_script: variables:
- echo "Before script section" STACK_ROOT: "${CI_PROJECT_DIR}/.stack"
- echo "For example you might run an update here or install a build dependency" STACK_OPTS: "--system-ghc"
- echo "Or perhaps you might print out some debugging details"
cache:
after_script: paths:
- echo "After script section" - .stack
- echo "For example you might do some cleanup here" - .stack-work
- target
build1:
#before_script:
#- apt-get update
#- apt-get install make xz-utils
stages:
- build
- test
build:
stage: build stage: build
script: script:
- ./install - make setup
- make build
#test1:
# TOOO
#unit-test:
# stage: test # stage: test
# script: # script:
# - echo "Do a test here" # - make test-unit
# - echo "For example run a test suite" #
# #int-test:
#test2:
# stage: test # stage: test
# script:
# - echo "Do another parallel test here"
# - echo "For example run a lint test"
#
#deploy1:
# stage: deploy
# script: # script:
# - echo "Do your deploy here" # - make test-int
#
#e2e-test:
# stage: test
# script:
# - make test-e2e
#
# If you find yourself with a non-sensical build error when you know your project should be building just fine, this fragment should help:
#
#build:
# stage: build
# script:
# # Clear out cache files
# - rm -rf .stack
# - rm -rf .stack-work
# - stack setup --system-ghc
# - stack install --local-bin-path target --system-ghc
...@@ -19,23 +19,20 @@ Import a corpus binary. ...@@ -19,23 +19,20 @@ Import a corpus binary.
module Main where module Main where
import Data.Either (Either(..))
import Data.Maybe (Maybe(..))
import System.Environment (getArgs) import System.Environment (getArgs)
import Gargantext.Prelude import Gargantext.Prelude
import Gargantext.Database.Flow (FlowCmdM, flowCorpusFile, getOrMkRoot) import Gargantext.Database.Flow (getOrMkRoot, getOrMkRootWithCorpus)
import Gargantext.Text.Corpus.Parsers (FileFormat(..)) import Gargantext.Database.Schema.Node (getOrMkList)
import Gargantext.Database.Utils (Cmd, ) import Gargantext.Database.Utils (Cmd, )
import Gargantext.Database.Types.Node (CorpusId, toHyperdataDocument, RootId) import Gargantext.Database.Types.Node (CorpusId, RootId, HyperdataCorpus, ListId)
import Gargantext.Database.Schema.User (insertUsersDemo, UserId) import Gargantext.Database.Schema.User (insertUsersDemo, UserId)
import Gargantext.Text.Terms (TermType(..))
import Gargantext.Core (Lang(..))
import Gargantext.API.Types (GargError) import Gargantext.API.Types (GargError)
import Gargantext.API.Node () -- instances import Gargantext.API.Node () -- instances
import Gargantext.API.Settings (withDevEnv, runCmdDev, DevEnv) import Gargantext.API.Settings (withDevEnv, runCmdDev)
--import Gargantext.Text.Corpus.Parsers.GrandDebat (readFile, GrandDebatReference(..)) import Gargantext.Database.Config (userMaster, corpusMasterName)
import Data.Text (Text) import Gargantext.Database.Init (initTriggers)
import qualified Data.Text as Text
import Control.Monad.IO.Class (liftIO)
main :: IO () main :: IO ()
main = do main = do
[iniPath] <- getArgs [iniPath] <- getArgs
...@@ -44,11 +41,21 @@ main = do ...@@ -44,11 +41,21 @@ main = do
createUsers = insertUsersDemo createUsers = insertUsersDemo
let let
mkRoots :: Cmd GargError (UserId, RootId) mkRoots :: Cmd GargError [(UserId, RootId)]
mkRoots = getOrMkRoot "user1" mkRoots = mapM getOrMkRoot ["gargantua", "user1", "user2"]
-- TODO create all users roots
let
initMaster :: Cmd GargError (UserId, RootId, CorpusId, ListId)
initMaster = do
(masterUserId, masterRootId, masterCorpusId) <- getOrMkRootWithCorpus userMaster (Left corpusMasterName) (Nothing :: Maybe HyperdataCorpus)
masterListId <- getOrMkList masterCorpusId masterUserId
_ <- initTriggers masterListId
pure (masterUserId, masterRootId, masterCorpusId, masterListId)
withDevEnv iniPath $ \env -> do withDevEnv iniPath $ \env -> do
_ <- runCmdDev env createUsers _ <- runCmdDev env createUsers
_ <- runCmdDev env mkRoots _ <- runCmdDev env mkRoots
x <- runCmdDev env initMaster
putStrLn $ show x
pure () pure ()
#!/bin/bash
if git --version;
then
echo "git installed, ok"
else
sudo apt update && sudo apt install git
fi
sudo apt update
sudo apt install liblzma-dev libpcre3-dev libblas-dev liblapack-dev pkg-config libgsl-dev libbz2-dev postgresql postgresql-server-dev-9.6 nginx libigraph-dev
#echo "Which user?"
#read USER
#sudo adduser --disabled-password --gecos "" $USER
#sudo su $USER
curl -sSL https://get.haskellstack.org/ | sh
stack update
stack upgrade
git clone https://gitlab.iscpif.fr/gargantext/haskell-gargantext.git
cd haskell-gargantext
git clone https://gitlab.iscpif.fr/gargantext/purescript-gargantext
mkdir deps
cd deps
git clone https://gitlab.iscpif.fr/gargantext/clustering-louvain.git
git clone https://github.com/np/servant-job.git
git clone https://github.com/np/patches-map
git clone https://gitlab.com/npouillard/patches-class.git
git clone https://github.com/delanoe/haskell-opaleye
git clone -b next --single-branch https://github.com/delanoe/hsparql
cd ..
stack setup
stack build
stack install
# Specific to our servers
### Configure timezone and locale ###################################
echo "########### LOCALES & TZ #################"
echo "Europe/Paris" > /etc/timezone
dpkg-reconfigure --frontend=noninteractive tzdata
#ENV TZ "Europe/Paris"
sed -i -e 's/# en_GB.UTF-8 UTF-8/en_GB.UTF-8 UTF-8/' /etc/locale.gen && \
sed -i -e 's/# fr_FR.UTF-8 UTF-8/fr_FR.UTF-8 UTF-8/' /etc/locale.gen && \
locale-gen && \
update-locale LANG=fr_FR.UTF-8 && \
update-locale LANGUAGE=fr_FR.UTF-8 && \
update-locale LC_ALL=fr_FR.UTF-8
################################################################
# Database configuration
# CREATE USER gargantua WITH PASSWORD $(grep DB_PASS gargantext.ini)
# GRANT ALL PRIVILEGES ON DATABASE gargandbV4 to gargantua
#######################################################################
## POSTGRESQL DATA (as ROOT)
#######################################################################
sed -iP "s%^data_directory.*%data_directory = \'\/srv\/gargandata\'%" /etc/postgresql/9.6/main/postgresql.conf
echo "host all all 0.0.0.0/0 md5" >> /etc/postgresql/9.6/main/pg_hba.conf
echo "listen_addresses='*'" >> /etc/postgresql/9.6/main/postgresql.conf
...@@ -14,5 +14,5 @@ sudo apt install liblzma-dev libpcre3-dev libblas-dev liblapack-dev pkg-config l ...@@ -14,5 +14,5 @@ sudo apt install liblzma-dev libpcre3-dev libblas-dev liblapack-dev pkg-config l
# Phylo management # Phylo management
sudo apt install graphviz sudo apt install graphviz
sudo apt install postgresql-server-dev-9.6 sudo apt install postgresql-server-dev-11
#!/bin/bash
git clone https://gitlab.iscpif.fr/gargantext/clustering-louvain-cplusplus.git
cd clustering-louvain-cplusplus
./install
cd ..
sudo apt install default-jdk
wget https://dl.gargantext.org/coreNLP.tar.bz2
tar xvjf coreNLP.tar.bz2
# ./startServer.sh
This diff is collapsed.
##
# You should look at the following URL's in order to grasp a solid understanding
# of Nginx configuration files in order to fully unleash the power of Nginx.
# http://wiki.nginx.org/Pitfalls
# http://wiki.nginx.org/QuickStart
# http://wiki.nginx.org/Configuration
#
# Generally, you will want to move this file somewhere, and start with a clean
# file but keep this around for reference. Or just disable in sites-enabled.
#
# Please see /usr/share/doc/nginx-doc/examples/ for more detailed examples.
##
server {
if ($host = dev.gargantext.org) {
return 301 https://$host$request_uri;
} # managed by Certbot
listen 80;
server_name dev.gargantext.org;
add_header Cache-Control "no-cache";
location '/.well-known/acme-challenge' {
root /var/www/gargantext;
}
# Always redirect to https
return 301 https://dev.gargantext.org$request_uri;
}
server {
listen 443;
listen [::]:443 ssl;
server_name dev.gargantext.org;
# Some options configurations:
# https://github.com/h5bp/server-configs-nginx/blob/master/h5bp/location/expires.conf
add_header Cache-Control "no-cache";
# SSL configuration
#
# listen 443 ssl default_server;
# listen [::]:443 ssl default_server;
ssl on;
ssl_certificate /etc/letsencrypt/live/dev.gargantext.org/fullchain.pem; # managed by Certbot
ssl_certificate_key /etc/letsencrypt/live/dev.gargantext.org/privkey.pem; # managed by Certbot
# Note: You should disable gzip for SSL traffic.
# See: https://bugs.debian.org/773332
#
# Read up on ssl_ciphers to ensure a secure configuration.
# See: https://bugs.debian.org/765782
#
# Self signed certs generated by the ssl-cert package
# Don't use them in a production server!
#
# include snippets/snakeoil.conf;
client_max_body_size 800M;
client_body_timeout 12;
client_header_timeout 12;
keepalive_timeout 15;
send_timeout 10;
root /var/www/html;
index index.html;
#add_header Access-Control-Allow-Origin $http_origin always;
# Add index.php to the list if you are using PHP
#index index.html index.htm index.nginx-debian.html;
# CORS config borrowed from: https://gist.github.com/pauloricardomg/7084524
# NP: not sure we need CORS yet
#
if ($http_origin ~* (^https?://(127.0.0.1|localhost|dev\.gargantext\.com))) {
set $cors "1";
}
#
# Cross-Origin Resource Sharing
if ($request_method = "OPTIONS") {
set $cors "${cors}o";
}
# SSL CERT renewal
location '/.well-known/acme-challenge' {
alias /var/www/gargantext/.well-known/acme-challenge ;
}
location /api {
# limit_except OPTIONS {
# auth_basic "Gargantext's Development Version";
# auth_basic_user_file /etc/nginx/haskell_gargantext.htpasswd;
# }
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $http_host;
proxy_redirect off;
proxy_pass http://127.0.0.1:8008;
}
location / {
# https://stackoverflow.com/a/48708812
limit_except OPTIONS {
auth_basic "Gargantext's Development Version";
auth_basic_user_file /etc/nginx/haskell_gargantext.htpasswd;
}
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $http_host;
proxy_redirect off;
proxy_pass http://127.0.0.1:8008;
}
#access_log off;
access_log /var/log/nginx/access.log;
error_log /var/log/nginx/error.log;
}
sudo apt install apache2-utils
htpasswd -c /etc/nginx/haskell_gargantext.htpasswd username1
sudo apt-get install certbot python-certbot-nginx
sudo certbot --nginx
#!/bin/bash #!/bin/bash
sudo su postgres # sudo su postgres
PW="password" # postgresql://$USER:$PW@localhost/$DB
PW="C8kdcUrAQy66U"
DB="gargandbV5" DB="gargandbV5"
USER="gargantua" USER="gargantua"
psql -c "CREATE USER \"${USER}\" psql -c "CREATE USER \"${USER}\""
psql -c "ALTER USER \"${USER}\" with PASSWORD \"${PW}\"" psql -c "ALTER USER \"${USER}\" with PASSWORD '${PW}'"
psql -c "DROP DATABASE IF EXISTS \"${DB}\"" psql -c "DROP DATABASE IF EXISTS \"${DB}\""
createdb "${DB}" createdb "${DB}"
psql "${DB}" < schema.sql psql "${DB}" < schema.sql
psql -c "ALTER DATABASE \"${DB}\" OWNER to \"${USER}\" ;" psql -c "ALTER DATABASE \"${DB}\" OWNER to \"${USER}\""
......
CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog; CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog;
CREATE EXTENSION IF NOT EXISTS tsm_system_rows; CREATE EXTENSION IF NOT EXISTS tsm_system_rows;
COMMENT ON EXTENSION plpgsql IS 'PL/pgSQL procedural language'; COMMENT ON EXTENSION plpgsql IS 'PL/pgSQL procedural language';
-- CREATE USER WITH ...
-- createdb "gargandb"
CREATE TABLE public.auth_user ( CREATE TABLE public.auth_user (
id SERIAL, id SERIAL,
password character varying(128) NOT NULL, password character varying(128) NOT NULL,
...@@ -23,7 +19,6 @@ CREATE TABLE public.auth_user ( ...@@ -23,7 +19,6 @@ CREATE TABLE public.auth_user (
ALTER TABLE public.auth_user OWNER TO gargantua; ALTER TABLE public.auth_user OWNER TO gargantua;
-- TODO add publication_date -- TODO add publication_date
-- TODO typename -> type_id -- TODO typename -> type_id
CREATE TABLE public.nodes ( CREATE TABLE public.nodes (
...@@ -40,7 +35,6 @@ CREATE TABLE public.nodes ( ...@@ -40,7 +35,6 @@ CREATE TABLE public.nodes (
); );
ALTER TABLE public.nodes OWNER TO gargantua; ALTER TABLE public.nodes OWNER TO gargantua;
CREATE TABLE public.ngrams ( CREATE TABLE public.ngrams (
id SERIAL, id SERIAL,
terms character varying(255), terms character varying(255),
...@@ -50,22 +44,34 @@ CREATE TABLE public.ngrams ( ...@@ -50,22 +44,34 @@ CREATE TABLE public.ngrams (
ALTER TABLE public.ngrams OWNER TO gargantua; ALTER TABLE public.ngrams OWNER TO gargantua;
-------------------------------------------------------------- --------------------------------------------------------------
-- TODO: delete delete this table CREATE TABLE public.node_ngrams (
--CREATE TABLE public.nodes_ngrams ( id SERIAL,
-- id SERIAL, node_id integer NOT NULL,
-- node_id integer NOT NULL, node_subtype integer,
-- ngrams_id integer NOT NULL, ngrams_id integer NOT NULL,
-- parent_id integer REFERENCES public.nodes_ngrams(id) ON DELETE SET NULL, ngrams_type integer, -- change to ngrams_field? (no for pedagogic reason)
-- ngrams_type integer, ngrams_field integer,
-- list_type integer, ngrams_tag integer,
-- weight double precision, ngrams_class integer,
-- FOREIGN KEY (node_id) REFERENCES public.nodes(id) ON DELETE CASCADE, weight double precision,
-- FOREIGN KEY (ngrams_id) REFERENCES public.ngrams(id) ON DELETE CASCADE, PRIMARY KEY (id),
-- PRIMARY KEY (id) FOREIGN KEY (node_id) REFERENCES public.nodes(id) ON DELETE CASCADE,
--); FOREIGN KEY (ngrams_id) REFERENCES public.ngrams(id) ON DELETE CASCADE
--ALTER TABLE public.nodes_ngrams OWNER TO gargantua; );
-------------------------------------------------------------- ALTER TABLE public.node_ngrams OWNER TO gargantua;
CREATE TABLE public.node_ngrams_ngrams (
node_id integer NOT NULL,
node_ngrams1_id integer NOT NULL,
node_ngrams2_id integer NOT NULL,
FOREIGN KEY (node_id) REFERENCES public.nodes(id) ON DELETE CASCADE,
FOREIGN KEY (node_ngrams1_id) REFERENCES public.node_ngrams(id) ON DELETE CASCADE,
FOREIGN KEY (node_ngrams2_id) REFERENCES public.node_ngrams(id) ON DELETE CASCADE,
PRIMARY KEY (node_id, node_ngrams1_id, node_ngrams2_id)
);
ALTER TABLE public.node_ngrams_ngrams OWNER TO gargantua;
--------------------------------------------------------------
-------------------------------------------------------------- --------------------------------------------------------------
-- --
-- --
...@@ -78,8 +84,8 @@ ALTER TABLE public.ngrams OWNER TO gargantua; ...@@ -78,8 +84,8 @@ ALTER TABLE public.ngrams OWNER TO gargantua;
--); --);
-- --
--ALTER TABLE public.nodes_ngrams_ngrams OWNER TO gargantua; --ALTER TABLE public.nodes_ngrams_ngrams OWNER TO gargantua;
--------------------------------------------------------------- ---------------------------------------------------------------
-- TODO nodes_nodes(node1_id int, node2_id int, edge_type int , weight real)
CREATE TABLE public.nodes_nodes ( CREATE TABLE public.nodes_nodes (
node1_id integer NOT NULL REFERENCES public.nodes(id) ON DELETE CASCADE, node1_id integer NOT NULL REFERENCES public.nodes(id) ON DELETE CASCADE,
node2_id integer NOT NULL REFERENCES public.nodes(id) ON DELETE CASCADE, node2_id integer NOT NULL REFERENCES public.nodes(id) ON DELETE CASCADE,
...@@ -92,13 +98,15 @@ ALTER TABLE public.nodes_nodes OWNER TO gargantua; ...@@ -92,13 +98,15 @@ ALTER TABLE public.nodes_nodes OWNER TO gargantua;
--------------------------------------------------------------- ---------------------------------------------------------------
-- TODO should reference "id" of nodes_nodes (instead of node1_id, node2_id) -- TODO should reference "id" of nodes_nodes (instead of node1_id, node2_id)
CREATE TABLE public.node_node_ngrams ( CREATE TABLE public.node_node_ngrams (
id SERIAL,
node1_id INTEGER NOT NULL REFERENCES public.nodes (id) ON DELETE CASCADE, node1_id INTEGER NOT NULL REFERENCES public.nodes (id) ON DELETE CASCADE,
-- here id to node_ngrams
node2_id INTEGER NOT NULL REFERENCES public.nodes (id) ON DELETE CASCADE, node2_id INTEGER NOT NULL REFERENCES public.nodes (id) ON DELETE CASCADE,
ngrams_id INTEGER NOT NULL REFERENCES public.ngrams (id) ON DELETE CASCADE, ngrams_id INTEGER NOT NULL REFERENCES public.ngrams (id) ON DELETE CASCADE,
ngrams_type INTEGER, ngrams_type INTEGER,
--ngrams_tag INTEGER,
--ngrams_class INTEGER,
weight double precision, weight double precision,
PRIMARY KEY (id) PRIMARY KEY (node1_id, node2_id, ngrams_id, ngrams_type)
); );
ALTER TABLE public.node_node_ngrams OWNER TO gargantua; ALTER TABLE public.node_node_ngrams OWNER TO gargantua;
-------------------------------------------------------------- --------------------------------------------------------------
...@@ -123,7 +131,6 @@ CREATE TABLE public.rights ( ...@@ -123,7 +131,6 @@ CREATE TABLE public.rights (
); );
ALTER TABLE public.rights OWNER TO gargantua; ALTER TABLE public.rights OWNER TO gargantua;
------------------------------------------------------------ ------------------------------------------------------------
-- INDEXES -- INDEXES
...@@ -141,6 +148,7 @@ CREATE UNIQUE INDEX ON public.nodes USING btree (typename, parent_id, ((hyperdat ...@@ -141,6 +148,7 @@ CREATE UNIQUE INDEX ON public.nodes USING btree (typename, parent_id, ((hyperdat
CREATE UNIQUE INDEX ON public.ngrams (terms); -- TEST GIN CREATE UNIQUE INDEX ON public.ngrams (terms); -- TEST GIN
CREATE INDEX ON public.ngrams USING btree (id, terms); CREATE INDEX ON public.ngrams USING btree (id, terms);
CREATE UNIQUE INDEX ON public.node_ngrams USING btree (node_id,node_subtype, ngrams_id);
CREATE INDEX ON public.nodes_nodes USING btree (node1_id, node2_id, category); CREATE INDEX ON public.nodes_nodes USING btree (node1_id, node2_id, category);
CREATE UNIQUE INDEX ON public.nodes_nodes USING btree (node1_id, node2_id); CREATE UNIQUE INDEX ON public.nodes_nodes USING btree (node1_id, node2_id);
...@@ -149,36 +157,10 @@ CREATE UNIQUE INDEX ON public.node_node_ngrams USING btree (node1_id, node2_id, ...@@ -149,36 +157,10 @@ CREATE UNIQUE INDEX ON public.node_node_ngrams USING btree (node1_id, node2_id,
CREATE INDEX ON public.node_node_ngrams USING btree (node1_id, node2_id); CREATE INDEX ON public.node_node_ngrams USING btree (node1_id, node2_id);
CREATE INDEX ON public.node_node_ngrams USING btree (ngrams_id, node2_id); CREATE INDEX ON public.node_node_ngrams USING btree (ngrams_id, node2_id);
-- TRIGGERS
-- TODO user haskell-postgresql-simple to create this function
-- with rights typename
CREATE OR REPLACE FUNCTION public.search_update()
RETURNS trigger AS $$
begin
IF new.typename = 4 AND new.hyperdata @> '{"language_iso2":"EN"}' THEN
new.search := to_tsvector( 'english' , (new.hyperdata ->> 'title') || ' ' || (new.hyperdata ->> 'abstract'));
ELSIF new.typename = 4 AND new.hyperdata @> '{"language_iso2":"FR"}' THEN
new.search := to_tsvector( 'french' , (new.hyperdata ->> 'title') || ' ' || (new.hyperdata ->> 'abstract'));
ELSIF new.typename = 41 THEN
new.search := to_tsvector( 'french' , (new.hyperdata ->> 'prenom')
|| ' ' || (new.hyperdata ->> 'nom')
|| ' ' || (new.hyperdata ->> 'fonction')
);
ELSE
new.search := to_tsvector( 'english' , new.name);
END IF;
return new;
end
$$ LANGUAGE plpgsql;
ALTER FUNCTION public.search_update() OWNER TO gargantua;
CREATE TRIGGER search_update_trigger BEFORE INSERT OR UPDATE ON nodes FOR EACH ROW EXECUTE PROCEDURE search_update();
------------------------------------------------------------------------
-- Ngrams Full DB Extraction Optim -- Ngrams Full DB Extraction Optim
-- TODO remove hard parameter -- TODO remove hard parameter and move elsewhere
CREATE OR REPLACE function node_pos(int, int) returns bigint CREATE OR REPLACE function node_pos(int, int) returns bigint
AS 'SELECT count(id) from nodes AS 'SELECT count(id) from nodes
WHERE id < $1 WHERE id < $1
...@@ -189,8 +171,3 @@ CREATE OR REPLACE function node_pos(int, int) returns bigint ...@@ -189,8 +171,3 @@ CREATE OR REPLACE function node_pos(int, int) returns bigint
--drop index node_by_pos; --drop index node_by_pos;
create index node_by_pos on nodes using btree(node_pos(id,typename)); create index node_by_pos on nodes using btree(node_pos(id,typename));
-- Initialize index with already existing data
UPDATE nodes SET hyperdata = hyperdata;
name: gargantext name: gargantext
version: '4.0.0.6' version: '0.0.0.4'
synopsis: Search, map, share synopsis: Search, map, share
description: Please see README.md description: Please see README.md
category: Data category: Data
...@@ -39,6 +39,8 @@ library: ...@@ -39,6 +39,8 @@ library:
- Gargantext.Core.Types.Main - Gargantext.Core.Types.Main
- Gargantext.Core.Utils.Prefix - Gargantext.Core.Utils.Prefix
- Gargantext.Database - Gargantext.Database
- Gargantext.Database.Init
- Gargantext.Database.Config
- Gargantext.Database.Flow - Gargantext.Database.Flow
- Gargantext.Database.Schema.Node - Gargantext.Database.Schema.Node
- Gargantext.Database.Tree - Gargantext.Database.Tree
......
...@@ -351,7 +351,7 @@ serverPrivateGargAPI' (AuthenticatedUser (NodeId uid)) ...@@ -351,7 +351,7 @@ serverPrivateGargAPI' (AuthenticatedUser (NodeId uid))
:<|> withAccess (Proxy :: Proxy TableNgramsApi) Proxy uid <$> PathNode <*> apiNgramsTableDoc :<|> withAccess (Proxy :: Proxy TableNgramsApi) Proxy uid <$> PathNode <*> apiNgramsTableDoc
:<|> count -- TODO: undefined :<|> count -- TODO: undefined
:<|> withAccess (Proxy :: Proxy SearchPairsAPI) Proxy uid <$> PathNode <*> searchPairs -- TODO: move elsewhere :<|> withAccess (Proxy :: Proxy SearchPairsAPI) Proxy uid <$> PathNode <*> searchPairs -- TODO: move elsewhere
:<|> withAccess (Proxy :: Proxy GraphAPI) Proxy uid <$> PathNode <*> graphAPI -- TODO: mock :<|> withAccess (Proxy :: Proxy GraphAPI) Proxy uid <$> PathNode <*> graphAPI uid -- TODO: mock
:<|> withAccess (Proxy :: Proxy TreeAPI) Proxy uid <$> PathNode <*> treeAPI :<|> withAccess (Proxy :: Proxy TreeAPI) Proxy uid <$> PathNode <*> treeAPI
:<|> addToCorpus :<|> addToCorpus
:<|> New.api -- TODO-SECURITY :<|> New.api -- TODO-SECURITY
......
...@@ -47,7 +47,7 @@ module Gargantext.API.Ngrams ...@@ -47,7 +47,7 @@ module Gargantext.API.Ngrams
, NgramsStatePatch , NgramsStatePatch
, NgramsTablePatch , NgramsTablePatch
, NgramsElement , NgramsElement(..)
, mkNgramsElement , mkNgramsElement
, mergeNgramsElement , mergeNgramsElement
...@@ -60,6 +60,7 @@ module Gargantext.API.Ngrams ...@@ -60,6 +60,7 @@ module Gargantext.API.Ngrams
, Repo(..) , Repo(..)
, r_version , r_version
, r_state , r_state
, r_history
, NgramsRepo , NgramsRepo
, NgramsRepoElement(..) , NgramsRepoElement(..)
, saveRepo , saveRepo
...@@ -83,6 +84,10 @@ module Gargantext.API.Ngrams ...@@ -83,6 +84,10 @@ module Gargantext.API.Ngrams
, getNgramsTableMap , getNgramsTableMap
, tableNgramsPull , tableNgramsPull
, tableNgramsPut , tableNgramsPut
, Versioned(..)
, currentVersion
, listNgramsChangedSince
) )
where where
...@@ -126,7 +131,7 @@ import GHC.Generics (Generic) ...@@ -126,7 +131,7 @@ import GHC.Generics (Generic)
import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger) import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger)
-- import Gargantext.Database.Schema.Ngrams (NgramsTypeId, ngramsTypeId, NgramsTableData(..)) -- import Gargantext.Database.Schema.Ngrams (NgramsTypeId, ngramsTypeId, NgramsTableData(..))
import Gargantext.Database.Config (userMaster) import Gargantext.Database.Config (userMaster)
import Gargantext.Database.Metrics.NgramsByNode (getOccByNgramsOnlyFast) import Gargantext.Database.Metrics.NgramsByNode (getOccByNgramsOnlyFast')
import Gargantext.Database.Schema.Ngrams (NgramsType) import Gargantext.Database.Schema.Ngrams (NgramsType)
import Gargantext.Database.Types.Node (NodeType(..)) import Gargantext.Database.Types.Node (NodeType(..))
import Gargantext.Database.Utils (fromField', HasConnection) import Gargantext.Database.Utils (fromField', HasConnection)
...@@ -1015,7 +1020,8 @@ getTableNgrams _nType nId tabType listId limit_ offset ...@@ -1015,7 +1020,8 @@ getTableNgrams _nType nId tabType listId limit_ offset
setScores True table = do setScores True table = do
let ngrams_terms = (table ^.. each . ne_ngrams) let ngrams_terms = (table ^.. each . ne_ngrams)
t1 <- getTime' t1 <- getTime'
occurrences <- getOccByNgramsOnlyFast nId occurrences <- getOccByNgramsOnlyFast' nId
listId
ngramsType ngramsType
ngrams_terms ngrams_terms
t2 <- getTime' t2 <- getTime'
...@@ -1053,7 +1059,7 @@ getTableNgrams _nType nId tabType listId limit_ offset ...@@ -1053,7 +1059,7 @@ getTableNgrams _nType nId tabType listId limit_ offset
% " map1=" % timeSpecs % " map1=" % timeSpecs
% " map2=" % timeSpecs % " map2=" % timeSpecs
% " map3=" % timeSpecs % " map3=" % timeSpecs
% " sql=" % if nSco then "map2" else "map3" % " sql=" % (if nSco then "map2" else "map3")
% "\n" % "\n"
) t0 t3 t0 t1 t1 t2 t2 t3 ) t0 t3 t0 t1 t1 t2 t2 t3
pure tableMap3 pure tableMap3
...@@ -1149,8 +1155,6 @@ getTableNgramsDoc dId tabType listId limit_ offset listType minSize maxSize orde ...@@ -1149,8 +1155,6 @@ getTableNgramsDoc dId tabType listId limit_ offset listType minSize maxSize orde
apiNgramsTableCorpus :: ( RepoCmdM env err m apiNgramsTableCorpus :: ( RepoCmdM env err m
, HasNodeError err , HasNodeError err
, HasInvalidError err , HasInvalidError err
......
...@@ -38,7 +38,7 @@ Node API ...@@ -38,7 +38,7 @@ Node API
module Gargantext.API.Node module Gargantext.API.Node
where where
import Control.Lens ((.~), (?~)) import Control.Lens ((.~), (?~), (^.))
import Control.Monad ((>>), forM) import Control.Monad ((>>), forM)
import Control.Monad.IO.Class (liftIO) import Control.Monad.IO.Class (liftIO)
import Data.Aeson (FromJSON, ToJSON) import Data.Aeson (FromJSON, ToJSON)
...@@ -55,11 +55,12 @@ import Gargantext.API.Ngrams.NTree (MyTree) ...@@ -55,11 +55,12 @@ import Gargantext.API.Ngrams.NTree (MyTree)
import Gargantext.API.Search (SearchDocsAPI, searchDocs) import Gargantext.API.Search (SearchDocsAPI, searchDocs)
import Gargantext.API.Table import Gargantext.API.Table
import Gargantext.API.Types import Gargantext.API.Types
import Gargantext.Core.Types (NodeTableResult)
import Gargantext.Core.Types.Main (Tree, NodeTree, ListType) import Gargantext.Core.Types.Main (Tree, NodeTree, ListType)
import Gargantext.Database.Config (nodeTypeId) import Gargantext.Database.Config (nodeTypeId)
import Gargantext.Database.Facet (FacetDoc, OrderBy(..)) import Gargantext.Database.Facet (FacetDoc, OrderBy(..))
import Gargantext.Database.Node.Children (getChildren) import Gargantext.Database.Node.Children (getChildren)
import Gargantext.Database.Schema.Node ( getNodesWithParentId, getNode, getNode', deleteNode, deleteNodes, mkNodeWithParent, JSONB, HasNodeError(..)) import Gargantext.Database.Schema.Node ( getNodesWithParentId, getNodeWith, getNode, deleteNode, deleteNodes, mkNodeWithParent, JSONB, HasNodeError(..))
import Gargantext.Database.Schema.NodeNode (nodeNodesCategory) import Gargantext.Database.Schema.NodeNode (nodeNodesCategory)
import Gargantext.Database.Tree (treeDB) import Gargantext.Database.Tree (treeDB)
import Gargantext.Database.Types.Node import Gargantext.Database.Types.Node
...@@ -142,7 +143,7 @@ type NodeAPI a = Get '[JSON] (Node a) ...@@ -142,7 +143,7 @@ type NodeAPI a = Get '[JSON] (Node a)
:<|> "pie" :> PieApi :<|> "pie" :> PieApi
:<|> "tree" :> TreeApi :<|> "tree" :> TreeApi
:<|> "phylo" :> PhyloAPI :<|> "phylo" :> PhyloAPI
:<|> "upload" :> UploadAPI :<|> "add" :> NodeAddAPI
-- TODO-ACCESS: check userId CanRenameNode nodeId -- TODO-ACCESS: check userId CanRenameNode nodeId
-- TODO-EVENTS: NodeRenamed RenameNode or re-use some more general NodeEdited... -- TODO-EVENTS: NodeRenamed RenameNode or re-use some more general NodeEdited...
...@@ -158,7 +159,8 @@ type ChildrenApi a = Summary " Summary children" ...@@ -158,7 +159,8 @@ type ChildrenApi a = Summary " Summary children"
:> QueryParam "type" NodeType :> QueryParam "type" NodeType
:> QueryParam "offset" Int :> QueryParam "offset" Int
:> QueryParam "limit" Int :> QueryParam "limit" Int
:> Get '[JSON] [Node a] -- :> Get '[JSON] [Node a]
:> Get '[JSON] (NodeTableResult a)
------------------------------------------------------------------------ ------------------------------------------------------------------------
type NodeNodeAPI a = Get '[JSON] (Node a) type NodeNodeAPI a = Get '[JSON] (Node a)
...@@ -167,7 +169,7 @@ nodeNodeAPI :: forall proxy a. (JSONB a, ToJSON a) => proxy a -> UserId -> Corpu ...@@ -167,7 +169,7 @@ nodeNodeAPI :: forall proxy a. (JSONB a, ToJSON a) => proxy a -> UserId -> Corpu
nodeNodeAPI p uId cId nId = withAccess (Proxy :: Proxy (NodeNodeAPI a)) Proxy uId (PathNodeNode cId nId) nodeNodeAPI' nodeNodeAPI p uId cId nId = withAccess (Proxy :: Proxy (NodeNodeAPI a)) Proxy uId (PathNodeNode cId nId) nodeNodeAPI'
where where
nodeNodeAPI' :: GargServer (NodeNodeAPI a) nodeNodeAPI' :: GargServer (NodeNodeAPI a)
nodeNodeAPI' = getNode nId p nodeNodeAPI' = getNodeWith nId p
...@@ -177,7 +179,7 @@ nodeAPI :: forall proxy a. (JSONB a, ToJSON a) => proxy a -> UserId -> NodeId -> ...@@ -177,7 +179,7 @@ nodeAPI :: forall proxy a. (JSONB a, ToJSON a) => proxy a -> UserId -> NodeId ->
nodeAPI p uId id = withAccess (Proxy :: Proxy (NodeAPI a)) Proxy uId (PathNode id) nodeAPI' nodeAPI p uId id = withAccess (Proxy :: Proxy (NodeAPI a)) Proxy uId (PathNode id) nodeAPI'
where where
nodeAPI' :: GargServer (NodeAPI a) nodeAPI' :: GargServer (NodeAPI a)
nodeAPI' = getNode id p nodeAPI' = getNodeWith id p
:<|> rename id :<|> rename id
:<|> postNode uId id :<|> postNode uId id
:<|> putNode id :<|> putNode id
...@@ -199,10 +201,11 @@ nodeAPI p uId id = withAccess (Proxy :: Proxy (NodeAPI a)) Proxy uId (PathNode i ...@@ -199,10 +201,11 @@ nodeAPI p uId id = withAccess (Proxy :: Proxy (NodeAPI a)) Proxy uId (PathNode i
:<|> getPie id :<|> getPie id
:<|> getTree id :<|> getTree id
:<|> phyloAPI id uId :<|> phyloAPI id uId
:<|> postUpload id :<|> nodeAddAPI id
-- :<|> postUpload id
deleteNodeApi id' = do deleteNodeApi id' = do
node <- getNode' id' node <- getNode id'
if _node_typename node == nodeTypeId NodeUser if _node_typename node == nodeTypeId NodeUser
then panic "not allowed" -- TODO add proper Right Management Type then panic "not allowed" -- TODO add proper Right Management Type
else deleteNode id' else deleteNode id'
...@@ -333,7 +336,10 @@ rename :: NodeId -> RenameNode -> Cmd err [Int] ...@@ -333,7 +336,10 @@ rename :: NodeId -> RenameNode -> Cmd err [Int]
rename nId (RenameNode name') = U.update (U.Rename nId name') rename nId (RenameNode name') = U.update (U.Rename nId name')
postNode :: HasNodeError err => UserId -> NodeId -> PostNode -> Cmd err [NodeId] postNode :: HasNodeError err => UserId -> NodeId -> PostNode -> Cmd err [NodeId]
postNode uId pId (PostNode nodeName nt) = mkNodeWithParent nt (Just pId) uId nodeName postNode uId pId (PostNode nodeName nt) = do
nodeUser <- getNodeWith (NodeId uId) HyperdataUser
let uId' = nodeUser ^. node_userId
mkNodeWithParent nt (Just pId) uId' nodeName
putNode :: NodeId -> Cmd err Int putNode :: NodeId -> Cmd err Int
putNode = undefined -- TODO putNode = undefined -- TODO
...@@ -375,6 +381,12 @@ instance (ToParamSchema a, HasSwagger sub) => ...@@ -375,6 +381,12 @@ instance (ToParamSchema a, HasSwagger sub) =>
& in_ .~ ParamFormData & in_ .~ ParamFormData
& paramSchema .~ toParamSchema (Proxy :: Proxy a) & paramSchema .~ toParamSchema (Proxy :: Proxy a)
type NodeAddAPI = "file" :> Summary "Node add API"
:> UploadAPI
nodeAddAPI :: NodeId -> GargServer NodeAddAPI
nodeAddAPI id = postUpload id
type UploadAPI = Summary "Upload file(s) to a corpus" type UploadAPI = Summary "Upload file(s) to a corpus"
:> MultipartForm Mem (MultipartData Mem) :> MultipartForm Mem (MultipartData Mem)
:> QueryParam "fileType" FileType :> QueryParam "fileType" FileType
......
...@@ -178,14 +178,18 @@ data MockEnv = MockEnv ...@@ -178,14 +178,18 @@ data MockEnv = MockEnv
makeLenses ''MockEnv makeLenses ''MockEnv
-- | TODO add this path in Settings -- | TODO add this path in Settings
repoDir :: FilePath
repoDir = "repos"
repoSnapshot :: FilePath repoSnapshot :: FilePath
repoSnapshot = "repo.json" repoSnapshot = repoDir <> "/repo.json"
-- | TODO add hard coded file in Settings -- | TODO add hard coded file in Settings
-- This assumes we own the lock on repoSnapshot. -- This assumes we own the lock on repoSnapshot.
repoSaverAction :: ToJSON a => a -> IO () repoSaverAction :: ToJSON a => a -> IO ()
repoSaverAction a = do repoSaverAction a = do
withTempFile "." "tmp-repo.json" $ \fp h -> do withTempFile "repos" "tmp-repo.json" $ \fp h -> do
-- printDebug "repoSaverAction" fp -- printDebug "repoSaverAction" fp
L.hPut h $ encode a L.hPut h $ encode a
hClose h hClose h
...@@ -210,6 +214,8 @@ mkRepoSaver repo_var = mkDebounce settings ...@@ -210,6 +214,8 @@ mkRepoSaver repo_var = mkDebounce settings
readRepoEnv :: IO RepoEnv readRepoEnv :: IO RepoEnv
readRepoEnv = do readRepoEnv = do
-- Does file exist ? :: Bool -- Does file exist ? :: Bool
_repoDir <- createDirectoryIfMissing True repoDir
repoFile <- doesFileExist repoSnapshot repoFile <- doesFileExist repoSnapshot
-- Is file not empty ? :: Bool -- Is file not empty ? :: Bool
...@@ -230,7 +236,7 @@ readRepoEnv = do ...@@ -230,7 +236,7 @@ readRepoEnv = do
pure repo pure repo
else else
pure initRepo pure initRepo
-- TODO save in DB here
saver <- mkRepoSaver mvar saver <- mkRepoSaver mvar
pure $ RepoEnv { _renv_var = mvar, _renv_saver = saver, _renv_lock = lock } pure $ RepoEnv { _renv_var = mvar, _renv_saver = saver, _renv_lock = lock }
......
...@@ -44,7 +44,7 @@ import Data.Swagger ...@@ -44,7 +44,7 @@ import Data.Swagger
import Data.Text (Text()) import Data.Text (Text())
import GHC.Generics (Generic) import GHC.Generics (Generic)
import Gargantext.API.Ngrams (TabType(..)) import Gargantext.API.Ngrams (TabType(..))
import Gargantext.Core.Types (Offset, Limit) import Gargantext.Core.Types (Offset, Limit, TableResult(..))
import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger) import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger)
import Gargantext.Database.Facet (FacetDoc , runViewDocuments, OrderBy(..)) import Gargantext.Database.Facet (FacetDoc , runViewDocuments, OrderBy(..))
import Gargantext.Database.Learn (FavOrTrash(..), moreLike) import Gargantext.Database.Learn (FavOrTrash(..), moreLike)
...@@ -60,7 +60,7 @@ import Test.QuickCheck.Arbitrary (Arbitrary, arbitrary) ...@@ -60,7 +60,7 @@ import Test.QuickCheck.Arbitrary (Arbitrary, arbitrary)
type TableApi = Summary " Table API" type TableApi = Summary " Table API"
:> ReqBody '[JSON] TableQuery :> ReqBody '[JSON] TableQuery
:> Post '[JSON] TableResult :> Post '[JSON] FacetTableResult
data TableQuery = TableQuery data TableQuery = TableQuery
{ tq_offset :: Int { tq_offset :: Int
...@@ -70,17 +70,7 @@ data TableQuery = TableQuery ...@@ -70,17 +70,7 @@ data TableQuery = TableQuery
, tq_query :: Text , tq_query :: Text
} deriving (Generic) } deriving (Generic)
data TableResult = TableResult { tr_count :: Int type FacetTableResult = TableResult FacetDoc
, tr_docs :: [FacetDoc]
} deriving (Generic)
$(deriveJSON (unPrefix "tr_") ''TableResult)
instance ToSchema TableResult where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "tr_")
instance Arbitrary TableResult where
arbitrary = TableResult <$> arbitrary <*> arbitrary
$(deriveJSON (unPrefix "tq_") ''TableQuery) $(deriveJSON (unPrefix "tq_") ''TableQuery)
...@@ -91,7 +81,7 @@ instance Arbitrary TableQuery where ...@@ -91,7 +81,7 @@ instance Arbitrary TableQuery where
arbitrary = elements [TableQuery 0 10 DateAsc Docs "electrodes"] arbitrary = elements [TableQuery 0 10 DateAsc Docs "electrodes"]
tableApi :: NodeId -> TableQuery -> Cmd err TableResult tableApi :: NodeId -> TableQuery -> Cmd err FacetTableResult
tableApi cId (TableQuery o l order ft "") = getTable cId (Just ft) (Just o) (Just l) (Just order) tableApi cId (TableQuery o l order ft "") = getTable cId (Just ft) (Just o) (Just l) (Just order)
tableApi cId (TableQuery o l order ft q) = case ft of tableApi cId (TableQuery o l order ft q) = case ft of
Docs -> searchInCorpus' cId False [q] (Just o) (Just l) (Just order) Docs -> searchInCorpus' cId False [q] (Just o) (Just l) (Just order)
...@@ -104,20 +94,21 @@ searchInCorpus' :: CorpusId ...@@ -104,20 +94,21 @@ searchInCorpus' :: CorpusId
-> Maybe Offset -> Maybe Offset
-> Maybe Limit -> Maybe Limit
-> Maybe OrderBy -> Maybe OrderBy
-> Cmd err TableResult -> Cmd err FacetTableResult
searchInCorpus' cId t q o l order = do searchInCorpus' cId t q o l order = do
docs <- searchInCorpus cId t q o l order docs <- searchInCorpus cId t q o l order
allDocs <- searchInCorpus cId t q Nothing Nothing Nothing countAllDocs <- searchCountInCorpus cId t q
pure (TableResult (length allDocs) docs) pure $ TableResult { tr_docs = docs, tr_count = countAllDocs }
getTable :: NodeId -> Maybe TabType getTable :: NodeId -> Maybe TabType
-> Maybe Offset -> Maybe Limit -> Maybe Offset -> Maybe Limit
-> Maybe OrderBy -> Cmd err TableResult -> Maybe OrderBy -> Cmd err FacetTableResult
getTable cId ft o l order = do getTable cId ft o l order = do
docs <- getTable' cId ft o l order docs <- getTable' cId ft o l order
-- TODO: Rewrite to use runCountOpaQuery and avoid (length allDocs)
allDocs <- getTable' cId ft Nothing Nothing Nothing allDocs <- getTable' cId ft Nothing Nothing Nothing
pure (TableResult (length allDocs) docs) pure $ TableResult { tr_docs = docs, tr_count = length allDocs }
getTable' :: NodeId -> Maybe TabType getTable' :: NodeId -> Maybe TabType
-> Maybe Offset -> Maybe Limit -> Maybe Offset -> Maybe Limit
......
...@@ -46,8 +46,6 @@ class HasText h ...@@ -46,8 +46,6 @@ class HasText h
where where
hasText :: h -> [Text] hasText :: h -> [Text]
------------------------------------------------------------------------
instance UniqId HyperdataDocument instance UniqId HyperdataDocument
where where
uniqId = hyperdataDocument_uniqId uniqId = hyperdataDocument_uniqId
...@@ -56,4 +54,3 @@ instance UniqId HyperdataContact ...@@ -56,4 +54,3 @@ instance UniqId HyperdataContact
where where
uniqId = hc_uniqId uniqId = hc_uniqId
...@@ -14,6 +14,7 @@ commentary with @some markup@. ...@@ -14,6 +14,7 @@ commentary with @some markup@.
------------------------------------------------------------------------ ------------------------------------------------------------------------
{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
module Gargantext.Core.Types ( module Gargantext.Core.Types.Main module Gargantext.Core.Types ( module Gargantext.Core.Types.Main
, module Gargantext.Database.Types.Node , module Gargantext.Database.Types.Node
...@@ -22,21 +23,28 @@ module Gargantext.Core.Types ( module Gargantext.Core.Types.Main ...@@ -22,21 +23,28 @@ module Gargantext.Core.Types ( module Gargantext.Core.Types.Main
, Label, Stems , Label, Stems
, HasInvalidError(..), assertValid , HasInvalidError(..), assertValid
, Name , Name
, TableResult(..)
, NodeTableResult
) where ) where
import Control.Lens (Prism', (#)) import Control.Lens (Prism', (#))
import Control.Monad.Error.Class (MonadError, throwError) import Control.Monad.Error.Class (MonadError, throwError)
import Data.Aeson import Data.Aeson
import Data.Semigroup import Data.Aeson.TH (deriveJSON)
import Data.Monoid import Data.Monoid
import Data.Semigroup
import Data.Set (Set, empty) import Data.Set (Set, empty)
import Data.Swagger (ToSchema(..), genericDeclareNamedSchema)
--import qualified Data.Set as S --import qualified Data.Set as S
import Data.Text (Text, unpack) import Data.Text (Text, unpack)
import Data.Validity import Data.Validity
import Test.QuickCheck.Arbitrary (Arbitrary, arbitrary)
import Gargantext.Core.Types.Main import Gargantext.Core.Types.Main
import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger)
import Gargantext.Database.Types.Node import Gargantext.Database.Types.Node
import Gargantext.Prelude import Gargantext.Prelude
...@@ -135,3 +143,18 @@ assertValid :: (MonadError e m, HasInvalidError e) => Validation -> m () ...@@ -135,3 +143,18 @@ assertValid :: (MonadError e m, HasInvalidError e) => Validation -> m ()
assertValid v = when (not $ validationIsValid v) $ throwError $ _InvalidError # v assertValid v = when (not $ validationIsValid v) $ throwError $ _InvalidError # v
-- assertValid :: MonadIO m => Validation -> m () -- assertValid :: MonadIO m => Validation -> m ()
-- assertValid v = when (not $ validationIsValid v) $ fail $ show v -- assertValid v = when (not $ validationIsValid v) $ fail $ show v
data TableResult a = TableResult { tr_count :: Int
, tr_docs :: [a]
} deriving (Generic)
$(deriveJSON (unPrefix "tr_") ''TableResult)
instance ToSchema a => ToSchema (TableResult a) where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "tr_")
instance Arbitrary a => Arbitrary (TableResult a) where
arbitrary = TableResult <$> arbitrary <*> arbitrary
type NodeTableResult a = TableResult (Node a)
...@@ -57,14 +57,15 @@ nodeTypeId n = ...@@ -57,14 +57,15 @@ nodeTypeId n =
---- Lists ---- Lists
NodeList -> 5 NodeList -> 5
NodeListModel -> 10 NodeListCooc -> 50
NodeListModel -> 52
---- Scores ---- Scores
-- NodeOccurrences -> 10 -- NodeOccurrences -> 10
NodeGraph -> 9 NodeGraph -> 9
NodePhylo -> 90 NodePhylo -> 90
NodeDashboard -> 7 NodeChart -> 7
NodeChart -> 51 NodeDashboard -> 71
NodeNoteBook -> 88 NodeNoteBook -> 88
-- Cooccurrences -> 9 -- Cooccurrences -> 9
...@@ -94,4 +95,5 @@ nodeTypes = [ (n, nodeTypeId n) | n <- allNodeTypes ] ...@@ -94,4 +95,5 @@ nodeTypes = [ (n, nodeTypeId n) | n <- allNodeTypes ]
fromNodeTypeId :: NodeTypeId -> NodeType fromNodeTypeId :: NodeTypeId -> NodeType
fromNodeTypeId tId = fromMaybe (panic $ pack $ "Type Id " <> show tId <> " does not exist") fromNodeTypeId tId = fromMaybe (panic $ pack $ "Type Id " <> show tId <> " does not exist")
(lookup tId nodeTypeInv) (lookup tId nodeTypeInv)
...@@ -41,16 +41,12 @@ import Prelude (String) ...@@ -41,16 +41,12 @@ import Prelude (String)
import Data.Either import Data.Either
import Debug.Trace (trace) import Debug.Trace (trace)
import Control.Lens ((^.), view, _Just) import Control.Lens ((^.), view, _Just)
import Control.Monad (mapM_)
import Control.Monad.IO.Class (liftIO) import Control.Monad.IO.Class (liftIO)
import Data.List (concat) import Data.List (concat)
import Data.Map (Map, lookup, toList) import Data.Map (Map, lookup)
import Data.Maybe (Maybe(..), catMaybes) import Data.Maybe (Maybe(..), catMaybes)
import Data.Monoid import Data.Monoid
import Data.Text (Text, splitOn, intercalate) import Data.Text (Text, splitOn, intercalate)
import GHC.Show (Show)
import Gargantext.API.Ngrams (HasRepoVar)
import Gargantext.API.Ngrams (NgramsElement, putListNgrams, RepoCmdM)
import Gargantext.Core (Lang(..)) import Gargantext.Core (Lang(..))
import Gargantext.Core.Types (NodePoly(..), Terms(..)) import Gargantext.Core.Types (NodePoly(..), Terms(..))
import Gargantext.Core.Types.Individu (Username) import Gargantext.Core.Types.Individu (Username)
...@@ -58,6 +54,8 @@ import Gargantext.Core.Flow ...@@ -58,6 +54,8 @@ import Gargantext.Core.Flow
import Gargantext.Core.Types.Main import Gargantext.Core.Types.Main
import Gargantext.Database.Config (userMaster, corpusMasterName) import Gargantext.Database.Config (userMaster, corpusMasterName)
import Gargantext.Database.Flow.Utils (insertDocNgrams) import Gargantext.Database.Flow.Utils (insertDocNgrams)
import Gargantext.Database.Flow.List
import Gargantext.Database.Flow.Types
import Gargantext.Database.Node.Contact -- (HyperdataContact(..), ContactWho(..)) import Gargantext.Database.Node.Contact -- (HyperdataContact(..), ContactWho(..))
import Gargantext.Database.Node.Document.Insert -- (insertDocuments, ReturnId(..), addUniqIdsDoc, addUniqIdsContact, ToDbData(..)) import Gargantext.Database.Node.Document.Insert -- (insertDocuments, ReturnId(..), addUniqIdsDoc, addUniqIdsContact, ToDbData(..))
import Gargantext.Database.Root (getRoot) import Gargantext.Database.Root (getRoot)
...@@ -66,7 +64,7 @@ import Gargantext.Database.Schema.Node -- (mkRoot, mkCorpus, getOrMkList, mkGrap ...@@ -66,7 +64,7 @@ import Gargantext.Database.Schema.Node -- (mkRoot, mkCorpus, getOrMkList, mkGrap
import Gargantext.Database.Schema.User (getUser, UserLight(..)) import Gargantext.Database.Schema.User (getUser, UserLight(..))
import Gargantext.Database.TextSearch (searchInDatabase) import Gargantext.Database.TextSearch (searchInDatabase)
import Gargantext.Database.Types.Node -- (HyperdataDocument(..), NodeType(..), NodeId, UserId, ListId, CorpusId, RootId, MasterCorpusId, MasterUserId) import Gargantext.Database.Types.Node -- (HyperdataDocument(..), NodeType(..), NodeId, UserId, ListId, CorpusId, RootId, MasterCorpusId, MasterUserId)
import Gargantext.Database.Utils (Cmd, CmdM) import Gargantext.Database.Utils (Cmd)
import Gargantext.Ext.IMT (toSchoolName) import Gargantext.Ext.IMT (toSchoolName)
import Gargantext.Ext.IMTUser (deserialiseImtUsersFromFile) import Gargantext.Ext.IMTUser (deserialiseImtUsersFromFile)
import Gargantext.Prelude import Gargantext.Prelude
...@@ -84,13 +82,6 @@ import qualified Data.Text as Text ...@@ -84,13 +82,6 @@ import qualified Data.Text as Text
import qualified Gargantext.Database.Node.Document.Add as Doc (add) import qualified Gargantext.Database.Node.Document.Add as Doc (add)
import qualified Gargantext.Text.Corpus.Parsers.GrandDebat as GD import qualified Gargantext.Text.Corpus.Parsers.GrandDebat as GD
type FlowCmdM env err m =
( CmdM env err m
, RepoCmdM env err m
, HasNodeError err
, HasRepoVar env
)
------------------------------------------------------------------------ ------------------------------------------------------------------------
data ApiQuery = ApiIsidoreQuery Text | ApiIsidoreAuth Text data ApiQuery = ApiIsidoreQuery Text | ApiIsidoreAuth Text
...@@ -220,18 +211,22 @@ flowCorpusUser :: (FlowCmdM env err m, MkCorpus c) ...@@ -220,18 +211,22 @@ flowCorpusUser :: (FlowCmdM env err m, MkCorpus c)
flowCorpusUser l userName corpusName ctype ids = do flowCorpusUser l userName corpusName ctype ids = do
-- User Flow -- User Flow
(userId, _rootId, userCorpusId) <- getOrMkRootWithCorpus userName corpusName ctype (userId, _rootId, userCorpusId) <- getOrMkRootWithCorpus userName corpusName ctype
listId <- getOrMkList userCorpusId userId
_cooc <- mkNode NodeListCooc listId userId
-- TODO: check if present already, ignore -- TODO: check if present already, ignore
_ <- Doc.add userCorpusId ids _ <- Doc.add userCorpusId ids
tId <- mkNode NodeTexts userCorpusId userId
_tId <- mkNode NodeTexts userCorpusId userId
printDebug "Node Text Id" tId -- printDebug "Node Text Id" tId
-- User List Flow -- User List Flow
--{- --{-
(_masterUserId, _masterRootId, masterCorpusId) <- getOrMkRootWithCorpus userMaster (Left "") ctype (_masterUserId, _masterRootId, masterCorpusId) <- getOrMkRootWithCorpus userMaster (Left "") ctype
ngs <- buildNgramsLists l 2 3 (StopSize 3) userCorpusId masterCorpusId ngs <- buildNgramsLists l 2 3 (StopSize 3) userCorpusId masterCorpusId
userListId <- flowList userId userCorpusId ngs _userListId <- flowList listId ngs
printDebug "userListId" userListId --mastListId <- getOrMkList masterCorpusId masterUserId
-- _ <- insertOccsUpdates userCorpusId mastListId
-- printDebug "userListId" userListId
-- User Graph Flow -- User Graph Flow
_ <- mkDashboard userCorpusId userId _ <- mkDashboard userCorpusId userId
_ <- mkGraph userCorpusId userId _ <- mkGraph userCorpusId userId
...@@ -281,6 +276,7 @@ insertMasterDocs c lang hs = do ...@@ -281,6 +276,7 @@ insertMasterDocs c lang hs = do
let indexedNgrams = Map.mapKeys (indexNgrams terms2id) maps let indexedNgrams = Map.mapKeys (indexNgrams terms2id) maps
lId <- getOrMkList masterCorpusId masterUserId lId <- getOrMkList masterCorpusId masterUserId
_cooc <- mkNode NodeListCooc lId masterUserId
_ <- insertDocNgrams lId indexedNgrams _ <- insertDocNgrams lId indexedNgrams
pure $ map reId ids pure $ map reId ids
...@@ -348,15 +344,6 @@ toInserted = ...@@ -348,15 +344,6 @@ toInserted =
Map.fromList . map (\r -> (reUniqId r, r) ) Map.fromList . map (\r -> (reUniqId r, r) )
. filter (\r -> reInserted r == True) . filter (\r -> reInserted r == True)
data DocumentWithId a = DocumentWithId
{ documentId :: !NodeId
, documentData :: !a
} deriving (Show)
instance HasText a => HasText (DocumentWithId a)
where
hasText (DocumentWithId _ a) = hasText a
mergeData :: Map HashId ReturnId mergeData :: Map HashId ReturnId
-> Map HashId a -> Map HashId a
-> [DocumentWithId a] -> [DocumentWithId a]
...@@ -367,11 +354,6 @@ mergeData rs = catMaybes . map toDocumentWithId . Map.toList ...@@ -367,11 +354,6 @@ mergeData rs = catMaybes . map toDocumentWithId . Map.toList
<*> Just hpd <*> Just hpd
------------------------------------------------------------------------ ------------------------------------------------------------------------
data DocumentIdWithNgrams a = DocumentIdWithNgrams
{ documentWithId :: !(DocumentWithId a)
, document_ngrams :: !(Map Ngrams (Map NgramsType Int))
} deriving (Show)
instance HasText HyperdataContact instance HasText HyperdataContact
where where
...@@ -449,37 +431,3 @@ documentIdWithNgrams f = mapM toDocumentIdWithNgrams ...@@ -449,37 +431,3 @@ documentIdWithNgrams f = mapM toDocumentIdWithNgrams
e <- f $ documentData d e <- f $ documentData d
pure $ DocumentIdWithNgrams d e pure $ DocumentIdWithNgrams d e
-- FLOW LIST
-- | TODO check optimization
mapNodeIdNgrams :: [DocumentIdWithNgrams a]
-> Map Ngrams (Map NgramsType (Map NodeId Int))
mapNodeIdNgrams = Map.unionsWith (Map.unionWith (Map.unionWith (+))) . fmap f
where
f :: DocumentIdWithNgrams a
-> Map Ngrams (Map NgramsType (Map NodeId Int))
f d = fmap (fmap (Map.singleton nId)) $ document_ngrams d
where
nId = documentId $ documentWithId d
------------------------------------------------------------------------
listInsert :: FlowCmdM env err m
=> ListId
-> Map NgramsType [NgramsElement]
-> m ()
listInsert lId ngs = mapM_ (\(typeList, ngElmts)
-> putListNgrams lId typeList ngElmts
) $ toList ngs
flowList :: FlowCmdM env err m
=> UserId
-> CorpusId
-> Map NgramsType [NgramsElement]
-> m ListId
flowList uId cId ngs = do
lId <- getOrMkList cId uId
printDebug "listId flowList" lId
listInsert lId ngs
--trace (show $ List.filter (\n -> _ne_ngrams n == "versatile") $ List.concat $ Map.elems ngs) $ listInsert lId ngs
pure lId
{-|
Module : Gargantext.Database.Flow.List
Description : List Flow
Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ConstrainedClassMethods #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module Gargantext.Database.Flow.List
where
import Control.Monad (mapM_)
import Data.Map (Map, toList)
import Data.Maybe (Maybe(..))
import Gargantext.API.Ngrams (NgramsElement(..), putListNgrams)
import Gargantext.Database.Schema.Ngrams -- (insertNgrams, Ngrams(..), NgramsIndexed(..), indexNgrams, NgramsType(..), text2ngrams, ngramsTypeId)
import Gargantext.Database.Schema.NodeNgrams (NodeNgramsPoly(..), NodeNgramsW, listInsertDb)
import Gargantext.Database.Types.Node -- (HyperdataDocument(..), NodeType(..), NodeId, UserId, ListId, CorpusId, RootId, MasterCorpusId, MasterUserId)
import Gargantext.Database.Flow.Types
import Gargantext.Prelude
import qualified Data.List as List
import qualified Data.Map as Map
-- FLOW LIST
-- | TODO check optimization
mapNodeIdNgrams :: [DocumentIdWithNgrams a]
-> Map Ngrams (Map NgramsType (Map NodeId Int))
mapNodeIdNgrams = Map.unionsWith (Map.unionWith (Map.unionWith (+))) . fmap f
where
f :: DocumentIdWithNgrams a
-> Map Ngrams (Map NgramsType (Map NodeId Int))
f d = fmap (fmap (Map.singleton nId)) $ document_ngrams d
where
nId = documentId $ documentWithId d
------------------------------------------------------------------------
listInsert :: FlowCmdM env err m
=> ListId
-> Map NgramsType [NgramsElement]
-> m ()
listInsert lId ngs = mapM_ (\(typeList, ngElmts)
-> putListNgrams lId typeList ngElmts
) $ toList ngs
toNodeNgramsW :: ListId
-> [(NgramsType, [NgramsElement])]
-> [NodeNgramsW]
toNodeNgramsW l ngs = List.concat $ map (toNodeNgramsW' l) ngs
where
toNodeNgramsW' :: ListId
-> (NgramsType, [NgramsElement])
-> [NodeNgramsW]
toNodeNgramsW' l' (ngrams_type, elms) =
[ NodeNgrams Nothing l' list_type ngrams_terms' ngrams_type Nothing Nothing Nothing 0 |
(NgramsElement ngrams_terms' _size list_type _occ _root _parent _children) <- elms
]
flowList :: FlowCmdM env err m
=> ListId
-> Map NgramsType [NgramsElement]
-> m ListId
flowList lId ngs = do
-- printDebug "listId flowList" lId
-- TODO save in database
_r <- listInsertDb lId toNodeNgramsW (Map.toList ngs)
-- printDebug "result " r
listInsert lId ngs
--trace (show $ List.filter (\n -> _ne_ngrams n == "versatile") $ List.concat $ Map.elems ngs) $ listInsert lId ngs
pure lId
...@@ -32,16 +32,25 @@ import qualified Data.Map as DM ...@@ -32,16 +32,25 @@ import qualified Data.Map as DM
import Data.Text (Text, toLower) import Data.Text (Text, toLower)
import qualified Data.Text as DT import qualified Data.Text as DT
import Gargantext.Prelude hiding (sum) import Gargantext.Prelude hiding (sum)
import Gargantext.Core.Types (TableResult(..))
import Gargantext.Database.Schema.Ngrams -- (NgramsType(..)) import Gargantext.Database.Schema.Ngrams -- (NgramsType(..))
import Gargantext.Database.Node.Contact -- (HyperdataContact(..)) import Gargantext.Database.Node.Contact -- (HyperdataContact(..))
import Gargantext.Database.Flow.Utils import Gargantext.Database.Flow.Utils
import Gargantext.Database.Utils (Cmd, runPGSQuery) import Gargantext.Database.Utils (Cmd, runPGSQuery)
import Gargantext.Database.Types.Node (AnnuaireId, CorpusId, ListId) import Gargantext.Database.Types.Node (AnnuaireId, CorpusId, ListId{-, DocId, ContactId-})
import Gargantext.Database.Node.Children (getAllContacts) import Gargantext.Database.Node.Children (getAllContacts)
-- TODO mv this type in Types Main -- TODO mv this type in Types Main
type Terms = Text type Terms = Text
{-
pairing'' :: (CorpusId, CorpusId) -> (DocId -> DocId)
pairing'' = undefined
pairing' :: (CorpusId, AnnuaireId) -> (DocId -> ContactId)
pairing' = undefined
-}
-- | TODO : add paring policy as parameter -- | TODO : add paring policy as parameter
pairing :: AnnuaireId pairing :: AnnuaireId
-> CorpusId -> CorpusId
...@@ -50,7 +59,7 @@ pairing :: AnnuaireId ...@@ -50,7 +59,7 @@ pairing :: AnnuaireId
pairing aId cId lId = do pairing aId cId lId = do
contacts' <- getAllContacts aId contacts' <- getAllContacts aId
let contactsMap = pairingPolicyToMap toLower let contactsMap = pairingPolicyToMap toLower
$ toMaps extractNgramsT contacts' $ toMaps extractNgramsT (tr_docs contacts')
ngramsMap' <- getNgramsTindexed cId Authors ngramsMap' <- getNgramsTindexed cId Authors
let ngramsMap = pairingPolicyToMap lastName ngramsMap' let ngramsMap = pairingPolicyToMap lastName ngramsMap'
......
{-|
Module : Gargantext.Database.Flow.Types
Description : Types for Flow
Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ConstrainedClassMethods #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module Gargantext.Database.Flow.Types
where
import Data.Map (Map)
import Gargantext.Prelude
import Gargantext.Core.Flow
import Gargantext.API.Ngrams (HasRepoVar, RepoCmdM)
import Gargantext.Database.Schema.Ngrams (Ngrams(..), NgramsType(..))
import Gargantext.Database.Types.Node (NodeId)
import Gargantext.Database.Schema.Node (HasNodeError)
import Gargantext.Database.Utils (CmdM)
type FlowCmdM env err m =
( CmdM env err m
, RepoCmdM env err m
, HasNodeError err
, HasRepoVar env
)
data DocumentIdWithNgrams a = DocumentIdWithNgrams
{ documentWithId :: !(DocumentWithId a)
, document_ngrams :: !(Map Ngrams (Map NgramsType Int))
} deriving (Show)
data DocumentWithId a = DocumentWithId
{ documentId :: !NodeId
, documentData :: !a
} deriving (Show)
instance HasText a => HasText (DocumentWithId a)
where
hasText (DocumentWithId _ a) = hasText a
...@@ -25,12 +25,17 @@ import Gargantext.Database.Utils (Cmd) ...@@ -25,12 +25,17 @@ import Gargantext.Database.Utils (Cmd)
import Gargantext.Database.Schema.NodeNodeNgrams import Gargantext.Database.Schema.NodeNodeNgrams
import Gargantext.Database.Types.Node import Gargantext.Database.Types.Node
toMaps :: Hyperdata a => (a -> Map (NgramsT Ngrams) Int) -> [Node a] -> Map (NgramsT Ngrams) (Map NodeId Int) toMaps :: Hyperdata a
=> (a -> Map (NgramsT Ngrams) Int)
-> [Node a]
-> Map (NgramsT Ngrams) (Map NodeId Int)
toMaps fun ns = mapNodeIdNgrams $ documentIdWithNgrams fun ns' toMaps fun ns = mapNodeIdNgrams $ documentIdWithNgrams fun ns'
where where
ns' = map (\(Node nId _ _ _ _ _ json) -> DocumentWithId nId json) ns ns' = map (\(Node nId _ _ _ _ _ json) -> DocumentWithId nId json) ns
mapNodeIdNgrams :: Hyperdata a => [DocumentIdWithNgrams a] -> Map (NgramsT Ngrams) (Map NodeId Int) mapNodeIdNgrams :: Hyperdata a
=> [DocumentIdWithNgrams a]
-> Map (NgramsT Ngrams) (Map NodeId Int)
mapNodeIdNgrams ds = DM.map (DM.fromListWith (+)) $ DM.fromListWith (<>) xs mapNodeIdNgrams ds = DM.map (DM.fromListWith (+)) $ DM.fromListWith (<>) xs
where where
xs = [(ng, [(nId, i)]) | (nId, n2i') <- n2i ds, (ng, i) <- DM.toList n2i'] xs = [(ng, [(nId, i)]) | (nId, n2i') <- n2i ds, (ng, i) <- DM.toList n2i']
...@@ -61,7 +66,7 @@ docNgrams2nodeNodeNgrams :: CorpusId ...@@ -61,7 +66,7 @@ docNgrams2nodeNodeNgrams :: CorpusId
-> DocNgrams -> DocNgrams
-> NodeNodeNgrams -> NodeNodeNgrams
docNgrams2nodeNodeNgrams cId (DocNgrams d n nt w) = docNgrams2nodeNodeNgrams cId (DocNgrams d n nt w) =
NodeNodeNgrams Nothing cId d n nt w NodeNodeNgrams cId d n nt w
data DocNgrams = DocNgrams { dn_doc_id :: DocId data DocNgrams = DocNgrams { dn_doc_id :: DocId
, dn_ngrams_id :: Int , dn_ngrams_id :: Int
...@@ -72,14 +77,17 @@ data DocNgrams = DocNgrams { dn_doc_id :: DocId ...@@ -72,14 +77,17 @@ data DocNgrams = DocNgrams { dn_doc_id :: DocId
insertDocNgramsOn :: CorpusId insertDocNgramsOn :: CorpusId
-> [DocNgrams] -> [DocNgrams]
-> Cmd err Int -> Cmd err Int
insertDocNgramsOn cId dn = insertNodeNodeNgrams $ (map (docNgrams2nodeNodeNgrams cId) dn) insertDocNgramsOn cId dn =
insertNodeNodeNgrams
$ (map (docNgrams2nodeNodeNgrams cId) dn)
insertDocNgrams :: CorpusId insertDocNgrams :: CorpusId
-> Map NgramsIndexed (Map NgramsType (Map NodeId Int)) -> Map NgramsIndexed (Map NgramsType (Map NodeId Int))
-> Cmd err Int -> Cmd err Int
insertDocNgrams cId m = insertDocNgramsOn cId [ DocNgrams n (_ngramsId ng) (ngramsTypeId t) (fromIntegral i) insertDocNgrams cId m =
| (ng, t2n2i) <- DM.toList m insertDocNgramsOn cId [ DocNgrams n (_ngramsId ng) (ngramsTypeId t) (fromIntegral i)
, (t, n2i) <- DM.toList t2n2i | (ng, t2n2i) <- DM.toList m
, (n, i) <- DM.toList n2i , (t, n2i) <- DM.toList t2n2i
] , (n, i) <- DM.toList n2i
]
{-|
Module : Gargantext.Database.Init
Description : Triggers configuration
Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
Ngrams by node enable contextual metrics.
-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
module Gargantext.Database.Init
where
-- import Database.PostgreSQL.Simple.Types (Values(..), QualifiedIdentifier(..))
import Gargantext.Database.Utils (Cmd)
import Gargantext.Prelude
import Gargantext.Database.Triggers.Nodes (triggerSearchUpdate)
import Gargantext.Database.Triggers.NodesNodes (triggerDeleteCount, triggerInsertCount, triggerUpdateAdd, triggerUpdateDel, MasterListId)
import Gargantext.Database.Triggers.NodeNodeNgrams (triggerCountInsert)
------------------------------------------------------------------------
initTriggers :: MasterListId -> Cmd err [Int64]
initTriggers lId = do
t0 <- triggerSearchUpdate
t1 <- triggerCountInsert
t2 <- triggerDeleteCount lId
t3 <- triggerInsertCount lId
t4 <- triggerUpdateAdd lId
t5 <- triggerUpdateDel lId
pure [t0, t1,t2,t3,t4,t5]
...@@ -28,10 +28,10 @@ import Gargantext.Prelude ...@@ -28,10 +28,10 @@ import Gargantext.Prelude
import Opaleye import Opaleye
import Control.Arrow (returnA) import Control.Arrow (returnA)
selectNgramsByDoc :: [CorpusId] -> DocId -> NgramsType -> Cmd err [Text] selectNgramsByDoc :: [ListId] -> DocId -> NgramsType -> Cmd err [Text]
selectNgramsByDoc cIds dId nt = runOpaQuery (query cIds dId nt) selectNgramsByDoc lIds dId nt = runOpaQuery (query lIds dId nt)
where where
join :: Query (NgramsRead, NodeNodeNgramsReadNull) join :: Query (NgramsRead, NodeNodeNgramsReadNull)
join = leftJoin queryNgramsTable queryNodeNodeNgramsTable on1 join = leftJoin queryNgramsTable queryNodeNodeNgramsTable on1
where where
...@@ -42,7 +42,7 @@ selectNgramsByDoc cIds dId nt = runOpaQuery (query cIds dId nt) ...@@ -42,7 +42,7 @@ selectNgramsByDoc cIds dId nt = runOpaQuery (query cIds dId nt)
restrict -< foldl (\b cId -> ((toNullable $ pgNodeId cId) .== nnng^.nnng_node1_id) .|| b) (pgBool True) cIds' restrict -< foldl (\b cId -> ((toNullable $ pgNodeId cId) .== nnng^.nnng_node1_id) .|| b) (pgBool True) cIds'
restrict -< (toNullable $ pgNodeId dId') .== nnng^.nnng_node2_id restrict -< (toNullable $ pgNodeId dId') .== nnng^.nnng_node2_id
restrict -< (toNullable $ pgNgramsType nt') .== nnng^.nnng_ngramsType restrict -< (toNullable $ pgNgramsType nt') .== nnng^.nnng_ngramsType
returnA -< ng^.ngrams_terms returnA -< ng^.ngrams_terms
postNgrams :: CorpusId -> DocId -> [Text] -> Cmd err Int postNgrams :: CorpusId -> DocId -> [Text] -> Cmd err Int
......
...@@ -30,12 +30,11 @@ import Gargantext.Database.Node.Contact (HyperdataContact) ...@@ -30,12 +30,11 @@ import Gargantext.Database.Node.Contact (HyperdataContact)
import Gargantext.Database.Schema.Node (pgNodeId) import Gargantext.Database.Schema.Node (pgNodeId)
import Control.Arrow (returnA) import Control.Arrow (returnA)
getAllDocuments :: ParentId -> Cmd err (TableResult (Node HyperdataDocument))
getAllDocuments :: ParentId -> Cmd err [Node HyperdataDocument]
getAllDocuments pId = getAllChildren pId (Proxy :: Proxy HyperdataDocument) getAllDocuments pId = getAllChildren pId (Proxy :: Proxy HyperdataDocument)
(Just NodeDocument) (Just NodeDocument)
getAllContacts :: ParentId -> Cmd err [Node HyperdataContact] getAllContacts :: ParentId -> Cmd err (TableResult (Node HyperdataContact))
getAllContacts pId = getAllChildren pId (Proxy :: Proxy HyperdataContact) getAllContacts pId = getAllChildren pId (Proxy :: Proxy HyperdataContact)
(Just NodeContact) (Just NodeContact)
...@@ -43,7 +42,7 @@ getAllChildren :: JSONB a ...@@ -43,7 +42,7 @@ getAllChildren :: JSONB a
=> ParentId => ParentId
-> proxy a -> proxy a
-> Maybe NodeType -> Maybe NodeType
-> Cmd err [Node a] -> Cmd err (NodeTableResult a)
getAllChildren pId p maybeNodeType = getChildren pId p maybeNodeType Nothing Nothing getAllChildren pId p maybeNodeType = getChildren pId p maybeNodeType Nothing Nothing
getChildren :: JSONB a getChildren :: JSONB a
...@@ -52,11 +51,19 @@ getChildren :: JSONB a ...@@ -52,11 +51,19 @@ getChildren :: JSONB a
-> Maybe NodeType -> Maybe NodeType
-> Maybe Offset -> Maybe Offset
-> Maybe Limit -> Maybe Limit
-> Cmd err [Node a] -> Cmd err (NodeTableResult a)
getChildren pId _ maybeNodeType maybeOffset maybeLimit = runOpaQuery getChildren pId _ maybeNodeType maybeOffset maybeLimit = do
$ limit' maybeLimit $ offset' maybeOffset docs <- runOpaQuery
$ orderBy (asc _node_id) $ limit' maybeLimit $ offset' maybeOffset
$ selectChildren pId maybeNodeType $ orderBy (asc _node_id)
$ query
docCount <- runCountOpaQuery query
pure $ TableResult { tr_docs = docs, tr_count = docCount }
where
query = selectChildren pId maybeNodeType
selectChildren :: ParentId selectChildren :: ParentId
-> Maybe NodeType -> Maybe NodeType
......
{-|
Module : Gargantext.Database.Node.UpdateOpaleye
Description : Update Node in Database (Postgres)
Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RankNTypes #-}
module Gargantext.Database.Node.UpdateOpaleye where
import Opaleye
import Data.Aeson (encode, ToJSON)
import Gargantext.Prelude
import Gargantext.Database.Schema.Node
import Gargantext.Database.Types.Node
import Gargantext.Database.Utils (Cmd, mkCmd)
updateHyperdata :: ToJSON a => NodeId -> a -> Cmd err Int64
updateHyperdata i h = mkCmd $ \c -> runUpdate_ c (updateHyperdataQuery i h)
updateHyperdataQuery :: ToJSON a => NodeId -> a -> Update Int64
updateHyperdataQuery i h = Update
{ uTable = nodeTable
, uUpdateWith = updateEasy (\ (Node _ni _nt _nu _np _nn _nd _h)
-> Node _ni _nt _nu _np _nn _nd h'
)
, uWhere = (\row -> _node_id row .== pgNodeId i )
, uReturning = rCount
}
where h' = (pgJSONB $ cs $ encode $ h)
...@@ -64,7 +64,9 @@ leftJoin3 ...@@ -64,7 +64,9 @@ leftJoin3
-> ((fieldsL2, fieldsR) -> Column PGBool) -> ((fieldsL2, fieldsR) -> Column PGBool)
-> ((fieldsL1, (fieldsL2, nullableFieldsR1)) -> Column PGBool) -> ((fieldsL1, (fieldsL2, nullableFieldsR1)) -> Column PGBool)
-> Opaleye.Select (fieldsL1, nullableFieldsR2) -> Opaleye.Select (fieldsL1, nullableFieldsR2)
leftJoin3 q1 q2 q3 cond12 cond23 = leftJoin q3 (leftJoin q2 q1 cond12) cond23 leftJoin3 q1 q2 q3
cond12 cond23 =
leftJoin q3 ( leftJoin q2 q1 cond12) cond23
leftJoin4 leftJoin4
...@@ -85,7 +87,13 @@ leftJoin4 ...@@ -85,7 +87,13 @@ leftJoin4
-> ((fieldsL2, (fieldsL3, nullableFieldsR2)) -> Column PGBool) -> ((fieldsL2, (fieldsL3, nullableFieldsR2)) -> Column PGBool)
-> ((fieldsL1, (fieldsL2, nullableFieldsR1)) -> Column PGBool) -> ((fieldsL1, (fieldsL2, nullableFieldsR1)) -> Column PGBool)
-> Opaleye.Select (fieldsL1, nullableFieldsR3) -> Opaleye.Select (fieldsL1, nullableFieldsR3)
leftJoin4 q1 q2 q3 q4 cond12 cond23 cond34 = leftJoin q4 (leftJoin q3 (leftJoin q2 q1 cond12) cond23) cond34 leftJoin4 q1 q2 q3 q4
cond12 cond23 cond34 =
leftJoin q4 ( leftJoin q3
( leftJoin q2 q1
cond12
) cond23
) cond34
leftJoin5 :: ( Default Unpackspec fieldsL1 fieldsL1, leftJoin5 :: ( Default Unpackspec fieldsL1 fieldsL1,
...@@ -110,7 +118,15 @@ leftJoin5 :: ( Default Unpackspec fieldsL1 fieldsL1, ...@@ -110,7 +118,15 @@ leftJoin5 :: ( Default Unpackspec fieldsL1 fieldsL1,
-> ((fieldsL2, (fieldsL3, nullableFieldsR2)) -> Column PGBool) -> ((fieldsL2, (fieldsL3, nullableFieldsR2)) -> Column PGBool)
-> ((fieldsL1, (fieldsL2, nullableFieldsR1)) -> Column PGBool) -> ((fieldsL1, (fieldsL2, nullableFieldsR1)) -> Column PGBool)
-> Query (fieldsL1, nullableFieldsR4) -> Query (fieldsL1, nullableFieldsR4)
leftJoin5 q1 q2 q3 q4 q5 cond12 cond23 cond34 cond45 = leftJoin q5 (leftJoin q4 (leftJoin q3 (leftJoin q2 q1 cond12) cond23) cond34) cond45 leftJoin5 q1 q2 q3 q4 q5
cond12 cond23 cond34 cond45 =
leftJoin q5 ( leftJoin q4
( leftJoin q3
( leftJoin q2 q1
cond12
) cond23
) cond34
) cond45
leftJoin6 :: ( Default Unpackspec fieldsL1 fieldsL1, leftJoin6 :: ( Default Unpackspec fieldsL1 fieldsL1,
...@@ -139,7 +155,17 @@ leftJoin6 :: ( Default Unpackspec fieldsL1 fieldsL1, ...@@ -139,7 +155,17 @@ leftJoin6 :: ( Default Unpackspec fieldsL1 fieldsL1,
-> ((fieldsL2, (fieldsL3, nullableFieldsR2)) -> Column PGBool) -> ((fieldsL2, (fieldsL3, nullableFieldsR2)) -> Column PGBool)
-> ((fieldsL1, (fieldsL2, nullableFieldsR1)) -> Column PGBool) -> ((fieldsL1, (fieldsL2, nullableFieldsR1)) -> Column PGBool)
-> Query (fieldsL1, nullableFieldsR5) -> Query (fieldsL1, nullableFieldsR5)
leftJoin6 q1 q2 q3 q4 q5 q6 cond12 cond23 cond34 cond45 cond56 = leftJoin q6 (leftJoin q5 (leftJoin q4 (leftJoin q3 (leftJoin q2 q1 cond12) cond23) cond34) cond45) cond56 leftJoin6 q1 q2 q3 q4 q5 q6
cond12 cond23 cond34 cond45 cond56 =
leftJoin q6 ( leftJoin q5
( leftJoin q4
( leftJoin q3
( leftJoin q2 q1
cond12
) cond23
) cond34
) cond45
) cond56
leftJoin7 leftJoin7
...@@ -175,7 +201,19 @@ leftJoin7 ...@@ -175,7 +201,19 @@ leftJoin7
-> ((fieldsL2, (fieldsL3, nullableFieldsR2)) -> Column PGBool) -> ((fieldsL2, (fieldsL3, nullableFieldsR2)) -> Column PGBool)
-> ((fieldsL1, (fieldsL2, nullableFieldsR1)) -> Column PGBool) -> ((fieldsL1, (fieldsL2, nullableFieldsR1)) -> Column PGBool)
-> Opaleye.Select (fieldsL1, nullableFieldsR6) -> Opaleye.Select (fieldsL1, nullableFieldsR6)
leftJoin7 q1 q2 q3 q4 q5 q6 q7 cond12 cond23 cond34 cond45 cond56 cond67 = leftJoin q7 (leftJoin q6 (leftJoin q5 (leftJoin q4 (leftJoin q3 (leftJoin q2 q1 cond12) cond23) cond34) cond45) cond56) cond67 leftJoin7 q1 q2 q3 q4 q5 q6 q7
cond12 cond23 cond34 cond45 cond56 cond67 =
leftJoin q7 ( leftJoin q6
( leftJoin q5
( leftJoin q4
( leftJoin q3
( leftJoin q2 q1
cond12
) cond23
) cond34
) cond45
) cond56
) cond67
leftJoin8 leftJoin8
...@@ -216,7 +254,21 @@ leftJoin8 ...@@ -216,7 +254,21 @@ leftJoin8
-> ((fieldsL2, (fieldsL3, nullableFieldsR2)) -> Column PGBool) -> ((fieldsL2, (fieldsL3, nullableFieldsR2)) -> Column PGBool)
-> ((fieldsL1, (fieldsL2, nullableFieldsR1)) -> Column PGBool) -> ((fieldsL1, (fieldsL2, nullableFieldsR1)) -> Column PGBool)
-> Opaleye.Select (fieldsL1, nullableFieldsR7) -> Opaleye.Select (fieldsL1, nullableFieldsR7)
leftJoin8 q1 q2 q3 q4 q5 q6 q7 q8 cond12 cond23 cond34 cond45 cond56 cond67 cond78 = leftJoin q8 (leftJoin q7 (leftJoin q6 (leftJoin q5 (leftJoin q4 (leftJoin q3 (leftJoin q2 q1 cond12) cond23) cond34) cond45) cond56) cond67) cond78 leftJoin8 q1 q2 q3 q4 q5 q6 q7 q8
cond12 cond23 cond34 cond45 cond56 cond67 cond78 =
leftJoin q8 ( leftJoin q7
( leftJoin q6
( leftJoin q5
( leftJoin q4
( leftJoin q3
( leftJoin q2 q1
cond12
) cond23
) cond34
) cond45
) cond56
) cond67
) cond78
leftJoin9 leftJoin9
...@@ -262,5 +314,21 @@ leftJoin9 ...@@ -262,5 +314,21 @@ leftJoin9
-> ((fieldsL2, (fieldsL3, nullableFieldsR2)) -> Column PGBool) -> ((fieldsL2, (fieldsL3, nullableFieldsR2)) -> Column PGBool)
-> ((fieldsL1, (fieldsL2, nullableFieldsR1)) -> Column PGBool) -> ((fieldsL1, (fieldsL2, nullableFieldsR1)) -> Column PGBool)
-> Opaleye.Select (fieldsL1, nullableFieldsR8) -> Opaleye.Select (fieldsL1, nullableFieldsR8)
leftJoin9 q1 q2 q3 q4 q5 q6 q7 q8 q9 cond12 cond23 cond34 cond45 cond56 cond67 cond78 cond89 = leftJoin q9 (leftJoin q8 (leftJoin q7 (leftJoin q6 (leftJoin q5 (leftJoin q4 (leftJoin q3 (leftJoin q2 q1 cond12) cond23) cond34) cond45) cond56) cond67) cond78) cond89 leftJoin9 q1 q2 q3 q4 q5 q6 q7 q8 q9
cond12 cond23 cond34 cond45 cond56 cond67 cond78 cond89 =
leftJoin q9 ( leftJoin q8
( leftJoin q7
( leftJoin q6
( leftJoin q5
( leftJoin q4
( leftJoin q3
( leftJoin q2 q1
cond12
) cond23
) cond34
) cond45
) cond56
) cond67
) cond78
) cond89
...@@ -44,6 +44,7 @@ import Gargantext.Database.Queries.Filter (limit', offset') ...@@ -44,6 +44,7 @@ import Gargantext.Database.Queries.Filter (limit', offset')
import Gargantext.Database.Types.Node (NodeType(..), defaultCorpus, Hyperdata, HyperData(..)) import Gargantext.Database.Types.Node (NodeType(..), defaultCorpus, Hyperdata, HyperData(..))
import Gargantext.Database.Utils import Gargantext.Database.Utils
import Gargantext.Prelude hiding (sum, head) import Gargantext.Prelude hiding (sum, head)
import Gargantext.Viz.Graph (HyperdataGraph(..))
import Opaleye hiding (FromField) import Opaleye hiding (FromField)
import Opaleye.Internal.QueryArr (Query) import Opaleye.Internal.QueryArr (Query)
...@@ -218,11 +219,11 @@ nodeTable :: Table NodeWrite NodeRead ...@@ -218,11 +219,11 @@ nodeTable :: Table NodeWrite NodeRead
nodeTable = Table "nodes" (pNode Node { _node_id = optional "id" nodeTable = Table "nodes" (pNode Node { _node_id = optional "id"
, _node_typename = required "typename" , _node_typename = required "typename"
, _node_userId = required "user_id" , _node_userId = required "user_id"
, _node_parentId = optional "parent_id" , _node_parentId = optional "parent_id"
, _node_name = required "name" , _node_name = required "name"
, _node_date = optional "date" , _node_date = optional "date"
, _node_hyperdata = required "hyperdata" , _node_hyperdata = required "hyperdata"
} }
) )
...@@ -266,21 +267,20 @@ type NodeSearchReadNull = ...@@ -266,21 +267,20 @@ type NodeSearchReadNull =
(Column (Nullable PGJsonb) ) (Column (Nullable PGJsonb) )
(Column (Nullable PGTSVector) ) (Column (Nullable PGTSVector) )
--{-
nodeTableSearch :: Table NodeSearchWrite NodeSearchRead nodeTableSearch :: Table NodeSearchWrite NodeSearchRead
nodeTableSearch = Table "nodes" (pNodeSearch NodeSearch { _ns_id = optional "id" nodeTableSearch = Table "nodes" (pNodeSearch NodeSearch { _ns_id = optional "id"
, _ns_typename = required "typename" , _ns_typename = required "typename"
, _ns_userId = required "user_id" , _ns_userId = required "user_id"
, _ns_parentId = required "parent_id" , _ns_parentId = required "parent_id"
, _ns_name = required "name" , _ns_name = required "name"
, _ns_date = optional "date" , _ns_date = optional "date"
, _ns_hyperdata = required "hyperdata" , _ns_hyperdata = required "hyperdata"
, _ns_search = optional "search" , _ns_search = optional "search"
} }
) )
--}
queryNodeSearchTable :: Query NodeSearchRead queryNodeSearchTable :: Query NodeSearchRead
queryNodeSearchTable = queryTable nodeTableSearch queryNodeSearchTable = queryTable nodeTableSearch
...@@ -372,19 +372,19 @@ selectNodesWithType type_id = proc () -> do ...@@ -372,19 +372,19 @@ selectNodesWithType type_id = proc () -> do
type JSONB = QueryRunnerColumnDefault PGJsonb type JSONB = QueryRunnerColumnDefault PGJsonb
getNode :: JSONB a => NodeId -> proxy a -> Cmd err (Node a)
getNode nId _ = do
fromMaybe (error $ "Node does node exist: " <> show nId) . headMay
<$> runOpaQuery (limit 1 $ selectNode (pgNodeId nId))
getNodePhylo :: NodeId -> Cmd err (Node HyperdataPhylo) getNode :: NodeId -> Cmd err (Node Value)
getNodePhylo nId = do getNode nId = fromMaybe (error $ "Node does not exist: " <> show nId) . headMay
fromMaybe (error $ "Node does node exist: " <> show nId) . headMay
<$> runOpaQuery (limit 1 $ selectNode (pgNodeId nId)) <$> runOpaQuery (limit 1 $ selectNode (pgNodeId nId))
getNodeWith :: JSONB a => NodeId -> proxy a -> Cmd err (Node a)
getNodeWith nId _ = do
fromMaybe (error $ "Node does not exist: " <> show nId) . headMay
<$> runOpaQuery (limit 1 $ selectNode (pgNodeId nId))
getNode' :: NodeId -> Cmd err (Node Value) getNodePhylo :: NodeId -> Cmd err (Node HyperdataPhylo)
getNode' nId = fromMaybe (error $ "Node does node exist: " <> show nId) . headMay getNodePhylo nId = do
fromMaybe (error $ "Node Phylo does not exist: " <> show nId) . headMay
<$> runOpaQuery (limit 1 $ selectNode (pgNodeId nId)) <$> runOpaQuery (limit 1 $ selectNode (pgNodeId nId))
...@@ -434,7 +434,6 @@ nodeAnnuaireW maybeName maybeAnnuaire pId = node NodeAnnuaire name annuaire (Jus ...@@ -434,7 +434,6 @@ nodeAnnuaireW maybeName maybeAnnuaire pId = node NodeAnnuaire name annuaire (Jus
name = maybe "Annuaire" identity maybeName name = maybe "Annuaire" identity maybeName
annuaire = maybe defaultAnnuaire identity maybeAnnuaire annuaire = maybe defaultAnnuaire identity maybeAnnuaire
------------------------------------------------------------------------ ------------------------------------------------------------------------
{- {-
...@@ -466,12 +465,14 @@ instance HasDefault NodeType where ...@@ -466,12 +465,14 @@ instance HasDefault NodeType where
hasDefaultData nt = case nt of hasDefaultData nt = case nt of
NodeTexts -> HyperdataTexts (Just "Preferences") NodeTexts -> HyperdataTexts (Just "Preferences")
NodeList -> HyperdataList' (Just "Preferences") NodeList -> HyperdataList' (Just "Preferences")
NodeListCooc -> HyperdataList' (Just "Preferences")
_ -> undefined _ -> undefined
--NodeAnnuaire -> HyperdataAnnuaire (Just "Title") (Just "Description") --NodeAnnuaire -> HyperdataAnnuaire (Just "Title") (Just "Description")
hasDefaultName nt = case nt of hasDefaultName nt = case nt of
NodeTexts -> "Texts" NodeTexts -> "Texts"
NodeList -> "Lists" NodeList -> "Lists"
NodeListCooc -> "Cooc"
_ -> undefined _ -> undefined
------------------------------------------------------------------------ ------------------------------------------------------------------------
...@@ -498,7 +499,7 @@ nodeListModelW maybeName maybeListModel pId = node NodeListModel name list (Just ...@@ -498,7 +499,7 @@ nodeListModelW maybeName maybeListModel pId = node NodeListModel name list (Just
------------------------------------------------------------------------ ------------------------------------------------------------------------
arbitraryGraph :: HyperdataGraph arbitraryGraph :: HyperdataGraph
arbitraryGraph = HyperdataGraph (Just "Preferences") arbitraryGraph = HyperdataGraph Nothing
nodeGraphW :: Maybe Name -> Maybe HyperdataGraph -> ParentId -> UserId -> NodeWrite nodeGraphW :: Maybe Name -> Maybe HyperdataGraph -> ParentId -> UserId -> NodeWrite
nodeGraphW maybeName maybeGraph pId = node NodeGraph name graph (Just pId) nodeGraphW maybeName maybeGraph pId = node NodeGraph name graph (Just pId)
...@@ -506,6 +507,12 @@ nodeGraphW maybeName maybeGraph pId = node NodeGraph name graph (Just pId) ...@@ -506,6 +507,12 @@ nodeGraphW maybeName maybeGraph pId = node NodeGraph name graph (Just pId)
name = maybe "Graph" identity maybeName name = maybe "Graph" identity maybeName
graph = maybe arbitraryGraph identity maybeGraph graph = maybe arbitraryGraph identity maybeGraph
mkGraph :: ParentId -> UserId -> Cmd err [GraphId]
mkGraph p u = insertNodesR [nodeGraphW Nothing Nothing p u]
insertGraph :: ParentId -> UserId -> HyperdataGraph -> Cmd err [GraphId]
insertGraph p u h = insertNodesR [nodeGraphW Nothing (Just h) p u]
------------------------------------------------------------------------ ------------------------------------------------------------------------
arbitraryPhylo :: HyperdataPhylo arbitraryPhylo :: HyperdataPhylo
arbitraryPhylo = HyperdataPhylo Nothing Nothing arbitraryPhylo = HyperdataPhylo Nothing Nothing
...@@ -518,10 +525,8 @@ nodePhyloW maybeName maybePhylo pId = node NodePhylo name graph (Just pId) ...@@ -518,10 +525,8 @@ nodePhyloW maybeName maybePhylo pId = node NodePhylo name graph (Just pId)
------------------------------------------------------------------------ ------------------------------------------------------------------------
arbitraryDashboard :: HyperdataDashboard arbitraryDashboard :: HyperdataDashboard
arbitraryDashboard = HyperdataDashboard (Just "Preferences") arbitraryDashboard = HyperdataDashboard (Just "Preferences")
------------------------------------------------------------------------ ------------------------------------------------------------------------
node :: (ToJSON a, Hyperdata a) => NodeType -> Name -> a -> Maybe ParentId -> UserId -> NodeWrite node :: (ToJSON a, Hyperdata a) => NodeType -> Name -> a -> Maybe ParentId -> UserId -> NodeWrite
...@@ -695,11 +700,6 @@ defaultList cId = ...@@ -695,11 +700,6 @@ defaultList cId =
mkNode :: NodeType -> ParentId -> UserId -> Cmd err [NodeId] mkNode :: NodeType -> ParentId -> UserId -> Cmd err [NodeId]
mkNode nt p u = insertNodesR [nodeDefault nt p u] mkNode nt p u = insertNodesR [nodeDefault nt p u]
mkGraph :: ParentId -> UserId -> Cmd err [GraphId]
mkGraph p u = insertNodesR [nodeGraphW Nothing Nothing p u]
mkDashboard :: ParentId -> UserId -> Cmd err [NodeId] mkDashboard :: ParentId -> UserId -> Cmd err [NodeId]
mkDashboard p u = insertNodesR [nodeDashboardW Nothing Nothing p u] mkDashboard p u = insertNodesR [nodeDashboardW Nothing Nothing p u]
where where
...@@ -720,3 +720,4 @@ pgNodeId = pgInt4 . id2int ...@@ -720,3 +720,4 @@ pgNodeId = pgInt4 . id2int
getListsWithParentId :: NodeId -> Cmd err [Node HyperdataList] getListsWithParentId :: NodeId -> Cmd err [Node HyperdataList]
getListsWithParentId n = runOpaQuery $ selectNodesWith' n (Just NodeList) getListsWithParentId n = runOpaQuery $ selectNodesWith' n (Just NodeList)
{-|
Module : Gargantext.Database.Schema.NodeNgrams
Description :
Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
NodeNgrams register Context of Ngrams (named Cgrams then)
-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE Arrows #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
module Gargantext.Database.Schema.NodeNgrams where
import Data.Text (Text)
import qualified Database.PostgreSQL.Simple as PGS (Query, Only(..))
import Database.PostgreSQL.Simple.Types (Values(..), QualifiedIdentifier(..))
import Database.PostgreSQL.Simple.FromRow (fromRow, field)
import Database.PostgreSQL.Simple.ToField (toField)
import Database.PostgreSQL.Simple (FromRow)
import Database.PostgreSQL.Simple.SqlQQ (sql)
-- import Control.Lens.TH (makeLenses)
import Data.Maybe (Maybe, fromMaybe)
import Gargantext.Core.Types
import Gargantext.Database.Utils
import Gargantext.Database.Schema.Ngrams (NgramsType, ngramsTypeId)
import Gargantext.Prelude
data NodeNgramsPoly id
node_id'
node_subtype
ngrams_id
ngrams_type
ngrams_field
ngrams_tag
ngrams_class
weight
= NodeNgrams { _nng_id :: id
, _nng_node_id :: node_id'
, _nng_node_subtype :: node_subtype
, _nng_ngrams_id :: ngrams_id
, _nng_ngrams_type :: ngrams_type
, _nng_ngrams_field :: ngrams_field
, _nng_ngrams_tag :: ngrams_tag
, _nng_ngrams_class :: ngrams_class
, _nng_ngrams_weight :: weight
} deriving (Show)
{-
type NodeNgramsWrite = NodeNgramsPoly (Maybe (Column (PGInt4)))
(Column (PGInt4))
(Maybe (Column (PGInt4)))
(Column (PGInt4))
(Maybe (Column (PGInt4)))
(Maybe (Column (PGInt4)))
(Maybe (Column (PGInt4)))
(Maybe (Column (PGInt4)))
(Maybe (Column (PGFloat8)))
type NodeNodeRead = NodeNgramsPoly (Column PGInt4)
(Column PGInt4)
(Column PGInt4)
(Column PGInt4)
(Column PGInt4)
(Column PGInt4)
(Column PGInt4)
(Column PGInt4)
(Column PGFloat8)
type NodeNgramsReadNull = NodeNgramsPoly (Column (Nullable PGInt4))
(Column (Nullable PGInt4))
(Column (Nullable PGInt4))
(Column (Nullable PGInt4))
(Column (Nullable PGInt4))
(Column (Nullable PGInt4))
(Column (Nullable PGInt4))
(Column (Nullable PGInt4))
(Column (Nullable PGFloat8))
-}
type NgramsId = Int
type NgramsField = Int
type NgramsTag = Int
type NgramsClass = Int
type NgramsText = Text
-- Example of list Ngrams
-- type ListNgrams = NodeNgramsPoly (Maybe Int) ListType Text
type NodeNgramsW =
NodeNgramsPoly (Maybe Int) NodeId ListType NgramsText
NgramsType (Maybe NgramsField) (Maybe NgramsTag) (Maybe NgramsClass)
Double
data Returning = Returning { re_terms :: Text
, re_ngrams_id :: Int
}
deriving (Show)
instance FromRow Returning where
fromRow = Returning <$> field <*> field
-- insertDb :: ListId -> Map NgramsType [NgramsElemet] -> Cmd err [Result]
listInsertDb :: Show a => ListId
-> (ListId -> a -> [NodeNgramsW])
-> a
-> Cmd err [Returning]
listInsertDb l f ngs = insertNodeNgrams (f l ngs)
-- TODO optimize with size of ngrams
insertNodeNgrams :: [NodeNgramsW] -> Cmd err [Returning]
insertNodeNgrams nns = runPGSQuery query (PGS.Only $ Values fields nns')
where
fields = map (\t-> QualifiedIdentifier Nothing t) ["int4","int4","text","int4"
,"int4","int4","int4","int4"
,"float8"]
-- nns' :: [(Int, ListTypeId, NgramsText, NgramsTypeId ,NgramsField, NgramsTag, NgramsClass, Double)]
nns' = map (\(NodeNgrams _id (NodeId node_id'') node_subtype ngrams_terms ngrams_type ngrams_field ngrams_tag ngrams_class weight)
-> [ toField node_id''
, toField $ listTypeId node_subtype
, toField $ ngrams_terms
, toField $ ngramsTypeId ngrams_type
, toField $ fromMaybe 0 ngrams_field
, toField $ fromMaybe 0 ngrams_tag
, toField $ fromMaybe 0 ngrams_class
, toField weight
]
) nns
query :: PGS.Query
query = [sql|
WITH input(node_id, node_subtype, ngrams_terms, ngrams_type, ngrams_field, ngrams_tag, ngrams_class, weight) AS (?),
return(id, ngrams_id) AS (
INSERT INTO node_ngrams (node_id, node_subtype, ngrams_id, ngrams_type, ngrams_field, ngrams_tag, ngrams_class, weight)
SELECT i.node_id, i.node_subtype, ng.id, i.ngrams_type, i.ngrams_field, i.ngrams_tag, i.ngrams_class, i.weight FROM input as i
INNER JOIN ngrams as ng ON ng.terms = i.ngrams_terms
ON CONFLICT(node_id, node_subtype, ngrams_id)
DO UPDATE SET node_subtype = excluded.node_subtype, ngrams_type = excluded.ngrams_type, ngrams_field = excluded.ngrams_field, ngrams_tag = excluded.ngrams_tag, ngrams_class = excluded.ngrams_class, weight = excluded.weight
RETURNING id, ngrams_id
)
SELECT ng.terms, return.id FROM return
INNER JOIN ngrams ng ON return.ngrams_id = ng.id;
|]
...@@ -120,12 +120,12 @@ nodeNodesCategory inputData = map (\(PGS.Only a) -> a) ...@@ -120,12 +120,12 @@ nodeNodesCategory inputData = map (\(PGS.Only a) -> a)
where where
fields = map (\t-> QualifiedIdentifier Nothing t) ["int4","int4","int4"] fields = map (\t-> QualifiedIdentifier Nothing t) ["int4","int4","int4"]
catQuery :: PGS.Query catQuery :: PGS.Query
catQuery = [sql| UPDATE nodes_nodes as old SET catQuery = [sql| UPDATE nodes_nodes as nn0
category = new.category SET category = nn1.category
from (?) as new(node1_id,node2_id,category) FROM (?) as nn1(node1_id,node2_id,category)
WHERE old.node1_id = new.node1_id WHERE nn0.node1_id = nn1.node1_id
AND old.node2_id = new.node2_id AND nn0.node2_id = nn1.node2_id
RETURNING new.node2_id RETURNING nn1.node2_id
|] |]
------------------------------------------------------------------------ ------------------------------------------------------------------------
...@@ -187,12 +187,12 @@ nodesToTrash input = map (\(PGS.Only a) -> a) ...@@ -187,12 +187,12 @@ nodesToTrash input = map (\(PGS.Only a) -> a)
where where
fields = map (\t-> QualifiedIdentifier Nothing t) ["int4","int4","bool"] fields = map (\t-> QualifiedIdentifier Nothing t) ["int4","int4","bool"]
trashQuery :: PGS.Query trashQuery :: PGS.Query
trashQuery = [sql| UPDATE nodes_nodes as old SET trashQuery = [sql| UPDATE nodes_nodes as nn0 SET
delete = new.delete delete = nn1.delete
from (?) as new(node1_id,node2_id,delete) from (?) as nn1(node1_id,node2_id,delete)
WHERE old.node1_id = new.node1_id WHERE nn0.node1_id = nn1.node1_id
AND old.node2_id = new.node2_id AND nn0.node2_id = nn1.node2_id
RETURNING new.node2_id RETURNING nn1.node2_id
|] |]
-- | /!\ Really remove nodes in the Corpus or Annuaire -- | /!\ Really remove nodes in the Corpus or Annuaire
......
...@@ -23,7 +23,6 @@ module Gargantext.Database.Schema.NodeNodeNgrams ...@@ -23,7 +23,6 @@ module Gargantext.Database.Schema.NodeNodeNgrams
where where
import Prelude import Prelude
import Data.Maybe (Maybe)
import Data.Profunctor.Product.TH (makeAdaptorAndInstance) import Data.Profunctor.Product.TH (makeAdaptorAndInstance)
import Control.Lens.TH (makeLenses) import Control.Lens.TH (makeLenses)
import Gargantext.Database.Utils (Cmd, mkCmd) import Gargantext.Database.Utils (Cmd, mkCmd)
...@@ -33,10 +32,8 @@ import Gargantext.Database.Types.Node ...@@ -33,10 +32,8 @@ import Gargantext.Database.Types.Node
import Opaleye import Opaleye
data NodeNodeNgramsPoly n1 n2 ngrams_id ngt w
data NodeNodeNgramsPoly id' n1 n2 ngrams_id ngt w = NodeNodeNgrams { _nnng_node1_id :: n1
= NodeNodeNgrams { _nnng_id :: id'
, _nnng_node1_id :: n1
, _nnng_node2_id :: n2 , _nnng_node2_id :: n2
, _nnng_ngrams_id :: ngrams_id , _nnng_ngrams_id :: ngrams_id
, _nnng_ngramsType :: ngt , _nnng_ngramsType :: ngt
...@@ -45,8 +42,7 @@ data NodeNodeNgramsPoly id' n1 n2 ngrams_id ngt w ...@@ -45,8 +42,7 @@ data NodeNodeNgramsPoly id' n1 n2 ngrams_id ngt w
type NodeNodeNgramsWrite = type NodeNodeNgramsWrite =
NodeNodeNgramsPoly (Maybe (Column PGInt4 )) NodeNodeNgramsPoly (Column PGInt4 )
(Column PGInt4 )
(Column PGInt4 ) (Column PGInt4 )
(Column PGInt4 ) (Column PGInt4 )
(Column PGInt4 ) (Column PGInt4 )
...@@ -57,7 +53,6 @@ type NodeNodeNgramsRead = ...@@ -57,7 +53,6 @@ type NodeNodeNgramsRead =
(Column PGInt4 ) (Column PGInt4 )
(Column PGInt4 ) (Column PGInt4 )
(Column PGInt4 ) (Column PGInt4 )
(Column PGInt4 )
(Column PGFloat8) (Column PGFloat8)
type NodeNodeNgramsReadNull = type NodeNodeNgramsReadNull =
...@@ -65,11 +60,10 @@ type NodeNodeNgramsReadNull = ...@@ -65,11 +60,10 @@ type NodeNodeNgramsReadNull =
(Column (Nullable PGInt4 )) (Column (Nullable PGInt4 ))
(Column (Nullable PGInt4 )) (Column (Nullable PGInt4 ))
(Column (Nullable PGInt4 )) (Column (Nullable PGInt4 ))
(Column (Nullable PGInt4 ))
(Column (Nullable PGFloat8)) (Column (Nullable PGFloat8))
type NodeNodeNgrams = type NodeNodeNgrams =
NodeNodeNgramsPoly (Maybe Int) CorpusId DocId NgramsId NgramsTypeId Double NodeNodeNgramsPoly CorpusId DocId NgramsId NgramsTypeId Double
$(makeAdaptorAndInstance "pNodeNodeNgrams" ''NodeNodeNgramsPoly) $(makeAdaptorAndInstance "pNodeNodeNgrams" ''NodeNodeNgramsPoly)
makeLenses ''NodeNodeNgramsPoly makeLenses ''NodeNodeNgramsPoly
...@@ -78,8 +72,7 @@ makeLenses ''NodeNodeNgramsPoly ...@@ -78,8 +72,7 @@ makeLenses ''NodeNodeNgramsPoly
nodeNodeNgramsTable :: Table NodeNodeNgramsWrite NodeNodeNgramsRead nodeNodeNgramsTable :: Table NodeNodeNgramsWrite NodeNodeNgramsRead
nodeNodeNgramsTable = Table "node_node_ngrams" nodeNodeNgramsTable = Table "node_node_ngrams"
( pNodeNodeNgrams NodeNodeNgrams ( pNodeNodeNgrams NodeNodeNgrams
{ _nnng_id = optional "id" { _nnng_node1_id = required "node1_id"
, _nnng_node1_id = required "node1_id"
, _nnng_node2_id = required "node2_id" , _nnng_node2_id = required "node2_id"
, _nnng_ngrams_id = required "ngrams_id" , _nnng_ngrams_id = required "ngrams_id"
, _nnng_ngramsType = required "ngrams_type" , _nnng_ngramsType = required "ngrams_type"
...@@ -94,9 +87,8 @@ queryNodeNodeNgramsTable = queryTable nodeNodeNgramsTable ...@@ -94,9 +87,8 @@ queryNodeNodeNgramsTable = queryTable nodeNodeNgramsTable
-- | Insert utils -- | Insert utils
insertNodeNodeNgrams :: [NodeNodeNgrams] -> Cmd err Int insertNodeNodeNgrams :: [NodeNodeNgrams] -> Cmd err Int
insertNodeNodeNgrams = insertNodeNodeNgramsW insertNodeNodeNgrams = insertNodeNodeNgramsW
. map (\(NodeNodeNgrams id'' n1 n2 ng nt w) -> . map (\(NodeNodeNgrams n1 n2 ng nt w) ->
NodeNodeNgrams (pgInt4 <$> id'') NodeNodeNgrams (pgNodeId n1)
(pgNodeId n1)
(pgNodeId n2) (pgNodeId n2)
(pgInt4 ng) (pgInt4 ng)
(pgNgramsTypeId nt) (pgNgramsTypeId nt)
......
{-| {-|
Module : Gargantext.Database.Schema.NodeNgramsNgrams Module : Gargantext.Database.Schema.Node_NodeNgrams_NodeNgrams
Description : Description :
Copyright : (c) CNRS, 2017-Present Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3 License : AGPL + CECILL v3
...@@ -7,10 +7,13 @@ Maintainer : team@gargantext.org ...@@ -7,10 +7,13 @@ Maintainer : team@gargantext.org
Stability : experimental Stability : experimental
Portability : POSIX Portability : POSIX
NodeNgramsNgrams table is used to group Ngrams lgrams: listed ngrams
- NodeId :: List Id
- NgramId_1, NgramId_2 where all NgramId_2 will be added to NgramId_1 Node_NodeNgrams_NodeNgrams table is used to group ngrams
- weight: probability of the relation (TODO, fixed to 1 for simple stemming) - first NodeId :: Referential / space node (corpus)
- NodeNgrams where Node is List
- lgrams1_id, lgrams2_id where all lgrams2_id will be added to lgrams1_id
- weight: score the relation
Next Step benchmark: Next Step benchmark:
- recursive queries of postgres - recursive queries of postgres
...@@ -29,69 +32,68 @@ Next Step benchmark: ...@@ -29,69 +32,68 @@ Next Step benchmark:
{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-} {-# OPTIONS_GHC -fno-warn-orphans #-}
module Gargantext.Database.Schema.NodeNgramsNgrams module Gargantext.Database.Schema.Node_NodeNgramsNodeNgrams
where where
import Control.Lens (view)
import Control.Lens.TH (makeLensesWith, abbreviatedFields) import Control.Lens.TH (makeLensesWith, abbreviatedFields)
import Control.Monad.IO.Class (liftIO)
import Data.Maybe (Maybe) import Data.Maybe (Maybe)
import Data.Profunctor.Product.TH (makeAdaptorAndInstance) import Data.Profunctor.Product.TH (makeAdaptorAndInstance)
import Gargantext.Database.Utils (Cmd, runOpaQuery, connection) import Gargantext.Database.Utils (Cmd, runOpaQuery, mkCmd)
import Gargantext.Database.Types.Node (ListId) import Gargantext.Database.Types.Node (CorpusId)
import Gargantext.Database.Schema.Node (pgNodeId) import Gargantext.Database.Schema.Node (pgNodeId)
import Gargantext.Prelude import Gargantext.Prelude
import Opaleye import Opaleye
data NodeNgramsNgramsPoly node_id ngram1_id ngram2_id weight = data Node_NodeNgrams_NodeNgrams_Poly node_id nng1_id nng2_id weight =
NodeNgramsNgrams { _nng_NodeId :: node_id Node_NodeNgrams_NodeNgrams { _nnn_node_id :: node_id
, _nng_Ngram1Id :: ngram1_id , _nnn_nng1_id :: nng1_id
, _nng_Ngram2Id :: ngram2_id , _nnn_nng2_id :: nng2_id
, _nng_Weight :: weight , _nnn_weight :: weight
} deriving (Show) } deriving (Show)
type NodeNgramsNgramsWrite = type Node_NodeNgrams_NodeNgrams_Write =
NodeNgramsNgramsPoly (Column PGInt4 ) Node_NodeNgrams_NodeNgrams_Poly
(Column PGInt4 ) (Column PGInt4 )
(Column PGInt4 ) (Column PGInt4 )
(Maybe (Column PGFloat8)) (Column PGInt4 )
(Maybe (Column PGFloat8))
type NodeNgramsNgramsRead =
NodeNgramsNgramsPoly (Column PGInt4 ) type Node_NodeNgrams_NodeNgrams_Read =
(Column PGInt4 ) Node_NodeNgrams_NodeNgrams_Poly
(Column PGInt4 ) (Column PGInt4 )
(Column PGFloat8) (Column PGInt4 )
(Column PGInt4 )
type NodeNgramsNgrams = (Column PGFloat8)
NodeNgramsNgramsPoly ListId
Int type ListNgramsId = Int
Int
(Maybe Double) type Node_NodeNgrams_NodeNgrams =
Node_NodeNgrams_NodeNgrams_Poly CorpusId ListNgramsId ListNgramsId (Maybe Double)
$(makeAdaptorAndInstance "pNodeNgramsNgrams"
''NodeNgramsNgramsPoly) $(makeAdaptorAndInstance "pNode_NodeNgrams_NodeNgrams"
''Node_NodeNgrams_NodeNgrams_Poly)
$(makeLensesWith abbreviatedFields $(makeLensesWith abbreviatedFields
''NodeNgramsNgramsPoly) ''Node_NodeNgrams_NodeNgrams_Poly)
nodeNgramsNgramsTable :: Table NodeNgramsNgramsWrite NodeNgramsNgramsRead node_NodeNgrams_NodeNgrams_Table :: Table Node_NodeNgrams_NodeNgrams_Write Node_NodeNgrams_NodeNgrams_Read
nodeNgramsNgramsTable = node_NodeNgrams_NodeNgrams_Table =
Table "nodes_ngrams_ngrams" Table "nodes_nodengrams_nodengrams"
( pNodeNgramsNgrams NodeNgramsNgrams ( pNode_NodeNgrams_NodeNgrams Node_NodeNgrams_NodeNgrams
{ _nng_NodeId = required "node_id" { _nnn_node_id = required "node_id"
, _nng_Ngram1Id = required "ngram1_id" , _nnn_nng1_id = required "nng1_id"
, _nng_Ngram2Id = required "ngram2_id" , _nnn_nng2_id = required "nng2_id"
, _nng_Weight = optional "weight" , _nnn_weight = optional "weight"
} }
) )
queryNodeNgramsNgramsTable :: Query NodeNgramsNgramsRead queryNode_NodeNgrams_NodeNgrams_Table :: Query Node_NodeNgrams_NodeNgrams_Read
queryNodeNgramsNgramsTable = queryTable nodeNgramsNgramsTable queryNode_NodeNgrams_NodeNgrams_Table = queryTable node_NodeNgrams_NodeNgrams_Table
-- | Select NodeNgramsNgrams -- | Select NodeNgramsNgrams
-- TODO not optimized (get all ngrams without filters) -- TODO not optimized (get all ngrams without filters)
nodeNgramsNgrams :: Cmd err [NodeNgramsNgrams] node_Node_NodeNgrams_NodeNgrams :: Cmd err [Node_NodeNgrams_NodeNgrams]
nodeNgramsNgrams = runOpaQuery queryNodeNgramsNgramsTable node_Node_NodeNgrams_NodeNgrams = runOpaQuery queryNode_NodeNgrams_NodeNgrams_Table
instance QueryRunnerColumnDefault PGInt4 (Maybe Int) where instance QueryRunnerColumnDefault PGInt4 (Maybe Int) where
queryRunnerColumnDefault = fieldQueryRunnerColumn queryRunnerColumnDefault = fieldQueryRunnerColumn
...@@ -101,17 +103,19 @@ instance QueryRunnerColumnDefault PGFloat8 (Maybe Double) where ...@@ -101,17 +103,19 @@ instance QueryRunnerColumnDefault PGFloat8 (Maybe Double) where
-- TODO: Add option on conflict -- TODO: Add option on conflict
insertNodeNgramsNgramsNew :: [NodeNgramsNgrams] -> Cmd err Int insert_Node_NodeNgrams_NodeNgrams :: [Node_NodeNgrams_NodeNgrams] -> Cmd err Int64
insertNodeNgramsNgramsNew = insertNodeNgramsNgramsW insert_Node_NodeNgrams_NodeNgrams = insert_Node_NodeNgrams_NodeNgrams_W
. map (\(NodeNgramsNgrams n ng1 ng2 maybeWeight) -> . map (\(Node_NodeNgrams_NodeNgrams n ng1 ng2 maybeWeight) ->
NodeNgramsNgrams (pgNodeId n ) Node_NodeNgrams_NodeNgrams (pgNodeId n )
(pgInt4 ng1) (pgInt4 ng1)
(pgInt4 ng2) (pgInt4 ng2)
(pgDouble <$> maybeWeight) (pgDouble <$> maybeWeight)
) )
insertNodeNgramsNgramsW :: [NodeNgramsNgramsWrite] -> Cmd err Int insert_Node_NodeNgrams_NodeNgrams_W :: [Node_NodeNgrams_NodeNgrams_Write] -> Cmd err Int64
insertNodeNgramsNgramsW ns = do insert_Node_NodeNgrams_NodeNgrams_W ns =
c <- view connection mkCmd $ \c -> runInsert_ c Insert { iTable = node_NodeNgrams_NodeNgrams_Table
liftIO $ fromIntegral <$> runInsertMany c nodeNgramsNgramsTable ns , iRows = ns
, iReturning = rCount
, iOnConflict = (Just DoNothing)
}
...@@ -35,7 +35,7 @@ import Gargantext.Database.Schema.Ngrams ...@@ -35,7 +35,7 @@ import Gargantext.Database.Schema.Ngrams
import Gargantext.Database.Schema.NodeNode hiding (joinInCorpus) import Gargantext.Database.Schema.NodeNode hiding (joinInCorpus)
import Gargantext.Database.Schema.NodeNodeNgrams import Gargantext.Database.Schema.NodeNodeNgrams
import Gargantext.Database.Queries.Join (leftJoin6) import Gargantext.Database.Queries.Join (leftJoin6)
import Gargantext.Database.Utils (Cmd, runPGSQuery, runOpaQuery) import Gargantext.Database.Utils (Cmd, runPGSQuery, runOpaQuery, runCountOpaQuery)
import Gargantext.Text.Terms.Mono.Stem.En (stemIt) import Gargantext.Text.Terms.Mono.Stem.En (stemIt)
import Gargantext.Core.Types import Gargantext.Core.Types
import Control.Arrow (returnA) import Control.Arrow (returnA)
...@@ -72,6 +72,15 @@ searchInCorpus cId t q o l order = runOpaQuery ...@@ -72,6 +72,15 @@ searchInCorpus cId t q o l order = runOpaQuery
$ intercalate " | " $ intercalate " | "
$ map stemIt q $ map stemIt q
searchCountInCorpus :: CorpusId
-> IsTrash
-> [Text]
-> Cmd err Int
searchCountInCorpus cId t q = runCountOpaQuery
$ queryInCorpus cId t
$ intercalate " | "
$ map stemIt q
queryInCorpus :: CorpusId queryInCorpus :: CorpusId
-> IsTrash -> IsTrash
-> Text -> Text
......
...@@ -108,7 +108,7 @@ dbTree rootId = map (\(nId, tId, pId, n) -> DbTreeNode nId tId pId n) ...@@ -108,7 +108,7 @@ dbTree rootId = map (\(nId, tId, pId, n) -> DbTreeNode nId tId pId n)
FROM nodes AS c FROM nodes AS c
INNER JOIN tree AS s ON c.parent_id = s.id INNER JOIN tree AS s ON c.parent_id = s.id
-- WHERE c.typename IN (2,20,21,22,3,5,30,31,40,7,9,90) WHERE c.typename IN (2,20,21,22,3,5,30,31,40,7,9,90)
) )
SELECT * from tree; SELECT * from tree;
|] (Only rootId) |] (Only rootId)
......
{-|
Module : Gargantext.Database.Triggers.NodeNodeNgrams
Description : Triggers configuration
Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
Triggers on NodeNodeNgrams table.
-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
module Gargantext.Database.Triggers.NodeNodeNgrams
where
import Database.PostgreSQL.Simple.SqlQQ (sql)
-- import Database.PostgreSQL.Simple.Types (Values(..), QualifiedIdentifier(..))
import Gargantext.Database.Config (nodeTypeId)
import Gargantext.Database.Types.Node -- (ListId, CorpusId, NodeId)
import Gargantext.Database.Utils (Cmd, execPGSQuery)
import Gargantext.Prelude
import qualified Database.PostgreSQL.Simple as DPS
triggerCountInsert :: Cmd err Int64
triggerCountInsert = execPGSQuery query (nodeTypeId NodeDocument, nodeTypeId NodeList)
where
query :: DPS.Query
query = [sql|
CREATE OR REPLACE FUNCTION set_ngrams_global_count() RETURNS trigger AS $$
BEGIN
IF pg_trigger_depth() <> 1 THEN
RETURN NEW;
END IF;
IF TG_OP = 'INSERT' THEN
INSERT INTO node_node_ngrams (node1_id, node2_id, ngrams_id, ngrams_type, weight)
select n.parent_id, n.id, new1.ngrams_id, new1.ngrams_type, count(*) from NEW as new1
INNER JOIN nodes n ON n.id = new1.node1_id
INNER JOIN nodes n2 ON n2.id = new1.node2_id
WHERE n2.typename = ? -- not mandatory
AND n.typename = ? -- not mandatory
AND n.parent_id <> n2.id -- not mandatory
GROUP BY n.parent_id, n.id, new1.ngrams_id, new1.ngrams_type
ON CONFLICT (node1_id, node2_id, ngrams_id, ngrams_type)
DO UPDATE set weight = node_node_ngrams.weight + excluded.weight
;
END IF;
RETURN NULL;
END
$$ LANGUAGE plpgsql;
-- DROP trigger trigger_count_insert on node_node_ngrams;
CREATE TRIGGER trigger_count_insert AFTER INSERT on node_node_ngrams
REFERENCING NEW TABLE AS NEW
FOR EACH STATEMENT
EXECUTE PROCEDURE set_ngrams_global_count();
|]
{-|
Module : Gargantext.Database.Triggers.Nodes
Description : Triggers configuration
Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
Triggers on Nodes table.
-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
module Gargantext.Database.Triggers.Nodes
where
import Database.PostgreSQL.Simple.SqlQQ (sql)
-- import Database.PostgreSQL.Simple.Types (Values(..), QualifiedIdentifier(..))
import Gargantext.Database.Config (nodeTypeId)
import Gargantext.Database.Types.Node -- (ListId, CorpusId, NodeId)
import Gargantext.Database.Utils (Cmd, execPGSQuery)
import Gargantext.Prelude
import qualified Database.PostgreSQL.Simple as DPS
triggerSearchUpdate :: Cmd err Int64
triggerSearchUpdate = execPGSQuery query ( nodeTypeId NodeDocument
, nodeTypeId NodeDocument
, nodeTypeId NodeContact
)
where
query :: DPS.Query
query = [sql|
CREATE OR REPLACE FUNCTION public.search_update()
RETURNS trigger AS $$
begin
IF new.typename = ? AND new.hyperdata @> '{"language_iso2":"EN"}' THEN
new.search := to_tsvector( 'english' , (new.hyperdata ->> 'title') || ' ' || (new.hyperdata ->> 'abstract'));
ELSIF new.typename = ? AND new.hyperdata @> '{"language_iso2":"FR"}' THEN
new.search := to_tsvector( 'french' , (new.hyperdata ->> 'title') || ' ' || (new.hyperdata ->> 'abstract'));
ELSIF new.typename = ? THEN
new.search := to_tsvector( 'french' , (new.hyperdata ->> 'prenom')
|| ' ' || (new.hyperdata ->> 'nom')
|| ' ' || (new.hyperdata ->> 'fonction')
);
ELSE
new.search := to_tsvector( 'english' , new.name);
END IF;
return new;
end
$$ LANGUAGE plpgsql;
ALTER FUNCTION public.search_update() OWNER TO gargantua;
CREATE TRIGGER search_update_trigger
BEFORE INSERT OR UPDATE
ON nodes FOR EACH ROW
EXECUTE PROCEDURE search_update();
-- Initialize index with already existing data
UPDATE nodes SET hyperdata = hyperdata;
|]
{-|
Module : Gargantext.Database.Triggers.NodesNodes
Description : Triggers configuration
Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
Triggers on NodesNodes table.
-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
module Gargantext.Database.Triggers.NodesNodes
where
import Database.PostgreSQL.Simple.SqlQQ (sql)
-- import Database.PostgreSQL.Simple.Types (Values(..), QualifiedIdentifier(..))
import Gargantext.Database.Config (nodeTypeId)
import Gargantext.Database.Types.Node -- (ListId, CorpusId, NodeId)
import Gargantext.Database.Utils (Cmd, execPGSQuery)
import Gargantext.Prelude
import qualified Database.PostgreSQL.Simple as DPS
type MasterListId = ListId
triggerDeleteCount :: MasterListId -> Cmd err Int64
triggerDeleteCount lId = execPGSQuery query (lId, nodeTypeId NodeList)
where
query :: DPS.Query
query = [sql|
CREATE OR REPLACE FUNCTION set_delete_count() RETURNS trigger AS $$
BEGIN
UPDATE node_node_ngrams SET weight = weight - d.delete_count
FROM (SELECT old1.node1_id as node1_id, lists.id as node2_id, nnn.ngrams_id as ngrams_id, nnn.ngrams_type as ngrams_type, count(*) as delete_count FROM OLD as old1
INNER JOIN nodes doc ON doc.id = old1.node2_id
INNER JOIN nodes lists ON lists.parent_id = old1.node1_id
INNER JOIN node_node_ngrams nnn ON nnn.node2_id = doc.id
WHERE nnn.node1_id in (?, lists.id)
AND lists.typename = ?
GROUP BY old1.node1_id, lists.id, nnn.ngrams_id, nnn.ngrams_type
) AS d
WHERE node_node_ngrams.node1_id = d.node1_id
AND node_node_ngrams.node2_id = d.node2_id
AND node_node_ngrams.ngrams_id = d.ngrams_id
AND node_node_ngrams.ngrams_type = d.ngrams_type
;
RETURN NULL;
END
$$ LANGUAGE plpgsql;
-- DROP trigger trigger_delete_count on nodes_nodes;
CREATE TRIGGER trigger_delete_count AFTER DELETE on nodes_nodes
REFERENCING OLD TABLE AS OLD
FOR EACH STATEMENT
EXECUTE PROCEDURE set_delete_count();
|]
triggerInsertCount :: MasterListId -> Cmd err Int64
triggerInsertCount lId = execPGSQuery query (lId, nodeTypeId NodeList)
where
query :: DPS.Query
query = [sql|
CREATE OR REPLACE FUNCTION set_insert_count() RETURNS trigger AS $$
BEGIN
INSERT INTO node_node_ngrams (node1_id, node2_id, ngrams_id, ngrams_type, weight)
SELECT new1.node1_id , lists.id, nnn.ngrams_id, nnn.ngrams_type, count(*) as weight from NEW as new1
INNER JOIN nodes doc ON doc.id = new1.node2_id
INNER JOIN nodes lists ON lists.parent_id = new1.node1_id
INNER JOIN node_node_ngrams nnn ON nnn.node2_id = doc.id
WHERE nnn.node1_id in (?, lists.id)
AND lists.typename = ?
GROUP BY new1.node1_id, lists.id, nnn.ngrams_id, nnn.ngrams_type
ON CONFLICT (node1_id, node2_id, ngrams_id, ngrams_type)
DO UPDATE set weight = node_node_ngrams.weight + excluded.weight
;
RETURN NULL;
END
$$ LANGUAGE plpgsql;
-- DROP trigger trigger_insert_count on nodes_nodes;
CREATE TRIGGER trigger_insert_count AFTER INSERT on nodes_nodes
REFERENCING NEW TABLE AS NEW
FOR EACH STATEMENT
EXECUTE PROCEDURE set_insert_count();
|]
triggerUpdateAdd :: MasterListId -> Cmd err Int64
triggerUpdateAdd lId = execPGSQuery query (lId, nodeTypeId NodeList)
where
query :: DPS.Query
query = [sql|
CREATE OR REPLACE FUNCTION set_update_ngrams_add() RETURNS trigger AS $$
BEGIN
UPDATE node_node_ngrams nnn0 SET weight = weight + d.fix_count
FROM (SELECT new1.node1_id as node1_id, lists.id as node2_id, nnn.ngrams_id as ngrams_id, nnn.ngrams_type as ngrams_type, count(*) as fix_count
FROM NEW as new1
INNER JOIN nodes lists ON new1.node1_id = lists.parent_id
INNER JOIN node_node_ngrams nnn ON new1.node2_id = nnn.node2_id
WHERE nnn.node1_id in (?, lists.id) -- (masterList_id, userLists)
AND lists.typename = ?
GROUP BY new1.node1_id, lists.id, nnn.ngrams_id, nnn.ngrams_type
) as d
WHERE nnn0.node1_id = d.node1_id
AND nnn0.node2_id = d.node2_id
AND nnn0.ngrams_id = d.ngrams_id
AND nnn0.ngrams_type = d.ngrams_type
;
RETURN NULL;
END
$$ LANGUAGE plpgsql;
-- DROP trigger trigger_count_update_add on nodes_nodes;
CREATE TRIGGER trigger_count_update_add AFTER UPDATE on nodes_nodes
REFERENCING OLD TABLE AS OLD NEW TABLE AS NEW
FOR EACH ROW
WHEN (OLD.category <= 0 AND NEW.category >= 1)
EXECUTE PROCEDURE set_update_ngrams_add();
|]
triggerUpdateDel :: MasterListId -> Cmd err Int64
triggerUpdateDel lId = execPGSQuery query (lId, nodeTypeId NodeList)
where
query :: DPS.Query
query = [sql|
CREATE OR REPLACE FUNCTION set_update_ngrams_count_del() RETURNS trigger AS $$
BEGIN
UPDATE node_node_ngrams nnn0 SET weight = weight - d.fix_count
FROM (SELECT new1.node1_id as node1_id, lists.id as node2_id, nnn.ngrams_id as ngrams_id, nnn.ngrams_type as ngrams_type, count(*) as fix_count
FROM NEW as new1
INNER JOIN nodes lists ON new1.node1_id = lists.parent_id
INNER JOIN node_node_ngrams nnn ON new1.node2_id = nnn.node2_id
WHERE nnn.node1_id in (?, lists.id) -- (masterList_id, userLists)
AND lists.typename = ?
GROUP BY new1.node1_id, lists.id, nnn.ngrams_id, nnn.ngrams_type
) as d
WHERE nnn0.node1_id = d.node1_id
AND nnn0.node2_id = d.node2_id
AND nnn0.ngrams_id = d.ngrams_id
AND nnn0.ngrams_type = d.ngrams_type
;
RETURN NULL;
END
$$ LANGUAGE plpgsql;
-- DROP trigger trigger_count_delete2 on nodes_nodes;
CREATE TRIGGER trigger_count_delete2 AFTER UPDATE on nodes_nodes
REFERENCING OLD TABLE AS OLD NEW TABLE AS NEW
FOR EACH ROW
WHEN (OLD.category >= 1 AND NEW.category <= 0)
EXECUTE PROCEDURE set_update_ngrams_count_del();
|]
...@@ -62,13 +62,13 @@ import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger) ...@@ -62,13 +62,13 @@ import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger)
import Gargantext.Viz.Phylo (Phylo) import Gargantext.Viz.Phylo (Phylo)
--import Gargantext.Database.Utils --import Gargantext.Database.Utils
------------------------------------------------------------------------ ------------------------------------------------------------------------
newtype NodeId = NodeId Int newtype NodeId = NodeId Int
deriving (Show, Read, Generic, Num, Eq, Ord, Enum, ToJSONKey, FromJSONKey, ToJSON, FromJSON) deriving (Show, Read, Generic, Num, Eq, Ord, Enum, ToJSONKey, FromJSONKey, ToJSON, FromJSON)
instance ToField NodeId where instance ToField NodeId where
toField (NodeId n) = toField n toField (NodeId n) = toField n
instance FromField NodeId where instance FromField NodeId where
fromField field mdata = do fromField field mdata = do
n <- fromField field mdata n <- fromField field mdata
...@@ -78,6 +78,7 @@ instance FromField NodeId where ...@@ -78,6 +78,7 @@ instance FromField NodeId where
instance ToSchema NodeId instance ToSchema NodeId
type NodeTypeId = Int type NodeTypeId = Int
type NodeName = Text type NodeName = Text
type TSVector = Text type TSVector = Text
...@@ -87,13 +88,13 @@ data NodePoly id typename userId ...@@ -87,13 +88,13 @@ data NodePoly id typename userId
parentId name date parentId name date
hyperdata = Node { _node_id :: id hyperdata = Node { _node_id :: id
, _node_typename :: typename , _node_typename :: typename
, _node_userId :: userId , _node_userId :: userId
, _node_parentId :: parentId , _node_parentId :: parentId
, _node_name :: name , _node_name :: name
, _node_date :: date , _node_date :: date
, _node_hyperdata :: hyperdata , _node_hyperdata :: hyperdata
} deriving (Show, Generic) } deriving (Show, Generic)
$(deriveJSON (unPrefix "_node_") ''NodePoly) $(deriveJSON (unPrefix "_node_") ''NodePoly)
...@@ -103,7 +104,6 @@ $(makeLenses ''NodePoly) ...@@ -103,7 +104,6 @@ $(makeLenses ''NodePoly)
type Node json = NodePoly NodeId NodeTypeId UserId (Maybe ParentId) NodeName UTCTime json type Node json = NodePoly NodeId NodeTypeId UserId (Maybe ParentId) NodeName UTCTime json
------------------------------------------------------------------------ ------------------------------------------------------------------------
...@@ -401,13 +401,6 @@ $(deriveJSON (unPrefix "hyperdataDashboard_") ''HyperdataDashboard) ...@@ -401,13 +401,6 @@ $(deriveJSON (unPrefix "hyperdataDashboard_") ''HyperdataDashboard)
instance Hyperdata HyperdataDashboard instance Hyperdata HyperdataDashboard
-- TODO add the Graph Structure here
data HyperdataGraph = HyperdataGraph { hyperdataGraph_preferences :: !(Maybe Text)
} deriving (Show, Generic)
$(deriveJSON (unPrefix "hyperdataGraph_") ''HyperdataGraph)
instance Hyperdata HyperdataGraph
------------------------------------------------------------------------ ------------------------------------------------------------------------
-- TODO add the Graph Structure here -- TODO add the Graph Structure here
...@@ -451,6 +444,7 @@ data NodeType = NodeUser ...@@ -451,6 +444,7 @@ data NodeType = NodeUser
| NodeGraph | NodePhylo | NodeGraph | NodePhylo
| NodeDashboard | NodeChart | NodeNoteBook | NodeDashboard | NodeChart | NodeNoteBook
| NodeList | NodeListModel | NodeList | NodeListModel
| NodeListCooc
deriving (Show, Read, Eq, Generic, Bounded, Enum) deriving (Show, Read, Eq, Generic, Bounded, Enum)
......
...@@ -30,6 +30,7 @@ import Control.Monad.Except ...@@ -30,6 +30,7 @@ import Control.Monad.Except
import Data.Aeson (Result(Error,Success), fromJSON, FromJSON) import Data.Aeson (Result(Error,Success), fromJSON, FromJSON)
import Data.Either.Extra (Either(Left, Right)) import Data.Either.Extra (Either(Left, Right))
import Data.Ini (readIniFile, lookupValue) import Data.Ini (readIniFile, lookupValue)
import qualified Data.List as DL
import Data.Maybe (maybe) import Data.Maybe (maybe)
import Data.Monoid ((<>)) import Data.Monoid ((<>))
import Data.Profunctor.Product.Default (Default) import Data.Profunctor.Product.Default (Default)
...@@ -41,6 +42,7 @@ import Database.PostgreSQL.Simple.FromField ( Conversion, ResultError(Conversion ...@@ -41,6 +42,7 @@ import Database.PostgreSQL.Simple.FromField ( Conversion, ResultError(Conversion
import Database.PostgreSQL.Simple.Internal (Field) import Database.PostgreSQL.Simple.Internal (Field)
import Gargantext.Prelude import Gargantext.Prelude
import Opaleye (Query, Unpackspec, showSqlForPostgres, FromFields, Select, runQuery) import Opaleye (Query, Unpackspec, showSqlForPostgres, FromFields, Select, runQuery)
import Opaleye.Aggregate (countRows)
import System.IO (FilePath) import System.IO (FilePath)
import Text.Read (read) import Text.Read (read)
import qualified Data.ByteString as DB import qualified Data.ByteString as DB
...@@ -67,20 +69,30 @@ type Cmd' env err a = forall m. CmdM' env err m => m a ...@@ -67,20 +69,30 @@ type Cmd' env err a = forall m. CmdM' env err m => m a
type Cmd err a = forall m env. CmdM env err m => m a type Cmd err a = forall m env. CmdM env err m => m a
fromInt64ToInt :: Int64 -> Int
fromInt64ToInt = fromIntegral
-- TODO: ideally there should be very few calls to this functions. -- TODO: ideally there should be very few calls to this functions.
mkCmd :: (Connection -> IO a) -> Cmd err a mkCmd :: (Connection -> IO a) -> Cmd err a
mkCmd k = do mkCmd k = do
conn <- view connection conn <- view connection
liftIO $ k conn liftIO $ k conn
runCmd :: (HasConnection env) => env runCmd :: (HasConnection env)
-> Cmd' env err a => env -> Cmd' env err a
-> IO (Either err a) -> IO (Either err a)
runCmd env m = runExceptT $ runReaderT m env runCmd env m = runExceptT $ runReaderT m env
runOpaQuery :: Default FromFields fields haskells => Select fields -> Cmd err [haskells] runOpaQuery :: Default FromFields fields haskells
=> Select fields -> Cmd err [haskells]
runOpaQuery q = mkCmd $ \c -> runQuery c q runOpaQuery q = mkCmd $ \c -> runQuery c q
runCountOpaQuery :: Select a -> Cmd err Int
runCountOpaQuery q = do
counts <- mkCmd $ \c -> runQuery c $ countRows q
-- countRows is guaranteed to return a list with exactly one row so DL.head is safe here
pure $ fromInt64ToInt $ DL.head counts
formatPGSQuery :: PGS.ToRow a => PGS.Query -> a -> Cmd err DB.ByteString formatPGSQuery :: PGS.ToRow a => PGS.Query -> a -> Cmd err DB.ByteString
formatPGSQuery q a = mkCmd $ \conn -> PGS.formatQuery conn q a formatPGSQuery q a = mkCmd $ \conn -> PGS.formatQuery conn q a
......
...@@ -17,7 +17,7 @@ module Gargantext.Text.List ...@@ -17,7 +17,7 @@ module Gargantext.Text.List
where where
import Data.Either (partitionEithers, Either(..)) import Data.Either (partitionEithers, Either(..))
import Debug.Trace (trace) -- import Debug.Trace (trace)
import Data.Map (Map) import Data.Map (Map)
import Data.Set (Set) import Data.Set (Set)
import Data.Text (Text) import Data.Text (Text)
...@@ -161,7 +161,7 @@ toList stop l n = case stop n of ...@@ -161,7 +161,7 @@ toList stop l n = case stop n of
toTermList :: Int -> Int -> (a -> Bool) -> [a] -> [(ListType, a)] toTermList :: Int -> Int -> (a -> Bool) -> [a] -> [(ListType, a)]
toTermList _ _ _ [] = [] toTermList _ _ _ [] = []
toTermList a b stop ns = trace ("computing toTermList") $ toTermList a b stop ns = -- trace ("computing toTermList") $
map (toList stop CandidateTerm) xs map (toList stop CandidateTerm) xs
<> map (toList stop GraphTerm) ys <> map (toList stop GraphTerm) ys
<> toTermList a b stop zs <> toTermList a b stop zs
......
...@@ -9,6 +9,7 @@ Portability : POSIX ...@@ -9,6 +9,7 @@ Portability : POSIX
-} -}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE DeriveGeneric #-}
...@@ -26,7 +27,7 @@ import GHC.Generics (Generic) ...@@ -26,7 +27,7 @@ import GHC.Generics (Generic)
import GHC.IO (FilePath) import GHC.IO (FilePath)
import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger) import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger)
import Gargantext.Core.Types (ListId) import Gargantext.Core.Types (ListId)
import Gargantext.Database.Types.Node (NodeId) import Gargantext.Database.Types.Node (NodeId, Hyperdata)
import Gargantext.Prelude import Gargantext.Prelude
import Test.QuickCheck (elements) import Test.QuickCheck (elements)
import Test.QuickCheck.Arbitrary (Arbitrary, arbitrary) import Test.QuickCheck.Arbitrary (Arbitrary, arbitrary)
...@@ -82,11 +83,24 @@ instance ToSchema LegendField where ...@@ -82,11 +83,24 @@ instance ToSchema LegendField where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_lf_") declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_lf_")
makeLenses ''LegendField makeLenses ''LegendField
---------------------------------------------------------------
type Version = Int
data ListForGraph = ListForGraph { _lfg_listId :: ListId
, _lfg_version :: Version
} deriving (Show, Generic)
$(deriveJSON (unPrefix "_lfg_") ''ListForGraph)
instance ToSchema ListForGraph where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_lfg_")
makeLenses ''ListForGraph
-- --
data GraphMetadata = GraphMetadata { _gm_title :: Text -- title of the graph data GraphMetadata = GraphMetadata { _gm_title :: Text -- title of the graph
, _gm_corpusId :: [NodeId] -- we can map with different corpus , _gm_corpusId :: [NodeId] -- we can map with different corpus
, _gm_legend :: [LegendField] -- legend of the Graph , _gm_legend :: [LegendField] -- legend of the Graph
, _gm_listId :: ListId , _gm_list :: ListForGraph
, _gm_version :: Int
} }
deriving (Show, Generic) deriving (Show, Generic)
$(deriveJSON (unPrefix "_gm_") ''GraphMetadata) $(deriveJSON (unPrefix "_gm_") ''GraphMetadata)
...@@ -143,6 +157,15 @@ data GraphV3 = GraphV3 { go_links :: [EdgeV3] ...@@ -143,6 +157,15 @@ data GraphV3 = GraphV3 { go_links :: [EdgeV3]
$(deriveJSON (unPrefix "go_") ''GraphV3) $(deriveJSON (unPrefix "go_") ''GraphV3)
----------------------------------------------------------- -----------------------------------------------------------
data HyperdataGraph = HyperdataGraph { _hyperdataGraph :: !(Maybe Graph)
} deriving (Show, Generic)
$(deriveJSON (unPrefix "") ''HyperdataGraph)
instance Hyperdata HyperdataGraph
makeLenses ''HyperdataGraph
----------------------------------------------------------- -----------------------------------------------------------
graphV3ToGraph :: GraphV3 -> Graph graphV3ToGraph :: GraphV3 -> Graph
......
...@@ -24,8 +24,11 @@ Portability : POSIX ...@@ -24,8 +24,11 @@ Portability : POSIX
module Gargantext.Viz.Graph.API module Gargantext.Viz.Graph.API
where where
import Control.Lens (set) import Debug.Trace (trace)
import Control.Lens (set, (^.), _Just, (^?))
import Control.Monad.IO.Class (liftIO) import Control.Monad.IO.Class (liftIO)
import Data.Maybe (Maybe(..))
import Gargantext.API.Ngrams (currentVersion)
import Gargantext.API.Ngrams.Tools import Gargantext.API.Ngrams.Tools
import Gargantext.API.Types import Gargantext.API.Types
import Gargantext.Core.Types.Main import Gargantext.Core.Types.Main
...@@ -33,9 +36,9 @@ import Gargantext.Database.Config ...@@ -33,9 +36,9 @@ import Gargantext.Database.Config
import Gargantext.Database.Metrics.NgramsByNode (getNodesByNgramsOnlyUser) import Gargantext.Database.Metrics.NgramsByNode (getNodesByNgramsOnlyUser)
import Gargantext.Database.Schema.Ngrams import Gargantext.Database.Schema.Ngrams
import Gargantext.Database.Node.Select import Gargantext.Database.Node.Select
import Gargantext.Database.Schema.Node (getNode) import Gargantext.Database.Schema.Node (getNodeWith, defaultList, insertGraph)
import Gargantext.Database.Schema.Node (defaultList)
import Gargantext.Database.Types.Node hiding (node_id) -- (GraphId, ListId, CorpusId, NodeId) import Gargantext.Database.Types.Node hiding (node_id) -- (GraphId, ListId, CorpusId, NodeId)
import Gargantext.Database.Node.UpdateOpaleye (updateHyperdata)
import Gargantext.Prelude import Gargantext.Prelude
import Gargantext.Viz.Graph import Gargantext.Viz.Graph
import Gargantext.Viz.Graph.Tools -- (cooc2graph) import Gargantext.Viz.Graph.Tools -- (cooc2graph)
...@@ -51,39 +54,73 @@ type GraphAPI = Get '[JSON] Graph ...@@ -51,39 +54,73 @@ type GraphAPI = Get '[JSON] Graph
:<|> Put '[JSON] Int :<|> Put '[JSON] Int
graphAPI :: NodeId -> GargServer GraphAPI graphAPI :: UserId -> NodeId -> GargServer GraphAPI
graphAPI n = getGraph n graphAPI u n = getGraph u n
:<|> postGraph n :<|> postGraph n
:<|> putGraph n :<|> putGraph n
------------------------------------------------------------------------ ------------------------------------------------------------------------
getGraph :: NodeId -> GargServer (Get '[JSON] Graph) getGraph :: UserId -> NodeId -> GargServer (Get '[JSON] Graph)
getGraph nId = do getGraph uId nId = do
nodeGraph <- getNode nId HyperdataGraph nodeGraph <- getNodeWith nId HyperdataGraph
-- get HyperdataGraphp from Database let graph = nodeGraph ^. node_hyperdata . hyperdataGraph
-- if Nothing else if version == current version then compute let graphVersion = graph ^? _Just
. graph_metadata
let cId = maybe (panic "no parentId") identity $ _node_parentId nodeGraph . _Just
. gm_version
v <- currentVersion
nodeUser <- getNodeWith (NodeId uId) HyperdataUser
let uId' = nodeUser ^. node_userId
let cId = maybe (panic "[ERR:G.V.G.API] Node has no parent")
identity
$ nodeGraph ^. node_parentId
g <- case graph of
Nothing -> do
graph' <- computeGraph cId NgramsTerms v
_ <- insertGraph cId uId' (HyperdataGraph $ Just graph')
pure graph'
Just graph' -> if graphVersion == Just v
then pure graph'
else do
graph'' <- computeGraph cId NgramsTerms v
_ <- updateHyperdata nId (HyperdataGraph $ Just graph'')
pure graph''
pure $ trace ("salut" <> show g) $ g
-- TODO use Database Monad only here ?
computeGraph :: CorpusId -> NgramsType -> Int -> GargServer (Get '[JSON] Graph)
computeGraph cId nt v = do
lId <- defaultList cId lId <- defaultList cId
v' <- currentVersion
let metadata = GraphMetadata "Title" [maybe 0 identity $ _node_parentId nodeGraph] let metadata = GraphMetadata "Title" [cId]
[ LegendField 1 "#FFF" "Cluster" [ LegendField 1 "#FFF" "Cluster"
, LegendField 2 "#FFF" "Cluster" , LegendField 2 "#FFF" "Cluster"
] ]
lId (ListForGraph lId v')
v
-- (map (\n -> LegendField n "#FFFFFF" (pack $ show n)) [1..10]) -- (map (\n -> LegendField n "#FFFFFF" (pack $ show n)) [1..10])
lIds <- selectNodesWithUsername NodeList userMaster lIds <- selectNodesWithUsername NodeList userMaster
repo <- getRepo repo <- getRepo
let ngs = filterListWithRoot GraphTerm $ mapTermListRoot [lId] NgramsTerms repo let ngs = filterListWithRoot GraphTerm <$> mapTermListRoot [lId] nt repo
myCooc <- Map.filter (>1) <$> getCoocByNgrams (Diagonal False) myCooc <- Map.filter (>1)
<$> groupNodesByNgrams ngs <$> getCoocByNgrams (Diagonal True)
<$> getNodesByNgramsOnlyUser cId (lIds <> [lId]) NgramsTerms (Map.keys ngs) <$> groupNodesByNgrams ngs
<$> getNodesByNgramsOnlyUser cId (lIds <> [lId]) nt (Map.keys ngs)
graph <- liftIO $ cooc2graph 0 myCooc graph <- liftIO $ cooc2graph 0 myCooc
pure $ set graph_metadata (Just metadata) graph let graph' = set graph_metadata (Just metadata) graph
pure graph'
postGraph :: NodeId -> GargServer (Post '[JSON] [NodeId]) postGraph :: NodeId -> GargServer (Post '[JSON] [NodeId])
...@@ -92,8 +129,3 @@ postGraph = undefined ...@@ -92,8 +129,3 @@ postGraph = undefined
putGraph :: NodeId -> GargServer (Put '[JSON] Int) putGraph :: NodeId -> GargServer (Put '[JSON] Int)
putGraph = undefined putGraph = undefined
-- | Instances
...@@ -15,10 +15,11 @@ Portability : POSIX ...@@ -15,10 +15,11 @@ Portability : POSIX
module Gargantext.Viz.Graph.Tools module Gargantext.Viz.Graph.Tools
where where
--import Debug.Trace (trace) import Debug.Trace (trace)
import Data.Graph.Clustering.Louvain.CplusPlus (LouvainNode(..)) import Data.Graph.Clustering.Louvain.CplusPlus (LouvainNode(..))
import Data.Graph.Clustering.Louvain.CplusPlus (cLouvain) import Data.Graph.Clustering.Louvain.CplusPlus (cLouvain)
import Data.Map (Map) import Data.Map (Map)
import qualified Data.Set as Set
import Data.Text (Text) import Data.Text (Text)
import Gargantext.Prelude import Gargantext.Prelude
import Gargantext.Core.Statistics import Gargantext.Core.Statistics
...@@ -35,33 +36,59 @@ import qualified Data.Vector.Storable as Vec ...@@ -35,33 +36,59 @@ import qualified Data.Vector.Storable as Vec
import qualified Data.Map as Map import qualified Data.Map as Map
import qualified Data.List as List import qualified Data.List as List
type Threshold = Int type Threshold = Double
cooc2graph :: Threshold -> (Map (Text, Text) Int) -> IO Graph cooc2graph :: Threshold
-> (Map (Text, Text) Int)
-> IO Graph
cooc2graph threshold myCooc = do cooc2graph threshold myCooc = do
let (ti, _) = createIndices myCooc let (ti, _) = createIndices myCooc
myCooc' = toIndex ti myCooc myCooc' = toIndex ti myCooc
matCooc = map2mat (0) (Map.size ti) $ Map.filter (>threshold) myCooc' matCooc = map2mat 0 (Map.size ti) $ Map.filter (> 1) myCooc'
distanceMat = measureConditional matCooc distanceMat = measureConditional matCooc
distanceMap = Map.filter (>0.01) $ mat2map distanceMat distanceMap = Map.filter (> threshold) $ mat2map distanceMat
let nodesApprox :: Int
nodesApprox = n'
where
(as, bs) = List.unzip $ Map.keys distanceMap
n' = Set.size $ Set.fromList $ as <> bs
ClustersParams rivers level = trace ("nodesApprox: " <> show nodesApprox) $ clustersParams nodesApprox
partitions <- case Map.size distanceMap > 0 of partitions <- case Map.size distanceMap > 0 of
True -> cLouvain distanceMap True -> trace ("level" <> show level) $ cLouvain level distanceMap
False -> panic "Text.Flow: DistanceMap is empty" False -> panic "Text.Flow: DistanceMap is empty"
let bridgeness' = bridgeness 300 partitions distanceMap let bridgeness' = trace ("rivers: " <> show rivers) $ bridgeness rivers partitions distanceMap
let confluence' = confluence (Map.keys bridgeness') 3 True False let confluence' = confluence (Map.keys bridgeness') 3 True False
data2graph (Map.toList ti) myCooc' bridgeness' confluence' partitions data2graph (Map.toList ti) myCooc' bridgeness' confluence' partitions
data ClustersParams = ClustersParams { bridgness :: Double
, louvain :: Text
} deriving (Show)
clustersParams :: Int -> ClustersParams
clustersParams x = ClustersParams (fromIntegral x) y
where
y | x < 100 = "0.01"
| x < 350 = "0.01"
| x < 500 = "0.01"
| x < 1000 = "0.1"
| otherwise = "1"
---------------------------------------------------------- ----------------------------------------------------------
-- | From data to Graph -- | From data to Graph
data2graph :: [(Text, Int)] -> Map (Int, Int) Int data2graph :: [(Text, Int)]
-> Map (Int, Int) Double -> Map (Int, Int) Int
-> Map (Int, Int) Double -> Map (Int, Int) Double
-> [LouvainNode] -> Map (Int, Int) Double
-> IO Graph -> [LouvainNode]
-> IO Graph
data2graph labels coocs bridge conf partitions = do data2graph labels coocs bridge conf partitions = do
let community_id_by_node_id = Map.fromList [ (n, c) | LouvainNode n c <- partitions ] let community_id_by_node_id = Map.fromList [ (n, c) | LouvainNode n c <- partitions ]
...@@ -74,18 +101,24 @@ data2graph labels coocs bridge conf partitions = do ...@@ -74,18 +101,24 @@ data2graph labels coocs bridge conf partitions = do
, node_x_coord = 0 , node_x_coord = 0
, node_y_coord = 0 , node_y_coord = 0
, node_attributes = , node_attributes =
Attributes { clust_default = maybe 0 identity Attributes { clust_default = maybe 0 identity
(Map.lookup n community_id_by_node_id) } } (Map.lookup n community_id_by_node_id) } }
) )
| (l, n) <- labels | (l, n) <- labels
, Set.member n $ Set.fromList
$ List.concat
$ map (\((s,t),d) -> if d > 0 && s /=t then [s,t] else [])
$ Map.toList bridge
] ]
let edges = [ Edge { edge_source = cs (show s) let edges = [ Edge { edge_source = cs (show s)
, edge_target = cs (show t) , edge_target = cs (show t)
, edge_weight = d , edge_weight = d
, edge_confluence = maybe (panic "E: data2graph edges") identity $ Map.lookup (s,t) conf , edge_confluence = maybe 0 identity $ Map.lookup (s,t) conf
-- , edge_confluence = maybe (panic "E: data2graph edges") identity $ Map.lookup (s,t) conf
, edge_id = cs (show i) } , edge_id = cs (show i) }
| (i, ((s,t), d)) <- zip ([0..]::[Integer]) (Map.toList bridge) ] | (i, ((s,t), d)) <- zip ([0..]::[Integer]) (Map.toList bridge), s /= t, d > 0
]
pure $ Graph nodes edges Nothing pure $ Graph nodes edges Nothing
......
...@@ -53,7 +53,7 @@ relatedComp graphs = foldl' (\mem groups -> ...@@ -53,7 +53,7 @@ relatedComp graphs = foldl' (\mem groups ->
louvain :: ([GroupNode],[GroupEdge]) -> IO [[PhyloGroup]] louvain :: ([GroupNode],[GroupEdge]) -> IO [[PhyloGroup]]
louvain (nodes,edges) = map (\community -> map (\node -> nodes !! (l_node_id node)) community) louvain (nodes,edges) = map (\community -> map (\node -> nodes !! (l_node_id node)) community)
<$> groupBy (\a b -> (l_community_id a) == (l_community_id b)) <$> groupBy (\a b -> (l_community_id a) == (l_community_id b))
<$> (cLouvain $ mapKeys (\(x,y) -> (idx x, idx y)) $ fromList edges) <$> (cLouvain "0.0001" $ mapKeys (\(x,y) -> (idx x, idx y)) $ fromList edges)
where where
-------------------------------------- --------------------------------------
idx :: PhyloGroup -> Int idx :: PhyloGroup -> Int
......
...@@ -39,7 +39,7 @@ extra-deps: ...@@ -39,7 +39,7 @@ extra-deps:
- git: https://github.com/np/servant-job.git - git: https://github.com/np/servant-job.git
commit: 8557bfc9472a1b2be0b7bc632c23701ba5f44bf8 commit: 8557bfc9472a1b2be0b7bc632c23701ba5f44bf8
- git: https://gitlab.iscpif.fr/gargantext/clustering-louvain.git - git: https://gitlab.iscpif.fr/gargantext/clustering-louvain.git
commit: 1c636112b151110408e7c5a28cec39e46657358e commit: e5814cbfa71f43b0a453efb65f476240d7d51a53
- git: https://github.com/np/patches-map - git: https://github.com/np/patches-map
commit: 8c6f38c4844ead53e664cf9c82ba461715dbe445 commit: 8c6f38c4844ead53e664cf9c82ba461715dbe445
- git: https://github.com/delanoe/haskell-opaleye.git #- opaleye-0.6.7002.0 - git: https://github.com/delanoe/haskell-opaleye.git #- opaleye-0.6.7002.0
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment