Commit 1781ba63 authored by Karen Konou's avatar Karen Konou

Merge branch 'dev' into 428-dev-profile-img-upload

parents 79f858de 91b97fbd
## Version 0.0.6
* [OPTIM] Ngrams Table optmization. To upgrade:
1. `./bin/psql gargantext.ini < devops/postgresql/upgrade/0.0.6.sql`
2. in `stack --nix repl` run `runCmdReplEasy $ migrateFromDirToDb`
* [FIX] Ngrams Table next button: loads only one time instead of twice previously
* [FRONT][FIX] Resize handler on Write Node
* [FRONT][FIX] Do not highlight ngrams if maximum abstract length > 4500 characters
## Version 0.0.5.9.6
* [BACK][FIX] Nix build ok
* [BACK][OPTI] Confluence optimization
* [FRONT][GACK][FEAT] Team management
* [FRONT][FEAT] Legend for graph
## Version 0.0.5.9.5 ## Version 0.0.5.9.5
* [FRONT][FIX] View Document List fix CSS * [FRONT][FIX] View Document List fix CSS
* [FRONT][FIX] Node Modal fix * [FRONT][FIX] Node Modal fix
......
...@@ -40,7 +40,7 @@ main = do ...@@ -40,7 +40,7 @@ main = do
--let q = ["water", "scarcity", "morocco", "shortage","flood"] --let q = ["water", "scarcity", "morocco", "shortage","flood"]
let q = ["gratuit", "gratuité", "culture", "culturel"] let q = ["gratuit", "gratuité", "culture", "culturel"]
eDocs <- CSV.readFile rPath eDocs <- CSV.readCSVFile rPath
case eDocs of case eDocs of
Right (h, csvDocs) -> do Right (h, csvDocs) -> do
putStrLn $ "Number of documents before:" <> show (V.length csvDocs) putStrLn $ "Number of documents before:" <> show (V.length csvDocs)
......
...@@ -42,7 +42,7 @@ import Gargantext.Core.Types ...@@ -42,7 +42,7 @@ import Gargantext.Core.Types
import Gargantext.Core.Text.Terms import Gargantext.Core.Text.Terms
import Gargantext.Core.Text.Context import Gargantext.Core.Text.Context
import Gargantext.Core.Text.Terms.WithList import Gargantext.Core.Text.Terms.WithList
import Gargantext.Core.Text.Corpus.Parsers.CSV (readFile, csv_title, csv_abstract, csv_publication_year, unIntOrDec, fromMIntOrDec, defaultYear) import Gargantext.Core.Text.Corpus.Parsers.CSV (readCSVFile, csv_title, csv_abstract, csv_publication_year, unIntOrDec, fromMIntOrDec, defaultYear)
import Gargantext.Core.Text.List.Formats.CSV (csvMapTermList) import Gargantext.Core.Text.List.Formats.CSV (csvMapTermList)
import Gargantext.Core.Text.Terms (terms) import Gargantext.Core.Text.Terms (terms)
import Gargantext.Core.Text.Metrics.Count (coocOnContexts, Coocs) import Gargantext.Core.Text.Metrics.Count (coocOnContexts, Coocs)
...@@ -86,7 +86,7 @@ main = do ...@@ -86,7 +86,7 @@ main = do
[corpusFile, termListFile, outputFile] <- getArgs [corpusFile, termListFile, outputFile] <- getArgs
--corpus :: IO (DM.IntMap [[Text]]) --corpus :: IO (DM.IntMap [[Text]])
eCorpusFile <- readFile corpusFile eCorpusFile <- readCSVFile corpusFile
case eCorpusFile of case eCorpusFile of
Right cf -> do Right cf -> do
let corpus = DM.fromListWith (<>) let corpus = DM.fromListWith (<>)
......
module Auth where module Auth where
import Prelude import Prelude
import Data.Maybe
import Core import Core
import Options import Options
......
...@@ -109,7 +109,7 @@ csvToDocs parser patterns time path = ...@@ -109,7 +109,7 @@ csvToDocs parser patterns time path =
(termsInText patterns $ (csv_title row) <> " " <> (csv_abstract row)) (termsInText patterns $ (csv_title row) <> " " <> (csv_abstract row))
Nothing Nothing
[] []
) <$> snd <$> either (\err -> panic $ cs $ "CSV error" <> (show err)) identity <$> Csv.readFile path ) <$> snd <$> either (\err -> panic $ cs $ "CSV error" <> (show err)) identity <$> Csv.readCSVFile path
Csv' limit -> Vector.toList Csv' limit -> Vector.toList
<$> Vector.take limit <$> Vector.take limit
<$> Vector.map (\row -> Document (toPhyloDate (csv'_publication_year row) (csv'_publication_month row) (csv'_publication_day row) time) <$> Vector.map (\row -> Document (toPhyloDate (csv'_publication_year row) (csv'_publication_month row) (csv'_publication_day row) time)
......
...@@ -2,4 +2,4 @@ ...@@ -2,4 +2,4 @@
#stack install --nix --profile --test --fast --no-install-ghc --skip-ghc-check #stack install --nix --profile --test --fast --no-install-ghc --skip-ghc-check
env LANG=C.UTF-8 stack install --nix --test --no-install-ghc --skip-ghc-check env LANG=C.UTF-8 stack install --haddock --nix --test --no-install-ghc --skip-ghc-check --no-haddock-deps
packages: . packages: .
allow-newer: base, accelerate, servant, time, classy-prelude allow-newer: base, accelerate, servant, time, classy-prelude
allow-newer: binary, primitive, vector
-- Patches -- Patches
source-repository-package source-repository-package
...@@ -61,12 +62,27 @@ source-repository-package ...@@ -61,12 +62,27 @@ source-repository-package
location: https://gitlab.iscpif.fr/gargantext/crawlers/hal.git location: https://gitlab.iscpif.fr/gargantext/crawlers/hal.git
tag: 020f5f9b308f5c23c925aedf5fb11f8b4728fb19 tag: 020f5f9b308f5c23c925aedf5fb11f8b4728fb19
source-repository-package
type: git
location: https://gitlab.iscpif.fr/gargantext/crawlers/arxiv-api.git
tag: f3e517cc40d92e282c5245b23d253d2ca3f802e5
-- Graphs -- Graphs
source-repository-package source-repository-package
type: git type: git
location: https://github.com/alpmestan/haskell-igraph.git location: https://github.com/alpmestan/haskell-igraph.git
tag: 9f55eb36639c8e0965c8bc539a57738869f33e9a tag: 9f55eb36639c8e0965c8bc539a57738869f33e9a
source-repository-package
type: git
location: https://gitlab.iscpif.fr/gargantext/haskell-infomap.git
tag: 6d1d60b952b9b2b272b58fc5539700fd8890ac88
source-repository-package
type: git
location: https://gitlab.iscpif.fr/gargantext/gargantext-graph.git
tag: f41ee8b53c3264e5aa5adc06b2e5b293d2a8c474
-- Data mining -- Data mining
source-repository-package source-repository-package
type: git type: git
...@@ -116,16 +132,33 @@ source-repository-package ...@@ -116,16 +132,33 @@ source-repository-package
tag: fc24987d3af348a677748f226e48d64779a694e9 tag: fc24987d3af348a677748f226e48d64779a694e9
-- Accelerate -- numerical computing
source-repository-package
type: git
location: https://github.com/alpmestan/accelerate.git
tag: 640b5af87cea94b61c7737d878e6f7f2fca5c015
source-repository-package
type: git
location: https://gitlab.iscpif.fr/amestanogullari/accelerate-utility.git
tag: a3875fe652d3bb5acb522674c22c6c814c1b4ad0
source-repository-package source-repository-package
type: git type: git
location: https://gitlab.iscpif.fr/anoe/accelerate.git location: https://github.com/alpmestan/accelerate-arithmetic.git
tag: f5c0e0071ec7b6532f9a9cd3eb33d14f340fbcc9 tag: a110807651036ca2228a76507ee35bbf7aedf87a
source-repository-package source-repository-package
type: git type: git
location: https://gitlab.iscpif.fr/anoe/accelerate-utility.git location: https://github.com/alpmestan/accelerate-llvm.git
tag: 83ada76e78ac10d9559af8ed6bd4064ec81308e4 tag: 944f5a4aea35ee6aedb81ea754bf46b131fce9e3
subdir: accelerate-llvm/ accelerate-llvm-native/
source-repository-package
type: git
location: https://github.com/alpmestan/hmatrix.git
tag: b9fca8beee0f23c17a6b2001ec834d071709e6e7
subdir: packages/base/
-- Wikidata -- Wikidata
...@@ -135,7 +168,22 @@ source-repository-package ...@@ -135,7 +168,22 @@ source-repository-package
tag: 9637a82344bb70f7fa8f02e75db3c081ccd434ce tag: 9637a82344bb70f7fa8f02e75db3c081ccd434ce
-- numerical computing
source-repository-package
type: git
location: https://github.com/alpmestan/sparse-linear.git
tag: bc6ca8058077b0b5702ea4b88bd4189cfcad267a
subdir: sparse-linear/
constraints: unordered-containers==0.2.14.*, constraints: unordered-containers==0.2.14.*,
servant-ekg==0.3.1, servant-ekg==0.3.1,
time==1.9.3, time==1.9.3,
stm==2.5.0.1 stm==2.5.0.1,
vector==0.12.3.0,
eigen==3.3.7.0,
cborg==0.2.6.0,
primitive==0.7.3.0
package accelerate
flags: +debug
\ No newline at end of file
...@@ -219,6 +219,33 @@ CREATE TABLE public.rights ( ...@@ -219,6 +219,33 @@ CREATE TABLE public.rights (
ALTER TABLE public.rights OWNER TO gargantua; ALTER TABLE public.rights OWNER TO gargantua;
------------------------------------------------------------ ------------------------------------------------------------
-- Node Story
create table public.node_stories (
id SERIAL,
node_id INTEGER NOT NULL,
archive jsonb DEFAULT '{}'::jsonb NOT NULL,
PRIMARY KEY (id),
FOREIGN KEY (node_id) REFERENCES public.nodes(id) ON DELETE CASCADE
);
ALTER TABLE public.node_stories OWNER TO gargantua;
CREATE UNIQUE INDEX ON public.node_stories USING btree (node_id);
create table public.node_story_archive_history (
id SERIAL,
node_id INTEGER NOT NULL,
ngrams_type_id INTEGER NOT NULL,
ngrams_id INTEGER NOT NULL,
patch jsonb DEFAULT '{}'::jsonb NOT NULL,
PRIMARY KEY (id),
FOREIGN KEY (node_id) REFERENCES public.nodes(id) ON DELETE CASCADE,
FOREIGN KEY (ngrams_id) REFERENCES public.ngrams(id) ON DELETE CASCADE
);
ALTER TABLE public.node_story_archive_history OWNER TO gargantua;
------------------------------------------------------------ ------------------------------------------------------------
-- INDEXES -- INDEXES
CREATE INDEX ON public.auth_user USING btree (username varchar_pattern_ops); CREATE INDEX ON public.auth_user USING btree (username varchar_pattern_ops);
......
create table public.node_stories (
id SERIAL,
node_id INTEGER NOT NULL,
archive jsonb DEFAULT '{}'::jsonb NOT NULL,
PRIMARY KEY (id),
FOREIGN KEY (node_id) REFERENCES public.nodes(id) ON DELETE CASCADE
);
ALTER TABLE public.node_stories OWNER TO gargantua;
CREATE UNIQUE INDEX ON public.node_stories USING btree (node_id);
create table public.node_story_archive_history (
id SERIAL,
node_id INTEGER NOT NULL,
ngrams_type_id INTEGER NOT NULL,
ngrams_id INTEGER NOT NULL,
patch jsonb DEFAULT '{}'::jsonb NOT NULL,
PRIMARY KEY (id),
FOREIGN KEY (node_id) REFERENCES public.nodes(id) ON DELETE CASCADE,
FOREIGN KEY (ngrams_id) REFERENCES public.ngrams(id) ON DELETE CASCADE
);
ALTER TABLE public.node_story_archive_history OWNER TO gargantua;
-- INSERT INTO node_story_archive_history (node_id, ngrams_type_id, patch) SELECT t.node_id, t.ngrams_type_id, t.patch FROM
-- (
-- WITH q AS (SELECT node_id, history.*, row_number() over (ORDER BY node_id) AS sid
-- FROM node_stories,
-- jsonb_to_recordset(archive->'history') AS history("Authors" jsonb, "Institutes" jsonb, "NgramsTerms" jsonb, "Sources" jsonb))
-- (SELECT node_id, sid, 1 AS ngrams_type_id, "Authors" AS patch FROM q WHERE "Authors" IS NOT NULL)
-- UNION (SELECT node_id, sid, 2 AS ngrams_type_id, "Institutes" AS patch FROM q WHERE "Institutes" IS NOT NULL)
-- UNION (SELECT node_id, sid, 4 AS ngrams_type_id, "NgramsTerms" AS patch FROM q WHERE "NgramsTerms" IS NOT NULL)
-- UNION (SELECT node_id, sid, 3 AS ngrams_type_id, "Sources" AS patch FROM q WHERE "Sources" IS NOT NULL)
-- ORDER BY node_id, ngrams_type_id, sid
-- ) AS t;
cabal-version: 1.12 cabal-version: 1.12
-- This file has been generated from package.yaml by hpack version 0.34.4. -- This file has been generated from package.yaml by hpack version 0.34.7.
-- --
-- see: https://github.com/sol/hpack -- see: https://github.com/sol/hpack
name: gargantext name: gargantext
version: 0.0.5.9.5 version: 0.0.6
synopsis: Search, map, share synopsis: Search, map, share
description: Please see README.md description: Please see README.md
category: Data category: Data
...@@ -96,6 +96,7 @@ library ...@@ -96,6 +96,7 @@ library
Gargantext.Core.Viz.Phylo.SynchronicClustering Gargantext.Core.Viz.Phylo.SynchronicClustering
Gargantext.Core.Viz.Types Gargantext.Core.Viz.Types
other-modules: other-modules:
-- ConcurrentTest
Gargantext.API.Admin.Auth Gargantext.API.Admin.Auth
Gargantext.API.Admin.FrontEnd Gargantext.API.Admin.FrontEnd
Gargantext.API.Admin.Orchestrator Gargantext.API.Admin.Orchestrator
...@@ -111,6 +112,7 @@ library ...@@ -111,6 +112,7 @@ library
Gargantext.API.GraphQL.AsyncTask Gargantext.API.GraphQL.AsyncTask
Gargantext.API.GraphQL.IMT Gargantext.API.GraphQL.IMT
Gargantext.API.GraphQL.Node Gargantext.API.GraphQL.Node
Gargantext.API.GraphQL.Team
Gargantext.API.GraphQL.TreeFirstLevel Gargantext.API.GraphQL.TreeFirstLevel
Gargantext.API.GraphQL.User Gargantext.API.GraphQL.User
Gargantext.API.GraphQL.UserInfo Gargantext.API.GraphQL.UserInfo
...@@ -162,6 +164,7 @@ library ...@@ -162,6 +164,7 @@ library
Gargantext.Core.Methods.Graph.BAC.Proxemy Gargantext.Core.Methods.Graph.BAC.Proxemy
Gargantext.Core.Methods.Graph.MaxClique Gargantext.Core.Methods.Graph.MaxClique
Gargantext.Core.Methods.Matrix.Accelerate.Utils Gargantext.Core.Methods.Matrix.Accelerate.Utils
Gargantext.Core.NodeStoryFile
Gargantext.Core.Statistics Gargantext.Core.Statistics
Gargantext.Core.Text.Convert Gargantext.Core.Text.Convert
Gargantext.Core.Text.Corpus.API.Arxiv Gargantext.Core.Text.Corpus.API.Arxiv
...@@ -410,6 +413,7 @@ library ...@@ -410,6 +413,7 @@ library
, jose , jose
, json-stream , json-stream
, lens , lens
, lifted-base
, listsafe , listsafe
, located-base , located-base
, logging-effect , logging-effect
...@@ -488,6 +492,7 @@ library ...@@ -488,6 +492,7 @@ library
, transformers-base , transformers-base
, tuple , tuple
, unordered-containers , unordered-containers
, uri-encode
, utf8-string , utf8-string
, uuid , uuid
, validity , validity
......
...@@ -6,7 +6,6 @@ rec { ...@@ -6,7 +6,6 @@ rec {
hsBuildInputs = [ hsBuildInputs = [
ghc ghc
pkgs.cabal-install pkgs.cabal-install
pkgs.haskellPackages.llvm-hs
]; ];
nonhsBuildInputs = with pkgs; [ nonhsBuildInputs = with pkgs; [
bzip2 bzip2
...@@ -18,6 +17,7 @@ rec { ...@@ -18,6 +17,7 @@ rec {
#haskell-language-server #haskell-language-server
hlint hlint
igraph igraph
libffi
liblapack liblapack
lzma lzma
pcre pcre
...@@ -31,8 +31,7 @@ rec { ...@@ -31,8 +31,7 @@ rec {
expat expat
icu icu
graphviz graphviz
libffi llvm_9
llvmPackages_9.llvm
]; ];
libPaths = pkgs.lib.makeLibraryPath nonhsBuildInputs; libPaths = pkgs.lib.makeLibraryPath nonhsBuildInputs;
shellHook = '' shellHook = ''
......
...@@ -6,7 +6,7 @@ name: gargantext ...@@ -6,7 +6,7 @@ name: gargantext
# | | | +----- Layers * : New versions with API additions # | | | +----- Layers * : New versions with API additions
# | | | | +--- Layers * : New versions without API breaking changes # | | | | +--- Layers * : New versions without API breaking changes
# | | | | | # | | | | |
version: '0.0.5.9.5' version: '0.0.6'
synopsis: Search, map, share synopsis: Search, map, share
description: Please see README.md description: Please see README.md
category: Data category: Data
...@@ -195,6 +195,7 @@ library: ...@@ -195,6 +195,7 @@ library:
- jose - jose
- json-stream - json-stream
- lens - lens
- lifted-base
- listsafe - listsafe
- located-base - located-base
- logging-effect - logging-effect
...@@ -274,6 +275,7 @@ library: ...@@ -274,6 +275,7 @@ library:
- unordered-containers - unordered-containers
- utf8-string - utf8-string
- uuid - uuid
- uri-encode
- validity - validity
- vector - vector
- wai - wai
......
...@@ -9,5 +9,5 @@ LOGFILE=$FOLDER"/"$FILE ...@@ -9,5 +9,5 @@ LOGFILE=$FOLDER"/"$FILE
mkdir -p $FOLDER mkdir -p $FOLDER
#env LANG=en_US.UTF-8 ~/.local/bin/gargantext-server --ini gargantext.ini --run Dev +RTS > $LOGFILE 2>&1 & tail -F $LOGFILE # -p env LANG=en_US.UTF-8 ~/.local/bin/gargantext-server --ini gargantext.ini --run Dev +RTS > $LOGFILE 2>&1 & tail -F $LOGFILE # -p
env LANG=en_US.UTF-8 stack --docker exec gargantext-server -- --ini gargantext.ini --run Dev +RTS > $LOGFILE 2>&1 & tail -F $LOGFILE # -p #env LANG=en_US.UTF-8 stack --docker exec gargantext-server -- --ini gargantext.ini --run Dev +RTS > $LOGFILE 2>&1 & tail -F $LOGFILE # -p
{ pkgs ? import ./nix/pkgs.nix {} }: { pkgs ? import ./nix/pkgs.nix {} }:
let let
myBuildInputs = [ myBuildInputs = [
pkgs.pkgs.docker-compose pkgs.pkgs.docker-compose
pkgs.pkgs.haskell-language-server pkgs.pkgs.haskell-language-server
pkgs.pkgs.stack pkgs.pkgs.stack
]; ];
in in
pkgs.pkgs.mkShell { pkgs.pkgs.mkShell {
name = pkgs.shell.name; name = pkgs.shell.name;
shellHook = pkgs.shell.shellHook; shellHook = pkgs.shell.shellHook;
......
...@@ -14,7 +14,7 @@ Portability : POSIX ...@@ -14,7 +14,7 @@ Portability : POSIX
module Graph.Clustering where module Graph.Clustering where
import Gargantext.API.Ngrams.Types (NgramsTerm(..)) import Gargantext.API.Ngrams.Types (NgramsTerm(..))
import Gargantext.Core.Viz.Graph (Graph(..)) import Gargantext.Core.Viz.Graph (Graph(..), Strength(..))
import Gargantext.Core.Viz.Graph.Tools (doDistanceMap) import Gargantext.Core.Viz.Graph.Tools (doDistanceMap)
import Gargantext.Core.Viz.Graph.Tools.IGraph (spinglass) import Gargantext.Core.Viz.Graph.Tools.IGraph (spinglass)
import Gargantext.Prelude import Gargantext.Prelude
...@@ -35,7 +35,7 @@ test :: IO () ...@@ -35,7 +35,7 @@ test :: IO ()
test = hspec $ do test = hspec $ do
describe "Cross" $ do describe "Cross" $ do
let let
(distanceMap,_,_) = doDistanceMap Conditional 0 myCooc (distanceMap,_,_) = doDistanceMap Conditional 0 Weak myCooc
it "Partition test" $ do it "Partition test" $ do
partitions <- spinglass 1 distanceMap partitions <- spinglass 1 distanceMap
let let
......
...@@ -117,9 +117,9 @@ makeMockApp env = do ...@@ -117,9 +117,9 @@ makeMockApp env = do
blocking <- fireWall req (env ^. menv_firewall) blocking <- fireWall req (env ^. menv_firewall)
case blocking of case blocking of
True -> app req resp True -> app req resp
False -> resp ( responseLBS status401 [] False -> resp ( responseLBS status401 []
"Invalid Origin or Host header") "Invalid Origin or Host header")
let corsMiddleware = cors $ \_ -> Just CorsResourcePolicy let corsMiddleware = cors $ \_ -> Just CorsResourcePolicy
-- { corsOrigins = Just ([env^.settings.allowedOrigin], False) -- { corsOrigins = Just ([env^.settings.allowedOrigin], False)
{ corsOrigins = Nothing -- == /* { corsOrigins = Nothing -- == /*
...@@ -135,7 +135,7 @@ makeMockApp env = do ...@@ -135,7 +135,7 @@ makeMockApp env = do
--let warpS = Warp.setPort (8008 :: Int) -- (env^.settings.appPort) --let warpS = Warp.setPort (8008 :: Int) -- (env^.settings.appPort)
-- $ Warp.defaultSettings -- $ Warp.defaultSettings
--pure (warpS, logWare $ checkOriginAndHost $ corsMiddleware $ serverApp) --pure (warpS, logWare $ checkOriginAndHost $ corsMiddleware $ serverApp)
pure $ logStdoutDev $ checkOriginAndHost $ corsMiddleware $ serverApp pure $ logStdoutDev $ checkOriginAndHost $ corsMiddleware $ serverApp
-} -}
...@@ -149,7 +149,7 @@ makeDevMiddleware mode = do ...@@ -149,7 +149,7 @@ makeDevMiddleware mode = do
-- blocking <- fireWall req (env ^. menv_firewall) -- blocking <- fireWall req (env ^. menv_firewall)
-- case blocking of -- case blocking of
-- True -> app req resp -- True -> app req resp
-- False -> resp ( responseLBS status401 [] -- False -> resp ( responseLBS status401 []
-- "Invalid Origin or Host header") -- "Invalid Origin or Host header")
-- --
let corsMiddleware = cors $ \_ -> Just CorsResourcePolicy let corsMiddleware = cors $ \_ -> Just CorsResourcePolicy
......
...@@ -48,8 +48,6 @@ import GHC.Generics (Generic) ...@@ -48,8 +48,6 @@ import GHC.Generics (Generic)
import Servant import Servant
import Servant.Auth.Server import Servant.Auth.Server
import Servant.Job.Async (JobFunction(..), serveJobsAPI) import Servant.Job.Async (JobFunction(..), serveJobsAPI)
import qualified Text.Blaze.Html.Renderer.Text as H
import qualified Text.Blaze.Html5 as H
--import qualified Text.Blaze.Html5.Attributes as HA --import qualified Text.Blaze.Html5.Attributes as HA
import qualified Gargantext.Prelude.Crypto.Auth as Auth import qualified Gargantext.Prelude.Crypto.Auth as Auth
...@@ -59,7 +57,6 @@ import Gargantext.API.Admin.Orchestrator.Types (JobLog(..), AsyncJobs) ...@@ -59,7 +57,6 @@ import Gargantext.API.Admin.Orchestrator.Types (JobLog(..), AsyncJobs)
import Gargantext.API.Admin.Types import Gargantext.API.Admin.Types
import Gargantext.API.Job (jobLogSuccess) import Gargantext.API.Job (jobLogSuccess)
import Gargantext.API.Prelude (HasJoseError(..), joseError, HasServerError, GargServerC, GargServer, _ServerError) import Gargantext.API.Prelude (HasJoseError(..), joseError, HasServerError, GargServerC, GargServer, _ServerError)
import Gargantext.API.Types
import Gargantext.Core.Mail (MailModel(..), mail) import Gargantext.Core.Mail (MailModel(..), mail)
import Gargantext.Core.Mail.Types (HasMail, mailSettings) import Gargantext.Core.Mail.Types (HasMail, mailSettings)
import Gargantext.Core.Types.Individu (User(..), Username, GargPassword(..)) import Gargantext.Core.Types.Individu (User(..), Username, GargPassword(..))
...@@ -173,7 +170,7 @@ type ForgotPasswordAPI = Summary "Forgot password POST API" ...@@ -173,7 +170,7 @@ type ForgotPasswordAPI = Summary "Forgot password POST API"
:> Post '[JSON] ForgotPasswordResponse :> Post '[JSON] ForgotPasswordResponse
:<|> Summary "Forgot password GET API" :<|> Summary "Forgot password GET API"
:> QueryParam "uuid" Text :> QueryParam "uuid" Text
:> Get '[HTML] Text :> Get '[JSON] ForgotPasswordGet
forgotPassword :: GargServer ForgotPasswordAPI forgotPassword :: GargServer ForgotPasswordAPI
...@@ -193,8 +190,8 @@ forgotPasswordPost (ForgotPasswordRequest email) = do ...@@ -193,8 +190,8 @@ forgotPasswordPost (ForgotPasswordRequest email) = do
pure $ ForgotPasswordResponse "ok" pure $ ForgotPasswordResponse "ok"
forgotPasswordGet :: (HasSettings env, HasConnectionPool env, HasJoseError err, HasConfig env, HasMail env, HasServerError err) forgotPasswordGet :: (HasSettings env, HasConnectionPool env, HasJoseError err, HasConfig env, HasMail env, HasServerError err)
=> Maybe Text -> Cmd' env err Text => Maybe Text -> Cmd' env err ForgotPasswordGet
forgotPasswordGet Nothing = pure "" forgotPasswordGet Nothing = pure $ ForgotPasswordGet ""
forgotPasswordGet (Just uuid) = do forgotPasswordGet (Just uuid) = do
let mUuid = fromText uuid let mUuid = fromText uuid
case mUuid of case mUuid of
...@@ -209,7 +206,7 @@ forgotPasswordGet (Just uuid) = do ...@@ -209,7 +206,7 @@ forgotPasswordGet (Just uuid) = do
--------------------- ---------------------
forgotPasswordGetUser :: (HasSettings env, HasConnectionPool env, HasJoseError err, HasConfig env, HasMail env, HasServerError err) forgotPasswordGetUser :: (HasSettings env, HasConnectionPool env, HasJoseError err, HasConfig env, HasMail env, HasServerError err)
=> UserLight -> Cmd' env err Text => UserLight -> Cmd' env err ForgotPasswordGet
forgotPasswordGetUser (UserLight { .. }) = do forgotPasswordGetUser (UserLight { .. }) = do
-- pick some random password -- pick some random password
password <- liftBase gargPass password <- liftBase gargPass
...@@ -225,16 +222,7 @@ forgotPasswordGetUser (UserLight { .. }) = do ...@@ -225,16 +222,7 @@ forgotPasswordGetUser (UserLight { .. }) = do
-- clear the uuid so that the page can't be refreshed -- clear the uuid so that the page can't be refreshed
_ <- updateUserForgotPasswordUUID $ UserLight { userLight_forgot_password_uuid = Nothing, .. } _ <- updateUserForgotPasswordUUID $ UserLight { userLight_forgot_password_uuid = Nothing, .. }
pure $ toStrict $ H.renderHtml $ pure $ ForgotPasswordGet password
H.docTypeHtml $ do
H.html $ do
H.head $ do
H.title "Gargantext - forgot password"
H.body $ do
H.h1 "Forgot password"
H.p $ do
H.span "Here is your password (will be shown only once): "
H.b $ H.toHtml password
forgotUserPassword :: (HasConnectionPool env, HasConfig env, HasMail env) forgotUserPassword :: (HasConnectionPool env, HasConfig env, HasMail env)
=> UserLight -> Cmd' env err () => UserLight -> Cmd' env err ()
......
...@@ -112,6 +112,7 @@ data PathId = PathNode NodeId | PathNodeNode ListId DocId ...@@ -112,6 +112,7 @@ data PathId = PathNode NodeId | PathNodeNode ListId DocId
--------------------------- ---------------------------
type Email = Text type Email = Text
type Password = Text
data ForgotPasswordRequest = ForgotPasswordRequest { _fpReq_email :: Email } data ForgotPasswordRequest = ForgotPasswordRequest { _fpReq_email :: Email }
deriving (Generic ) deriving (Generic )
...@@ -124,3 +125,9 @@ data ForgotPasswordResponse = ForgotPasswordResponse { _fpRes_status :: Text } ...@@ -124,3 +125,9 @@ data ForgotPasswordResponse = ForgotPasswordResponse { _fpRes_status :: Text }
$(deriveJSON (unPrefix "_fpRes_") ''ForgotPasswordResponse) $(deriveJSON (unPrefix "_fpRes_") ''ForgotPasswordResponse)
instance ToSchema ForgotPasswordResponse where instance ToSchema ForgotPasswordResponse where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_fpRes_") declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_fpRes_")
data ForgotPasswordGet = ForgotPasswordGet {_fpGet_password :: Password}
deriving (Generic )
$(deriveJSON (unPrefix "_fpGet_") ''ForgotPasswordGet)
instance ToSchema ForgotPasswordGet where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_fpGet_")
\ No newline at end of file
{-| {-|
Module : Gargantext.API.Admin.Settings Module : Gargantext.API.Admin.Settings
Description : Settings of the API (Server and Client) Description : Settings of the API (Server and Client)
Copyright : (c) CNRS, 2017-Present Copyright : (c) CNRS, 2017-Present
...@@ -27,7 +27,7 @@ import Data.Maybe (fromMaybe) ...@@ -27,7 +27,7 @@ import Data.Maybe (fromMaybe)
import Data.Pool (Pool, createPool) import Data.Pool (Pool, createPool)
import Database.PostgreSQL.Simple (Connection, connect, close, ConnectInfo) import Database.PostgreSQL.Simple (Connection, connect, close, ConnectInfo)
import Gargantext.Core.NodeStory import Gargantext.Core.NodeStory
import Gargantext.Prelude.Config (GargConfig(..), {-gc_repofilepath,-} readConfig) import Gargantext.Prelude.Config ({-GargConfig(..),-} {-gc_repofilepath,-} readConfig)
import Network.HTTP.Client.TLS (newTlsManager) import Network.HTTP.Client.TLS (newTlsManager)
import Servant.Auth.Server (defaultJWTSettings, CookieSettings(..), XsrfCookieSettings(..), defaultCookieSettings, defaultXsrfCookieSettings, readKey, writeKey) import Servant.Auth.Server (defaultJWTSettings, CookieSettings(..), XsrfCookieSettings(..), defaultCookieSettings, defaultXsrfCookieSettings, readKey, writeKey)
import Servant.Client (parseBaseUrl) import Servant.Client (parseBaseUrl)
...@@ -180,7 +180,8 @@ newEnv port file = do ...@@ -180,7 +180,8 @@ newEnv port file = do
self_url_env <- parseBaseUrl $ "http://0.0.0.0:" <> show port self_url_env <- parseBaseUrl $ "http://0.0.0.0:" <> show port
dbParam <- databaseParameters file dbParam <- databaseParameters file
pool <- newPool dbParam pool <- newPool dbParam
nodeStory_env <- readNodeStoryEnv (_gc_repofilepath config_env) --nodeStory_env <- readNodeStoryEnv (_gc_repofilepath config_env)
nodeStory_env <- readNodeStoryEnv pool
scrapers_env <- newJobEnv defaultSettings manager_env scrapers_env <- newJobEnv defaultSettings manager_env
logger <- newStderrLoggerSet defaultBufSize logger <- newStderrLoggerSet defaultBufSize
config_mail <- Mail.readConfig file config_mail <- Mail.readConfig file
......
...@@ -68,7 +68,7 @@ getBackendVersion :: ClientM Text ...@@ -68,7 +68,7 @@ getBackendVersion :: ClientM Text
-- * auth API -- * auth API
postAuth :: AuthRequest -> ClientM AuthResponse postAuth :: AuthRequest -> ClientM AuthResponse
forgotPasswordPost :: ForgotPasswordRequest -> ClientM ForgotPasswordResponse forgotPasswordPost :: ForgotPasswordRequest -> ClientM ForgotPasswordResponse
forgotPasswordGet :: Maybe Text -> ClientM Text forgotPasswordGet :: Maybe Text -> ClientM ForgotPasswordGet
postForgotPasswordAsync :: ClientM (JobStatus 'Safe JobLog) postForgotPasswordAsync :: ClientM (JobStatus 'Safe JobLog)
postForgotPasswordAsyncJob :: JobInput Maybe ForgotPasswordAsyncParams -> ClientM (JobStatus 'Safe JobLog) postForgotPasswordAsyncJob :: JobInput Maybe ForgotPasswordAsyncParams -> ClientM (JobStatus 'Safe JobLog)
killForgotPasswordAsyncJob :: JobID 'Unsafe -> Maybe Limit -> Maybe Offset -> ClientM (JobStatus 'Safe JobLog) killForgotPasswordAsyncJob :: JobID 'Unsafe -> Maybe Limit -> Maybe Offset -> ClientM (JobStatus 'Safe JobLog)
......
{-| {-|
Module : Gargantext.API.Dev Module : Gargantext.API.Dev
Description : Description :
Copyright : (c) CNRS, 2017-Present Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3 License : AGPL + CECILL v3
Maintainer : team@gargantext.org Maintainer : team@gargantext.org
...@@ -22,7 +22,7 @@ import Gargantext.API.Prelude ...@@ -22,7 +22,7 @@ import Gargantext.API.Prelude
import Gargantext.Core.NodeStory import Gargantext.Core.NodeStory
import Gargantext.Database.Prelude import Gargantext.Database.Prelude
import Gargantext.Prelude import Gargantext.Prelude
import Gargantext.Prelude.Config (GargConfig(..), readConfig) import Gargantext.Prelude.Config (readConfig)
import qualified Gargantext.Prelude.Mail as Mail import qualified Gargantext.Prelude.Mail as Mail
import Servant import Servant
import System.IO (FilePath) import System.IO (FilePath)
...@@ -38,8 +38,9 @@ withDevEnv iniPath k = do ...@@ -38,8 +38,9 @@ withDevEnv iniPath k = do
newDevEnv = do newDevEnv = do
cfg <- readConfig iniPath cfg <- readConfig iniPath
dbParam <- databaseParameters iniPath dbParam <- databaseParameters iniPath
nodeStory_env <- readNodeStoryEnv (_gc_repofilepath cfg) --nodeStory_env <- readNodeStoryEnv (_gc_repofilepath cfg)
pool <- newPool dbParam pool <- newPool dbParam
nodeStory_env <- readNodeStoryEnv pool
setts <- devSettings devJwkFile setts <- devSettings devJwkFile
mail <- Mail.readConfig iniPath mail <- Mail.readConfig iniPath
pure $ DevEnv pure $ DevEnv
...@@ -61,7 +62,7 @@ runCmdReplServantErr = runCmdRepl ...@@ -61,7 +62,7 @@ runCmdReplServantErr = runCmdRepl
-- the command. -- the command.
-- This function is constrained to the DevEnv rather than -- This function is constrained to the DevEnv rather than
-- using HasConnectionPool and HasRepoVar. -- using HasConnectionPool and HasRepoVar.
runCmdDev :: (Show err) => DevEnv -> Cmd'' DevEnv err a -> IO a runCmdDev :: Show err => DevEnv -> Cmd'' DevEnv err a -> IO a
runCmdDev env f = runCmdDev env f =
(either (fail . show) pure =<< runCmd env f) (either (fail . show) pure =<< runCmd env f)
`finally` `finally`
......
...@@ -41,6 +41,7 @@ import qualified Gargantext.API.GraphQL.Node as GQLNode ...@@ -41,6 +41,7 @@ import qualified Gargantext.API.GraphQL.Node as GQLNode
import qualified Gargantext.API.GraphQL.User as GQLUser import qualified Gargantext.API.GraphQL.User as GQLUser
import qualified Gargantext.API.GraphQL.UserInfo as GQLUserInfo import qualified Gargantext.API.GraphQL.UserInfo as GQLUserInfo
import qualified Gargantext.API.GraphQL.TreeFirstLevel as GQLTree import qualified Gargantext.API.GraphQL.TreeFirstLevel as GQLTree
import qualified Gargantext.API.GraphQL.Team as GQLTeam
import Gargantext.API.Prelude (GargM, GargError) import Gargantext.API.Prelude (GargM, GargError)
import Gargantext.API.Types import Gargantext.API.Types
import Gargantext.Core.Mail.Types (HasMail) import Gargantext.Core.Mail.Types (HasMail)
...@@ -72,12 +73,14 @@ data Query m ...@@ -72,12 +73,14 @@ data Query m
, user_infos :: GQLUserInfo.UserInfoArgs -> m [GQLUserInfo.UserInfo] , user_infos :: GQLUserInfo.UserInfoArgs -> m [GQLUserInfo.UserInfo]
, users :: GQLUser.UserArgs -> m [GQLUser.User m] , users :: GQLUser.UserArgs -> m [GQLUser.User m]
, tree :: GQLTree.TreeArgs -> m (GQLTree.TreeFirstLevel m) , tree :: GQLTree.TreeArgs -> m (GQLTree.TreeFirstLevel m)
, team :: GQLTeam.TeamArgs -> m [GQLTeam.TeamMember]
} deriving (Generic, GQLType) } deriving (Generic, GQLType)
data Mutation m data Mutation m
= Mutation = Mutation
{ update_user_info :: GQLUserInfo.UserInfoMArgs -> m Int } { update_user_info :: GQLUserInfo.UserInfoMArgs -> m Int
deriving (Generic, GQLType) , delete_team_membership :: GQLTeam.TeamDeleteMArgs -> m [Int]
} deriving (Generic, GQLType)
-- | Possible GraphQL Events, i.e. here we describe how we will -- | Possible GraphQL Events, i.e. here we describe how we will
-- manipulate the data. -- manipulate the data.
...@@ -108,8 +111,10 @@ rootResolver = ...@@ -108,8 +111,10 @@ rootResolver =
, node_parent = GQLNode.resolveNodeParent , node_parent = GQLNode.resolveNodeParent
, user_infos = GQLUserInfo.resolveUserInfos , user_infos = GQLUserInfo.resolveUserInfos
, users = GQLUser.resolveUsers , users = GQLUser.resolveUsers
, tree = GQLTree.resolveTree } , tree = GQLTree.resolveTree
, mutationResolver = Mutation { update_user_info = GQLUserInfo.updateUserInfo } , team = GQLTeam.resolveTeam }
, mutationResolver = Mutation { update_user_info = GQLUserInfo.updateUserInfo
, delete_team_membership = GQLTeam.deleteTeamMembership }
, subscriptionResolver = Undefined } , subscriptionResolver = Undefined }
-- | Main GraphQL "app". -- | Main GraphQL "app".
......
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DuplicateRecordFields #-}
module Gargantext.API.GraphQL.Team where
import Gargantext.Prelude
import GHC.Generics (Generic)
import Data.Morpheus.Types (GQLType, Resolver, QUERY, ResolverM, lift)
import Data.Text ( Text )
import Gargantext.API.Prelude (GargM, GargError)
import Gargantext.Database.Action.Share (membersOf, deleteMemberShip)
import Gargantext.Core.Types (NodeId(..), unNodeId)
import Gargantext.Database.Prelude (HasConnectionPool)
import Gargantext.Database (HasConfig)
import Gargantext.Core.Mail.Types (HasMail)
import Gargantext.Database.Query.Table.Node (getNode)
import Gargantext.API.GraphQL.Utils (authUser, AuthStatus (Invalid, Valid))
import Gargantext.Database.Schema.Node (NodePoly(Node, _node_id), _node_user_id)
import Gargantext.API.Admin.Types (HasSettings)
import Gargantext.Database.Query.Table.User (getUsersWithNodeHyperdata)
import qualified Data.Text as T
data TeamArgs = TeamArgs
{ team_node_id :: Int } deriving (Generic, GQLType)
data TeamMember = TeamMember
{ username :: Text
, shared_folder_id :: Int
} deriving (Generic, GQLType)
data TeamDeleteMArgs = TeamDeleteMArgs
{ token :: Text
, shared_folder_id :: Int
, team_node_id :: Int
} deriving (Generic, GQLType)
type GqlM e env = Resolver QUERY e (GargM env GargError)
type GqlM' e env a = ResolverM e (GargM env GargError) a
todo :: a
todo = undefined
resolveTeam :: (HasConnectionPool env, HasConfig env, HasMail env) => TeamArgs -> GqlM e env [TeamMember]
resolveTeam TeamArgs { team_node_id } = dbTeam team_node_id
dbTeam :: (HasConnectionPool env, HasConfig env, HasMail env) => Int -> GqlM e env [TeamMember]
dbTeam nodeId = do
let nId = NodeId nodeId
res <- lift $ membersOf nId
pure $ map toTeamMember res
where
toTeamMember :: (Text, NodeId) -> TeamMember
toTeamMember (username, fId)= TeamMember {
username,
shared_folder_id = unNodeId fId
}
-- TODO: list as argument
deleteTeamMembership :: (HasConnectionPool env, HasConfig env, HasMail env, HasSettings env) => TeamDeleteMArgs -> GqlM' e env [Int]
deleteTeamMembership TeamDeleteMArgs { token, shared_folder_id, team_node_id } = do
teamNode <- lift $ getNode $ NodeId team_node_id
userNodes <- lift (getUsersWithNodeHyperdata $ uId teamNode)
case userNodes of
[] -> panic $ "[deleteTeamMembership] User with id " <> T.pack (show $ uId teamNode) <> " doesn't exist."
(( _, node_u):_) -> do
testAuthUser <- lift $ authUser (nId node_u) token
case testAuthUser of
Invalid -> panic "[deleteTeamMembership] failed to validate user"
Valid -> do
lift $ deleteMemberShip [(NodeId shared_folder_id, NodeId team_node_id)]
where
uId Node { _node_user_id } = _node_user_id
nId Node { _node_id } = _node_id
...@@ -11,7 +11,7 @@ Ngrams API ...@@ -11,7 +11,7 @@ Ngrams API
-- | TODO -- | TODO
get ngrams filtered by NgramsType get ngrams filtered by NgramsType
add get add get
-} -}
...@@ -106,7 +106,7 @@ import Gargantext.Database.Action.Flow.Types ...@@ -106,7 +106,7 @@ import Gargantext.Database.Action.Flow.Types
import Gargantext.Database.Action.Metrics.NgramsByContext (getOccByNgramsOnlyFast') import Gargantext.Database.Action.Metrics.NgramsByContext (getOccByNgramsOnlyFast')
import Gargantext.Database.Admin.Config (userMaster) import Gargantext.Database.Admin.Config (userMaster)
import Gargantext.Database.Admin.Types.Node (NodeType(..)) import Gargantext.Database.Admin.Types.Node (NodeType(..))
import Gargantext.Database.Prelude (HasConnectionPool, HasConfig) import Gargantext.Database.Prelude (HasConnectionPool(..), HasConfig)
import Gargantext.Database.Query.Table.Ngrams hiding (NgramsType(..), ngramsType, ngrams_terms) import Gargantext.Database.Query.Table.Ngrams hiding (NgramsType(..), ngramsType, ngrams_terms)
import Gargantext.Database.Query.Table.Node (getNode) import Gargantext.Database.Query.Table.Node (getNode)
import Gargantext.Database.Query.Table.Node.Error (HasNodeError) import Gargantext.Database.Query.Table.Node.Error (HasNodeError)
...@@ -261,7 +261,9 @@ setListNgrams listId ngramsType ns = do ...@@ -261,7 +261,9 @@ setListNgrams listId ngramsType ns = do
currentVersion :: HasNodeStory env err m currentVersion :: HasNodeStory env err m
=> ListId -> m Version => ListId -> m Version
currentVersion listId = do currentVersion listId = do
nls <- getRepo [listId] --nls <- getRepo [listId]
pool <- view connPool
nls <- liftBase $ getNodeStory pool listId
pure $ nls ^. unNodeStory . at listId . _Just . a_version pure $ nls ^. unNodeStory . at listId . _Just . a_version
...@@ -282,13 +284,16 @@ commitStatePatch :: (HasNodeStory env err m, HasMail env) ...@@ -282,13 +284,16 @@ commitStatePatch :: (HasNodeStory env err m, HasMail env)
=> ListId => ListId
-> Versioned NgramsStatePatch' -> Versioned NgramsStatePatch'
-> m (Versioned NgramsStatePatch') -> m (Versioned NgramsStatePatch')
commitStatePatch listId (Versioned p_version p) = do commitStatePatch listId (Versioned _p_version p) = do
-- printDebug "[commitStatePatch]" listId -- printDebug "[commitStatePatch]" listId
var <- getNodeStoryVar [listId] var <- getNodeStoryVar [listId]
vq' <- liftBase $ modifyMVar var $ \ns -> do vq' <- liftBase $ modifyMVar var $ \ns -> do
let let
a = ns ^. unNodeStory . at listId . _Just a = ns ^. unNodeStory . at listId . _Just
q = mconcat $ take (a ^. a_version - p_version) (a ^. a_history) -- apply patches from version p_version to a ^. a_version
-- TODO Check this
--q = mconcat $ take (a ^. a_version - p_version) (a ^. a_history)
q = mconcat $ a ^. a_history
(p', q') = transformWith ngramsStatePatchConflictResolution p q (p', q') = transformWith ngramsStatePatchConflictResolution p q
a' = a & a_version +~ 1 a' = a & a_version +~ 1
& a_state %~ act p' & a_state %~ act p'
...@@ -808,5 +813,3 @@ listNgramsChangedSince listId ngramsType version ...@@ -808,5 +813,3 @@ listNgramsChangedSince listId ngramsType version
Versioned <$> currentVersion listId <*> pure True Versioned <$> currentVersion listId <*> pure True
| otherwise = | otherwise =
tableNgramsPull listId ngramsType version & mapped . v_data %~ (== mempty) tableNgramsPull listId ngramsType version & mapped . v_data %~ (== mempty)
...@@ -22,13 +22,16 @@ import Data.Hashable (Hashable) ...@@ -22,13 +22,16 @@ import Data.Hashable (Hashable)
import Data.Set (Set) import Data.Set (Set)
import Data.Validity import Data.Validity
import Gargantext.API.Ngrams.Types import Gargantext.API.Ngrams.Types
import Gargantext.Core.Types (ListType(..), NodeId, ListId) import Gargantext.Core.Types (ListType(..), NodeId, NodeType(..), ListId)
import Gargantext.Database.Prelude (CmdM, HasConnectionPool(..))
import Gargantext.Database.Schema.Ngrams (NgramsType) import Gargantext.Database.Schema.Ngrams (NgramsType)
import Gargantext.Prelude import Gargantext.Prelude
import qualified Data.HashMap.Strict as HM import qualified Data.HashMap.Strict as HM
import qualified Data.Map.Strict as Map import qualified Data.Map.Strict as Map
import qualified Data.Set as Set import qualified Data.Set as Set
import Gargantext.Core.NodeStory import Gargantext.Core.NodeStory
import qualified Gargantext.Core.NodeStoryFile as NSF
mergeNgramsElement :: NgramsRepoElement -> NgramsRepoElement -> NgramsRepoElement mergeNgramsElement :: NgramsRepoElement -> NgramsRepoElement -> NgramsRepoElement
mergeNgramsElement _neOld neNew = neNew mergeNgramsElement _neOld neNew = neNew
...@@ -193,3 +196,21 @@ getCoocByNgrams' f (Diagonal diag) m = ...@@ -193,3 +196,21 @@ getCoocByNgrams' f (Diagonal diag) m =
where ks = HM.keys m where ks = HM.keys m
------------------------------------------ ------------------------------------------
migrateFromDirToDb :: (CmdM env err m, HasNodeStory env err m)
=> m ()
migrateFromDirToDb = do
pool <- view connPool
listIds <- liftBase $ getNodesIdWithType pool NodeList
printDebug "[migrateFromDirToDb] listIds" listIds
(NodeStory nls) <- NSF.getRepoReadConfig listIds
printDebug "[migrateFromDirToDb] nls" nls
_ <- mapM (\(nId, a) -> do
n <- liftBase $ nodeExists pool nId
case n of
False -> pure 0
True -> liftBase $ upsertNodeArchive pool nId a
) $ Map.toList nls
--_ <- nodeStoryIncs (Just $ NodeStory nls) listIds
pure ()
...@@ -28,7 +28,8 @@ import Data.String (IsString, fromString) ...@@ -28,7 +28,8 @@ import Data.String (IsString, fromString)
import Data.Swagger hiding (version, patch) import Data.Swagger hiding (version, patch)
import Data.Text (Text, pack, strip) import Data.Text (Text, pack, strip)
import Data.Validity import Data.Validity
import Database.PostgreSQL.Simple.FromField (FromField, fromField, ResultError(ConversionFailed), returnError) import Database.PostgreSQL.Simple.FromField (FromField, fromField, fromJSONField, ResultError(ConversionFailed), returnError)
import Database.PostgreSQL.Simple.ToField (ToField, toJSONField, toField)
import GHC.Generics (Generic) import GHC.Generics (Generic)
import Gargantext.Core.Text (size) import Gargantext.Core.Text (size)
import Gargantext.Core.Types (ListType(..), ListId, NodeId, TODO) import Gargantext.Core.Types (ListType(..), ListId, NodeId, TODO)
...@@ -124,19 +125,14 @@ instance (ToJSONKey a, ToSchema a) => ToSchema (MSet a) where ...@@ -124,19 +125,14 @@ instance (ToJSONKey a, ToSchema a) => ToSchema (MSet a) where
------------------------------------------------------------------------ ------------------------------------------------------------------------
newtype NgramsTerm = NgramsTerm { unNgramsTerm :: Text } newtype NgramsTerm = NgramsTerm { unNgramsTerm :: Text }
deriving (Ord, Eq, Show, Generic, ToJSONKey, ToJSON, FromJSON, Semigroup, Arbitrary, Serialise, ToSchema, Hashable, NFData) deriving (Ord, Eq, Show, Generic, ToJSONKey, ToJSON, FromJSON, Semigroup, Arbitrary, Serialise, ToSchema, Hashable, NFData)
instance IsHashable NgramsTerm where instance IsHashable NgramsTerm where
hash (NgramsTerm t) = hash t hash (NgramsTerm t) = hash t
instance Monoid NgramsTerm where instance Monoid NgramsTerm where
mempty = NgramsTerm "" mempty = NgramsTerm ""
instance FromJSONKey NgramsTerm where instance FromJSONKey NgramsTerm where
fromJSONKey = FromJSONKeyTextParser $ \t -> pure $ NgramsTerm $ strip t fromJSONKey = FromJSONKeyTextParser $ \t -> pure $ NgramsTerm $ strip t
instance IsString NgramsTerm where instance IsString NgramsTerm where
fromString s = NgramsTerm $ pack s fromString s = NgramsTerm $ pack s
instance FromField NgramsTerm instance FromField NgramsTerm
where where
fromField field mb = do fromField field mb = do
...@@ -147,6 +143,9 @@ instance FromField NgramsTerm ...@@ -147,6 +143,9 @@ instance FromField NgramsTerm
$ List.intercalate " " [ "cannot parse hyperdata for JSON: " $ List.intercalate " " [ "cannot parse hyperdata for JSON: "
, show v , show v
] ]
instance ToField NgramsTerm where
toField (NgramsTerm n) = toField n
data RootParent = RootParent data RootParent = RootParent
{ _rp_root :: NgramsTerm { _rp_root :: NgramsTerm
...@@ -448,13 +447,16 @@ instance ToSchema NgramsPatch where ...@@ -448,13 +447,16 @@ instance ToSchema NgramsPatch where
, ("old", nreSch) , ("old", nreSch)
, ("new", nreSch) , ("new", nreSch)
] ]
instance Arbitrary NgramsPatch where instance Arbitrary NgramsPatch where
arbitrary = frequency [ (9, NgramsPatch <$> arbitrary <*> (replace <$> arbitrary <*> arbitrary)) arbitrary = frequency [ (9, NgramsPatch <$> arbitrary <*> (replace <$> arbitrary <*> arbitrary))
, (1, NgramsReplace <$> arbitrary <*> arbitrary) , (1, NgramsReplace <$> arbitrary <*> arbitrary)
] ]
instance Serialise NgramsPatch instance Serialise NgramsPatch
instance FromField NgramsPatch where
fromField = fromJSONField
instance ToField NgramsPatch where
toField = toJSONField
instance Serialise (Replace ListType) instance Serialise (Replace ListType)
instance Serialise ListType instance Serialise ListType
...@@ -512,7 +514,6 @@ instance Action (PairPatch (PatchMSet NgramsTerm) (Replace ListType)) NgramsRepo ...@@ -512,7 +514,6 @@ instance Action (PairPatch (PatchMSet NgramsTerm) (Replace ListType)) NgramsRepo
instance Applicable NgramsPatch (Maybe NgramsRepoElement) where instance Applicable NgramsPatch (Maybe NgramsRepoElement) where
applicable p = applicable (p ^. _NgramsPatch) applicable p = applicable (p ^. _NgramsPatch)
instance Action NgramsPatch (Maybe NgramsRepoElement) where instance Action NgramsPatch (Maybe NgramsRepoElement) where
act p = act (p ^. _NgramsPatch) act p = act (p ^. _NgramsPatch)
...@@ -524,7 +525,11 @@ instance Serialise (PatchMap NgramsTerm NgramsPatch) ...@@ -524,7 +525,11 @@ instance Serialise (PatchMap NgramsTerm NgramsPatch)
instance FromField NgramsTablePatch instance FromField NgramsTablePatch
where where
fromField = fromField' fromField = fromJSONField
--fromField = fromField'
instance ToField NgramsTablePatch
where
toField = toJSONField
instance FromField (PatchMap TableNgrams.NgramsType (PatchMap NodeId NgramsTablePatch)) instance FromField (PatchMap TableNgrams.NgramsType (PatchMap NodeId NgramsTablePatch))
where where
...@@ -751,4 +756,3 @@ instance ToSchema UpdateTableNgramsCharts where ...@@ -751,4 +756,3 @@ instance ToSchema UpdateTableNgramsCharts where
------------------------------------------------------------------------ ------------------------------------------------------------------------
type NgramsList = (Map TableNgrams.NgramsType (Versioned NgramsTableMap)) type NgramsList = (Map TableNgrams.NgramsType (Versioned NgramsTableMap))
...@@ -76,7 +76,7 @@ fileDownload uId nId = do ...@@ -76,7 +76,7 @@ fileDownload uId nId = do
let (HyperdataFile { _hff_name = name' let (HyperdataFile { _hff_name = name'
, _hff_path = path }) = node ^. node_hyperdata , _hff_path = path }) = node ^. node_hyperdata
Contents c <- GargDB.readFile $ unpack path Contents c <- GargDB.readGargFile $ unpack path
let (mMime, _) = DMT.guessType DMT.defaultmtd False $ unpack name' let (mMime, _) = DMT.guessType DMT.defaultmtd False $ unpack name'
mime = case mMime of mime = case mMime of
......
...@@ -12,6 +12,7 @@ Portability : POSIX ...@@ -12,6 +12,7 @@ Portability : POSIX
module Gargantext.Core.Mail where module Gargantext.Core.Mail where
import Control.Lens (view) import Control.Lens (view)
import Network.URI.Encode (encodeText)
import Data.Text (Text, unlines, splitOn) import Data.Text (Text, unlines, splitOn)
import Gargantext.Core.Types.Individu import Gargantext.Core.Types.Individu
import Gargantext.Database.Schema.User (UserLight(..)) import Gargantext.Database.Schema.User (UserLight(..))
...@@ -90,7 +91,7 @@ bodyWith server (ForgotPassword { user = UserLight { userLight_forgot_password_u ...@@ -90,7 +91,7 @@ bodyWith server (ForgotPassword { user = UserLight { userLight_forgot_password_u
, forgot_password_link server uuid ] , forgot_password_link server uuid ]
forgot_password_link :: ServerAddress -> Text -> Text forgot_password_link :: ServerAddress -> Text -> Text
forgot_password_link server uuid = server <> "/api/v1.0/forgot-password?uuid=" <> uuid forgot_password_link server uuid = server <> "/#/forgotPassword?uuid=" <> uuid <> "&server=" <> encodeText server
------------------------------------------------------------------------ ------------------------------------------------------------------------
email_subject :: MailModel -> Text email_subject :: MailModel -> Text
......
This diff is collapsed.
{- NOTE This is legacy code. It keeps node stories in a directory
repo. We now have migrated to the DB. However this code is needed to
make the migration (see Gargantext.API.Ngrams.Tools) -}
module Gargantext.Core.NodeStoryFile where
import Control.Lens (view)
import Control.Monad (foldM)
import Codec.Serialise (serialise, deserialise)
import Codec.Serialise.Class
import Control.Concurrent (MVar(), modifyMVar_, newMVar, readMVar, withMVar)
import Control.Debounce (mkDebounce, defaultDebounceSettings, debounceFreq, debounceAction)
import Gargantext.Core.NodeStory hiding (readNodeStoryEnv)
import Gargantext.Core.Types (ListId, NodeId(..))
import Gargantext.Database.Prelude (CmdM, hasConfig)
import Gargantext.Prelude
import Gargantext.Prelude.Config (gc_repofilepath)
import System.Directory (renameFile, createDirectoryIfMissing, doesFileExist, removeFile)
import System.IO (FilePath, hClose)
import System.IO.Temp (withTempFile)
import qualified Data.ByteString.Lazy as DBL
import qualified Data.List as List
import qualified Data.Map.Strict as Map
import qualified Gargantext.Database.Query.Table.Ngrams as TableNgrams
getRepo :: HasNodeStory env err m
=> [ListId] -> m NodeListStory
getRepo listIds = do
g <- getNodeListStory
liftBase $ do
v <- g listIds
readMVar v
-- v <- liftBase $ f listIds
-- v' <- liftBase $ readMVar v
-- pure $ v'
getRepoReadConfig :: (CmdM env err m)
=> [ListId] -> m NodeListStory
getRepoReadConfig listIds = do
repoFP <- view $ hasConfig . gc_repofilepath
env <- liftBase $ readNodeStoryEnv repoFP
let g = view nse_getter env
liftBase $ do
v <- g listIds
readMVar v
getNodeListStory :: HasNodeStory env err m
=> m ([NodeId] -> IO (MVar NodeListStory))
getNodeListStory = do
env <- view hasNodeStory
pure $ view nse_getter env
readNodeStoryEnv :: NodeStoryDir -> IO NodeStoryEnv
readNodeStoryEnv nsd = do
mvar <- nodeStoryVar nsd Nothing []
saver <- mkNodeStorySaver nsd mvar
pure $ NodeStoryEnv { _nse_var = mvar
, _nse_saver = saver
, _nse_getter = nodeStoryVar nsd (Just mvar) }
------------------------------------------------------------------------
mkNodeStorySaver :: NodeStoryDir -> MVar NodeListStory -> IO (IO ())
mkNodeStorySaver nsd mvns = mkDebounce settings
where
settings = defaultDebounceSettings
{ debounceAction = withMVar mvns (writeNodeStories nsd)
, debounceFreq = 1 * minute
-- , debounceEdge = trailingEdge -- Trigger on the trailing edge
}
minute = 60 * second
second = 10^(6 :: Int)
nodeStoryVar :: NodeStoryDir
-> Maybe (MVar NodeListStory)
-> [NodeId]
-> IO (MVar NodeListStory)
nodeStoryVar nsd Nothing ni = nodeStoryIncs nsd Nothing ni >>= newMVar
nodeStoryVar nsd (Just mv) ni = do
_ <- modifyMVar_ mv $ \mv' -> (nodeStoryIncs nsd (Just mv') ni)
pure mv
nodeStoryInc :: NodeStoryDir -> Maybe NodeListStory -> NodeId -> IO NodeListStory
nodeStoryInc nsd (Just ns@(NodeStory nls)) ni = do
case Map.lookup ni nls of
Nothing -> do
(NodeStory nls') <- nodeStoryRead nsd ni
pure $ NodeStory $ Map.union nls nls'
Just _ -> pure ns
nodeStoryInc nsd Nothing ni = nodeStoryRead nsd ni
nodeStoryIncs :: NodeStoryDir
-> Maybe NodeListStory
-> [NodeId]
-> IO NodeListStory
nodeStoryIncs _ Nothing [] = pure $ NodeStory $ Map.empty
nodeStoryIncs nsd (Just nls) ns = foldM (\m n -> nodeStoryInc nsd (Just m) n) nls ns
nodeStoryIncs nsd Nothing (ni:ns) = do
m <- nodeStoryRead nsd ni
nodeStoryIncs nsd (Just m) ns
nodeStoryDec :: NodeStoryDir
-> NodeListStory
-> NodeId
-> IO NodeListStory
nodeStoryDec nsd ns@(NodeStory nls) ni = do
case Map.lookup ni nls of
Nothing -> do
-- we make sure the corresponding file repo is really removed
_ <- nodeStoryRemove nsd ni
pure ns
Just _ -> do
let ns' = Map.filterWithKey (\k _v -> k /= ni) nls
_ <- nodeStoryRemove nsd ni
pure $ NodeStory ns'
-- | TODO lock
nodeStoryRead :: NodeStoryDir -> NodeId -> IO NodeListStory
nodeStoryRead nsd ni = do
_repoDir <- createDirectoryIfMissing True nsd
let nsp = nodeStoryPath nsd ni
exists <- doesFileExist nsp
if exists
then deserialise <$> DBL.readFile nsp
else pure (initNodeStory ni)
nodeStoryRemove :: NodeStoryDir -> NodeId -> IO ()
nodeStoryRemove nsd ni = do
let nsp = nodeStoryPath nsd ni
exists <- doesFileExist nsp
if exists
then removeFile nsp
else pure ()
nodeStoryRead_test :: NodeStoryDir -> NodeId -> IO (Maybe [ TableNgrams.NgramsType ])
nodeStoryRead_test nsd ni = nodeStoryRead nsd ni >>= \n -> pure
$ fmap Map.keys
$ fmap _a_state
$ Map.lookup ni
$ _unNodeStory n
------------------------------------------------------------------------
type NodeStoryDir = FilePath
writeNodeStories :: NodeStoryDir -> NodeListStory -> IO ()
writeNodeStories fp nls = do
_done <- mapM (writeNodeStory fp) $ splitByNode nls
-- printDebug "[writeNodeStories]" done
pure ()
writeNodeStory :: NodeStoryDir -> (NodeId, NodeListStory) -> IO ()
writeNodeStory rdfp (n, ns) = saverAction' rdfp n ns
splitByNode :: NodeListStory -> [(NodeId, NodeListStory)]
splitByNode (NodeStory m) =
List.map (\(n,a) -> (n, NodeStory $ Map.singleton n a)) $ Map.toList m
saverAction' :: Serialise a => NodeStoryDir -> NodeId -> a -> IO ()
saverAction' repoDir nId a = do
withTempFile repoDir ((cs $ show nId) <> "-tmp-repo.cbor") $ \fp h -> do
-- printDebug "[repoSaverAction]" fp
DBL.hPut h $ serialise a
hClose h
renameFile fp (nodeStoryPath repoDir nId)
nodeStoryPath :: NodeStoryDir -> NodeId -> FilePath
nodeStoryPath repoDir nId = repoDir <> "/" <> filename
where
filename = "repo" <> "-" <> (cs $ show nId) <> ".cbor"
------------------------------------------------------------------------
-- TODO : repo Migration TODO TESTS
{-
repoMigration :: NodeStoryDir -> NgramsRepo -> IO ()
repoMigration fp r = writeNodeStories fp (repoToNodeListStory r)
repoToNodeListStory :: NgramsRepo -> NodeListStory
repoToNodeListStory (Repo _v s h) = NodeStory $ Map.fromList ns
where
s' = ngramsState_migration s
h' = ngramsStatePatch_migration h
ns = List.map (\(n,ns')
-> (n, let hs = fromMaybe [] (Map.lookup n h') in
Archive { _a_version = List.length hs
, _a_state = ns'
, _a_history = hs }
)
) $ Map.toList s'
ngramsState_migration :: NgramsState
-> Map NodeId NgramsState'
ngramsState_migration ns =
Map.fromListWith (Map.union) $
List.concat $
map (\(nt, nTable)
-> map (\(nid, table)
-> (nid, Map.singleton nt table)
) $ Map.toList nTable
) $ Map.toList ns
ngramsStatePatch_migration :: [NgramsStatePatch]
-> Map NodeId [NgramsStatePatch']
ngramsStatePatch_migration np' = Map.fromListWith (<>)
$ List.concat
$ map toPatch np'
where
toPatch :: NgramsStatePatch -> [(NodeId, [NgramsStatePatch'])]
toPatch p =
List.concat $
map (\(nt, nTable)
-> map (\(nid, table)
-> (nid, [fst $ Patch.singleton nt table])
) $ Patch.toList nTable
) $ Patch.toList p
-}
...@@ -234,7 +234,7 @@ delimiter Comma = fromIntegral $ ord ',' ...@@ -234,7 +234,7 @@ delimiter Comma = fromIntegral $ ord ','
------------------------------------------------------------------------ ------------------------------------------------------------------------
readCsvOn' :: [CsvDoc -> Text] -> FilePath -> IO (Either Prelude.String [Text]) readCsvOn' :: [CsvDoc -> Text] -> FilePath -> IO (Either Prelude.String [Text])
readCsvOn' fields fp = do readCsvOn' fields fp = do
r <- readFile fp r <- readCSVFile fp
pure $ ( V.toList pure $ ( V.toList
. V.map (\l -> intercalate (pack " ") $ map (\field -> field l) fields) . V.map (\l -> intercalate (pack " ") $ map (\field -> field l) fields)
. snd ) <$> r . snd ) <$> r
...@@ -267,8 +267,8 @@ readByteStringStrict d ff = (readByteStringLazy d ff) . BL.fromStrict ...@@ -267,8 +267,8 @@ readByteStringStrict d ff = (readByteStringLazy d ff) . BL.fromStrict
------------------------------------------------------------------------ ------------------------------------------------------------------------
-- | TODO use readFileLazy -- | TODO use readFileLazy
readFile :: FilePath -> IO (Either Prelude.String (Header, Vector CsvDoc)) readCSVFile :: FilePath -> IO (Either Prelude.String (Header, Vector CsvDoc))
readFile fp = do readCSVFile fp = do
result <- fmap (readCsvLazyBS Comma) $ BL.readFile fp result <- fmap (readCsvLazyBS Comma) $ BL.readFile fp
case result of case result of
Left _err -> fmap (readCsvLazyBS Tab) $ BL.readFile fp Left _err -> fmap (readCsvLazyBS Tab) $ BL.readFile fp
...@@ -448,7 +448,7 @@ parseHal' bs = (V.toList . V.map csvHal2doc . snd) <$> readCsvHalLazyBS bs ...@@ -448,7 +448,7 @@ parseHal' bs = (V.toList . V.map csvHal2doc . snd) <$> readCsvHalLazyBS bs
------------------------------------------------------------------------ ------------------------------------------------------------------------
parseCsv :: FilePath -> IO (Either Prelude.String [HyperdataDocument]) parseCsv :: FilePath -> IO (Either Prelude.String [HyperdataDocument])
parseCsv fp = fmap (V.toList . V.map csv2doc . snd) <$> readFile fp parseCsv fp = fmap (V.toList . V.map csv2doc . snd) <$> readCSVFile fp
{- {-
parseCsv' :: BL.ByteString -> Either Prelude.String [HyperdataDocument] parseCsv' :: BL.ByteString -> Either Prelude.String [HyperdataDocument]
......
...@@ -68,5 +68,3 @@ history' types lists = (Map.map (Map.unionsWith (<>))) ...@@ -68,5 +68,3 @@ history' types lists = (Map.map (Map.unionsWith (<>)))
-> Map NgramsType [HashMap NgramsTerm NgramsPatch] -> Map NgramsType [HashMap NgramsTerm NgramsPatch]
toMap m = Map.map (cons . unNgramsTablePatch) toMap m = Map.map (cons . unNgramsTablePatch)
$ unPatchMapToMap m $ unPatchMapToMap m
...@@ -132,14 +132,11 @@ cooc2graphWith' doPartitions distance threshold strength myCooc = do ...@@ -132,14 +132,11 @@ cooc2graphWith' doPartitions distance threshold strength myCooc = do
(as, bs) = List.unzip $ Map.keys distanceMap (as, bs) = List.unzip $ Map.keys distanceMap
n' = Set.size $ Set.fromList $ as <> bs n' = Set.size $ Set.fromList $ as <> bs
bridgeness' = bridgeness (fromIntegral nodesApprox) partitions distanceMap bridgeness' = bridgeness (fromIntegral nodesApprox) partitions distanceMap
confluence' = Map.empty -- confluence (Map.keys bridgeness') 3 True False confluence' = Map.empty -- BAC.computeConfluences 3 (Map.keys bridgeness') True
-- confluence (Map.keys bridgeness') 3 True False
seq bridgeness' $ printDebug "bridgeness OK" () seq bridgeness' $ printDebug "bridgeness OK" ()
saveAsFileDebug "/tmp/bridgeness" bridgeness' seq confluence' $ printDebug "confluence OK" ()
--seq confluence' $ printDebug "confluence OK" () pure $ data2graph ti diag bridgeness' confluence' partitions
--saveAsFileDebug "/tmp/confluence" confluence'
let g = data2graph ti diag bridgeness' confluence' partitions
--saveAsFileDebug "/tmp/graph" g
pure g
type Reverse = Bool type Reverse = Bool
......
...@@ -9,6 +9,7 @@ Portability : POSIX ...@@ -9,6 +9,7 @@ Portability : POSIX
-} -}
{-# LANGUAGE MonoLocalBinds #-}
{-# LANGUAGE ViewPatterns #-} {-# LANGUAGE ViewPatterns #-}
module Gargantext.Core.Viz.Phylo.Legacy.LegacyMain module Gargantext.Core.Viz.Phylo.Legacy.LegacyMain
......
...@@ -135,7 +135,7 @@ allDataOrigins = map InternalOrigin API.externalAPIs ...@@ -135,7 +135,7 @@ allDataOrigins = map InternalOrigin API.externalAPIs
--------------- ---------------
data DataText = DataOld ![NodeId] data DataText = DataOld ![NodeId]
| DataNew !(Maybe Integer, ConduitT () HyperdataDocument IO ()) | DataNew !(Maybe Integer, ConduitT () HyperdataDocument IO ())
-- | DataNew ![[HyperdataDocument]] --- | DataNew ![[HyperdataDocument]]
-- Show instance is not possible because of IO -- Show instance is not possible because of IO
printDataText :: DataText -> IO () printDataText :: DataText -> IO ()
......
...@@ -210,4 +210,3 @@ putListNgrams nodeId ngramsType nes = putListNgrams' nodeId ngramsType m ...@@ -210,4 +210,3 @@ putListNgrams nodeId ngramsType nes = putListNgrams' nodeId ngramsType m
& unNodeStory . at listId . _Just . a_history %~ (p :) & unNodeStory . at listId . _Just . a_history %~ (p :)
& unNodeStory . at listId . _Just . a_state . at ngramsType' .~ Just ns & unNodeStory . at listId . _Just . a_state . at ngramsType' .~ Just ns
saveNodeStory saveNodeStory
...@@ -140,13 +140,13 @@ writeFile a = do ...@@ -140,13 +140,13 @@ writeFile a = do
--- ---
-- | Example to read a file with Type -- | Example to read a file with Type
readFile :: ( MonadReader env m readGargFile :: ( MonadReader env m
, HasConfig env , HasConfig env
, MonadBase IO m , MonadBase IO m
, ReadFile a , ReadFile a
) )
=> FilePath -> m a => FilePath -> m a
readFile fp = do readGargFile fp = do
dataPath <- view $ hasConfig . gc_datafilepath dataPath <- view $ hasConfig . gc_datafilepath
liftBase $ readFile' $ toFilePath dataPath fp liftBase $ readFile' $ toFilePath dataPath fp
......
...@@ -230,6 +230,9 @@ selectNodesIdWithType nt = proc () -> do ...@@ -230,6 +230,9 @@ selectNodesIdWithType nt = proc () -> do
------------------------------------------------------------------------ ------------------------------------------------------------------------
nodeExists :: (HasNodeError err) => NodeId -> Cmd err Bool
nodeExists nId = (== [DPS.Only True])
<$> runPGSQuery [sql|SELECT true FROM nodes WHERE id = ? AND ?|] (nId, True)
getNode :: HasNodeError err => NodeId -> Cmd err (Node Value) getNode :: HasNodeError err => NodeId -> Cmd err (Node Value)
getNode nId = do getNode nId = do
......
...@@ -19,27 +19,35 @@ commentary with @some markup@. ...@@ -19,27 +19,35 @@ commentary with @some markup@.
module Gargantext.Database.Query.Table.NodeNode module Gargantext.Database.Query.Table.NodeNode
( module Gargantext.Database.Schema.NodeNode ( module Gargantext.Database.Schema.NodeNode
, queryNodeNodeTable , deleteNodeNode
, getNodeNode , getNodeNode
, insertNodeNode , insertNodeNode
, deleteNodeNode , nodeNodesCategory
, nodeNodesScore
, queryNodeNodeTable
, selectDocNodes
, selectDocs
, selectDocsDates
, selectPublicNodes , selectPublicNodes
) )
where where
import Control.Arrow (returnA) import Control.Arrow (returnA)
import Control.Lens ((^.)) import Control.Lens ((^.), view)
import qualified Opaleye as O import Data.Text (Text, splitOn)
import Opaleye import Data.Maybe (catMaybes)
import Database.PostgreSQL.Simple.SqlQQ (sql)
import Database.PostgreSQL.Simple.Types (Values(..), QualifiedIdentifier(..))
import Gargantext.Core import Gargantext.Core
import Gargantext.Core.Types import Gargantext.Core.Types
import Gargantext.Database.Schema.NodeNode
import Gargantext.Database.Admin.Types.Hyperdata import Gargantext.Database.Admin.Types.Hyperdata
import Gargantext.Database.Prelude import Gargantext.Database.Prelude
import Gargantext.Database.Schema.Node import Gargantext.Database.Schema.Node
import Gargantext.Database.Schema.NodeNode
import Gargantext.Prelude import Gargantext.Prelude
import Opaleye
import qualified Database.PostgreSQL.Simple as PGS
import qualified Opaleye as O
queryNodeNodeTable :: Select NodeNodeRead queryNodeNodeTable :: Select NodeNodeRead
queryNodeNodeTable = selectTable nodeNodeTable queryNodeNodeTable = selectTable nodeNodeTable
...@@ -113,8 +121,113 @@ deleteNodeNode n1 n2 = mkCmd $ \conn -> ...@@ -113,8 +121,113 @@ deleteNodeNode n1 n2 = mkCmd $ \conn ->
) )
------------------------------------------------------------------------ ------------------------------------------------------------------------
selectPublicNodes :: HasDBid NodeType -- | Favorite management
=> (Hyperdata a, DefaultFromField SqlJsonb a) _nodeNodeCategory :: CorpusId -> DocId -> Int -> Cmd err [Int]
_nodeNodeCategory cId dId c = map (\(PGS.Only a) -> a) <$> runPGSQuery favQuery (c,cId,dId)
where
favQuery :: PGS.Query
favQuery = [sql|UPDATE nodes_nodes SET category = ?
WHERE node1_id = ? AND node2_id = ?
RETURNING node2_id;
|]
nodeNodesCategory :: [(CorpusId, DocId, Int)] -> Cmd err [Int]
nodeNodesCategory inputData = map (\(PGS.Only a) -> a)
<$> runPGSQuery catQuery (PGS.Only $ Values fields inputData)
where
fields = map (\t-> QualifiedIdentifier Nothing t) ["int4","int4","int4"]
catQuery :: PGS.Query
catQuery = [sql| UPDATE nodes_nodes as nn0
SET category = nn1.category
FROM (?) as nn1(node1_id,node2_id,category)
WHERE nn0.node1_id = nn1.node1_id
AND nn0.node2_id = nn1.node2_id
RETURNING nn1.node2_id
|]
------------------------------------------------------------------------
-- | Score management
_nodeNodeScore :: CorpusId -> DocId -> Int -> Cmd err [Int]
_nodeNodeScore cId dId c = map (\(PGS.Only a) -> a) <$> runPGSQuery scoreQuery (c,cId,dId)
where
scoreQuery :: PGS.Query
scoreQuery = [sql|UPDATE nodes_nodes SET score = ?
WHERE node1_id = ? AND node2_id = ?
RETURNING node2_id;
|]
nodeNodesScore :: [(CorpusId, DocId, Int)] -> Cmd err [Int]
nodeNodesScore inputData = map (\(PGS.Only a) -> a)
<$> runPGSQuery catScore (PGS.Only $ Values fields inputData)
where
fields = map (\t-> QualifiedIdentifier Nothing t) ["int4","int4","int4"]
catScore :: PGS.Query
catScore = [sql| UPDATE nodes_nodes as nn0
SET score = nn1.score
FROM (?) as nn1(node1_id, node2_id, score)
WHERE nn0.node1_id = nn1.node1_id
AND nn0.node2_id = nn1.node2_id
RETURNING nn1.node2_id
|]
------------------------------------------------------------------------
_selectCountDocs :: HasDBid NodeType => CorpusId -> Cmd err Int
_selectCountDocs cId = runCountOpaQuery (queryCountDocs cId)
where
queryCountDocs cId' = proc () -> do
(n, nn) <- joinInCorpus -< ()
restrict -< nn^.nn_node1_id .== (toNullable $ pgNodeId cId')
restrict -< nn^.nn_category .>= (toNullable $ sqlInt4 1)
restrict -< n^.node_typename .== (sqlInt4 $ toDBid NodeDocument)
returnA -< n
-- | TODO use UTCTime fast
selectDocsDates :: HasDBid NodeType => CorpusId -> Cmd err [Text]
selectDocsDates cId = map (head' "selectDocsDates" . splitOn "-")
<$> catMaybes
<$> map (view hd_publication_date)
<$> selectDocs cId
selectDocs :: HasDBid NodeType => CorpusId -> Cmd err [HyperdataDocument]
selectDocs cId = runOpaQuery (queryDocs cId)
queryDocs :: HasDBid NodeType => CorpusId -> O.Select (Column SqlJsonb)
queryDocs cId = proc () -> do
(n, nn) <- joinInCorpus -< ()
restrict -< nn^.nn_node1_id .== (toNullable $ pgNodeId cId)
restrict -< nn^.nn_category .>= (toNullable $ sqlInt4 1)
restrict -< n^.node_typename .== (sqlInt4 $ toDBid NodeDocument)
returnA -< view (node_hyperdata) n
selectDocNodes :: HasDBid NodeType =>CorpusId -> Cmd err [Node HyperdataDocument]
selectDocNodes cId = runOpaQuery (queryDocNodes cId)
queryDocNodes :: HasDBid NodeType =>CorpusId -> O.Select NodeRead
queryDocNodes cId = proc () -> do
(n, nn) <- joinInCorpus -< ()
restrict -< nn^.nn_node1_id .== (toNullable $ pgNodeId cId)
restrict -< nn^.nn_category .>= (toNullable $ sqlInt4 1)
restrict -< n^.node_typename .== (sqlInt4 $ toDBid NodeDocument)
returnA -< n
joinInCorpus :: O.Select (NodeRead, NodeNodeReadNull)
joinInCorpus = leftJoin queryNodeTable queryNodeNodeTable cond
where
cond :: (NodeRead, NodeNodeRead) -> Column SqlBool
cond (n, nn) = nn^.nn_node2_id .== (view node_id n)
_joinOn1 :: O.Select (NodeRead, NodeNodeReadNull)
_joinOn1 = leftJoin queryNodeTable queryNodeNodeTable cond
where
cond :: (NodeRead, NodeNodeRead) -> Column SqlBool
cond (n, nn) = nn^.nn_node1_id .== n^.node_id
------------------------------------------------------------------------
selectPublicNodes :: HasDBid NodeType => (Hyperdata a, DefaultFromField SqlJsonb a)
=> Cmd err [(Node a, Maybe Int)] => Cmd err [(Node a, Maybe Int)]
selectPublicNodes = runOpaQuery (queryWithType NodeFolderPublic) selectPublicNodes = runOpaQuery (queryWithType NodeFolderPublic)
......
...@@ -35,7 +35,7 @@ extra-deps: ...@@ -35,7 +35,7 @@ extra-deps:
- git: https://gitlab.iscpif.fr/gargantext/haskell-gargantext-prelude.git - git: https://gitlab.iscpif.fr/gargantext/haskell-gargantext-prelude.git
commit: 08096a4913572cf22762fa77613340207ec6d9fd commit: 08096a4913572cf22762fa77613340207ec6d9fd
- git: https://gitlab.iscpif.fr/gargantext/gargantext-graph.git - git: https://gitlab.iscpif.fr/gargantext/gargantext-graph.git
commit: 13131f5173e2e2ab35b968e53f0feaeee13ad8ac commit: f41ee8b53c3264e5aa5adc06b2e5b293d2a8c474
# Data Mining Libs # Data Mining Libs
- git: https://github.com/delanoe/data-time-segment.git - git: https://github.com/delanoe/data-time-segment.git
commit: 10a416b9f6c443866b36479c3441ebb3bcdeb7ef commit: 10a416b9f6c443866b36479c3441ebb3bcdeb7ef
...@@ -100,7 +100,7 @@ extra-deps: ...@@ -100,7 +100,7 @@ extra-deps:
- git: https://github.com/alpmestan/haskell-igraph.git - git: https://github.com/alpmestan/haskell-igraph.git
commit: 9f55eb36639c8e0965c8bc539a57738869f33e9a commit: 9f55eb36639c8e0965c8bc539a57738869f33e9a
- git: https://gitlab.iscpif.fr/gargantext/haskell-infomap.git - git: https://gitlab.iscpif.fr/gargantext/haskell-infomap.git
commit: 76b795c1eaca37f43418d07da9fbdf5f4e7d8f5c commit: 6d1d60b952b9b2b272b58fc5539700fd8890ac88
# Accelerate Linear Algebra and specific instances # Accelerate Linear Algebra and specific instances
- git: https://github.com/alpmestan/accelerate.git - git: https://github.com/alpmestan/accelerate.git
...@@ -110,7 +110,7 @@ extra-deps: ...@@ -110,7 +110,7 @@ extra-deps:
- git: https://gitlab.iscpif.fr/amestanogullari/accelerate-utility.git - git: https://gitlab.iscpif.fr/amestanogullari/accelerate-utility.git
commit: a3875fe652d3bb5acb522674c22c6c814c1b4ad0 commit: a3875fe652d3bb5acb522674c22c6c814c1b4ad0
- git: https://github.com/alpmestan/accelerate-llvm.git - git: https://github.com/alpmestan/accelerate-llvm.git
commit: 08eaa8ee771dde88b3dcf37a89b31777f1ca4910 commit: 944f5a4aea35ee6aedb81ea754bf46b131fce9e3
subdirs: subdirs:
- accelerate-llvm/ - accelerate-llvm/
- accelerate-llvm-native/ - accelerate-llvm-native/
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment