Verified Commit 5d172111 authored by Przemyslaw Kaminski's avatar Przemyslaw Kaminski

Merge branch 'dev' into 162-dev-haskell-9.2

parents b19412f7 784a2dc3
...@@ -2,8 +2,7 @@ ...@@ -2,8 +2,7 @@
# https://vadosware.io/post/zero-to-continuous-integrated-testing-a-haskell-project-with-gitlab/ # https://vadosware.io/post/zero-to-continuous-integrated-testing-a-haskell-project-with-gitlab/
# #
# #
image: cgenie/stack-build:lts-18.18-garg image: adinapoli/gargantext:v1
#image: cgenie/nixos-stack:latest
variables: variables:
STACK_ROOT: "${CI_PROJECT_DIR}/.stack-root" STACK_ROOT: "${CI_PROJECT_DIR}/.stack-root"
...@@ -15,32 +14,38 @@ variables: ...@@ -15,32 +14,38 @@ variables:
stages: stages:
- deps - deps
- docs - cabal
- test - test
- docs
deps: deps:
stage: deps
cache: cache:
# cache per branch name # cache per branch name
# key: ${CI_COMMIT_REF_SLUG} # key: ${CI_COMMIT_REF_SLUG}
paths: paths:
- /nix
- .stack - .stack
- .stack-root/ - .stack-root/
- .stack-work/ - .stack-work/
- target - target
script: script:
- stack build --no-terminal --haddock --no-haddock-deps --only-dependencies --fast - echo "Building the project from '$CI_PROJECT_DIR'"
- nix-shell && export LC_ALL=C.UTF-8 && stack -v build --no-terminal --haddock --no-haddock-deps --only-dependencies --fast
docs: docs:
stage: docs
cache: cache:
# cache per branch name # cache per branch name
# key: ${CI_COMMIT_REF_SLUG} # key: ${CI_COMMIT_REF_SLUG}
paths: paths:
- /nix
- .stack - .stack
- .stack-root/ - .stack-root/
- .stack-work/ - .stack-work/
- target - target
script: script:
- stack build --no-terminal --haddock --no-haddock-deps --fast - nix-shell && export LC_ALL=C.UTF-8 && stack build --no-terminal --haddock --no-haddock-deps --fast
- cp -R "$(stack path --local-install-root)"/doc ./output - cp -R "$(stack path --local-install-root)"/doc ./output
artifacts: artifacts:
paths: paths:
...@@ -48,16 +53,34 @@ docs: ...@@ -48,16 +53,34 @@ docs:
expire_in: 1 week expire_in: 1 week
test: test:
stage: test
cache: cache:
# cache per branch name # cache per branch name
# key: ${CI_COMMIT_REF_SLUG} # key: ${CI_COMMIT_REF_SLUG}
paths: paths:
- /nix
- .stack - .stack
- .stack-root/ - .stack-root/
- .stack-work/ - .stack-work/
- target - target
script: script:
- stack test --no-terminal --fast - nix-shell && export LC_ALL=C.UTF-8 && stack test --no-terminal --fast
# TOOO # TOOO
cabal:
stage: cabal
cache:
# cache per branch name
# key: ${CI_COMMIT_REF_SLUG}
paths:
- /nix
- .stack
- .stack-root/
- .stack-work/
- .local/
- .cabal/
- target
script:
- nix-shell && export LC_ALL=C.UTF-8 && hpack && cabal v2-build --dry-run
## Version 0.0.6.9.9.5
* [BACK][OPTIM][[Node terms] Random slowness on loading a page list of terms (#199)](https://gitlab.iscpif.fr/gargantext/haskell-gargantext/issues/199) Refresh index for textflow, reindex of terms list and schedule job
* [BACK][FIX][Docker image with nix (#188)](https://gitlab.iscpif.fr/gargantext/haskell-gargantext/issues/188)
* [FRONT][FEAT][improvement Doc table : default state is stars empty, become one star after reading (#541)](https://gitlab.iscpif.fr/gargantext/purescript-gargantext/issues/541)
## Version 0.0.6.9.9.4.6
* [BACK][FEAT][Import Corpus Docs as Json (#203)](https://gitlab.iscpif.fr/gargantext/haskell-gargantext/issues/203)
* [BACK][OPTIM][[Node terms] Random slowness on loading a page list of terms (#199)](https://gitlab.iscpif.fr/gargantext/haskell-gargantext/issues/199)
* Upgrade indications: run gargantext-upgrade gargantext.ini
## Version 0.0.6.9.9.4.5
* [BACK][OPTIM][[Node terms] Random slowness on loading a page list of terms (#199)](https://gitlab.iscpif.fr/gargantext/haskell-gargantext/issues/199)
## Version 0.0.6.9.9.4.4
* [FRONT][FIX/WIP][[Term table] Add term should not be suggested when term is already added (#544)](https://gitlab.iscpif.fr/gargantext/purescript-gargantext/issues/544)
* [FRONT][FIX] File Type vs Node Type
## Version 0.0.6.9.9.4.3
* [FRONT][FIX][APi tab not shown (#540)](https://gitlab.iscpif.fr/gargantext/purescript-gargantext/issues/540)
* [BACK][FIX] SearX analysis needs its NodeTexts
## Version 0.0.6.9.9.4.2 ## Version 0.0.6.9.9.4.2
* [BACK][FIX][[Node terms] Random slowness on loading a page list of terms (#199)](https://gitlab.iscpif.fr/gargantext/haskell-gargantext/issues/199) * [BACK][FIX][[Node terms] Random slowness on loading a page list of terms (#199)](https://gitlab.iscpif.fr/gargantext/haskell-gargantext/issues/199)
...@@ -6,7 +32,6 @@ ...@@ -6,7 +32,6 @@
## Version 0.0.6.9.9.4.1 ## Version 0.0.6.9.9.4.1
* [FRONT][FIX][Doc annotation: space selected as ngrams issue (#458)](https://gitlab.iscpif.fr/gargantext/purescript-gargantext/issues/458) * [FRONT][FIX][Doc annotation: space selected as ngrams issue (#458)](https://gitlab.iscpif.fr/gargantext/purescript-gargantext/issues/458)
* [BACK][ADM][API key / user (pubmed) (#201)](https://gitlab.iscpif.fr/gargantext/haskell-gargantext/issues/201) * [BACK][ADM][API key / user (pubmed) (#201)](https://gitlab.iscpif.fr/gargantext/haskell-gargantext/issues/201)
......
...@@ -11,29 +11,29 @@ Script to start gargantext with different modes (Dev, Prod, Mock). ...@@ -11,29 +11,29 @@ Script to start gargantext with different modes (Dev, Prod, Mock).
-} -}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE Strict #-} {-# LANGUAGE Strict #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-orphans #-} {-# OPTIONS_GHC -fno-warn-orphans #-}
module Main where module Main where
import Data.Version (showVersion)
import Data.Maybe (fromMaybe)
import Data.Text (unpack) import Data.Text (unpack)
import qualified Paths_gargantext as PG -- cabal magic build module import Data.Version (showVersion)
import Database.PostgreSQL.Simple.SqlQQ (sql)
import GHC.IO.Exception (IOException)
import Gargantext.API (startGargantext, Mode(..)) -- , startGargantextMock)
import Gargantext.API.Admin.EnvTypes (DevEnv)
import Gargantext.API.Dev (withDevEnv, runCmdDev)
import Gargantext.Prelude
import Options.Generic import Options.Generic
import System.Exit (exitSuccess) import System.Exit (exitSuccess)
import qualified Paths_gargantext as PG -- cabal magic build module
import Gargantext.Prelude
import Gargantext.API (startGargantext, Mode(..)) -- , startGargantextMock)
--------------------------------------------------------
-- Graph Tests
--import qualified Gargantext.Graph.Utils as U
--import qualified Gargantext.Graph.Distances.Conditional as C
--import qualified Gargantext.Graph.Distances.Distributional as D
--import qualified Gargantext.Graph.Distances.Matrice as M
--------------------------------------------------------
instance ParseRecord Mode instance ParseRecord Mode
instance ParseField Mode instance ParseField Mode
...@@ -59,24 +59,25 @@ main :: IO () ...@@ -59,24 +59,25 @@ main :: IO ()
main = do main = do
MyOptions myMode myPort myIniFile myVersion <- unwrapRecord MyOptions myMode myPort myIniFile myVersion <- unwrapRecord
"Gargantext server" "Gargantext server"
---------------------------------------------------------------
if myVersion then do if myVersion then do
putStrLn $ "Version: " <> showVersion PG.version putStrLn $ "Version: " <> showVersion PG.version
System.Exit.exitSuccess System.Exit.exitSuccess
else else
return () return ()
---------------------------------------------------------------
let myPort' = case myPort of let myPort' = case myPort of
Just p -> p Just p -> p
Nothing -> 8008 Nothing -> 8008
myIniFile' = case myIniFile of
Nothing -> panic "[ERROR] gargantext.ini needed"
Just i -> i
---------------------------------------------------------------
let start = case myMode of let start = case myMode of
Mock -> panic "[ERROR] Mock mode unsupported" Mock -> panic "[ERROR] Mock mode unsupported"
_ -> startGargantext myMode myPort' (unpack myIniFile') _ -> startGargantext myMode myPort' (unpack myIniFile')
where
myIniFile' = case myIniFile of
Nothing -> panic "[ERROR] gargantext.ini needed"
Just i -> i
putStrLn $ "Starting with " <> show myMode <> " mode." putStrLn $ "Starting with " <> show myMode <> " mode."
start start
---------------------------------------------------------------
This diff is collapsed.
FROM fpco/stack-build:lts-18.18 FROM ubuntu:jammy
#RUN apt-key adv --keyserver hkp://pool.sks-keyservers.net:80 --recv-keys 8B1DA6120C2BF624 ARG DEBIAN_FRONTEND=noninteractive
ARG GHC=8.10.7
ARG STACK=2.7.3
ARG CABAL=3.10.1.0
COPY ./shell.nix /builds/gargantext/shell.nix
COPY ./nix/pkgs.nix /builds/gargantext/nix/pkgs.nix
COPY ./nix/pinned-22.05.nix /builds/gargantext/nix/pinned-22.05.nix
ENV TZ=Europe/Rome
RUN apt-get update && \ RUN apt-get update && \
apt-get install -y ca-certificates git libigraph0-dev && \ apt-get install --no-install-recommends -y \
rm -rf /var/lib/apt/lists/* apt-transport-https \
autoconf \
automake \
build-essential \
ca-certificates \
curl \
gcc \
git \
gnupg2 \
libffi-dev \
libffi7 \
libgmp-dev \
libgmp10 \
libncurses-dev \
libncurses5 \
libnuma-dev \
libtinfo5 \
locales \
lsb-release \
software-properties-common \
sudo \
wget \
vim \
xz-utils \
zlib1g-dev && \
apt-get clean && rm -rf /var/lib/apt/lists/* && \
mkdir -m 0755 /nix && groupadd -r nixbld && chown root /nix && \
for n in $(seq 1 10); do useradd -c "Nix build user $n" -d /var/empty -g nixbld -G nixbld -M -N -r -s "$(command -v nologin)" "nixbld$n"; done
RUN gpg --batch --keyserver keys.openpgp.org --recv-keys 7D1E8AFD1D4A16D71FADA2F2CCC85C0E40C06A8C && \
gpg --batch --keyserver keyserver.ubuntu.com --recv-keys FE5AB6C91FEA597C3B31180B73EDE9E8CFBAEF01
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
RUN set -o pipefail && \
bash <(curl -L https://releases.nixos.org/nix/nix-2.15.0/install) --no-daemon && \
locale-gen en_US.UTF-8
ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' LC_ALL='en_US.UTF-8'
ENV USER=root
ENV SHELL /bin/bash
RUN . "$HOME/.nix-profile/etc/profile.d/nix.sh" && \
mkdir -p "/builds/gargantext/" && chmod 777 -R "/builds/gargantext" && \
echo "source $HOME/.nix-profile/etc/profile.d/nix.sh" >> "$HOME/.bashrc" && \
echo `which nix-env`
ENV PATH=/root/.nix-profile/bin:$PATH
RUN . $HOME/.bashrc && nix-env --version
RUN \
curl https://downloads.haskell.org/~ghcup/x86_64-linux-ghcup > /usr/bin/ghcup && \
chmod +x /usr/bin/ghcup && \
ghcup config set gpg-setting GPGLax && \
ghcup -v install ghc --force ${GHC} && \
ghcup -v install cabal --force ${CABAL} && \
ghcup -v install stack --force ${STACK}
ENV PATH=/root/.ghcup/bin:$PATH
ENV PATH=/root/.local/bin:$PATH
RUN cd /builds/gargantext && nix-shell
RUN ghcup set 8.10.7 && cabal v2-update && cabal v2-install hpack --overwrite-policy=always
WORKDIR "/builds/gargantext/"
...@@ -335,3 +335,16 @@ CREATE OR REPLACE function node_pos(int, int) returns bigint ...@@ -335,3 +335,16 @@ CREATE OR REPLACE function node_pos(int, int) returns bigint
--drop index node_by_pos; --drop index node_by_pos;
--create index node_by_pos on nodes using btree(node_pos(id,typename)); --create index node_by_pos on nodes using btree(node_pos(id,typename));
-- Optimization for Ngrams Table View
create materialized view if not exists context_node_ngrams_view as
select context_node_ngrams.context_id, ngrams_id, nodes_contexts.node_id
from nodes_contexts
join context_node_ngrams
on context_node_ngrams.context_id = nodes_contexts.context_id;
create index if not exists context_node_ngrams_context_id_ngrams_id_idx on context_node_ngrams(context_id, ngrams_id);
create index if not exists context_node_ngrams_view_context_id_idx on context_node_ngrams_view(context_id);
create index if not exists context_node_ngrams_view_ngrams_id_idx on context_node_ngrams_view(ngrams_id);
create index if not exists context_node_ngrams_view_node_id_idx on context_node_ngrams_view(node_id);
create index if not exists node_stories_ngrams_id_idx on node_stories(ngrams_id);
create materialized view if not exists context_node_ngrams_view as
select context_node_ngrams.context_id, ngrams_id, nodes_contexts.node_id
from nodes_contexts
join context_node_ngrams
on context_node_ngrams.context_id = nodes_contexts.context_id;
create index if not exists context_node_ngrams_context_id_ngrams_id_idx on context_node_ngrams(context_id, ngrams_id);
create index if not exists context_node_ngrams_view_context_id_idx on context_node_ngrams_view(context_id);
create index if not exists context_node_ngrams_view_ngrams_id_idx on context_node_ngrams_view(ngrams_id);
create index if not exists context_node_ngrams_view_node_id_idx on context_node_ngrams_view(node_id);
create index if not exists node_stories_ngrams_id_idx on node_stories(ngrams_id);
...@@ -5,7 +5,7 @@ cabal-version: 1.12 ...@@ -5,7 +5,7 @@ cabal-version: 1.12
-- see: https://github.com/sol/hpack -- see: https://github.com/sol/hpack
name: gargantext name: gargantext
version: 0.0.6.9.9.4.2 version: 0.0.6.9.9.5
synopsis: Search, map, share synopsis: Search, map, share
description: Please see README.md description: Please see README.md
category: Data category: Data
...@@ -56,6 +56,7 @@ library ...@@ -56,6 +56,7 @@ library
Gargantext.Core.Text.Corpus.Parsers Gargantext.Core.Text.Corpus.Parsers
Gargantext.Core.Text.Corpus.Parsers.CSV Gargantext.Core.Text.Corpus.Parsers.CSV
Gargantext.Core.Text.Corpus.Parsers.Date.Parsec Gargantext.Core.Text.Corpus.Parsers.Date.Parsec
Gargantext.Core.Text.Corpus.Parsers.JSON
Gargantext.Core.Text.List.Formats.CSV Gargantext.Core.Text.List.Formats.CSV
Gargantext.Core.Text.Metrics Gargantext.Core.Text.Metrics
Gargantext.Core.Text.Metrics.CharByChar Gargantext.Core.Text.Metrics.CharByChar
...@@ -393,6 +394,7 @@ library ...@@ -393,6 +394,7 @@ library
, crawlerISTEX , crawlerISTEX
, crawlerIsidore , crawlerIsidore
, crawlerPubMed , crawlerPubMed
, cron
, cryptohash , cryptohash
, data-time-segment , data-time-segment
, deepseq , deepseq
...@@ -689,6 +691,7 @@ executable gargantext-init ...@@ -689,6 +691,7 @@ executable gargantext-init
ghc-options: -Wall -threaded -rtsopts -with-rtsopts=-N -O2 -Wmissing-signatures ghc-options: -Wall -threaded -rtsopts -with-rtsopts=-N -O2 -Wmissing-signatures
build-depends: build-depends:
base base
, cron
, extra , extra
, gargantext , gargantext
, gargantext-prelude , gargantext-prelude
...@@ -798,6 +801,7 @@ executable gargantext-server ...@@ -798,6 +801,7 @@ executable gargantext-server
, gargantext-prelude , gargantext-prelude
, ini , ini
, optparse-generic , optparse-generic
, postgresql-simple
, text , text
, unordered-containers , unordered-containers
, vector , vector
...@@ -826,6 +830,7 @@ executable gargantext-upgrade ...@@ -826,6 +830,7 @@ executable gargantext-upgrade
ghc-options: -Wall -threaded -rtsopts -with-rtsopts=-N -O2 -Wmissing-signatures ghc-options: -Wall -threaded -rtsopts -with-rtsopts=-N -O2 -Wmissing-signatures
build-depends: build-depends:
base base
, cron
, extra , extra
, gargantext , gargantext
, gargantext-prelude , gargantext-prelude
......
...@@ -6,7 +6,7 @@ name: gargantext ...@@ -6,7 +6,7 @@ name: gargantext
# | | | +----- Layers * : New versions with API additions # | | | +----- Layers * : New versions with API additions
# | | | | +--- Layers * : New versions without API breaking changes # | | | | +--- Layers * : New versions without API breaking changes
# | | | | | # | | | | |
version: '0.0.6.9.9.4.2' version: '0.0.6.9.9.5'
synopsis: Search, map, share synopsis: Search, map, share
description: Please see README.md description: Please see README.md
category: Data category: Data
...@@ -82,6 +82,7 @@ library: ...@@ -82,6 +82,7 @@ library:
- Gargantext.Core.Text.Corpus.Parsers - Gargantext.Core.Text.Corpus.Parsers
- Gargantext.Core.Text.Corpus.Parsers.CSV - Gargantext.Core.Text.Corpus.Parsers.CSV
- Gargantext.Core.Text.Corpus.Parsers.Date.Parsec - Gargantext.Core.Text.Corpus.Parsers.Date.Parsec
- Gargantext.Core.Text.Corpus.Parsers.JSON
- Gargantext.Core.Text.List.Formats.CSV - Gargantext.Core.Text.List.Formats.CSV
- Gargantext.Core.Text.Metrics - Gargantext.Core.Text.Metrics
- Gargantext.Core.Text.Metrics.CharByChar - Gargantext.Core.Text.Metrics.CharByChar
...@@ -177,6 +178,7 @@ library: ...@@ -177,6 +178,7 @@ library:
- crawlerISTEX - crawlerISTEX
- crawlerIsidore - crawlerIsidore
- crawlerPubMed - crawlerPubMed
- cron
- cryptohash - cryptohash
- data-time-segment - data-time-segment
- deepseq - deepseq
...@@ -330,15 +332,16 @@ executables: ...@@ -330,15 +332,16 @@ executables:
- -fprof-auto - -fprof-auto
dependencies: dependencies:
- base - base
- cassava
- containers - containers
- full-text-search
- gargantext - gargantext
- gargantext-prelude - gargantext-prelude
- vector
- cassava
- ini - ini
- optparse-generic - optparse-generic
- postgresql-simple
- unordered-containers - unordered-containers
- full-text-search - vector
gargantext-cli: gargantext-cli:
main: Main.hs main: Main.hs
...@@ -421,6 +424,7 @@ executables: ...@@ -421,6 +424,7 @@ executables:
- gargantext - gargantext
- gargantext-prelude - gargantext-prelude
- base - base
- cron
gargantext-invitations: gargantext-invitations:
main: Main.hs main: Main.hs
...@@ -451,6 +455,7 @@ executables: ...@@ -451,6 +455,7 @@ executables:
- gargantext-prelude - gargantext-prelude
- base - base
- postgresql-simple - postgresql-simple
- cron
gargantext-admin: gargantext-admin:
main: Main.hs main: Main.hs
......
...@@ -26,16 +26,20 @@ Pouillard (who mainly made it). ...@@ -26,16 +26,20 @@ Pouillard (who mainly made it).
-} -}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE NumericUnderscores #-}
{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-} {-# LANGUAGE TypeOperators #-}
module Gargantext.API module Gargantext.API
where where
import Control.Exception (catch, finally, SomeException) import Control.Concurrent
import Control.Exception (catch, finally, SomeException, displayException, IOException)
import Control.Lens import Control.Lens
import Control.Monad.Except import Control.Monad.Except
import Control.Monad.Reader (runReaderT) import Control.Monad.Reader (runReaderT)
import Data.Either import Data.Either
import Data.Foldable (foldlM)
import Data.List (lookup) import Data.List (lookup)
import Data.Text (pack) import Data.Text (pack)
import Data.Text.Encoding (encodeUtf8) import Data.Text.Encoding (encodeUtf8)
...@@ -52,7 +56,8 @@ import Gargantext.API.Ngrams (saveNodeStoryImmediate) ...@@ -52,7 +56,8 @@ import Gargantext.API.Ngrams (saveNodeStoryImmediate)
import Gargantext.API.Routes import Gargantext.API.Routes
import Gargantext.API.Server (server) import Gargantext.API.Server (server)
import Gargantext.Core.NodeStory import Gargantext.Core.NodeStory
import qualified Gargantext.Database.Prelude as DB import Gargantext.Database.Prelude (Cmd)
import Gargantext.Database.Action.Metrics.NgramsByContext (refreshNgramsMaterialized)
import Gargantext.Prelude hiding (putStrLn) import Gargantext.Prelude hiding (putStrLn)
import Network.HTTP.Types hiding (Query) import Network.HTTP.Types hiding (Query)
import Network.Wai import Network.Wai
...@@ -62,6 +67,8 @@ import Network.Wai.Middleware.RequestLogger ...@@ -62,6 +67,8 @@ import Network.Wai.Middleware.RequestLogger
import Paths_gargantext (getDataDir) import Paths_gargantext (getDataDir)
import Servant import Servant
import System.FilePath import System.FilePath
import qualified Gargantext.Database.Prelude as DB
import qualified System.Cron.Schedule as Cron
data Mode = Dev | Mock | Prod data Mode = Dev | Mock | Prod
deriving (Show, Read, Generic) deriving (Show, Read, Generic)
...@@ -74,7 +81,8 @@ startGargantext mode port file = do ...@@ -74,7 +81,8 @@ startGargantext mode port file = do
portRouteInfo port portRouteInfo port
app <- makeApp env app <- makeApp env
mid <- makeDevMiddleware mode mid <- makeDevMiddleware mode
run port (mid app) `finally` stopGargantext env periodicActions <- schedulePeriodicActions env
run port (mid app) `finally` stopGargantext env periodicActions
where runDbCheck env = do where runDbCheck env = do
r <- runExceptT (runReaderT DB.dbCheck env) `catch` r <- runExceptT (runReaderT DB.dbCheck env) `catch`
...@@ -91,9 +99,12 @@ portRouteInfo port = do ...@@ -91,9 +99,12 @@ portRouteInfo port = do
putStrLn $ "http://localhost:" <> toUrlPiece port <> "/index.html" putStrLn $ "http://localhost:" <> toUrlPiece port <> "/index.html"
putStrLn $ "http://localhost:" <> toUrlPiece port <> "/swagger-ui" putStrLn $ "http://localhost:" <> toUrlPiece port <> "/swagger-ui"
-- | Stops the gargantext server and cancels all the periodic actions
-- scheduled to run up to that point.
-- TODO clean this Monad condition (more generic) ? -- TODO clean this Monad condition (more generic) ?
stopGargantext :: HasNodeStoryImmediateSaver env => env -> IO () stopGargantext :: HasNodeStoryImmediateSaver env => env -> [ThreadId] -> IO ()
stopGargantext env = do stopGargantext env scheduledPeriodicActions = do
forM_ scheduledPeriodicActions killThread
putStrLn "----- Stopping gargantext -----" putStrLn "----- Stopping gargantext -----"
runReaderT saveNodeStoryImmediate env runReaderT saveNodeStoryImmediate env
...@@ -105,6 +116,29 @@ startGargantextMock port = do ...@@ -105,6 +116,29 @@ startGargantextMock port = do
run port application run port application
-} -}
-- | Schedules all sorts of useful periodic actions to be run while
-- the server is alive accepting requests.
schedulePeriodicActions :: DB.CmdCommon env => env -> IO [ThreadId]
schedulePeriodicActions env =
-- Add your scheduled actions here.
let actions = [
refreshDBViews
]
in foldlM (\ !acc action -> (`mappend` acc) <$> Cron.execSchedule action) [] actions
where
refreshDBViews :: Cron.Schedule ()
refreshDBViews = do
let doRefresh = do
res <- DB.runCmd env (refreshNgramsMaterialized :: Cmd IOException ())
case res of
Left e -> liftIO $ putStrLn $ pack ("Refreshing Ngrams materialized view failed: " <> displayException e)
Right () -> do
_ <- liftIO $ putStrLn $ pack "Refresh Index Database done"
pure ()
Cron.addJob doRefresh "* 2 * * *"
---------------------------------------------------------------------- ----------------------------------------------------------------------
fireWall :: Applicative f => Request -> FireWall -> f Bool fireWall :: Applicative f => Request -> FireWall -> f Bool
......
...@@ -38,6 +38,7 @@ import Gargantext.Core.Text.Terms.WithList (buildPatterns, termsInText) ...@@ -38,6 +38,7 @@ import Gargantext.Core.Text.Terms.WithList (buildPatterns, termsInText)
import Gargantext.Core.Types.Main (ListType(..)) import Gargantext.Core.Types.Main (ListType(..))
import Gargantext.Database.Action.Flow (saveDocNgramsWith) import Gargantext.Database.Action.Flow (saveDocNgramsWith)
import Gargantext.Database.Action.Flow.Types (FlowCmdM) import Gargantext.Database.Action.Flow.Types (FlowCmdM)
import Gargantext.Database.Action.Metrics.NgramsByContext (refreshNgramsMaterialized)
import Gargantext.Database.Admin.Types.Hyperdata.Document import Gargantext.Database.Admin.Types.Hyperdata.Document
import Gargantext.Database.Admin.Types.Node import Gargantext.Database.Admin.Types.Node
import Gargantext.Database.Query.Table.Node (getNode) import Gargantext.Database.Query.Table.Node (getNode)
...@@ -49,7 +50,6 @@ import Gargantext.Database.Types (Indexed(..)) ...@@ -49,7 +50,6 @@ import Gargantext.Database.Types (Indexed(..))
import Gargantext.Prelude import Gargantext.Prelude
import Gargantext.Utils.Jobs (serveJobsAPI, MonadJobStatus(..)) import Gargantext.Utils.Jobs (serveJobsAPI, MonadJobStatus(..))
import Servant import Servant
-- import Servant.Job.Async
import qualified Data.ByteString.Lazy as BSL import qualified Data.ByteString.Lazy as BSL
import qualified Data.Csv as Csv import qualified Data.Csv as Csv
import qualified Data.HashMap.Strict as HashMap import qualified Data.HashMap.Strict as HashMap
...@@ -180,7 +180,7 @@ reIndexWith cId lId nt lts = do ...@@ -180,7 +180,7 @@ reIndexWith cId lId nt lts = do
-- Saving the indexation in database -- Saving the indexation in database
_ <- mapM (saveDocNgramsWith lId) ngramsByDoc _ <- mapM (saveDocNgramsWith lId) ngramsByDoc
_ <- refreshNgramsMaterialized
pure () pure ()
toIndexedNgrams :: HashMap Text NgramsId -> Text -> Maybe (Indexed Int Ngrams) toIndexedNgrams :: HashMap Text NgramsId -> Text -> Maybe (Indexed Int Ngrams)
......
...@@ -233,7 +233,7 @@ addToCorpusWithQuery user cid (WithQuery { _wq_query = q ...@@ -233,7 +233,7 @@ addToCorpusWithQuery user cid (WithQuery { _wq_query = q
markProgress 1 jobHandle markProgress 1 jobHandle
void $ flowDataText user txt (Multi l) cid (Just flw) jobHandle void $ flowDataText user txt (Multi l) cid (Just flw) jobHandle
-- printDebug "corpus id" cids -- printDebug "corpus id" cids
-- printDebug "sending email" ("xxxxxxxxxxxxxxxxxxxxx" :: Text) -- printDebug "sending email" ("xxxxxxxxxxxxxxxxxxxxx" :: Text)
sendMail user sendMail user
-- TODO ... -- TODO ...
...@@ -270,6 +270,7 @@ addToCorpusWithForm user cid (NewWithForm ft ff d l _n sel) jobHandle = do ...@@ -270,6 +270,7 @@ addToCorpusWithForm user cid (NewWithForm ft ff d l _n sel) jobHandle = do
WOS -> Parser.parseFormatC Parser.WOS WOS -> Parser.parseFormatC Parser.WOS
PresseRIS -> Parser.parseFormatC Parser.RisPresse PresseRIS -> Parser.parseFormatC Parser.RisPresse
Iramuteq -> Parser.parseFormatC Parser.Iramuteq Iramuteq -> Parser.parseFormatC Parser.Iramuteq
JSON -> Parser.parseFormatC Parser.JSON
-- TODO granularity of the logStatus -- TODO granularity of the logStatus
let data' = case ff of let data' = case ff of
......
...@@ -15,6 +15,7 @@ data FileType = CSV ...@@ -15,6 +15,7 @@ data FileType = CSV
| PresseRIS | PresseRIS
| WOS | WOS
| Iramuteq | Iramuteq
| JSON
deriving (Eq, Show, Generic) deriving (Eq, Show, Generic)
instance ToSchema FileType instance ToSchema FileType
instance Arbitrary FileType where arbitrary = elements [CSV, PresseRIS] instance Arbitrary FileType where arbitrary = elements [CSV, PresseRIS]
...@@ -28,7 +29,8 @@ instance FromHttpApiData FileType where ...@@ -28,7 +29,8 @@ instance FromHttpApiData FileType where
parseUrlPiece "PresseRis" = pure PresseRIS parseUrlPiece "PresseRis" = pure PresseRIS
parseUrlPiece "WOS" = pure WOS parseUrlPiece "WOS" = pure WOS
parseUrlPiece "Iramuteq" = pure Iramuteq parseUrlPiece "Iramuteq" = pure Iramuteq
parseUrlPiece _ = panic "[G.A.A.Node.Corpus.New] File Type not implemented (yet)" parseUrlPiece "JSON" = pure JSON
parseUrlPiece s = panic $ "[G.A.A.Node.Corpus.New] File Type not implemented (yet): " <> s
instance ToHttpApiData FileType where instance ToHttpApiData FileType where
toUrlPiece = pack . show toUrlPiece = pack . show
......
...@@ -21,9 +21,10 @@ import Gargantext.Database.Action.Flow.List (flowList_DbRepo) ...@@ -21,9 +21,10 @@ import Gargantext.Database.Action.Flow.List (flowList_DbRepo)
import Gargantext.Database.Action.Flow.Types (FlowCmdM) import Gargantext.Database.Action.Flow.Types (FlowCmdM)
import Gargantext.Database.Action.User (getUserId) import Gargantext.Database.Action.User (getUserId)
import Gargantext.Database.Admin.Config (userMaster) import Gargantext.Database.Admin.Config (userMaster)
import Gargantext.Database.Query.Table.Node (insertDefaultNodeIfNotExists)
import Gargantext.Database.Admin.Types.Hyperdata.Corpus (HyperdataCorpus) import Gargantext.Database.Admin.Types.Hyperdata.Corpus (HyperdataCorpus)
import Gargantext.Database.Admin.Types.Hyperdata.Document (HyperdataDocument(..)) import Gargantext.Database.Admin.Types.Hyperdata.Document (HyperdataDocument(..))
import Gargantext.Database.Admin.Types.Node (CorpusId, ListId) import Gargantext.Database.Admin.Types.Node (CorpusId, ListId, NodeType(NodeTexts))
import Gargantext.Database.Prelude (hasConfig) import Gargantext.Database.Prelude (hasConfig)
import Gargantext.Database.Query.Table.Node (defaultListMaybe, getOrMkList) import Gargantext.Database.Query.Table.Node (defaultListMaybe, getOrMkList)
import Gargantext.Database.Query.Tree.Root (getOrMk_RootWithCorpus) import Gargantext.Database.Query.Tree.Root (getOrMk_RootWithCorpus)
...@@ -154,6 +155,10 @@ triggerSearxSearch :: (MonadBase IO m, FlowCmdM env err m, MonadJobStatus m) ...@@ -154,6 +155,10 @@ triggerSearxSearch :: (MonadBase IO m, FlowCmdM env err m, MonadJobStatus m)
-> JobHandle m -> JobHandle m
-> m () -> m ()
triggerSearxSearch user cId q l jobHandle = do triggerSearxSearch user cId q l jobHandle = do
userId <- getUserId user
_tId <- insertDefaultNodeIfNotExists NodeTexts cId userId
let numPages = 100 let numPages = 100
markStarted numPages jobHandle markStarted numPages jobHandle
......
...@@ -42,6 +42,7 @@ import Data.Tuple.Extra (both, first, second) ...@@ -42,6 +42,7 @@ import Data.Tuple.Extra (both, first, second)
import Gargantext.API.Node.Corpus.New.Types (FileFormat(..)) import Gargantext.API.Node.Corpus.New.Types (FileFormat(..))
import Gargantext.Core (Lang(..)) import Gargantext.Core (Lang(..))
import Gargantext.Core.Text.Corpus.Parsers.CSV (parseHal, parseCsv, parseCsvC) import Gargantext.Core.Text.Corpus.Parsers.CSV (parseHal, parseCsv, parseCsvC)
import Gargantext.Core.Text.Corpus.Parsers.JSON (parseJSONC)
import Gargantext.Core.Text.Corpus.Parsers.RIS.Presse (presseEnrich) import Gargantext.Core.Text.Corpus.Parsers.RIS.Presse (presseEnrich)
import Gargantext.Database.Admin.Types.Hyperdata (HyperdataDocument(..)) import Gargantext.Database.Admin.Types.Hyperdata (HyperdataDocument(..))
import Gargantext.Database.Query.Table.Ngrams (NgramsType(..)) import Gargantext.Database.Query.Table.Ngrams (NgramsType(..))
...@@ -79,7 +80,8 @@ data FileType = WOS ...@@ -79,7 +80,8 @@ data FileType = WOS
| CsvGargV3 | CsvGargV3
| CsvHal | CsvHal
| Iramuteq | Iramuteq
deriving (Show) | JSON
deriving (Show, Eq)
-- Implemented (ISI Format) -- Implemented (ISI Format)
-- | DOC -- Not Implemented / import Pandoc -- | DOC -- Not Implemented / import Pandoc
...@@ -132,6 +134,12 @@ parseFormatC Iramuteq Plain bs = do ...@@ -132,6 +134,12 @@ parseFormatC Iramuteq Plain bs = do
) )
<$> eDocs <$> eDocs
parseFormatC JSON Plain bs = do
let eParsedC = parseJSONC $ DBL.fromStrict bs
case eParsedC of
Left err -> pure $ Left err
Right (mLen, parsedC) -> pure $ Right (mLen, transPipe (pure . runIdentity) parsedC)
parseFormatC ft ZIP bs = do parseFormatC ft ZIP bs = do
path <- liftBase $ emptySystemTempFile "parsed-zip" path <- liftBase $ emptySystemTempFile "parsed-zip"
liftBase $ DB.writeFile path bs liftBase $ DB.writeFile path bs
...@@ -154,7 +162,7 @@ parseFormatC ft ZIP bs = do ...@@ -154,7 +162,7 @@ parseFormatC ft ZIP bs = do
pure $ Right ( Just totalLength pure $ Right ( Just totalLength
, sequenceConduits contents' >> pure () ) -- .| mapM_C (printDebug "[parseFormatC] doc") , sequenceConduits contents' >> pure () ) -- .| mapM_C (printDebug "[parseFormatC] doc")
_ -> pure $ Left $ unpack $ intercalate "\n" $ pack <$> errs _ -> pure $ Left $ unpack $ intercalate "\n" $ pack <$> errs
parseFormatC _ _ _ = undefined parseFormatC _ _ _ = undefined
...@@ -211,7 +219,7 @@ parseFile WOS Plain p = do ...@@ -211,7 +219,7 @@ parseFile WOS Plain p = do
parseFile Iramuteq Plain p = do parseFile Iramuteq Plain p = do
docs <- join $ mapM ((toDoc Iramuteq) . (map (second (Text.replace "_" " ")))) docs <- join $ mapM ((toDoc Iramuteq) . (map (second (Text.replace "_" " "))))
<$> snd <$> snd
<$> enrichWith Iramuteq <$> enrichWith Iramuteq
<$> readFileWith Iramuteq p <$> readFileWith Iramuteq p
pure $ Right docs pure $ Right docs
...@@ -226,7 +234,7 @@ toDoc ff d = do ...@@ -226,7 +234,7 @@ toDoc ff d = do
-- let abstract = lookup "abstract" d -- let abstract = lookup "abstract" d
let lang = EN -- maybe EN identity (join $ detectLangDefault <$> (fmap (DT.take 50) abstract)) let lang = EN -- maybe EN identity (join $ detectLangDefault <$> (fmap (DT.take 50) abstract))
let dateToParse = DT.replace " " "" <$> lookup "PY" d -- <> Just " " <> lookup "publication_date" d let dateToParse = DT.replace " " "" <$> lookup "PY" d -- <> Just " " <> lookup "publication_date" d
-- printDebug "[G.C.T.C.Parsers] dateToParse" dateToParse -- printDebug "[G.C.T.C.Parsers] dateToParse" dateToParse
(utcTime, (pub_year, pub_month, pub_day)) <- Date.dateSplit lang dateToParse (utcTime, (pub_year, pub_month, pub_day)) <- Date.dateSplit lang dateToParse
...@@ -314,9 +322,8 @@ clean txt = DBC.map clean' txt ...@@ -314,9 +322,8 @@ clean txt = DBC.map clean' txt
clean' ';' = '.' clean' ';' = '.'
clean' c = c clean' c = c
-- --
splitOn :: NgramsType -> Maybe Text -> Text -> [Text] splitOn :: NgramsType -> Maybe Text -> Text -> [Text]
splitOn Authors (Just "WOS") = (DT.splitOn "; ") splitOn Authors (Just "WOS") = (DT.splitOn "; ")
splitOn _ _ = (DT.splitOn ", ") splitOn _ _ = (DT.splitOn ", ")
{-| {-|
Module : Gargantext.Core.Text.Corpus.Parsers.CSV Module : Gargantext.Core.Text.Corpus.Parsers.CSV
Description : Description :
Copyright : (c) CNRS, 2017-Present Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3 License : AGPL + CECILL v3
Maintainer : team@gargantext.org Maintainer : team@gargantext.org
...@@ -378,7 +378,7 @@ instance ToNamedRecord CsvHal where ...@@ -378,7 +378,7 @@ instance ToNamedRecord CsvHal where
, "instStructId_i" .= csvHal_instStructId_i , "instStructId_i" .= csvHal_instStructId_i
, "deptStructId_i" .= csvHal_deptStructId_i , "deptStructId_i" .= csvHal_deptStructId_i
, "labStructId_i" .= csvHal_labStructId_i , "labStructId_i" .= csvHal_labStructId_i
, "rteamStructId_i" .= csvHal_rteamStructId_i , "rteamStructId_i" .= csvHal_rteamStructId_i
, "docType_s" .= csvHal_docType_s , "docType_s" .= csvHal_docType_s
] ]
...@@ -472,7 +472,7 @@ parseCsvC bs = do ...@@ -472,7 +472,7 @@ parseCsvC bs = do
Right res -> Right res Right res -> Right res
case result of case result of
Left err -> Left err Left err -> Left err
Right r -> Right $ (Just $ Prelude.fromIntegral $ Prelude.length $ snd r, (yieldMany $ snd r) .| mapC csv2doc) Right r -> Right (Just $ Prelude.fromIntegral $ Prelude.length $ snd r, (yieldMany $ snd r) .| mapC csv2doc)
------------------------------------------------------------------------ ------------------------------------------------------------------------
-- Csv v3 weighted for phylo -- Csv v3 weighted for phylo
......
{-|
Module : Gargantext.Core.Text.Corpus.Parsers.JSON
Description :
Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
JSON parser for Gargantext corpus files.
-}
{-# LANGUAGE DuplicateRecordFields #-}
module Gargantext.Core.Text.Corpus.Parsers.JSON where
import Conduit
import Data.Aeson
import qualified Data.ByteString.Lazy as BL
import Data.Either (Either(..))
import Data.Text
import GHC.Generics
import qualified Prelude
import Gargantext.Database.Admin.Types.Hyperdata (HyperdataDocument(..))
-- import Gargantext.Database.Schema.Node (NodePoly(..))
import Gargantext.Prelude hiding (length)
data JSONStruct =
JSONStruct { documents :: [ JSONStructDocument ]
, garg_version :: Text }
deriving (Generic)
instance FromJSON JSONStruct
data JSONStructDocument =
JSONStructDocument { document :: JSONDocument
, ngrams :: JSONNgrams
, hash :: Text }
deriving (Generic)
instance FromJSON JSONStructDocument
data JSONDocument =
JSONDocument { id :: Int
, hash_id :: Maybe Text
, typename :: Int
, user_id :: Int
, parent_id :: Maybe Int
, name :: Text
, date :: Text
, hyperdata :: HyperdataDocument }
deriving (Generic)
instance FromJSON JSONDocument
data JSONNgrams =
JSONNgrams { ngrams :: [Text]
, hash :: Text }
deriving (Generic)
instance FromJSON JSONNgrams
------------------------------------------------------------------------
-- | TODO: documents -> document -> hyperdata + title etc
readJSONLazyBS :: BL.ByteString -> Either Prelude.String JSONStruct
readJSONLazyBS bs = eitherDecode bs
parseJSONC :: BL.ByteString
-> Either Prelude.String (Maybe Integer, ConduitT () HyperdataDocument Identity ())
parseJSONC bs = do
case readJSONLazyBS bs of
Left err -> Left err
Right (JSONStruct { documents }) ->
Right ( Just $ Prelude.fromIntegral $ Prelude.length documents
, yieldMany documents .| mapC doc2hyperdoc )
doc2hyperdoc :: JSONStructDocument -> HyperdataDocument
doc2hyperdoc (JSONStructDocument { document = JSONDocument { hyperdata } }) = hyperdata
...@@ -35,6 +35,7 @@ import Gargantext.Core.Types.Query (Limit(..)) ...@@ -35,6 +35,7 @@ import Gargantext.Core.Types.Query (Limit(..))
import Gargantext.Database.Action.Flow.Types (FlowCmdM) import Gargantext.Database.Action.Flow.Types (FlowCmdM)
import Gargantext.Database.Action.Metrics.NgramsByContext (getContextsByNgramsOnlyUser{-, getTficfWith-}) import Gargantext.Database.Action.Metrics.NgramsByContext (getContextsByNgramsOnlyUser{-, getTficfWith-})
import Gargantext.Database.Admin.Config (userMaster) import Gargantext.Database.Admin.Config (userMaster)
import Gargantext.Database.Action.Metrics.NgramsByContext (refreshNgramsMaterialized)
import Gargantext.Database.Admin.Types.Node (ListId, CorpusId) import Gargantext.Database.Admin.Types.Node (ListId, CorpusId)
import Gargantext.Database.Query.Table.Node (defaultList) import Gargantext.Database.Query.Table.Node (defaultList)
import Gargantext.Database.Query.Table.Node.Select import Gargantext.Database.Query.Table.Node.Select
...@@ -125,7 +126,11 @@ updateNgramsOccurrences' cId maybeListId maybeLimit tabType = do ...@@ -125,7 +126,11 @@ updateNgramsOccurrences' cId maybeListId maybeLimit tabType = do
let fields = map (\t-> QualifiedIdentifier Nothing t) let fields = map (\t-> QualifiedIdentifier Nothing t)
$ map Text.pack ["int4", "int4","text","int4","int4"] $ map Text.pack ["int4", "int4","text","int4","int4"]
map (\(Only a) -> a) <$> runPGSQuery queryInsert (Only $ Values fields toInsert) res <- map (\(Only a) -> a) <$> runPGSQuery queryInsert (Only $ Values fields toInsert)
-- _ <- map (\(Only a) -> a) <$> runPGSQuery [sql|refresh materialized view context_node_ngrams_view;|] ()
_ <- refreshNgramsMaterialized
pure res
......
...@@ -18,6 +18,7 @@ module Gargantext.Database.Action.Metrics.NgramsByContext ...@@ -18,6 +18,7 @@ module Gargantext.Database.Action.Metrics.NgramsByContext
-- import Debug.Trace (trace) -- import Debug.Trace (trace)
--import Data.Map.Strict.Patch (PatchMap, Replace, diff) --import Data.Map.Strict.Patch (PatchMap, Replace, diff)
import Control.Monad (void)
import Data.HashMap.Strict (HashMap) import Data.HashMap.Strict (HashMap)
import Data.Map.Strict (Map) import Data.Map.Strict (Map)
import Data.Set (Set) import Data.Set (Set)
...@@ -29,7 +30,7 @@ import Gargantext.API.Ngrams.Types (NgramsTerm(..)) ...@@ -29,7 +30,7 @@ import Gargantext.API.Ngrams.Types (NgramsTerm(..))
import Gargantext.Core import Gargantext.Core
import Gargantext.Data.HashMap.Strict.Utils as HM import Gargantext.Data.HashMap.Strict.Utils as HM
import Gargantext.Database.Admin.Types.Node (ListId, CorpusId, NodeId(..), ContextId, MasterCorpusId, NodeType(NodeDocument), UserCorpusId, DocId) import Gargantext.Database.Admin.Types.Node (ListId, CorpusId, NodeId(..), ContextId, MasterCorpusId, NodeType(NodeDocument), UserCorpusId, DocId)
import Gargantext.Database.Prelude (Cmd, runPGSQuery) import Gargantext.Database.Prelude (Cmd, runPGSQuery, execPGSQuery)
import Gargantext.Database.Schema.Ngrams (ngramsTypeId, NgramsType(..)) import Gargantext.Database.Schema.Ngrams (ngramsTypeId, NgramsType(..))
import Gargantext.Prelude import Gargantext.Prelude
import qualified Data.HashMap.Strict as HM import qualified Data.HashMap.Strict as HM
...@@ -122,31 +123,29 @@ getOccByNgramsOnlyFast cId lId nt = do ...@@ -122,31 +123,29 @@ getOccByNgramsOnlyFast cId lId nt = do
-> Cmd err [(Text, DPST.PGArray Int)] -> Cmd err [(Text, DPST.PGArray Int)]
run cId' lId' nt' = runPGSQuery query run cId' lId' nt' = runPGSQuery query
( cId' ( cId'
, cId'
, lId' , lId'
, ngramsTypeId nt' , ngramsTypeId nt'
) )
query :: DPS.Query query :: DPS.Query
query = [sql| query = [sql|
SELECT ng.terms WITH node_context_ids AS
, ARRAY( (select context_id, ngrams_id
SELECT DISTINCT context_node_ngrams.context_id FROM context_node_ngrams_view
FROM context_node_ngrams WHERE node_id = ?
JOIN nodes_contexts ), ns AS
ON context_node_ngrams.context_id = nodes_contexts.context_id (select ngrams_id FROM node_stories
WHERE ng.id = context_node_ngrams.ngrams_id WHERE node_id = ? AND ngrams_type_id = ?
AND nodes_contexts.node_id = ? )
) AS context_ids
FROM ngrams ng SELECT ng.terms,
JOIN node_stories ns ON ng.id = ns.ngrams_id ARRAY ( SELECT DISTINCT context_id
JOIN node_node_ngrams nng ON ns.node_id = nng.node2_id FROM node_context_ids
WHERE nng.node1_id = ? WHERE ns.ngrams_id = node_context_ids.ngrams_id
AND nng.node2_id = ? )
AND nng.ngrams_type = ? AS context_ids
AND nng.ngrams_id = ng.id FROM ngrams ng
AND nng.ngrams_type = ns.ngrams_type_id JOIN ns ON ng.id = ns.ngrams_id
ORDER BY ng.id ASC;
|] |]
...@@ -219,12 +218,6 @@ queryNgramsOccurrencesOnlyByContextUser_withSample' = [sql| ...@@ -219,12 +218,6 @@ queryNgramsOccurrencesOnlyByContextUser_withSample' = [sql|
GROUP BY ng.id GROUP BY ng.id
|] |]
------------------------------------------------------------------------ ------------------------------------------------------------------------
getContextsByNgramsOnlyUser :: HasDBid NodeType getContextsByNgramsOnlyUser :: HasDBid NodeType
=> CorpusId => CorpusId
...@@ -399,3 +392,17 @@ queryNgramsByContextMaster' = [sql| ...@@ -399,3 +392,17 @@ queryNgramsByContextMaster' = [sql|
SELECT m.id, m.terms FROM nodesByNgramsMaster m SELECT m.id, m.terms FROM nodesByNgramsMaster m
RIGHT JOIN contextsByNgramsUser u ON u.id = m.id RIGHT JOIN contextsByNgramsUser u ON u.id = m.id
|] |]
-- | Refreshes the \"context_node_ngrams_view\" materialized view.
-- This function will be run :
-- - periodically
-- - at reindex stage
-- - at the end of each text flow
refreshNgramsMaterialized :: Cmd err ()
refreshNgramsMaterialized = void $ execPGSQuery refreshNgramsMaterializedQuery ()
where
refreshNgramsMaterializedQuery :: DPS.Query
refreshNgramsMaterializedQuery = [sql| refresh materialized view context_node_ngrams_view; |]
...@@ -12,6 +12,8 @@ TODO_2: quantitative tests (coded) ...@@ -12,6 +12,8 @@ TODO_2: quantitative tests (coded)
-} -}
{-# LANGUAGE QuasiQuotes #-}
module Gargantext.Database.GargDB module Gargantext.Database.GargDB
where where
...@@ -141,11 +143,11 @@ writeFile a = do ...@@ -141,11 +143,11 @@ writeFile a = do
-- | Example to read a file with Type -- | Example to read a file with Type
readGargFile :: ( MonadReader env m readGargFile :: ( MonadReader env m
, HasConfig env , HasConfig env
, MonadBase IO m , MonadBase IO m
, ReadFile a , ReadFile a
) )
=> FilePath -> m a => FilePath -> m a
readGargFile fp = do readGargFile fp = do
dataPath <- view $ hasConfig . gc_datafilepath dataPath <- view $ hasConfig . gc_datafilepath
liftBase $ readFile' $ toFilePath dataPath fp liftBase $ readFile' $ toFilePath dataPath fp
...@@ -153,9 +155,9 @@ readGargFile fp = do ...@@ -153,9 +155,9 @@ readGargFile fp = do
--- ---
rmFile :: ( MonadReader env m rmFile :: ( MonadReader env m
, MonadBase IO m , MonadBase IO m
, HasConfig env , HasConfig env
) )
=> FilePath -> m () => FilePath -> m ()
rmFile = onDisk_1 SD.removeFile rmFile = onDisk_1 SD.removeFile
...@@ -165,8 +167,11 @@ cpFile = onDisk_2 SD.copyFile ...@@ -165,8 +167,11 @@ cpFile = onDisk_2 SD.copyFile
--- ---
mvFile :: (MonadReader env m, MonadBase IO m, HasConfig env) mvFile :: ( MonadReader env m
=> FilePath -> FilePath -> m () , MonadBase IO m
, HasConfig env
)
=> FilePath -> FilePath -> m ()
mvFile fp1 fp2 = do mvFile fp1 fp2 = do
cpFile fp1 fp2 cpFile fp1 fp2
rmFile fp1 rmFile fp1
...@@ -174,10 +179,10 @@ mvFile fp1 fp2 = do ...@@ -174,10 +179,10 @@ mvFile fp1 fp2 = do
------------------------------------------------------------------------ ------------------------------------------------------------------------
onDisk_1 :: ( MonadReader env m onDisk_1 :: ( MonadReader env m
, MonadBase IO m , MonadBase IO m
, HasConfig env , HasConfig env
) )
=> (FilePath -> IO ()) -> FilePath -> m () => (FilePath -> IO ()) -> FilePath -> m ()
onDisk_1 action fp = do onDisk_1 action fp = do
dataPath <- view $ hasConfig . gc_datafilepath dataPath <- view $ hasConfig . gc_datafilepath
liftBase $ action (toFilePath dataPath fp) `catch` handleExists liftBase $ action (toFilePath dataPath fp) `catch` handleExists
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment