Commit 5fe5c681 authored by Alexandre Delanoë's avatar Alexandre Delanoë

[MERGE]

parents 42890a18 3fdc66e8
# Optimising CI speed by using tips from https://blog.nimbleways.com/let-s-make-faster-gitlab-ci-cd-pipelines/ # Optimising CI speed by using tips from https://blog.nimbleways.com/let-s-make-faster-gitlab-ci-cd-pipelines/
image: adinapoli/gargantext:v2.3 image: adinapoli/gargantext:v3.1
variables: variables:
STACK_ROOT: "${CI_PROJECT_DIR}/.stack-root" STACK_ROOT: "${CI_PROJECT_DIR}/.stack-root"
...@@ -26,6 +26,7 @@ stack: ...@@ -26,6 +26,7 @@ stack:
script: script:
- echo "Building the project from '$CI_PROJECT_DIR'" - echo "Building the project from '$CI_PROJECT_DIR'"
- nix-shell --run "stack build --no-terminal --fast --dry-run" - nix-shell --run "stack build --no-terminal --fast --dry-run"
allow_failure: false
cabal: cabal:
stage: cabal stage: cabal
......
...@@ -11,8 +11,8 @@ STORE_DIR="${1:-$DEFAULT_STORE}" ...@@ -11,8 +11,8 @@ STORE_DIR="${1:-$DEFAULT_STORE}"
# `expected_cabal_project_freeze_hash` with the # `expected_cabal_project_freeze_hash` with the
# `sha256sum` result calculated on the `cabal.project` and `cabal.project.freeze`. # `sha256sum` result calculated on the `cabal.project` and `cabal.project.freeze`.
# This ensures the files stay deterministic so that CI cache can kick in. # This ensures the files stay deterministic so that CI cache can kick in.
expected_cabal_project_hash="091c119800f234f6554149d67488eb0b41c058a38dec63693f539e200f0d89d7" expected_cabal_project_hash="34a58c630ec4812c10da6db0fbc58e13ce9a4b0df9dd29498042b5308010fef9"
expected_cabal_project_freeze_hash="2db6b8696cbfcc1fa1b17ad359003c2e35e98005db16c2441a32373cb4d6c879" expected_cabal_project_freeze_hash="bd90562207ea42857402eced5836b7a23024cc16034bf150968dbf373d2e243c"
cabal --store-dir=$STORE_DIR v2-update 'hackage.haskell.org,2023-12-10T10:34:46Z' cabal --store-dir=$STORE_DIR v2-update 'hackage.haskell.org,2023-12-10T10:34:46Z'
...@@ -20,7 +20,7 @@ cabal --store-dir=$STORE_DIR v2-update 'hackage.haskell.org,2023-12-10T10:34:46Z ...@@ -20,7 +20,7 @@ cabal --store-dir=$STORE_DIR v2-update 'hackage.haskell.org,2023-12-10T10:34:46Z
if ! stack2cabal --help &> /dev/null if ! stack2cabal --help &> /dev/null
then then
echo "stack2cabal could not be found" echo "stack2cabal could not be found"
cabal --store-dir=$STORE_DIR v2-install --index-state="2023-12-10T10:34:46Z" stack2cabal-1.0.14 --overwrite-policy=always cabal --store-dir=$STORE_DIR v2-install --index-state="2023-12-10T10:34:46Z" --constraint 'Cabal==3.6.3.0' stack2cabal-1.0.14 --overwrite-policy=always
fi fi
stack2cabal --no-run-hpack -p '2023-12-10 10:34:46' stack2cabal --no-run-hpack -p '2023-12-10 10:34:46'
......
...@@ -2,16 +2,33 @@ ...@@ -2,16 +2,33 @@
index-state: 2023-12-10T10:34:46Z index-state: 2023-12-10T10:34:46Z
with-compiler: ghc-8.10.7 with-compiler: ghc-9.4.7
packages: packages:
./ ./
source-repository-package
type: git
location: https://github.com/AccelerateHS/accelerate.git
tag: 334d05519436bb7f20f9926ec76418f5b8afa359
source-repository-package
type: git
location: https://github.com/AccelerateHS/accelerate-llvm.git
tag: 2b5d69448557e89002c0179ea1aaf59bb757a6e3
subdir: accelerate-llvm-native/
accelerate-llvm/
source-repository-package source-repository-package
type: git type: git
location: https://github.com/adinapoli/boolexpr.git location: https://github.com/adinapoli/boolexpr.git
tag: 91928b5d7f9342e9865dde0d94862792d2b88779 tag: 91928b5d7f9342e9865dde0d94862792d2b88779
source-repository-package
type: git
location: https://github.com/adinapoli/duckling.git
tag: 23603a832117e5352d5b0fb9bb1110228324b35a
source-repository-package source-repository-package
type: git type: git
location: https://github.com/adinapoli/haskell-opaleye.git location: https://github.com/adinapoli/haskell-opaleye.git
...@@ -19,25 +36,25 @@ source-repository-package ...@@ -19,25 +36,25 @@ source-repository-package
source-repository-package source-repository-package
type: git type: git
location: https://github.com/alpmestan/accelerate.git location: https://github.com/adinapoli/llvm-hs.git
tag: 640b5af87cea94b61c7737d878e6f7f2fca5c015 tag: 7533a9ccd3bfe77141745f6b61039a26aaf5c83b
subdir: llvm-hs
llvm-hs-pure
source-repository-package source-repository-package
type: git type: git
location: https://github.com/alpmestan/accelerate-arithmetic.git location: https://github.com/adinapoli/text16-compat.git
tag: a110807651036ca2228a76507ee35bbf7aedf87a tag: 85533b5d597e6fc5498411b4bcfc76380ec80d71
source-repository-package source-repository-package
type: git type: git
location: https://github.com/alpmestan/accelerate-llvm.git location: https://github.com/adinapoli/wikiparsec.git
tag: 944f5a4aea35ee6aedb81ea754bf46b131fce9e3 tag: b3519a0351ae9515497680571f76200c24dedb53
subdir: accelerate-llvm-native/
accelerate-llvm/
source-repository-package source-repository-package
type: git type: git
location: https://github.com/alpmestan/ekg-json.git location: https://github.com/alpmestan/accelerate-arithmetic.git
tag: fd7e5d7325939103cd87d0dc592faf644160341c tag: a110807651036ca2228a76507ee35bbf7aedf87a
source-repository-package source-repository-package
type: git type: git
...@@ -56,6 +73,11 @@ source-repository-package ...@@ -56,6 +73,11 @@ source-repository-package
tag: bc6ca8058077b0b5702ea4b88bd4189cfcad267a tag: bc6ca8058077b0b5702ea4b88bd4189cfcad267a
subdir: sparse-linear subdir: sparse-linear
source-repository-package
type: git
location: https://github.com/chessai/eigen.git
tag: 8fff32a43df743c8c83428a86dd566a0936a4fba
source-repository-package source-repository-package
type: git type: git
location: https://github.com/delanoe/data-time-segment.git location: https://github.com/delanoe/data-time-segment.git
...@@ -71,11 +93,6 @@ source-repository-package ...@@ -71,11 +93,6 @@ source-repository-package
location: https://gitlab.iscpif.fr/amestanogullari/accelerate-utility.git location: https://gitlab.iscpif.fr/amestanogullari/accelerate-utility.git
tag: a3875fe652d3bb5acb522674c22c6c814c1b4ad0 tag: a3875fe652d3bb5acb522674c22c6c814c1b4ad0
source-repository-package
type: git
location: https://gitlab.iscpif.fr/cgenie/patches-class.git
tag: 125c7cb90ab8f0cd6ac4a526dbdf302d10c945e9
source-repository-package source-repository-package
type: git type: git
location: https://gitlab.iscpif.fr/gargantext/crawlers/arxiv-api.git location: https://gitlab.iscpif.fr/gargantext/crawlers/arxiv-api.git
...@@ -89,7 +106,7 @@ source-repository-package ...@@ -89,7 +106,7 @@ source-repository-package
source-repository-package source-repository-package
type: git type: git
location: https://gitlab.iscpif.fr/gargantext/crawlers/hal.git location: https://gitlab.iscpif.fr/gargantext/crawlers/hal.git
tag: 25a1e9558075462a82660987920a698b8863dd63 tag: bfa9069b4ff70f341ca3244e8aff9e83eb4b8b73
source-repository-package source-repository-package
type: git type: git
...@@ -124,7 +141,7 @@ source-repository-package ...@@ -124,7 +141,7 @@ source-repository-package
source-repository-package source-repository-package
type: git type: git
location: https://gitlab.iscpif.fr/gargantext/haskell-igraph.git location: https://gitlab.iscpif.fr/gargantext/haskell-igraph.git
tag: 2a28524134b68421f30f6e97961063018f814a82 tag: 9f8a2f4a014539826a4eab3215cc70c0813f20cb
source-repository-package source-repository-package
type: git type: git
...@@ -148,26 +165,14 @@ source-repository-package ...@@ -148,26 +165,14 @@ source-repository-package
source-repository-package source-repository-package
type: git type: git
location: https://github.com/haskell-servant/servant.git location: https://github.com/MercuryTechnologies/ekg-json.git
tag: c2af6e775d1d36f2011d43aff230bb502f8fba63 tag: 232db57d6ce0940fcc902adf30a9ed3f3561f21d
subdir: servant-auth/servant-auth-client/
servant-auth/servant-auth-server/
servant-auth/servant-auth/
servant-client-core/
servant-client/
servant-server/
servant/
source-repository-package source-repository-package
type: git type: git
location: https://github.com/robstewart57/rdf4h.git location: https://github.com/robstewart57/rdf4h.git
tag: 4fd2edf30c141600ffad6d730cc4c1c08a6dbce4 tag: 4fd2edf30c141600ffad6d730cc4c1c08a6dbce4
source-repository-package
type: git
location: https://github.com/rspeer/wikiparsec.git
tag: 9637a82344bb70f7fa8f02e75db3c081ccd434ce
allow-older: * allow-older: *
allow-newer: * allow-newer: *
......
This source diff could not be displayed because it is too large. You can view the blob instead.
FROM ubuntu:jammy FROM ubuntu:noble
## NOTA BENE: In order for this to be built successfully, you have to run ./devops/coreNLP/build.sh first. ## NOTA BENE: In order for this to be built successfully, you have to run ./devops/coreNLP/build.sh first.
ARG DEBIAN_FRONTEND=noninteractive ARG DEBIAN_FRONTEND=noninteractive
ARG GHC=8.10.7 ARG GHC=9.4.7
ARG STACK=2.7.3 ARG STACK=2.7.3
ARG CABAL=3.10.1.0 ARG CABAL=3.10.1.0
ARG CORENLP=4.5.4 ARG CORENLP=4.5.4
ARG CORE ARG CORE
COPY ./shell.nix /builds/gargantext/shell.nix COPY ./shell.nix /builds/gargantext/shell.nix
COPY ./nix/pkgs.nix /builds/gargantext/nix/pkgs.nix COPY ./nix/pkgs.nix /builds/gargantext/nix/pkgs.nix
COPY ./nix/pinned-22.05.nix /builds/gargantext/nix/pinned-22.05.nix COPY ./nix/pinned-23.11.nix /builds/gargantext/nix/pinned-23.11.nix
COPY ./nix/overlays/Cabal-3.10.1.0.nix /builds/gargantext/nix/overlays/Cabal-3.10.1.0.nix COPY ./nix/overlays/Cabal-3.10.1.0.nix /builds/gargantext/nix/overlays/Cabal-3.10.1.0.nix
COPY ./nix/overlays/cabal-install-3.10.1.0.nix /builds/gargantext/nix/overlays/cabal-install-3.10.1.0.nix COPY ./nix/overlays/cabal-install-3.10.1.0.nix /builds/gargantext/nix/overlays/cabal-install-3.10.1.0.nix
COPY ./nix/overlays/cabal-install-solver-3.10.1.0.nix /builds/gargantext/nix/overlays/cabal-install-solver-3.10.1.0.nix COPY ./nix/overlays/cabal-install-solver-3.10.1.0.nix /builds/gargantext/nix/overlays/cabal-install-solver-3.10.1.0.nix
...@@ -34,13 +34,13 @@ RUN apt-get update && \ ...@@ -34,13 +34,13 @@ RUN apt-get update && \
git \ git \
gnupg2 \ gnupg2 \
libffi-dev \ libffi-dev \
libffi7 \ libffi8 \
libgmp-dev \ libgmp-dev \
libgmp10 \ libgmp10 \
libncurses-dev \ libncurses-dev \
libncurses5 \ libncurses6 \
libnuma-dev \ libnuma-dev \
libtinfo5 \ libtinfo6 \
locales \ locales \
lsb-release \ lsb-release \
software-properties-common \ software-properties-common \
...@@ -50,7 +50,7 @@ RUN apt-get update && \ ...@@ -50,7 +50,7 @@ RUN apt-get update && \
vim \ vim \
xz-utils \ xz-utils \
zlib1g-dev \ zlib1g-dev \
openjdk-18-jdk \ openjdk-21-jdk \
unzip && \ unzip && \
apt-get clean && rm -rf /var/lib/apt/lists/* && \ apt-get clean && rm -rf /var/lib/apt/lists/* && \
mkdir -m 0755 /nix && groupadd -r nixbld && chown root /nix && \ mkdir -m 0755 /nix && groupadd -r nixbld && chown root /nix && \
......
...@@ -45,6 +45,10 @@ flag test-crypto ...@@ -45,6 +45,10 @@ flag test-crypto
default: False default: False
manual: True manual: True
flag disable-db-obfuscation-executable
default: False
manual: True
library library
exposed-modules: exposed-modules:
Gargantext Gargantext
...@@ -165,6 +169,7 @@ library ...@@ -165,6 +169,7 @@ library
Gargantext.Utils.Jobs.Settings Gargantext.Utils.Jobs.Settings
Gargantext.Utils.Jobs.State Gargantext.Utils.Jobs.State
Gargantext.Utils.SpacyNLP Gargantext.Utils.SpacyNLP
Gargantext.Utils.SpacyNLP.Types
Gargantext.Utils.Tuple Gargantext.Utils.Tuple
Gargantext.Utils.Zip Gargantext.Utils.Zip
other-modules: other-modules:
...@@ -394,7 +399,7 @@ library ...@@ -394,7 +399,7 @@ library
RankNTypes RankNTypes
RecordWildCards RecordWildCards
StrictData StrictData
ghc-options: -Wall -Wincomplete-uni-patterns -Wincomplete-record-updates -Wmissing-signatures -Wunused-binds -Wunused-imports -Werror -freduction-depth=300 -fplugin=Clippy -fprint-potential-instances ghc-options: -Wall -Wincomplete-uni-patterns -Wincomplete-record-updates -Wmissing-signatures -Wunused-binds -Wunused-imports -Werror -freduction-depth=300 -fprint-potential-instances
if flag(test-crypto) if flag(test-crypto)
cpp-options: -DTEST_CRYPTO cpp-options: -DTEST_CRYPTO
build-depends: build-depends:
...@@ -459,7 +464,6 @@ library ...@@ -459,7 +464,6 @@ library
, fullstop ^>= 0.1.4 , fullstop ^>= 0.1.4
, gargantext-graph >=0.1.0.0 , gargantext-graph >=0.1.0.0
, gargantext-prelude , gargantext-prelude
, ghc-clippy-plugin ^>= 0.0.0.1
, graphviz ^>= 2999.20.1.0 , graphviz ^>= 2999.20.1.0
, hashable ^>= 1.3.0.0 , hashable ^>= 1.3.0.0
, haskell-igraph ^>= 0.10.4 , haskell-igraph ^>= 0.10.4
...@@ -474,7 +478,9 @@ library ...@@ -474,7 +478,9 @@ library
, http-media ^>= 0.8.0.0 , http-media ^>= 0.8.0.0
, http-types ^>= 0.12.3 , http-types ^>= 0.12.3
, hxt ^>= 9.3.1.22 , hxt ^>= 9.3.1.22
, ihaskell ^>= 0.10.2.2 , ihaskell >= 0.11.0.0
-- necessary for ihaskell to build
, ipython-kernel >= 0.11.0.0
, ini ^>= 0.4.1 , ini ^>= 0.4.1
, insert-ordered-containers ^>= 0.2.5.1 , insert-ordered-containers ^>= 0.2.5.1
, iso639 , iso639
...@@ -484,15 +490,21 @@ library ...@@ -484,15 +490,21 @@ library
, lens-aeson < 1.3 , lens-aeson < 1.3
, lifted-base ^>= 0.2.3.12 , lifted-base ^>= 0.2.3.12
, listsafe ^>= 0.1.0.1 , listsafe ^>= 0.1.0.1
, llvm-hs >= 12.0.0
, located-base ^>= 0.1.1.1 , located-base ^>= 0.1.1.1
, logging-effect ^>= 1.3.12 , logging-effect ^>= 1.3.12
, matrix ^>= 0.3.6.1 , matrix ^>= 0.3.6.1
, monad-control ^>= 1.0.3.1 , monad-control ^>= 1.0.3.1
, monad-logger ^>= 0.3.36 , monad-logger ^>= 0.3.36
, morpheus-graphql ^>= 0.17.0 , morpheus-graphql >= 0.17.0 && < 0.25
, morpheus-graphql-app ^>= 0.17.0 , morpheus-graphql-app >= 0.17.0 && < 0.25
, morpheus-graphql-core ^>= 0.17.0 , morpheus-graphql-client >= 0.17.0 && < 0.25
, morpheus-graphql-subscriptions ^>= 0.17.0 , morpheus-graphql-code-gen >= 0.17.0 && < 0.25
, morpheus-graphql-code-gen-utils >= 0.17.0 && < 0.25
, morpheus-graphql-core >= 0.17.0 && < 0.25
, morpheus-graphql-server >= 0.17.0 && < 0.25
, morpheus-graphql-subscriptions >= 0.17.0 && < 0.25
, morpheus-graphql-tests >= 0.17.0 && < 0.25
, mtl ^>= 2.2.2 , mtl ^>= 2.2.2
, natural-transformation ^>= 0.4 , natural-transformation ^>= 0.4
, network-uri ^>= 2.6.4.1 , network-uri ^>= 2.6.4.1
...@@ -526,26 +538,26 @@ library ...@@ -526,26 +538,26 @@ library
, scientific ^>= 0.3.7.0 , scientific ^>= 0.3.7.0
, semigroups ^>= 0.19.2 , semigroups ^>= 0.19.2
, serialise ^>= 0.2.4.0 , serialise ^>= 0.2.4.0
, servant ^>= 0.18.3 , servant >= 0.18.3 && < 0.20
, servant-auth ^>= 0.4.0.0 , servant-auth ^>= 0.4.0.0
, servant-auth-client ^>= 0.4.1.0 , servant-auth-client ^>= 0.4.1.0
, servant-auth-server ^>=0.4.6.0 , servant-auth-server ^>=0.4.6.0
, servant-auth-swagger ^>= 0.2.10.1 , servant-auth-swagger ^>= 0.2.10.1
, servant-blaze ^>= 0.9.1 , servant-blaze ^>= 0.9.1
, servant-cassava ^>= 0.10.1 , servant-cassava ^>= 0.10.1
, servant-client ^>= 0.18.3 , servant-client >= 0.18.3 && < 0.20
, servant-client-core ^>= 0.18.3 , servant-client-core >= 0.18.3 && < 0.20
, servant-ekg ^>= 0.3.1 , servant-ekg ^>= 0.3.1
, servant-flatten ^>= 0.2 , servant-flatten ^>= 0.2
, servant-job >= 0.2.0.0 , servant-job >= 0.2.0.0
, servant-mock ^>= 0.8.7
, servant-multipart ^>= 0.12.1 , servant-multipart ^>= 0.12.1
, servant-server ^>= 0.18.3 , servant-server >= 0.18.3 && < 0.20
, servant-swagger ^>= 1.1.10 , servant-swagger ^>= 1.1.10
, servant-swagger-ui ^>= 0.3.5.3.5.0 , servant-swagger-ui ^>= 0.3.5.3.5.0
, servant-xml-conduit >= 0.1.0.4 , servant-xml-conduit >= 0.1.0.4
, simple-reflect ^>= 0.3.3 , simple-reflect ^>= 0.3.3
, singletons ^>= 2.7 , singletons ^>= 2.7
, singletons-th >= 3.1
, split ^>= 0.2.3.4 , split ^>= 0.2.3.4
, stemmer ^>= 0.5.2 , stemmer ^>= 0.5.2
, stm ^>= 2.5.0.1 , stm ^>= 2.5.0.1
...@@ -713,15 +725,18 @@ executable gargantext-db-obfuscation ...@@ -713,15 +725,18 @@ executable gargantext-db-obfuscation
RecordWildCards RecordWildCards
StrictData StrictData
ghc-options: -Wall -threaded -rtsopts -with-rtsopts=-N -O2 -Wmissing-signatures ghc-options: -Wall -threaded -rtsopts -with-rtsopts=-N -O2 -Wmissing-signatures
build-depends: if flag(disable-db-obfuscation-executable)
base buildable: False
, extra else
, gargantext build-depends:
, gargantext-prelude base
, optparse-simple , extra
, postgresql-simple ^>= 0.6.4 , gargantext
, text , gargantext-prelude
default-language: Haskell2010 , optparse-simple
, postgresql-simple ^>= 0.6.4
, text
default-language: Haskell2010
executable gargantext-import executable gargantext-import
main-is: Main.hs main-is: Main.hs
......
This diff is collapsed.
import (builtins.fetchGit {
name = "nixos-23.05";
url = "https://github.com/nixos/nixpkgs";
ref = "refs/heads/nixos-23.05";
rev = "4ecab3273592f27479a583fb6d975d4aba3486fe";
})
import (builtins.fetchGit {
name = "nixos-23.11";
url = "https://github.com/nixos/nixpkgs";
ref = "refs/heads/nixos-23.11";
rev = "057f9aecfb71c4437d2b27d3323df7f93c010b7e";
})
{ pkgs ? import ./pinned-22.05.nix {} }: { pkgs ? import ./pinned-23.11.nix {} }:
rec { rec {
inherit pkgs; inherit pkgs;
# If we are on a Mac, in order to build successfully with cabal we need a bit more work. ghc947 = if pkgs.stdenv.isDarwin
ghc = if pkgs.stdenv.isDarwin then pkgs.haskell.compiler.ghc947.overrideAttrs (finalAttrs: previousAttrs: {
then haskell1.compiler.ghc8107.overrideAttrs (finalAttrs: previousAttrs: {
# See https://github.com/NixOS/nixpkgs/pull/149942/files
patches = previousAttrs.patches ++ [ patches = previousAttrs.patches ++ [
# Reverts the linking behavior of GHC to not resolve `-libc++` to `c++`. # Reverts the linking behavior of GHC to not resolve `-libc++` to `c++`.
(pkgs.fetchpatch { (pkgs.fetchpatch {
url = "https://raw.githubusercontent.com/input-output-hk/haskell.nix/613ec38dbd62ab7929178c9c7ffff71df9bb86be/overlays/patches/ghc/ghc-macOS-loadArchive-fix.patch"; url = "https://gist.githubusercontent.com/adinapoli/bf722db15f72763bf79dff13a3104b6f/raw/362da0aa3db5c530e0d276183ba68569f216d65a/ghc947-macOS-loadArchive-fix.patch";
sha256 = "0IUpuzjZb1G+gP3q6RnwQbW4mFzc/OZ/7QqZy+57kx0="; sha256 = "sha256-0tHrkWRKFWUewj3uIA0DujVCXo1qgX2lA5p0MIsAHYs=";
}) })
]; ];
}) })
else pkgs.haskell.compiler.ghc8107; else pkgs.haskell.compiler.ghc947;
cabal_install_3_10_1_0 = pkgs.haskell.lib.compose.justStaticExecutables pkgs.haskell.packages.ghc947.cabal-install;
graphviz = pkgs.graphviz.overrideAttrs (finalAttrs: previousAttrs: { graphviz = pkgs.graphviz.overrideAttrs (finalAttrs: previousAttrs: {
# Increase the YY_BUF_SIZE, see https://gitlab.iscpif.fr/gargantext/haskell-gargantext/issues/290#note_9015 # Increase the YY_BUF_SIZE, see https://gitlab.iscpif.fr/gargantext/haskell-gargantext/issues/290#note_9015
patches = [ patches = [
...@@ -24,26 +23,12 @@ rec { ...@@ -24,26 +23,12 @@ rec {
}) })
]; ];
}); });
haskell1 = pkgs.haskell // {
packages = pkgs.haskell.packages // {
ghc8107 = pkgs.haskell.packages.ghc8107.override {
overrides = self: super: {
directory = self.callPackage ./overlays/directory-1.3.7.0.nix {};
process = self.callPackage ./overlays/process-1.6.15.0.nix {};
hackage-security = self.callPackage ./overlays/hackage-security-0.6.2.3.nix {};
Cabal = self.callPackage ./overlays/Cabal-3.10.1.0.nix {};
Cabal-syntax = self.callPackage ./overlays/Cabal-syntax-3.10.1.0.nix {};
cabal-install-solver = self.callPackage ./overlays/cabal-install-solver-3.10.1.0.nix {};
cabal-install = self.callPackage ./overlays/cabal-install-3.10.1.0.nix {};
};
};
};
};
cabal_install_3_10_1_0 = pkgs.haskell.lib.compose.justStaticExecutables haskell1.packages.ghc8107.cabal-install;
igraph_0_10_4 = pkgs.igraph.overrideAttrs (finalAttrs: previousAttrs: { igraph_0_10_4 = pkgs.igraph.overrideAttrs (finalAttrs: previousAttrs: {
version = "0.10.4"; version = "0.10.4";
nativeBuildInputs = previousAttrs.nativeBuildInputs or [] ++ [ pkgs.clang_12 ];
src = pkgs.fetchFromGitHub { src = pkgs.fetchFromGitHub {
owner = "igraph"; owner = "igraph";
repo = "igraph"; repo = "igraph";
...@@ -77,7 +62,7 @@ rec { ...@@ -77,7 +62,7 @@ rec {
"-DIGRAPH_USE_INTERNAL_GMP=OFF" "-DIGRAPH_USE_INTERNAL_GMP=OFF"
"-DIGRAPH_USE_INTERNAL_PLFIT=OFF" "-DIGRAPH_USE_INTERNAL_PLFIT=OFF"
"-DIGRAPH_GLPK_SUPPORT=ON" "-DIGRAPH_GLPK_SUPPORT=ON"
"-DIGRAPH_GRAPHML_SUPPORT=ON" "-DIGRAPH_GRAPHML_SUPPORT=OFF"
"-DIGRAPH_OPENMP_SUPPORT=ON" "-DIGRAPH_OPENMP_SUPPORT=ON"
"-DIGRAPH_ENABLE_LTO=AUTO" "-DIGRAPH_ENABLE_LTO=AUTO"
"-DIGRAPH_ENABLE_TLS=ON" "-DIGRAPH_ENABLE_TLS=ON"
...@@ -97,7 +82,7 @@ rec { ...@@ -97,7 +82,7 @@ rec {
}); });
hsBuildInputs = [ hsBuildInputs = [
ghc ghc947
cabal_install_3_10_1_0 cabal_install_3_10_1_0
]; ];
nonhsBuildInputs = with pkgs; [ nonhsBuildInputs = with pkgs; [
...@@ -113,20 +98,22 @@ rec { ...@@ -113,20 +98,22 @@ rec {
lapack lapack
lzma lzma
pcre pcre
pkgconfig pkg-config
postgresql postgresql
xz xz
zlib zlib
blas blas
gfortran7 gfortran7
# gfortran7.cc.lib
expat expat
icu icu
graphviz graphviz
llvm_9 clang_12
llvm_12
gcc12
igraph_0_10_4 igraph_0_10_4
libpqxx libpqxx
libsodium libsodium
zeromq
] ++ ( lib.optionals stdenv.isDarwin [ ] ++ ( lib.optionals stdenv.isDarwin [
darwin.apple_sdk.frameworks.Accelerate darwin.apple_sdk.frameworks.Accelerate
]); ]);
...@@ -134,8 +121,11 @@ rec { ...@@ -134,8 +121,11 @@ rec {
shellHook = '' shellHook = ''
export LD_LIBRARY_PATH="${pkgs.gfortran7.cc.lib}:${libPaths}:$LD_LIBRARY_PATH" export LD_LIBRARY_PATH="${pkgs.gfortran7.cc.lib}:${libPaths}:$LD_LIBRARY_PATH"
export LIBRARY_PATH="${pkgs.gfortran7.cc.lib}:${libPaths}" export LIBRARY_PATH="${pkgs.gfortran7.cc.lib}:${libPaths}"
export PATH="${pkgs.gccStdenv}/bin:$PATH"
export NIX_CC="${pkgs.gccStdenv}"
export CC="${pkgs.gccStdenv}/bin/gcc"
''; '';
shell = pkgs.mkShell { shell = pkgs.mkShell.override { stdenv = pkgs.gccStdenv; } {
name = "gargantext-shell"; name = "gargantext-shell";
buildInputs = hsBuildInputs ++ nonhsBuildInputs; buildInputs = hsBuildInputs ++ nonhsBuildInputs;
inherit shellHook; inherit shellHook;
......
...@@ -52,16 +52,11 @@ data AuthenticatedUser = AuthenticatedUser ...@@ -52,16 +52,11 @@ data AuthenticatedUser = AuthenticatedUser
, _auth_user_id :: UserId , _auth_user_id :: UserId
} deriving (Generic) } deriving (Generic)
$(deriveJSON (JSON.defaultOptions { JSON.fieldLabelModifier = tail . dropWhile ((/=) '_') . tail }) ''AuthenticatedUser)
makeLenses ''AuthenticatedUser makeLenses ''AuthenticatedUser
instance ToSchema AuthenticatedUser where instance ToSchema AuthenticatedUser where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_authUser_") declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_authUser_")
instance ToJWT AuthenticatedUser
instance FromJWT AuthenticatedUser
data AuthenticationError data AuthenticationError
= LoginFailed NodeId UserId Jose.Error = LoginFailed NodeId UserId Jose.Error
| InvalidUsernameOrPassword | InvalidUsernameOrPassword
...@@ -71,7 +66,6 @@ data AuthenticationError ...@@ -71,7 +66,6 @@ data AuthenticationError
type AuthContext = '[JWTSettings, CookieSettings] -- , BasicAuthCfg type AuthContext = '[JWTSettings, CookieSettings] -- , BasicAuthCfg
-- | Instances -- | Instances
$(deriveJSON (unPrefix "_authReq_") ''AuthRequest)
instance ToSchema AuthRequest where instance ToSchema AuthRequest where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_authReq_") declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_authReq_")
...@@ -81,7 +75,6 @@ instance Arbitrary AuthRequest where ...@@ -81,7 +75,6 @@ instance Arbitrary AuthRequest where
, p <- arbitraryPassword , p <- arbitraryPassword
] ]
$(deriveJSON (unPrefix "_authRes_") ''AuthResponse)
instance ToSchema AuthResponse where instance ToSchema AuthResponse where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_authRes_") declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_authRes_")
instance Arbitrary AuthResponse where instance Arbitrary AuthResponse where
...@@ -101,20 +94,39 @@ type Password = Text ...@@ -101,20 +94,39 @@ type Password = Text
data ForgotPasswordRequest = ForgotPasswordRequest { _fpReq_email :: Email } data ForgotPasswordRequest = ForgotPasswordRequest { _fpReq_email :: Email }
deriving (Generic ) deriving (Generic )
$(deriveJSON (unPrefix "_fpReq_") ''ForgotPasswordRequest)
instance ToSchema ForgotPasswordRequest where instance ToSchema ForgotPasswordRequest where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_fpReq_") declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_fpReq_")
data ForgotPasswordResponse = ForgotPasswordResponse { _fpRes_status :: Text } data ForgotPasswordResponse = ForgotPasswordResponse { _fpRes_status :: Text }
deriving (Generic ) deriving (Generic )
$(deriveJSON (unPrefix "_fpRes_") ''ForgotPasswordResponse)
instance ToSchema ForgotPasswordResponse where instance ToSchema ForgotPasswordResponse where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_fpRes_") declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_fpRes_")
data ForgotPasswordGet = ForgotPasswordGet {_fpGet_password :: Password} data ForgotPasswordGet = ForgotPasswordGet {_fpGet_password :: Password}
deriving (Generic ) deriving (Generic )
$(deriveJSON (unPrefix "_fpGet_") ''ForgotPasswordGet)
instance ToSchema ForgotPasswordGet where instance ToSchema ForgotPasswordGet where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_fpGet_") declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_fpGet_")
--
-- Lenses
--
makeLenses ''AuthResponse makeLenses ''AuthResponse
--
-- JSON instances
--
$(deriveJSON (JSON.defaultOptions { JSON.fieldLabelModifier = tail . dropWhile ((/=) '_') . tail }) ''AuthenticatedUser)
$(deriveJSON (unPrefix "_authReq_") ''AuthRequest)
$(deriveJSON (unPrefix "_authRes_") ''AuthResponse)
$(deriveJSON (unPrefix "_fpReq_") ''ForgotPasswordRequest)
$(deriveJSON (unPrefix "_fpRes_") ''ForgotPasswordResponse)
$(deriveJSON (unPrefix "_fpGet_") ''ForgotPasswordGet)
--
-- JWT instances
--
instance ToJWT AuthenticatedUser
instance FromJWT AuthenticatedUser
...@@ -24,7 +24,8 @@ import Control.Lens ...@@ -24,7 +24,8 @@ import Control.Lens
import Control.Monad.Logger (LogLevel(..)) import Control.Monad.Logger (LogLevel(..))
import Control.Monad.Reader import Control.Monad.Reader
import Data.ByteString.Lazy qualified as L import Data.ByteString.Lazy qualified as L
import Data.Pool (Pool, createPool) import Data.Pool (Pool)
import qualified Data.Pool as Pool
import Database.PostgreSQL.Simple (Connection, connect, close, ConnectInfo) import Database.PostgreSQL.Simple (Connection, connect, close, ConnectInfo)
import Gargantext.API.Admin.EnvTypes import Gargantext.API.Admin.EnvTypes
import Gargantext.API.Admin.Types import Gargantext.API.Admin.Types
...@@ -217,7 +218,7 @@ newEnv logger port file = do ...@@ -217,7 +218,7 @@ newEnv logger port file = do
} }
newPool :: ConnectInfo -> IO (Pool Connection) newPool :: ConnectInfo -> IO (Pool Connection)
newPool param = createPool (connect param) close 1 (60*60) 8 newPool param = Pool.newPool $ Pool.setNumStripes (Just 1) $ Pool.defaultPoolConfig (connect param) close (60*60) 8
{- {-
cleanEnv :: (HasConfig env, HasRepo env) => env -> IO () cleanEnv :: (HasConfig env, HasRepo env) => env -> IO ()
......
...@@ -104,18 +104,12 @@ messages = toMessage $ [ (400, ["Ill formed query "]) ...@@ -104,18 +104,12 @@ messages = toMessage $ [ (400, ["Ill formed query "])
instance Arbitrary Message where instance Arbitrary Message where
arbitrary = elements messages arbitrary = elements messages
instance FromJSON Message
instance ToJSON Message
instance ToSchema Message instance ToSchema Message
----------------------------------------------------------------------- -----------------------------------------------------------------------
data Counts = Counts { results :: [Either Message Count] data Counts = Counts { results :: [Either Message Count]
} deriving (Eq, Show, Generic) } deriving (Eq, Show, Generic)
instance FromJSON Counts
instance ToJSON Counts
instance Arbitrary Counts where instance Arbitrary Counts where
arbitrary = elements [Counts [ Right (Count Pubmed (Just 20 )) arbitrary = elements [Counts [ Right (Count Pubmed (Just 20 ))
, Right (Count IsTex (Just 150)) , Right (Count IsTex (Just 150))
...@@ -131,8 +125,6 @@ data Count = Count { count_name :: Scraper ...@@ -131,8 +125,6 @@ data Count = Count { count_name :: Scraper
} }
deriving (Eq, Show, Generic) deriving (Eq, Show, Generic)
$(deriveJSON (unPrefix "count_") ''Count)
instance ToSchema Count where instance ToSchema Count where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "count_") declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "count_")
--instance Arbitrary Count where --instance Arbitrary Count where
...@@ -141,3 +133,16 @@ instance ToSchema Count where ...@@ -141,3 +133,16 @@ instance ToSchema Count where
----------------------------------------------------------------------- -----------------------------------------------------------------------
count :: Monad m => Query -> m Counts count :: Monad m => Query -> m Counts
count _ = undefined count _ = undefined
--
-- JSON instances
--
instance FromJSON Message
instance ToJSON Message
$(deriveJSON (unPrefix "count_") ''Count)
instance FromJSON Counts
instance ToJSON Counts
...@@ -671,8 +671,8 @@ genFrontendErr be = do ...@@ -671,8 +671,8 @@ genFrontendErr be = do
EC_404__tree_empty_root EC_404__tree_empty_root
-> pure $ mkFrontendErr' txt $ FE_tree_empty_root -> pure $ mkFrontendErr' txt $ FE_tree_empty_root
EC_500__tree_too_many_roots EC_500__tree_too_many_roots
-> do nodes <- arbitrary -> do nodes <- getNonEmpty <$> arbitrary
pure $ mkFrontendErr' txt $ FE_tree_too_many_roots nodes pure $ mkFrontendErr' txt $ FE_tree_too_many_roots (NE.fromList nodes)
-- job errors -- job errors
EC_500__job_invalid_id_type EC_500__job_invalid_id_type
......
...@@ -22,7 +22,7 @@ import Data.ByteString.Lazy.Char8 ( ByteString ) ...@@ -22,7 +22,7 @@ import Data.ByteString.Lazy.Char8 ( ByteString )
import Data.Morpheus ( App, deriveApp ) import Data.Morpheus ( App, deriveApp )
import Data.Morpheus.Server ( httpPlayground ) import Data.Morpheus.Server ( httpPlayground )
import Data.Morpheus.Subscriptions ( Event (..), httpPubApp ) import Data.Morpheus.Subscriptions ( Event (..), httpPubApp )
import Data.Morpheus.Types ( GQLRequest, GQLResponse, GQLType, RootResolver(..), Undefined(..) ) import Data.Morpheus.Types ( GQLRequest, GQLResponse, GQLType, RootResolver(..), Undefined, defaultRootResolver)
import Data.Proxy import Data.Proxy
import Gargantext.API.Admin.Auth.Types (AuthenticatedUser) import Gargantext.API.Admin.Auth.Types (AuthenticatedUser)
import Gargantext.API.Admin.Orchestrator.Types (JobLog) import Gargantext.API.Admin.Orchestrator.Types (JobLog)
...@@ -111,7 +111,7 @@ rootResolver ...@@ -111,7 +111,7 @@ rootResolver
-> AccessPolicyManager -> AccessPolicyManager
-> RootResolver (GargM env BackendInternalError) e Query Mutation Undefined -> RootResolver (GargM env BackendInternalError) e Query Mutation Undefined
rootResolver authenticatedUser policyManager = rootResolver authenticatedUser policyManager =
RootResolver defaultRootResolver
{ queryResolver = Query { annuaire_contacts = GQLA.resolveAnnuaireContacts { queryResolver = Query { annuaire_contacts = GQLA.resolveAnnuaireContacts
, context_ngrams = GQLCTX.resolveContextNgrams , context_ngrams = GQLCTX.resolveContextNgrams
, contexts = GQLCTX.resolveNodeContext , contexts = GQLCTX.resolveNodeContext
...@@ -133,7 +133,7 @@ rootResolver authenticatedUser policyManager = ...@@ -133,7 +133,7 @@ rootResolver authenticatedUser policyManager =
, update_user_epo_api_token = GQLUser.updateUserEPOAPIToken , update_user_epo_api_token = GQLUser.updateUserEPOAPIToken
, delete_team_membership = GQLTeam.deleteTeamMembership , delete_team_membership = GQLTeam.deleteTeamMembership
, update_node_context_category = GQLCTX.updateNodeContextCategory } , update_node_context_category = GQLCTX.updateNodeContextCategory }
, subscriptionResolver = Undefined } }
-- | Main GraphQL "app". -- | Main GraphQL "app".
app app
......
{-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE DerivingStrategies #-}
module Gargantext.API.GraphQL.IMT module Gargantext.API.GraphQL.IMT
( School(..) ( School(..)
...@@ -13,10 +13,11 @@ import Gargantext.API.GraphQL.Types ...@@ -13,10 +13,11 @@ import Gargantext.API.GraphQL.Types
import Gargantext.Core.Ext.IMT (School(..), schools) import Gargantext.Core.Ext.IMT (School(..), schools)
import Gargantext.Prelude import Gargantext.Prelude
data SchoolsArgs newtype SchoolsArgs
= SchoolsArgs = SchoolsArgs ()
{ } deriving (Generic, GQLType) deriving stock (Generic)
deriving anyclass (GQLType)
resolveSchools resolveSchools
:: SchoolsArgs -> GqlM e env [School] :: SchoolsArgs -> GqlM e env [School]
resolveSchools SchoolsArgs { } = pure $ schools resolveSchools (SchoolsArgs ()) = pure $ schools
{-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE DerivingStrategies #-}
module Gargantext.API.GraphQL.NLP module Gargantext.API.GraphQL.NLP
( Lang(..) ( Lang(..)
...@@ -18,9 +18,10 @@ import Gargantext.Prelude ...@@ -18,9 +18,10 @@ import Gargantext.Prelude
import Protolude import Protolude
import qualified Data.Map.Strict as Map import qualified Data.Map.Strict as Map
data LanguagesArgs newtype LanguagesArgs
= LanguagesArgs = LanguagesArgs ()
{ } deriving (Generic, GQLType) deriving stock (Generic)
deriving anyclass (GQLType)
type LanguagesMap = Map.Map Lang NLPServer type LanguagesMap = Map.Map Lang NLPServer
...@@ -33,7 +34,7 @@ data NLPServer = NLPServer ...@@ -33,7 +34,7 @@ data NLPServer = NLPServer
resolveLanguages resolveLanguages
:: HasNLPServer env => LanguagesArgs -> GqlM e env LanguagesMap :: HasNLPServer env => LanguagesArgs -> GqlM e env LanguagesMap
resolveLanguages LanguagesArgs { } = do resolveLanguages ( LanguagesArgs () ) = do
-- pure $ allLangs -- pure $ allLangs
lift $ do lift $ do
ns <- view nlpServer ns <- view nlpServer
......
...@@ -15,7 +15,7 @@ Portability : POSIX ...@@ -15,7 +15,7 @@ Portability : POSIX
module Gargantext.API.GraphQL.Node where module Gargantext.API.GraphQL.Node where
import Data.Aeson import Data.Aeson
import Data.HashMap.Strict qualified as HashMap import Data.Aeson.KeyMap qualified as KM
import Data.Morpheus.Types ( GQLType ) import Data.Morpheus.Types ( GQLType )
import Data.Text qualified as T import Data.Text qualified as T
import Gargantext.API.Admin.Auth.Types import Gargantext.API.Admin.Auth.Types
...@@ -126,7 +126,7 @@ toCorpus N.Node { .. } = Corpus { id = NN.unNodeId _node_id ...@@ -126,7 +126,7 @@ toCorpus N.Node { .. } = Corpus { id = NN.unNodeId _node_id
pubmedAPIKeyFromValue :: Value -> Maybe PUBMED.APIKey pubmedAPIKeyFromValue :: Value -> Maybe PUBMED.APIKey
pubmedAPIKeyFromValue (Object kv) = pubmedAPIKeyFromValue (Object kv) =
case HashMap.lookup "pubmed_api_key" kv of case KM.lookup "pubmed_api_key" kv of
Nothing -> Nothing Nothing -> Nothing
Just v -> case fromJSON v of Just v -> case fromJSON v of
Error _ -> Nothing Error _ -> Nothing
......
...@@ -8,6 +8,8 @@ Stability : experimental ...@@ -8,6 +8,8 @@ Stability : experimental
Portability : POSIX Portability : POSIX
-} -}
{-# OPTIONS_GHC -Wno-deprecations #-} -- FIXME(adn) GraphQL will need updating.
module Gargantext.API.GraphQL.Utils where module Gargantext.API.GraphQL.Utils where
import Control.Lens.Getter (view) import Control.Lens.Getter (view)
......
...@@ -15,6 +15,7 @@ import Data.ByteString qualified as BS ...@@ -15,6 +15,7 @@ import Data.ByteString qualified as BS
import Data.ByteString.Char8 qualified as C8 import Data.ByteString.Char8 qualified as C8
import Data.CaseInsensitive qualified as CI import Data.CaseInsensitive qualified as CI
import Data.List qualified as L import Data.List qualified as L
import Data.String
import Data.Text qualified as T import Data.Text qualified as T
import Data.Text.Encoding qualified as TE import Data.Text.Encoding qualified as TE
import Network.HTTP.Types import Network.HTTP.Types
...@@ -38,7 +39,7 @@ logStdoutDevSanitised = mkRequestLogger $ defaultRequestLoggerSettings { outputF ...@@ -38,7 +39,7 @@ logStdoutDevSanitised = mkRequestLogger $ defaultRequestLoggerSettings { outputF
-- >>> "{\"a\": 100, \"b\": 200}" & atKey "c" ?~ String "300" -- >>> "{\"a\": 100, \"b\": 200}" & atKey "c" ?~ String "300"
-- "{\"a\":100,\"b\":200,\"c\":\"300\"}" -- "{\"a\":100,\"b\":200,\"c\":\"300\"}"
atKey :: L.AsValue t => T.Text -> Traversal' t (Maybe A.Value) atKey :: L.AsValue t => T.Text -> Traversal' t (Maybe A.Value)
atKey i = L._Object . at i atKey i = L._Object . at (fromString $ T.unpack i)
{-# INLINE atKey #-} {-# INLINE atKey #-}
customOutput :: OutputFormatterWithDetailsAndHeaders customOutput :: OutputFormatterWithDetailsAndHeaders
......
...@@ -120,7 +120,7 @@ import Gargantext.Database.Query.Table.Node (getNode) ...@@ -120,7 +120,7 @@ import Gargantext.Database.Query.Table.Node (getNode)
import Gargantext.Database.Query.Table.Node.Error (HasNodeError) import Gargantext.Database.Query.Table.Node.Error (HasNodeError)
import Gargantext.Database.Query.Table.Node.Select import Gargantext.Database.Query.Table.Node.Select
import Gargantext.Database.Schema.Node (node_id, node_parent_id, node_user_id) import Gargantext.Database.Schema.Node (node_id, node_parent_id, node_user_id)
import Gargantext.Prelude hiding (log, to, toLower, (%)) import Gargantext.Prelude hiding (log, to, toLower, (%), isInfixOf)
import Gargantext.Prelude.Clock (hasTime, getTime) import Gargantext.Prelude.Clock (hasTime, getTime)
import Gargantext.Utils.Jobs (serveJobsAPI, MonadJobStatus(..)) import Gargantext.Utils.Jobs (serveJobsAPI, MonadJobStatus(..))
import GHC.Conc (readTVar, writeTVar) import GHC.Conc (readTVar, writeTVar)
......
...@@ -9,12 +9,9 @@ Portability : POSIX ...@@ -9,12 +9,9 @@ Portability : POSIX
-} -}
module Gargantext.API.Ngrams.List.Types where
--{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-} {-# LANGUAGE TypeOperators #-}
module Gargantext.API.Ngrams.List.Types where
--import Control.Lens hiding (elements, Indexed) --import Control.Lens hiding (elements, Indexed)
import Data.Aeson import Data.Aeson
......
...@@ -9,7 +9,8 @@ Portability : POSIX ...@@ -9,7 +9,8 @@ Portability : POSIX
-} -}
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
module Gargantext.API.Ngrams.Tools module Gargantext.API.Ngrams.Tools
where where
......
...@@ -84,8 +84,6 @@ instance ToParamSchema TabType ...@@ -84,8 +84,6 @@ instance ToParamSchema TabType
instance ToJSON TabType instance ToJSON TabType
instance FromJSON TabType instance FromJSON TabType
instance ToSchema TabType instance ToSchema TabType
instance Arbitrary TabType where
arbitrary = elements [minBound .. maxBound]
instance FromJSONKey TabType where instance FromJSONKey TabType where
fromJSONKey = genericFromJSONKey defaultJSONKeyOptions fromJSONKey = genericFromJSONKey defaultJSONKeyOptions
instance ToJSONKey TabType where instance ToJSONKey TabType where
...@@ -161,14 +159,11 @@ deriveJSON (unPrefix "_nre_") ''NgramsRepoElement ...@@ -161,14 +159,11 @@ deriveJSON (unPrefix "_nre_") ''NgramsRepoElement
makeLenses ''NgramsRepoElement makeLenses ''NgramsRepoElement
instance ToSchema NgramsRepoElement where instance ToSchema NgramsRepoElement where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_nre_") declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_nre_")
instance Serialise NgramsRepoElement
instance FromField NgramsRepoElement where instance FromField NgramsRepoElement where
fromField = fromJSONField fromField = fromJSONField
instance ToField NgramsRepoElement where instance ToField NgramsRepoElement where
toField = toJSONField toField = toJSONField
instance Serialise (MSet NgramsTerm)
data NgramsElement = data NgramsElement =
NgramsElement { _ne_ngrams :: NgramsTerm NgramsElement { _ne_ngrams :: NgramsTerm
, _ne_size :: Int , _ne_size :: Int
...@@ -197,9 +192,6 @@ newNgramsElement mayList ngrams = ...@@ -197,9 +192,6 @@ newNgramsElement mayList ngrams =
instance ToSchema NgramsElement where instance ToSchema NgramsElement where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_ne_") declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_ne_")
instance Arbitrary NgramsElement where
arbitrary = elements [newNgramsElement Nothing "sport"]
------------------------------------------------------------------------ ------------------------------------------------------------------------
newtype NgramsTable = NgramsTable [NgramsElement] newtype NgramsTable = NgramsTable [NgramsElement]
...@@ -257,9 +249,6 @@ mockTable = NgramsTable ...@@ -257,9 +249,6 @@ mockTable = NgramsTable
where where
rp n = Just $ RootParent n n rp n = Just $ RootParent n n
instance Arbitrary NgramsTable where
arbitrary = pure mockTable
instance ToSchema NgramsTable instance ToSchema NgramsTable
------------------------------------------------------------------------ ------------------------------------------------------------------------
...@@ -283,10 +272,6 @@ instance ToParamSchema OrderBy ...@@ -283,10 +272,6 @@ instance ToParamSchema OrderBy
instance FromJSON OrderBy instance FromJSON OrderBy
instance ToJSON OrderBy instance ToJSON OrderBy
instance ToSchema OrderBy instance ToSchema OrderBy
instance Arbitrary OrderBy
where
arbitrary = elements [minBound..maxBound]
-- | A query on a 'NgramsTable'. -- | A query on a 'NgramsTable'.
data NgramsSearchQuery = NgramsSearchQuery data NgramsSearchQuery = NgramsSearchQuery
...@@ -367,8 +352,6 @@ instance ToSchema a => ToSchema (PatchSet a) ...@@ -367,8 +352,6 @@ instance ToSchema a => ToSchema (PatchSet a)
type AddRem = Replace (Maybe ()) type AddRem = Replace (Maybe ())
instance Serialise AddRem
remPatch, addPatch :: AddRem remPatch, addPatch :: AddRem
remPatch = replace (Just ()) Nothing remPatch = replace (Just ()) Nothing
addPatch = replace Nothing (Just ()) addPatch = replace Nothing (Just ())
...@@ -388,9 +371,6 @@ unPatchMSet (PatchMSet a) = a ...@@ -388,9 +371,6 @@ unPatchMSet (PatchMSet a) = a
type ConflictResolutionPatchMSet a = a -> ConflictResolutionReplace (Maybe ()) type ConflictResolutionPatchMSet a = a -> ConflictResolutionReplace (Maybe ())
type instance ConflictResolution (PatchMSet a) = ConflictResolutionPatchMSet a type instance ConflictResolution (PatchMSet a) = ConflictResolutionPatchMSet a
instance (Serialise a, Ord a) => Serialise (PatchMap a AddRem)
instance (Serialise a, Ord a) => Serialise (PatchMSet a)
-- TODO this breaks module abstraction -- TODO this breaks module abstraction
makePrisms ''PM.PatchMap makePrisms ''PM.PatchMap
...@@ -419,19 +399,12 @@ instance (Ord a, ToJSON a) => ToJSON (PatchMSet a) where ...@@ -419,19 +399,12 @@ instance (Ord a, ToJSON a) => ToJSON (PatchMSet a) where
instance (Ord a, FromJSON a) => FromJSON (PatchMSet a) where instance (Ord a, FromJSON a) => FromJSON (PatchMSet a) where
parseJSON = fmap (_PatchMSetIso #) . parseJSON parseJSON = fmap (_PatchMSetIso #) . parseJSON
instance (Ord a, Arbitrary a) => Arbitrary (PatchMSet a) where
arbitrary = (PatchMSet . PM.fromMap) <$> arbitrary
instance ToSchema a => ToSchema (PatchMSet a) where instance ToSchema a => ToSchema (PatchMSet a) where
-- TODO -- TODO
declareNamedSchema _ = wellNamedSchema "" (Proxy :: Proxy TODO) declareNamedSchema _ = wellNamedSchema "" (Proxy :: Proxy TODO)
type instance Patched (PatchMSet a) = MSet a type instance Patched (PatchMSet a) = MSet a
instance (Eq a, Arbitrary a) => Arbitrary (Replace a) where
arbitrary = uncurry replace <$> arbitrary
-- If they happen to be equal then the patch is Keep.
instance ToSchema a => ToSchema (Replace a) where instance ToSchema a => ToSchema (Replace a) where
declareNamedSchema (_ :: Proxy (Replace a)) = do declareNamedSchema (_ :: Proxy (Replace a)) = do
-- TODO Keep constructor is not supported here. -- TODO Keep constructor is not supported here.
...@@ -475,19 +448,11 @@ instance ToSchema NgramsPatch where ...@@ -475,19 +448,11 @@ instance ToSchema NgramsPatch where
, ("old", nreSch) , ("old", nreSch)
, ("new", nreSch) , ("new", nreSch)
] ]
instance Arbitrary NgramsPatch where
arbitrary = frequency [ (9, NgramsPatch <$> arbitrary <*> (replace <$> arbitrary <*> arbitrary))
, (1, NgramsReplace <$> arbitrary <*> arbitrary)
]
instance Serialise NgramsPatch
instance FromField NgramsPatch where instance FromField NgramsPatch where
fromField = fromJSONField fromField = fromJSONField
instance ToField NgramsPatch where instance ToField NgramsPatch where
toField = toJSONField toField = toJSONField
instance Serialise (Replace ListType)
instance Serialise ListType
type NgramsPatchIso = type NgramsPatchIso =
MaybePatch NgramsRepoElement (PairPatch (PatchMSet NgramsTerm) (Replace ListType)) MaybePatch NgramsRepoElement (PairPatch (PatchMSet NgramsTerm) (Replace ListType))
...@@ -555,9 +520,6 @@ newtype NgramsTablePatch = NgramsTablePatch (PatchMap NgramsTerm NgramsPatch) ...@@ -555,9 +520,6 @@ newtype NgramsTablePatch = NgramsTablePatch (PatchMap NgramsTerm NgramsPatch)
mkNgramsTablePatch :: Map NgramsTerm NgramsPatch -> NgramsTablePatch mkNgramsTablePatch :: Map NgramsTerm NgramsPatch -> NgramsTablePatch
mkNgramsTablePatch = NgramsTablePatch . PM.fromMap mkNgramsTablePatch = NgramsTablePatch . PM.fromMap
instance Serialise NgramsTablePatch
instance Serialise (PatchMap NgramsTerm NgramsPatch)
instance FromField NgramsTablePatch instance FromField NgramsTablePatch
where where
fromField = fromJSONField fromField = fromJSONField
...@@ -690,9 +652,6 @@ instance Action NgramsTablePatch (Maybe NgramsTableMap) where ...@@ -690,9 +652,6 @@ instance Action NgramsTablePatch (Maybe NgramsTableMap) where
fmap (execState (reParentNgramsTablePatch p)) . fmap (execState (reParentNgramsTablePatch p)) .
act (p ^. _NgramsTablePatch) act (p ^. _NgramsTablePatch)
instance Arbitrary NgramsTablePatch where
arbitrary = NgramsTablePatch <$> PM.fromMap <$> arbitrary
-- Should it be less than an Lens' to preserve PatchMap's abstraction. -- Should it be less than an Lens' to preserve PatchMap's abstraction.
-- ntp_ngrams_patches :: Lens' NgramsTablePatch (Map NgramsTerm NgramsPatch) -- ntp_ngrams_patches :: Lens' NgramsTablePatch (Map NgramsTerm NgramsPatch)
-- ntp_ngrams_patches = _NgramsTablePatch . undefined -- ntp_ngrams_patches = _NgramsTablePatch . undefined
...@@ -709,8 +668,6 @@ deriveJSON (unPrefix "_v_") ''Versioned ...@@ -709,8 +668,6 @@ deriveJSON (unPrefix "_v_") ''Versioned
makeLenses ''Versioned makeLenses ''Versioned
instance (Typeable a, ToSchema a) => ToSchema (Versioned a) where instance (Typeable a, ToSchema a) => ToSchema (Versioned a) where
declareNamedSchema = wellNamedSchema "_v_" declareNamedSchema = wellNamedSchema "_v_"
instance Arbitrary a => Arbitrary (Versioned a) where
arbitrary = Versioned 1 <$> arbitrary -- TODO 1 is constant so far
------------------------------------------------------------------------ ------------------------------------------------------------------------
type Count = Int type Count = Int
...@@ -724,8 +681,6 @@ deriveJSON (unPrefix "_vc_") ''VersionedWithCount ...@@ -724,8 +681,6 @@ deriveJSON (unPrefix "_vc_") ''VersionedWithCount
makeLenses ''VersionedWithCount makeLenses ''VersionedWithCount
instance (Typeable a, ToSchema a) => ToSchema (VersionedWithCount a) where instance (Typeable a, ToSchema a) => ToSchema (VersionedWithCount a) where
declareNamedSchema = wellNamedSchema "_vc_" declareNamedSchema = wellNamedSchema "_vc_"
instance Arbitrary a => Arbitrary (VersionedWithCount a) where
arbitrary = VersionedWithCount 1 1 <$> arbitrary -- TODO 1 is constant so far
toVersionedWithCount :: Count -> Versioned a -> VersionedWithCount a toVersionedWithCount :: Count -> Versioned a -> VersionedWithCount a
toVersionedWithCount count (Versioned version data_) = VersionedWithCount version count data_ toVersionedWithCount count (Versioned version data_) = VersionedWithCount version count data_
...@@ -749,8 +704,6 @@ instance (ToJSON s, ToJSON p) => ToJSON (Repo s p) where ...@@ -749,8 +704,6 @@ instance (ToJSON s, ToJSON p) => ToJSON (Repo s p) where
toJSON = genericToJSON $ unPrefix "_r_" toJSON = genericToJSON $ unPrefix "_r_"
toEncoding = genericToEncoding $ unPrefix "_r_" toEncoding = genericToEncoding $ unPrefix "_r_"
instance (Serialise s, Serialise p) => Serialise (Repo s p)
makeLenses ''Repo makeLenses ''Repo
initRepo :: Monoid s => Repo s p initRepo :: Monoid s => Repo s p
...@@ -771,11 +724,6 @@ type RepoCmdM env err m = ...@@ -771,11 +724,6 @@ type RepoCmdM env err m =
-- Instances -- Instances
instance Arbitrary NgramsRepoElement where
arbitrary = elements $ map ngramsElementToRepo ns
where
NgramsTable ns = mockTable
instance FromHttpApiData (Map TableNgrams.NgramsType (Versioned NgramsTableMap)) instance FromHttpApiData (Map TableNgrams.NgramsType (Versioned NgramsTableMap))
where where
parseUrlPiece x = maybeToEither x (decode $ cs x) parseUrlPiece x = maybeToEither x (decode $ cs x)
...@@ -814,3 +762,51 @@ instance ToSchema UpdateTableNgramsCharts where ...@@ -814,3 +762,51 @@ instance ToSchema UpdateTableNgramsCharts where
------------------------------------------------------------------------ ------------------------------------------------------------------------
type NgramsList = (Map TableNgrams.NgramsType (Versioned NgramsTableMap)) type NgramsList = (Map TableNgrams.NgramsType (Versioned NgramsTableMap))
--
-- Serialise instances
--
instance Serialise ListType
instance Serialise NgramsRepoElement
instance Serialise NgramsTablePatch
instance Serialise (PatchMap NgramsTerm NgramsPatch)
instance Serialise (MSet NgramsTerm)
instance Serialise AddRem
instance Serialise NgramsPatch
instance Serialise (Replace ListType)
instance (Serialise a, Ord a) => Serialise (PatchMap a AddRem)
instance (Serialise a, Ord a) => Serialise (PatchMSet a)
instance (Serialise s, Serialise p) => Serialise (Repo s p)
--
-- Arbitrary instances
--
instance Arbitrary TabType where
arbitrary = elements [minBound .. maxBound]
instance Arbitrary NgramsElement where
arbitrary = elements [newNgramsElement Nothing "sport"]
instance Arbitrary NgramsTable where
arbitrary = pure mockTable
instance Arbitrary OrderBy
where
arbitrary = elements [minBound..maxBound]
instance (Ord a, Arbitrary a) => Arbitrary (PatchMSet a) where
arbitrary = (PatchMSet . PM.fromMap) <$> arbitrary
instance (Eq a, Arbitrary a) => Arbitrary (Replace a) where
arbitrary = uncurry replace <$> arbitrary
-- If they happen to be equal then the patch is Keep.
instance Arbitrary NgramsPatch where
arbitrary = frequency [ (9, NgramsPatch <$> arbitrary <*> (replace <$> arbitrary <*> arbitrary))
, (1, NgramsReplace <$> arbitrary <*> arbitrary)
]
instance Arbitrary NgramsTablePatch where
arbitrary = NgramsTablePatch <$> PM.fromMap <$> arbitrary
instance Arbitrary a => Arbitrary (Versioned a) where
arbitrary = Versioned 1 <$> arbitrary -- TODO 1 is constant so far
instance Arbitrary a => Arbitrary (VersionedWithCount a) where
arbitrary = VersionedWithCount 1 1 <$> arbitrary -- TODO 1 is constant so far
instance Arbitrary NgramsRepoElement where
arbitrary = elements $ map ngramsElementToRepo ns
where
NgramsTable ns = mockTable
...@@ -189,62 +189,6 @@ nodeNodeAPI p uId cId nId = withAccess (Proxy :: Proxy (NodeNodeAPI a)) Proxy uI ...@@ -189,62 +189,6 @@ nodeNodeAPI p uId cId nId = withAccess (Proxy :: Proxy (NodeNodeAPI a)) Proxy uI
nodeNodeAPI' :: GargServer (NodeNodeAPI a) nodeNodeAPI' :: GargServer (NodeNodeAPI a)
nodeNodeAPI' = getNodeWith nId p nodeNodeAPI' = getNodeWith nId p
------------------------------------------------------------------------
-- TODO: make the NodeId type indexed by `a`, then we no longer need the proxy.
nodeAPI :: forall proxy a.
( HyperdataC a, Show a
) => proxy a
-> AuthenticatedUser
-> NodeId
-> ServerT (NodeAPI a) (GargM Env BackendInternalError)
nodeAPI p authenticatedUser targetNode =
withAccess (Proxy :: Proxy (NodeAPI a)) Proxy authenticatedUser (PathNode targetNode) nodeAPI'
where
userRootId = RootId $ authenticatedUser ^. auth_node_id
nodeAPI' :: ServerT (NodeAPI a) (GargM Env BackendInternalError)
nodeAPI' = withPolicy authenticatedUser (nodeChecks targetNode) (getNodeWith targetNode p)
:<|> rename targetNode
:<|> postNode authenticatedUser targetNode
:<|> postNodeAsyncAPI authenticatedUser targetNode
:<|> FrameCalcUpload.api authenticatedUser targetNode
:<|> putNode targetNode
:<|> Update.api targetNode
:<|> Action.deleteNode userRootId targetNode
:<|> getChildren targetNode p
-- TODO gather it
:<|> tableApi targetNode
:<|> apiNgramsTableCorpus targetNode
:<|> catApi targetNode
:<|> scoreApi targetNode
:<|> Search.api targetNode
:<|> Share.api userRootId targetNode
-- Pairing Tools
:<|> pairWith targetNode
:<|> pairs targetNode
:<|> getPair targetNode
-- VIZ
:<|> scatterApi targetNode
:<|> chartApi targetNode
:<|> pieApi targetNode
:<|> treeApi targetNode
:<|> phyloAPI targetNode
:<|> moveNode userRootId targetNode
-- :<|> nodeAddAPI id'
-- :<|> postUpload id'
:<|> Share.unPublish targetNode
:<|> fileApi targetNode
:<|> fileAsyncApi authenticatedUser targetNode
:<|> DFWN.api authenticatedUser targetNode
:<|> DocumentUpload.api targetNode
------------------------------------------------------------------------ ------------------------------------------------------------------------
data RenameNode = RenameNode { r_name :: Text } data RenameNode = RenameNode { r_name :: Text }
deriving (Generic) deriving (Generic)
...@@ -374,5 +318,59 @@ instance ToSchema RenameNode ...@@ -374,5 +318,59 @@ instance ToSchema RenameNode
instance Arbitrary RenameNode where instance Arbitrary RenameNode where
arbitrary = elements [RenameNode "test"] arbitrary = elements [RenameNode "test"]
------------------------------------------------------------------------
-- TODO: make the NodeId type indexed by `a`, then we no longer need the proxy.
nodeAPI :: forall proxy a.
( HyperdataC a, Show a, MimeUnrender JSON a
) => proxy a
-> AuthenticatedUser
-> NodeId
-> ServerT (NodeAPI a) (GargM Env BackendInternalError)
nodeAPI p authenticatedUser targetNode =
withAccess (Proxy :: Proxy (NodeAPI a)) Proxy authenticatedUser (PathNode targetNode) nodeAPI'
where
userRootId = RootId $ authenticatedUser ^. auth_node_id
nodeAPI' :: ServerT (NodeAPI a) (GargM Env BackendInternalError)
nodeAPI' = withPolicy authenticatedUser (nodeChecks targetNode) (getNodeWith targetNode p)
:<|> rename targetNode
:<|> postNode authenticatedUser targetNode
:<|> postNodeAsyncAPI authenticatedUser targetNode
:<|> FrameCalcUpload.api authenticatedUser targetNode
:<|> putNode targetNode
:<|> Update.api targetNode
:<|> Action.deleteNode userRootId targetNode
:<|> getChildren targetNode p
-- TODO gather it
:<|> tableApi targetNode
:<|> apiNgramsTableCorpus targetNode
:<|> catApi targetNode
:<|> scoreApi targetNode
:<|> Search.api targetNode
:<|> Share.api userRootId targetNode
-- Pairing Tools
:<|> pairWith targetNode
:<|> pairs targetNode
:<|> getPair targetNode
-- VIZ
:<|> scatterApi targetNode
:<|> chartApi targetNode
:<|> pieApi targetNode
:<|> treeApi targetNode
:<|> phyloAPI targetNode
:<|> moveNode userRootId targetNode
-- :<|> nodeAddAPI id'
-- :<|> postUpload id'
:<|> Share.unPublish targetNode
:<|> fileApi targetNode
:<|> fileAsyncApi authenticatedUser targetNode
:<|> DFWN.api authenticatedUser targetNode
:<|> DocumentUpload.api targetNode
-------------------------------------------------------------
...@@ -94,6 +94,6 @@ type API = Summary "Document Export" ...@@ -94,6 +94,6 @@ type API = Summary "Document Export"
:<|> "csv" :<|> "csv"
:> Get '[PlainText] (Headers '[Servant.Header "Content-Disposition" Text] Text)) -- [Document]) :> Get '[PlainText] (Headers '[Servant.Header "Content-Disposition" Text] Text)) -- [Document])
$(deriveJSON (unPrefix "_de_") ''DocumentExport)
$(deriveJSON (unPrefix "_d_") ''Document)
$(deriveJSON (unPrefix "_ng_") ''Ngrams) $(deriveJSON (unPrefix "_ng_") ''Ngrams)
$(deriveJSON (unPrefix "_d_") ''Document)
$(deriveJSON (unPrefix "_de_") ''DocumentExport)
...@@ -701,7 +701,7 @@ clearHistory (NodeStory ns) = NodeStory $ ns & (traverse . a_history) .~ emptyHi ...@@ -701,7 +701,7 @@ clearHistory (NodeStory ns) = NodeStory $ ns & (traverse . a_history) .~ emptyHi
currentVersion :: (HasNodeStory env err m) => ListId -> m Version currentVersion :: (HasNodeStory env err m) => ListId -> m Version
currentVersion listId = do currentVersion listId = do
pool <- view connPool pool <- view connPool
nls <- withResource pool $ \c -> liftBase $ getNodeStory c listId nls <- liftBase $ withResource pool $ \c -> liftBase $ getNodeStory c listId
pure $ nls ^. unNodeStory . at listId . _Just . a_version pure $ nls ^. unNodeStory . at listId . _Just . a_version
...@@ -711,7 +711,7 @@ currentVersion listId = do ...@@ -711,7 +711,7 @@ currentVersion listId = do
fixNodeStoryVersions :: (HasNodeStory env err m) => m () fixNodeStoryVersions :: (HasNodeStory env err m) => m ()
fixNodeStoryVersions = do fixNodeStoryVersions = do
pool <- view connPool pool <- view connPool
_ <- withResource pool $ \c -> liftBase $ PGS.withTransaction c $ do _ <- liftBase $ withResource pool $ \c -> liftBase $ PGS.withTransaction c $ do
nIds <- runPGSQuery c [sql| SELECT id FROM nodes WHERE ? |] (PGS.Only True) :: IO [PGS.Only Int64] nIds <- runPGSQuery c [sql| SELECT id FROM nodes WHERE ? |] (PGS.Only True) :: IO [PGS.Only Int64]
-- printDebug "[fixNodeStoryVersions] nIds" nIds -- printDebug "[fixNodeStoryVersions] nIds" nIds
mapM_ (\(PGS.Only nId) -> do mapM_ (\(PGS.Only nId) -> do
......
...@@ -24,7 +24,7 @@ module Gargantext.Core.Text.Corpus.Parsers.Date ...@@ -24,7 +24,7 @@ module Gargantext.Core.Text.Corpus.Parsers.Date
import Data.Aeson (toJSON, Value) import Data.Aeson (toJSON, Value)
import Data.Aeson qualified as Json import Data.Aeson qualified as Json
import Data.HashMap.Strict as HM hiding (map) import Data.Aeson.KeyMap as KM hiding (map)
import Data.HashSet qualified as HashSet import Data.HashSet qualified as HashSet
import Data.List qualified as List import Data.List qualified as List
import Data.Text (unpack, splitOn, replace) import Data.Text (unpack, splitOn, replace)
...@@ -184,7 +184,7 @@ getTimeValue rt = case head rt of ...@@ -184,7 +184,7 @@ getTimeValue rt = case head rt of
extractValue :: Maybe Value -> Maybe Text extractValue :: Maybe Value -> Maybe Text
extractValue (Just (Json.Object object)) = extractValue (Just (Json.Object object)) =
case HM.lookup "value" object of case KM.lookup "value" object of
Just (Json.String date) -> Just date Just (Json.String date) -> Just date
_ -> Nothing _ -> Nothing
extractValue _ = Nothing extractValue _ = Nothing
......
...@@ -34,10 +34,11 @@ Notes for current implementation: ...@@ -34,10 +34,11 @@ Notes for current implementation:
{-# OPTIONS_GHC -fno-warn-deprecations #-} {-# OPTIONS_GHC -fno-warn-deprecations #-}
{-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
module Gargantext.Core.Text.Terms.Eleve where module Gargantext.Core.Text.Terms.Eleve where
......
...@@ -143,11 +143,11 @@ whitespace :: Tokenizer ...@@ -143,11 +143,11 @@ whitespace :: Tokenizer
whitespace xs = E [Right w | w <- T.words xs ] whitespace xs = E [Right w | w <- T.words xs ]
instance Monad (EitherList a) where instance Monad (EitherList a) where
return x = E [Right x] return = pure
E xs >>= f = E $ concatMap (either (return . Left) (unE . f)) xs E xs >>= f = E $ concatMap (either (return . Left) (unE . f)) xs
instance Applicative (EitherList a) where instance Applicative (EitherList a) where
pure = pure pure x = E [Right x]
f <*> x = f `ap` x f <*> x = f `ap` x
instance Functor (EitherList a) where instance Functor (EitherList a) where
......
...@@ -216,43 +216,6 @@ data ObjectData = ...@@ -216,43 +216,6 @@ data ObjectData =
| Layer !GvId !GraphDataData !LayerData | Layer !GvId !GraphDataData !LayerData
deriving (Show, Eq, Generic) deriving (Show, Eq, Generic)
instance ToJSON ObjectData where
toJSON = \case
GroupToNode gvid commonData nodeTypeData
-> mkObject gvid (Left commonData) nodeTypeData
BranchToNode gvid commonData nodeTypeData
-> mkObject gvid (Left commonData) nodeTypeData
PeriodToNode gvid commonData nodeTypeData
-> mkObject gvid (Left commonData) nodeTypeData
Layer gvid graphData nodeTypeData
-> mkObject gvid (Right graphData) nodeTypeData
instance FromJSON ObjectData where
parseJSON = withObject "ObjectData" $ \o -> do
_gvid <- o .: "_gvid"
-- try to parse the graph data first. If we succeed, then we are dealing with
-- the 'Layer', otherwise we the rest, but for the rest we can avoid re-parsing
-- the 'NodeCommonData' every time.
case parseMaybe @_ @GraphDataData parseJSON (Object o) of
Nothing
-> do commonData <- parseJSON (Object o)
((GroupToNode <$> pure _gvid <*> pure commonData <*> parseJSON (Object o)) <|>
(BranchToNode <$> pure _gvid <*> pure commonData <*> parseJSON (Object o)) <|>
(PeriodToNode <$> pure _gvid <*> pure commonData <*> parseJSON (Object o)))
Just gd
-> Layer <$> pure _gvid <*> pure gd <*> parseJSON (Object o)
mkObject :: ToJSON a => GvId -> Either NodeCommonData GraphDataData -> a -> Value
mkObject gvid commonData objectTypeData =
let commonDataJSON = either toJSON toJSON commonData
objectTypeDataJSON = toJSON objectTypeData
header = object $ [ "_gvid" .= toJSON gvid ]
in case (commonDataJSON, objectTypeDataJSON, header) of
(Object hdr, Object cdJSON, Object etDataJSON)
-> Object $ hdr <> cdJSON <> etDataJSON
_ -> panicTrace "[Gargantext.Core.Types.Phylo.mkObject] impossible: commonData, header or objectTypeDataJSON didn't convert back to JSON Object."
data GroupToNodeData data GroupToNodeData
= GroupToNodeData = GroupToNodeData
{ _gtn_bId :: Text { _gtn_bId :: Text
...@@ -474,17 +437,23 @@ data BranchToGroupData ...@@ -474,17 +437,23 @@ data BranchToGroupData
, _btg_style :: Maybe Text , _btg_style :: Maybe Text
} deriving (Show, Eq, Generic) } deriving (Show, Eq, Generic)
-- | Lenses
makeLenses ''Phylo
makeLenses ''PhyloPeriod
makeLenses ''PhyloLevel
makeLenses ''PhyloGroup
-- | JSON instances -- | JSON instances
$(deriveJSON (unPrefix "_phylo_" ) ''Phylo ) instance ToJSON GvId where
$(deriveJSON (unPrefix "_phylo_Period" ) ''PhyloPeriod ) toJSON GvId{..} = toJSON _GvId
$(deriveJSON (unPrefix "_phylo_Level" ) ''PhyloLevel ) instance FromJSON GvId where
$(deriveJSON (unPrefix "_phylo_Group" ) ''PhyloGroup ) parseJSON v = GvId <$> parseJSON v
-- /NOTE/ We need to define /after/ the JSON istance for 'GvId' due to GHC stage limitation.
mkObject :: ToJSON a => GvId -> Either NodeCommonData GraphDataData -> a -> Value
mkObject gvid commonData objectTypeData =
let commonDataJSON = either toJSON toJSON commonData
objectTypeDataJSON = toJSON objectTypeData
header = object $ [ "_gvid" .= toJSON gvid ]
in case (commonDataJSON, objectTypeDataJSON, header) of
(Object hdr, Object cdJSON, Object etDataJSON)
-> Object $ hdr <> cdJSON <> etDataJSON
_ -> panicTrace "[Gargantext.Core.Types.Phylo.mkObject] impossible: commonData, header or objectTypeDataJSON didn't convert back to JSON Object."
instance ToJSON GraphData where instance ToJSON GraphData where
toJSON = mkGraphData toJSON = mkGraphData
...@@ -512,11 +481,6 @@ instance FromJSON GraphData where ...@@ -512,11 +481,6 @@ instance FromJSON GraphData where
_gd_data <- parseJSON (Object o) _gd_data <- parseJSON (Object o)
pure GraphData{..} pure GraphData{..}
instance ToJSON GvId where
toJSON GvId{..} = toJSON _GvId
instance FromJSON GvId where
parseJSON v = GvId <$> parseJSON v
instance ToJSON EdgeData where instance ToJSON EdgeData where
toJSON = \case toJSON = \case
GroupToAncestor gvid commonData edgeTypeData GroupToAncestor gvid commonData edgeTypeData
...@@ -608,6 +572,38 @@ instance FromJSON BranchToGroupData where ...@@ -608,6 +572,38 @@ instance FromJSON BranchToGroupData where
_btg_style <- o .:? "style" _btg_style <- o .:? "style"
pure BranchToGroupData{..} pure BranchToGroupData{..}
instance ToJSON ObjectData where
toJSON = \case
GroupToNode gvid commonData nodeTypeData
-> mkObject gvid (Left commonData) nodeTypeData
BranchToNode gvid commonData nodeTypeData
-> mkObject gvid (Left commonData) nodeTypeData
PeriodToNode gvid commonData nodeTypeData
-> mkObject gvid (Left commonData) nodeTypeData
Layer gvid graphData nodeTypeData
-> mkObject gvid (Right graphData) nodeTypeData
instance FromJSON ObjectData where
parseJSON = withObject "ObjectData" $ \o -> do
_gvid <- o .: "_gvid"
-- try to parse the graph data first. If we succeed, then we are dealing with
-- the 'Layer', otherwise we the rest, but for the rest we can avoid re-parsing
-- the 'NodeCommonData' every time.
case parseMaybe @_ @GraphDataData parseJSON (Object o) of
Nothing
-> do commonData <- parseJSON (Object o)
((GroupToNode <$> pure _gvid <*> pure commonData <*> parseJSON (Object o)) <|>
(BranchToNode <$> pure _gvid <*> pure commonData <*> parseJSON (Object o)) <|>
(PeriodToNode <$> pure _gvid <*> pure commonData <*> parseJSON (Object o)))
Just gd
-> Layer <$> pure _gvid <*> pure gd <*> parseJSON (Object o)
$(deriveJSON (unPrefix "_phylo_Group" ) ''PhyloGroup )
$(deriveJSON (unPrefix "_phylo_Level" ) ''PhyloLevel )
$(deriveJSON (unPrefix "_phylo_Period" ) ''PhyloPeriod )
$(deriveJSON (unPrefix "_phylo_" ) ''Phylo )
-- | ToSchema instances -- | ToSchema instances
instance ToSchema Phylo where instance ToSchema Phylo where
...@@ -637,7 +633,9 @@ instance ToSchema GraphDataData where ...@@ -637,7 +633,9 @@ instance ToSchema GraphDataData where
instance ToSchema GraphData where instance ToSchema GraphData where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_gd_") declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_gd_")
-- | Arbitrary instances --
-- Arbitrary instances
--
instance Arbitrary LayerData where instance Arbitrary LayerData where
arbitrary = LayerData <$> arbitrary arbitrary = LayerData <$> arbitrary
instance Arbitrary NodeCommonData where instance Arbitrary NodeCommonData where
...@@ -723,3 +721,13 @@ instance Arbitrary GraphDataData where ...@@ -723,3 +721,13 @@ instance Arbitrary GraphDataData where
<*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
<*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
<*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
--
-- Lenses
--
makeLenses ''Phylo
makeLenses ''PhyloPeriod
makeLenses ''PhyloLevel
makeLenses ''PhyloGroup
...@@ -475,38 +475,38 @@ makeLenses ''PhyloEdge ...@@ -475,38 +475,38 @@ makeLenses ''PhyloEdge
------------------------ ------------------------
$(deriveJSON (unPrefix "_phylo_" ) ''Phylo ) $(deriveJSON (unPrefix "_phylo_group" ) ''PhyloGroup )
$(deriveJSON (unPrefix "_phylo_level" ) ''PhyloLevel )
$(deriveJSON (unPrefix "_phylo_foundations" ) ''PhyloFoundations ) $(deriveJSON (unPrefix "_phylo_foundations" ) ''PhyloFoundations )
$(deriveJSON (unPrefix "_phylo_period" ) ''PhyloPeriod ) $(deriveJSON (unPrefix "_phylo_period" ) ''PhyloPeriod )
$(deriveJSON (unPrefix "_phylo_level" ) ''PhyloLevel )
$(deriveJSON (unPrefix "_phylo_group" ) ''PhyloGroup )
$(deriveJSON (unPrefix "_phyloFis_" ) ''PhyloFis ) $(deriveJSON (unPrefix "_phyloFis_" ) ''PhyloFis )
-- --
$(deriveJSON (unPrefix "_software_" ) ''Software ) $(deriveJSON (unPrefix "_lb_" ) ''LBParams )
$(deriveJSON (unPrefix "_phyloParam_" ) ''PhyloParam ) $(deriveJSON (unPrefix "_sb_" ) ''SBParams )
--
$(deriveJSON (unPrefix "_fis_" ) ''FisParams )
$(deriveJSON (unPrefix "_hamming_" ) ''HammingParams )
$(deriveJSON (unPrefix "_wlj_" ) ''WLJParams )
-- --
$(deriveJSON defaultOptions ''Filter ) $(deriveJSON defaultOptions ''Filter )
$(deriveJSON defaultOptions ''Metric ) $(deriveJSON defaultOptions ''Metric )
$(deriveJSON defaultOptions ''Cluster )
$(deriveJSON defaultOptions ''Proximity ) $(deriveJSON defaultOptions ''Proximity )
--
$(deriveJSON (unPrefix "_fis_" ) ''FisParams )
$(deriveJSON (unPrefix "_hamming_" ) ''HammingParams )
$(deriveJSON (unPrefix "_louvain_" ) ''LouvainParams ) $(deriveJSON (unPrefix "_louvain_" ) ''LouvainParams )
$(deriveJSON (unPrefix "_rc_" ) ''RCParams ) $(deriveJSON (unPrefix "_rc_" ) ''RCParams )
$(deriveJSON (unPrefix "_wlj_" ) ''WLJParams ) $(deriveJSON defaultOptions ''Cluster )
$(deriveJSON (unPrefix "_q_" ) ''PhyloQueryBuild )
-- --
$(deriveJSON (unPrefix "_lb_" ) ''LBParams ) $(deriveJSON (unPrefix "_software_" ) ''Software )
$(deriveJSON (unPrefix "_sb_" ) ''SBParams ) $(deriveJSON (unPrefix "_phyloParam_" ) ''PhyloParam )
$(deriveJSON (unPrefix "_phylo_" ) ''Phylo )
-- --
$(deriveJSON (unPrefix "_q_" ) ''PhyloQueryBuild )
$(deriveJSON (unPrefix "_pv_" ) ''PhyloView )
$(deriveJSON (unPrefix "_pb_" ) ''PhyloBranch ) $(deriveJSON (unPrefix "_pb_" ) ''PhyloBranch )
$(deriveJSON (unPrefix "_pe_" ) ''PhyloEdge )
$(deriveJSON (unPrefix "_pn_" ) ''PhyloNode )
$(deriveJSON defaultOptions ''Filiation ) $(deriveJSON defaultOptions ''Filiation )
$(deriveJSON (unPrefix "_pn_" ) ''PhyloNode )
$(deriveJSON defaultOptions ''EdgeType ) $(deriveJSON defaultOptions ''EdgeType )
$(deriveJSON (unPrefix "_pe_" ) ''PhyloEdge )
$(deriveJSON (unPrefix "_pv_" ) ''PhyloView )
--------------------------- ---------------------------
-- | Swagger instances | -- -- | Swagger instances | --
......
...@@ -625,9 +625,6 @@ makeLenses ''PhyloBranch ...@@ -625,9 +625,6 @@ makeLenses ''PhyloBranch
-- | JSON instances | -- -- | JSON instances | --
------------------------ ------------------------
instance FromJSON Phylo
instance ToJSON Phylo
instance FromJSON PhyloSources instance FromJSON PhyloSources
instance ToJSON PhyloSources instance ToJSON PhyloSources
...@@ -651,6 +648,9 @@ instance ToJSON PhyloGroup ...@@ -651,6 +648,9 @@ instance ToJSON PhyloGroup
$(deriveJSON (unPrefix "_foundations_" ) ''PhyloFoundations) $(deriveJSON (unPrefix "_foundations_" ) ''PhyloFoundations)
instance FromJSON Phylo
instance ToJSON Phylo
-- NFData instances -- NFData instances
instance NFData CorpusParser instance NFData CorpusParser
...@@ -677,3 +677,4 @@ instance NFData Order ...@@ -677,3 +677,4 @@ instance NFData Order
instance NFData Sort instance NFData Sort
instance NFData Tagger instance NFData Tagger
instance NFData PhyloLabel instance NFData PhyloLabel
...@@ -114,7 +114,7 @@ corpusIdtoDocuments timeUnit corpusId = do ...@@ -114,7 +114,7 @@ corpusIdtoDocuments timeUnit corpusId = do
docs <- selectDocNodes corpusId docs <- selectDocNodes corpusId
lId <- defaultList corpusId lId <- defaultList corpusId
termList <- getTermList lId MapTerm NgramsTerms termList <- getTermList lId MapTerm NgramsTerms
corpus_node <- getNodeWith corpusId (Proxy @ HyperdataCorpus) corpus_node <- getNodeWith corpusId (Proxy @HyperdataCorpus)
let corpusLang = view (node_hyperdata . to _hc_lang) corpus_node let corpusLang = view (node_hyperdata . to _hc_lang) corpus_node
let patterns = case termList of let patterns = case termList of
......
...@@ -46,7 +46,7 @@ flowPhylo :: (HasNodeStory env err m, HasDBid NodeType) ...@@ -46,7 +46,7 @@ flowPhylo :: (HasNodeStory env err m, HasDBid NodeType)
-> m Phylo -> m Phylo
flowPhylo cId = do flowPhylo cId = do
corpus_node <- getNodeWith cId (Proxy @ HyperdataCorpus) corpus_node <- getNodeWith cId (Proxy @HyperdataCorpus)
let lang = withDefaultLanguage $ view (node_hyperdata . to _hc_lang) corpus_node let lang = withDefaultLanguage $ view (node_hyperdata . to _hc_lang) corpus_node
list' <- defaultList cId list' <- defaultList cId
termList <- HashMap.toList <$> getTermsWith (Text.words . unNgramsTerm) [list'] NgramsTerms (Set.singleton MapTerm) termList <- HashMap.toList <$> getTermsWith (Text.words . unNgramsTerm) [list'] NgramsTerms (Set.singleton MapTerm)
......
...@@ -659,7 +659,7 @@ reIndexWith :: ( HasNodeStory env err m ) ...@@ -659,7 +659,7 @@ reIndexWith :: ( HasNodeStory env err m )
-> m () -> m ()
reIndexWith cId lId nt lts = do reIndexWith cId lId nt lts = do
-- printDebug "(cId,lId,nt,lts)" (cId, lId, nt, lts) -- printDebug "(cId,lId,nt,lts)" (cId, lId, nt, lts)
corpus_node <- getNodeWith cId (Proxy @ HyperdataCorpus) corpus_node <- getNodeWith cId (Proxy @HyperdataCorpus)
let corpusLang = withDefaultLanguage $ view (node_hyperdata . to _hc_lang) corpus_node let corpusLang = withDefaultLanguage $ view (node_hyperdata . to _hc_lang) corpus_node
-- Getting [NgramsTerm] -- Getting [NgramsTerm]
......
...@@ -193,6 +193,20 @@ instance Arbitrary HyperdataContact where ...@@ -193,6 +193,20 @@ instance Arbitrary HyperdataContact where
-- | Specific Gargantext instance -- | Specific Gargantext instance
instance Hyperdata HyperdataContact instance Hyperdata HyperdataContact
-- | All lenses
makeLenses ''ContactWho
makeLenses ''ContactWhere
makeLenses ''ContactTouch
makeLenses ''ContactMetaData
makeLenses ''HyperdataContact
-- | All Json instances
$(deriveJSON (unPrefix "_ct_") ''ContactTouch)
$(deriveJSON (unPrefix "_cw_") ''ContactWho)
$(deriveJSON (unPrefix "_cw_") ''ContactWhere)
$(deriveJSON (unPrefix "_cm_") ''ContactMetaData)
$(deriveJSON (unPrefix "_hc_") ''HyperdataContact)
-- | Database (Posgresql-simple instance) -- | Database (Posgresql-simple instance)
instance FromField HyperdataContact where instance FromField HyperdataContact where
fromField = fromField' fromField = fromField'
...@@ -207,16 +221,3 @@ instance DefaultFromField (Nullable SqlJsonb) HyperdataContact where ...@@ -207,16 +221,3 @@ instance DefaultFromField (Nullable SqlJsonb) HyperdataContact where
-- | All lenses
makeLenses ''ContactWho
makeLenses ''ContactWhere
makeLenses ''ContactTouch
makeLenses ''ContactMetaData
makeLenses ''HyperdataContact
-- | All Json instances
$(deriveJSON (unPrefix "_cw_") ''ContactWho)
$(deriveJSON (unPrefix "_cw_") ''ContactWhere)
$(deriveJSON (unPrefix "_ct_") ''ContactTouch)
$(deriveJSON (unPrefix "_cm_") ''ContactMetaData)
$(deriveJSON (unPrefix "_hc_") ''HyperdataContact)
...@@ -73,8 +73,6 @@ defaultHyperdataDocument = case decode docExample of ...@@ -73,8 +73,6 @@ defaultHyperdataDocument = case decode docExample of
data StatusV3 = StatusV3 { statusV3_error :: !(Maybe Text) data StatusV3 = StatusV3 { statusV3_error :: !(Maybe Text)
, statusV3_action :: !(Maybe Text) , statusV3_action :: !(Maybe Text)
} deriving (Show, Generic) } deriving (Show, Generic)
$(deriveJSON (unPrefix "statusV3_") ''StatusV3)
------------------------------------------------------------------------ ------------------------------------------------------------------------
data HyperdataDocumentV3 = HyperdataDocumentV3 { _hdv3_publication_day :: !(Maybe Int) data HyperdataDocumentV3 = HyperdataDocumentV3 { _hdv3_publication_day :: !(Maybe Int)
...@@ -140,12 +138,25 @@ arbitraryHyperdataDocuments = ...@@ -140,12 +138,25 @@ arbitraryHyperdataDocuments =
instance Hyperdata HyperdataDocument instance Hyperdata HyperdataDocument
instance Hyperdata HyperdataDocumentV3 instance Hyperdata HyperdataDocumentV3
------------------------------------------------------------------------ ------------------------------------------------------------------------
$(makeLenses ''HyperdataDocument) -- $(deriveJSON (unPrefix "_hd_") ''HyperdataDocument)
makePrisms ''HyperdataDocument instance ToSchema HyperdataDocument where
declareNamedSchema proxy =
genericDeclareNamedSchema (unPrefixSwagger "_hd_") proxy
& mapped.schema.description ?~ "Document Hyperdata"
& mapped.schema.example ?~ toJSON defaultHyperdataDocument
$(makeLenses ''HyperdataDocumentV3) {-
-- | For now HyperdataDocumentV3 is not exposed with the API
instance ToSchema HyperdataDocumentV3 where
declareNamedSchema proxy =
genericDeclareNamedSchema (unPrefixSwagger "hyperdataDocumentV3_") proxy
& mapped.schema.description ?~ "Document Hyperdata for Garg V3"
& mapped.schema.example ?~ toJSON defaultHyperdataDocumentV3
-}
-- $(deriveJSON (unPrefix "_hd_") ''HyperdataDocument) --
-- JSON instances
--
instance FromJSON HyperdataDocument instance FromJSON HyperdataDocument
where where
...@@ -167,24 +178,13 @@ instance ToJSON HyperdataDocument ...@@ -167,24 +178,13 @@ instance ToJSON HyperdataDocument
$(deriveJSON (unPrefix "statusV3_") ''StatusV3)
$(deriveJSON (unPrefix "_hdv3_") ''HyperdataDocumentV3) $(deriveJSON (unPrefix "_hdv3_") ''HyperdataDocumentV3)
instance ToSchema HyperdataDocument where --
declareNamedSchema proxy = -- FromField/ToField instances
genericDeclareNamedSchema (unPrefixSwagger "_hd_") proxy --
& mapped.schema.description ?~ "Document Hyperdata"
& mapped.schema.example ?~ toJSON defaultHyperdataDocument
{-
-- | For now HyperdataDocumentV3 is not exposed with the API
instance ToSchema HyperdataDocumentV3 where
declareNamedSchema proxy =
genericDeclareNamedSchema (unPrefixSwagger "hyperdataDocumentV3_") proxy
& mapped.schema.description ?~ "Document Hyperdata for Garg V3"
& mapped.schema.example ?~ toJSON defaultHyperdataDocumentV3
-}
------------------------------------------------------------------------
instance FromField HyperdataDocument instance FromField HyperdataDocument
where where
fromField = fromField' fromField = fromField'
...@@ -193,14 +193,12 @@ instance FromField HyperdataDocumentV3 ...@@ -193,14 +193,12 @@ instance FromField HyperdataDocumentV3
where where
fromField = fromField' fromField = fromField'
-------
instance ToField HyperdataDocument where instance ToField HyperdataDocument where
toField = toJSONField toField = toJSONField
instance ToField HyperdataDocumentV3 where instance ToField HyperdataDocumentV3 where
toField = toJSONField toField = toJSONField
------------------------------------------------------------------------
instance DefaultFromField SqlJsonb HyperdataDocument instance DefaultFromField SqlJsonb HyperdataDocument
where where
defaultFromField = fromPGSFromField defaultFromField = fromPGSFromField
...@@ -208,4 +206,10 @@ instance DefaultFromField SqlJsonb HyperdataDocument ...@@ -208,4 +206,10 @@ instance DefaultFromField SqlJsonb HyperdataDocument
instance DefaultFromField SqlJsonb HyperdataDocumentV3 instance DefaultFromField SqlJsonb HyperdataDocumentV3
where where
defaultFromField = fromPGSFromField defaultFromField = fromPGSFromField
------------------------------------------------------------------------
--
-- Lenses
--
$(makeLenses ''HyperdataDocument)
makePrisms ''HyperdataDocument
$(makeLenses ''HyperdataDocumentV3)
...@@ -98,9 +98,9 @@ makeLenses ''HyperdataPrivate ...@@ -98,9 +98,9 @@ makeLenses ''HyperdataPrivate
makeLenses ''HyperdataPublic makeLenses ''HyperdataPublic
-- | All Json instances -- | All Json instances
$(deriveJSON (unPrefix "_hu_") ''HyperdataUser)
$(deriveJSON (unPrefix "_hpr_") ''HyperdataPrivate) $(deriveJSON (unPrefix "_hpr_") ''HyperdataPrivate)
$(deriveJSON (unPrefix "_hpu_") ''HyperdataPublic) $(deriveJSON (unPrefix "_hpu_") ''HyperdataPublic)
$(deriveJSON (unPrefix "_hu_") ''HyperdataUser)
-- | Arbitrary instances -- | Arbitrary instances
instance Arbitrary HyperdataUser where instance Arbitrary HyperdataUser where
......
...@@ -41,8 +41,8 @@ instance Arbitrary Metric ...@@ -41,8 +41,8 @@ instance Arbitrary Metric
<*> arbitrary <*> arbitrary
<*> arbitrary <*> arbitrary
deriveJSON (unPrefix "metrics_") ''Metrics
deriveJSON (unPrefix "m_") ''Metric deriveJSON (unPrefix "m_") ''Metric
deriveJSON (unPrefix "metrics_") ''Metrics
newtype ChartMetrics a = ChartMetrics { chartMetrics_data :: a } newtype ChartMetrics a = ChartMetrics { chartMetrics_data :: a }
......
...@@ -119,7 +119,7 @@ fromInt64ToInt = fromIntegral ...@@ -119,7 +119,7 @@ fromInt64ToInt = fromIntegral
mkCmd :: (Connection -> IO a) -> DBCmd err a mkCmd :: (Connection -> IO a) -> DBCmd err a
mkCmd k = do mkCmd k = do
pool <- view connPool pool <- view connPool
withResource pool (liftBase . k) liftBase $ withResource pool (liftBase . k)
runCmd :: (HasConnectionPool env) runCmd :: (HasConnectionPool env)
=> env => env
......
...@@ -15,6 +15,7 @@ Portability : POSIX ...@@ -15,6 +15,7 @@ Portability : POSIX
{-# LANGUAGE NoMonomorphismRestriction #-} {-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
------------------------------------------------------------------------ ------------------------------------------------------------------------
module Gargantext.Database.Query.Facet module Gargantext.Database.Query.Facet
( runViewAuthorsDoc ( runViewAuthorsDoc
......
...@@ -121,11 +121,11 @@ userTable = Table "auth_user" ...@@ -121,11 +121,11 @@ userTable = Table "auth_user"
} }
) )
$(deriveJSON (unPrefix "userLight_") ''UserLight)
$(deriveJSON (unPrefix "user_") ''UserPoly)
instance FromField UserLight where instance FromField UserLight where
fromField = fromField' fromField = fromField'
instance FromField UserDB where instance FromField UserDB where
fromField = fromField' fromField = fromField'
$(deriveJSON (unPrefix "userLight_") ''UserLight)
$(deriveJSON (unPrefix "user_") ''UserPoly)
...@@ -10,6 +10,7 @@ Portability : POSIX ...@@ -10,6 +10,7 @@ Portability : POSIX
{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
module Gargantext.Utils.Jobs ( module Gargantext.Utils.Jobs (
-- * Serving the JOBS API -- * Serving the JOBS API
serveJobsAPI serveJobsAPI
......
...@@ -20,8 +20,10 @@ import Data.Kind (Type) ...@@ -20,8 +20,10 @@ import Data.Kind (Type)
import Data.Sequence (Seq) import Data.Sequence (Seq)
import qualified Data.Sequence as Seq import qualified Data.Sequence as Seq
import Prelude import Prelude
import Servant.API import Servant.API.Alternative
import Servant.API.ContentTypes
import Gargantext.API.Errors.Types (BackendInternalError)
import Gargantext.Utils.Jobs.Map import Gargantext.Utils.Jobs.Map
import Gargantext.Utils.Jobs.Monad import Gargantext.Utils.Jobs.Monad
...@@ -33,16 +35,16 @@ import qualified Servant.Job.Core as SJ ...@@ -33,16 +35,16 @@ import qualified Servant.Job.Core as SJ
import qualified Servant.Job.Types as SJ import qualified Servant.Job.Types as SJ
serveJobsAPI serveJobsAPI
:: ( Ord t, Exception e, MonadError e m :: ( Ord t, MonadError BackendInternalError m
, MonadJob m t (Seq event) output , MonadJob m t (Seq event) output
, ToJSON e, ToJSON event, ToJSON output , ToJSON event, ToJSON output, MimeRender JSON output
, Foldable callback , Foldable callback
) )
=> (SJ.JobID 'SJ.Safe -> LoggerM m event -> JobHandle m) => (SJ.JobID 'SJ.Safe -> LoggerM m event -> JobHandle m)
-> m env -> m env
-> t -> t
-> (JobError -> e) -> (JobError -> BackendInternalError)
-> (env -> JobHandle m -> input -> IO (Either e output)) -> (env -> JobHandle m -> input -> IO (Either BackendInternalError output))
-> SJ.AsyncJobsServerT' ctI ctO callback event input output m -> SJ.AsyncJobsServerT' ctI ctO callback event input output m
serveJobsAPI newJobHandle getenv t joberr f serveJobsAPI newJobHandle getenv t joberr f
= newJob newJobHandle getenv t f (SJ.JobInput undefined Nothing) = newJob newJobHandle getenv t f (SJ.JobInput undefined Nothing)
...@@ -50,10 +52,10 @@ serveJobsAPI newJobHandle getenv t joberr f ...@@ -50,10 +52,10 @@ serveJobsAPI newJobHandle getenv t joberr f
:<|> serveJobAPI t joberr :<|> serveJobAPI t joberr
serveJobAPI serveJobAPI
:: forall (m :: Type -> Type) e t event output. :: forall (m :: Type -> Type) t event output.
(Ord t, MonadError e m, MonadJob m t (Seq event) output) (Ord t, MonadError BackendInternalError m, MonadJob m t (Seq event) output, MimeRender JSON output)
=> t => t
-> (JobError -> e) -> (JobError -> BackendInternalError)
-> SJ.JobID 'SJ.Unsafe -> SJ.JobID 'SJ.Unsafe
-> SJ.AsyncJobServerT event output m -> SJ.AsyncJobServerT event output m
serveJobAPI t joberr jid' = wrap' (killJob t) serveJobAPI t joberr jid' = wrap' (killJob t)
...@@ -72,14 +74,15 @@ serveJobAPI t joberr jid' = wrap' (killJob t) ...@@ -72,14 +74,15 @@ serveJobAPI t joberr jid' = wrap' (killJob t)
wrap' g limit offset = wrap (g limit offset) wrap' g limit offset = wrap (g limit offset)
newJob newJob
:: ( Ord t, Exception e, MonadJob m t (Seq event) output :: ( Ord t, MonadJob m t (Seq event) output
, ToJSON e, ToJSON event, ToJSON output , ToJSON event, ToJSON output
, MimeRender JSON output
, Foldable callbacks , Foldable callbacks
) )
=> (SJ.JobID 'SJ.Safe -> LoggerM m event -> JobHandle m) => (SJ.JobID 'SJ.Safe -> LoggerM m event -> JobHandle m)
-> m env -> m env
-> t -> t
-> (env -> JobHandle m -> input -> IO (Either e output)) -> (env -> JobHandle m -> input -> IO (Either BackendInternalError output))
-> SJ.JobInput callbacks input -> SJ.JobInput callbacks input
-> m (SJ.JobStatus 'SJ.Safe event) -> m (SJ.JobStatus 'SJ.Safe event)
newJob newJobHandle getenv jobkind f input = do newJob newJobHandle getenv jobkind f input = do
......
...@@ -15,71 +15,24 @@ Server to be used: https://gitlab.iscpif.fr/gargantext/spacy-server ...@@ -15,71 +15,24 @@ Server to be used: https://gitlab.iscpif.fr/gargantext/spacy-server
{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TemplateHaskell #-}
module Gargantext.Utils.SpacyNLP where module Gargantext.Utils.SpacyNLP (
module Gargantext.Utils.SpacyNLP.Types
, spacyRequest
, spacyTagsToToken
, spacyDataToPosSentences
, nlp
) where
import Control.Lens
import Data.Aeson (encode) import Data.Aeson (encode)
import Data.Aeson.TH (deriveJSON)
import Data.Text hiding (map, group, filter, concat, zip) import Data.Text hiding (map, group, filter, concat, zip)
import Gargantext.Core (Lang(..)) import Gargantext.Core (Lang(..))
import Gargantext.Core.Text.Terms.Multi.PosTagging.Types import Gargantext.Core.Text.Terms.Multi.PosTagging.Types
import Gargantext.Core.Types (POS(..), NER(..))
import Gargantext.Core.Utils.Prefix (unPrefix)
import Gargantext.Prelude import Gargantext.Prelude
import Network.HTTP.Simple (parseRequest, httpJSON, setRequestBodyLBS, getResponseBody, Response) import Network.HTTP.Simple (parseRequest, httpJSON, setRequestBodyLBS, getResponseBody, Response)
import Network.URI (URI(..)) import Network.URI (URI(..))
import Gargantext.Utils.SpacyNLP.Types
data SpacyData = SpacyData { _spacy_data :: ![SpacyText]}
deriving (Show)
data SpacyText = SpacyText { _spacy_text :: !Text
, _spacy_tags :: ![SpacyTags]
} deriving (Show)
data SpacyTags =
SpacyTags { _spacyTags_text :: !Text
, _spacyTags_text_with_ws :: !Text
, _spacyTags_whitespace :: !Text
, _spacyTags_head :: !Text
, _spacyTags_head_index :: !Int
, _spacyTags_left_edge :: !Text
, _spacyTags_right_edge :: !Text
, _spacyTags_index :: Int
, _spacyTags_ent_type :: !NER
, _spacyTags_ent_iob :: !Text
, _spacyTags_lemma :: !Text
, _spacyTags_normalized :: !Text
, _spacyTags_shape :: !Text
, _spacyTags_prefix :: !Text
, _spacyTags_suffix :: !Text
, _spacyTags_is_alpha :: Bool
, _spacyTags_is_ascii :: Bool
, _spacyTags_is_digit :: Bool
, _spacyTags_is_title :: Bool
, _spacyTags_is_punct :: Bool
, _spacyTags_is_left_punct :: Bool
, _spacyTags_is_right_punct :: Bool
, _spacyTags_is_space :: Bool
, _spacyTags_is_bracket :: Bool
, _spacyTags_is_quote :: Bool
, _spacyTags_is_currency :: Bool
, _spacyTags_like_url :: Bool
, _spacyTags_like_num :: Bool
, _spacyTags_like_email :: Bool
, _spacyTags_is_oov :: Bool
, _spacyTags_is_stop :: Bool
, _spacyTags_pos :: POS
, _spacyTags_tag :: POS
, _spacyTags_dep :: !Text
, _spacyTags_lang :: !Text
, _spacyTags_prob :: !Int
, _spacyTags_char_offset :: !Int
} deriving (Show)
data SpacyRequest = SpacyRequest { _spacyRequest_text :: !Text }
deriving (Show)
spacyRequest :: URI -> Text -> IO SpacyData spacyRequest :: URI -> Text -> IO SpacyData
spacyRequest uri txt = do spacyRequest uri txt = do
req <- parseRequest $ "POST " <> show (uri { uriPath = "/pos" }) req <- parseRequest $ "POST " <> show (uri { uriPath = "/pos" })
...@@ -87,30 +40,18 @@ spacyRequest uri txt = do ...@@ -87,30 +40,18 @@ spacyRequest uri txt = do
result <- httpJSON request :: IO (Response SpacyData) result <- httpJSON request :: IO (Response SpacyData)
pure $ getResponseBody result pure $ getResponseBody result
-- Instances
deriveJSON (unPrefix "_spacy_") ''SpacyData
deriveJSON (unPrefix "_spacy_") ''SpacyText
deriveJSON (unPrefix "_spacyTags_") ''SpacyTags
deriveJSON (unPrefix "_spacyRequest_") ''SpacyRequest
makeLenses ''SpacyData
makeLenses ''SpacyText
makeLenses ''SpacyTags
makeLenses ''SpacyRequest
---------------------------------------------------------------- ----------------------------------------------------------------
spacyTagsToToken :: SpacyTags -> Token spacyTagsToToken :: SpacyTags -> Token
spacyTagsToToken st = Token (st ^. spacyTags_index) spacyTagsToToken st = Token (_spacyTags_index st)
(st ^. spacyTags_normalized) (_spacyTags_normalized st)
(st ^. spacyTags_text) (_spacyTags_text st)
(st ^. spacyTags_lemma) (_spacyTags_lemma st)
(st ^. spacyTags_head_index) (_spacyTags_head_index st)
(st ^. spacyTags_char_offset) (_spacyTags_char_offset st)
(Just $ st ^. spacyTags_pos) (Just $ _spacyTags_pos st)
(Just $ st ^. spacyTags_ent_type) (Just $ _spacyTags_ent_type st)
(Just $ st ^. spacyTags_prefix) (Just $ _spacyTags_prefix st)
(Just $ st ^. spacyTags_suffix) (Just $ _spacyTags_suffix st)
spacyDataToPosSentences :: SpacyData -> PosSentences spacyDataToPosSentences :: SpacyData -> PosSentences
spacyDataToPosSentences (SpacyData ds) = PosSentences spacyDataToPosSentences (SpacyData ds) = PosSentences
......
{-|
Module : Gargantext.Utils.SpacyNLP.Types
Description : John Snow NLP API connexion
Copyright : (c) CNRS, 2017
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
Spacy ecosystem: https://github.com/explosion/spaCy
Server to be used: https://gitlab.iscpif.fr/gargantext/spacy-server
-}
{-# LANGUAGE TemplateHaskell #-}
module Gargantext.Utils.SpacyNLP.Types where
import Control.Lens
import Data.Aeson.TH (deriveJSON)
import Data.Text hiding (map, group, filter, concat, zip)
import Gargantext.Core.Types (POS(..), NER(..))
import Gargantext.Core.Utils.Prefix (unPrefix)
import Gargantext.Prelude
data SpacyData = SpacyData { _spacy_data :: ![SpacyText]}
deriving (Show)
data SpacyText = SpacyText { _spacy_text :: !Text
, _spacy_tags :: ![SpacyTags]
} deriving (Show)
data SpacyTags =
SpacyTags { _spacyTags_text :: !Text
, _spacyTags_text_with_ws :: !Text
, _spacyTags_whitespace :: !Text
, _spacyTags_head :: !Text
, _spacyTags_head_index :: !Int
, _spacyTags_left_edge :: !Text
, _spacyTags_right_edge :: !Text
, _spacyTags_index :: Int
, _spacyTags_ent_type :: !NER
, _spacyTags_ent_iob :: !Text
, _spacyTags_lemma :: !Text
, _spacyTags_normalized :: !Text
, _spacyTags_shape :: !Text
, _spacyTags_prefix :: !Text
, _spacyTags_suffix :: !Text
, _spacyTags_is_alpha :: Bool
, _spacyTags_is_ascii :: Bool
, _spacyTags_is_digit :: Bool
, _spacyTags_is_title :: Bool
, _spacyTags_is_punct :: Bool
, _spacyTags_is_left_punct :: Bool
, _spacyTags_is_right_punct :: Bool
, _spacyTags_is_space :: Bool
, _spacyTags_is_bracket :: Bool
, _spacyTags_is_quote :: Bool
, _spacyTags_is_currency :: Bool
, _spacyTags_like_url :: Bool
, _spacyTags_like_num :: Bool
, _spacyTags_like_email :: Bool
, _spacyTags_is_oov :: Bool
, _spacyTags_is_stop :: Bool
, _spacyTags_pos :: POS
, _spacyTags_tag :: POS
, _spacyTags_dep :: !Text
, _spacyTags_lang :: !Text
, _spacyTags_prob :: !Int
, _spacyTags_char_offset :: !Int
} deriving (Show)
data SpacyRequest = SpacyRequest { _spacyRequest_text :: !Text }
deriving (Show)
--
-- JSON instances
--
deriveJSON (unPrefix "_spacyTags_") ''SpacyTags
deriveJSON (unPrefix "_spacy_") ''SpacyText
deriveJSON (unPrefix "_spacy_") ''SpacyData
deriveJSON (unPrefix "_spacyRequest_") ''SpacyRequest
--
-- Lenses
--
makeLenses ''SpacyData
makeLenses ''SpacyText
makeLenses ''SpacyTags
makeLenses ''SpacyRequest
resolver: lts-18.28 resolver: lts-21.17
flags: flags:
accelerate: accelerate:
debug: false debug: false
wikiparsec:
library-only: true
extra-package-dbs: [] extra-package-dbs: []
skip-ghc-check: true skip-ghc-check: true
system-ghc: true
packages: packages:
- . - .
docker:
enable: false
repo: 'cgenie/stack-build:lts-18.18-garg'
run-args:
- '--publish=8008:8008'
nix:
enable: false
add-gc-roots: true
shell-file: nix/stack-shell.nix
allow-newer: true allow-newer: true
extra-deps: extra-deps:
...@@ -33,20 +25,8 @@ extra-deps: ...@@ -33,20 +25,8 @@ extra-deps:
- git: https://gitlab.iscpif.fr/gargantext/hlcm.git - git: https://gitlab.iscpif.fr/gargantext/hlcm.git
commit: 6f0595d2421005837d59151a8b26eee83ebb67b5 commit: 6f0595d2421005837d59151a8b26eee83ebb67b5
# API libs # API libs
- git: https://github.com/haskell-servant/servant.git
commit: c2af6e775d1d36f2011d43aff230bb502f8fba63
subdirs:
- servant/
- servant-server/
- servant-client-core/
- servant-client/
- servant-auth/servant-auth/
- servant-auth/servant-auth-client/
- servant-auth/servant-auth-server/
- git: https://gitlab.iscpif.fr/gargantext/servant-xml-conduit.git - git: https://gitlab.iscpif.fr/gargantext/servant-xml-conduit.git
commit: 339fd608341bd2652cf5c0e9e76a3293acffbea6 commit: 339fd608341bd2652cf5c0e9e76a3293acffbea6
- git: https://github.com/alpmestan/ekg-json.git
commit: fd7e5d7325939103cd87d0dc592faf644160341c
# Databases libs # Databases libs
- git: https://github.com/adinapoli/haskell-opaleye.git - git: https://github.com/adinapoli/haskell-opaleye.git
commit: e9a29582ac66198dd2c2fdc3f8c8a4b1e6fbe004 commit: e9a29582ac66198dd2c2fdc3f8c8a4b1e6fbe004
...@@ -58,7 +38,7 @@ extra-deps: ...@@ -58,7 +38,7 @@ extra-deps:
- git: https://gitlab.iscpif.fr/gargantext/crawlers/istex.git - git: https://gitlab.iscpif.fr/gargantext/crawlers/istex.git
commit: c0a08d62c40a169b7934ceb7cb12c39952160e7a commit: c0a08d62c40a169b7934ceb7cb12c39952160e7a
- git: https://gitlab.iscpif.fr/gargantext/crawlers/hal.git - git: https://gitlab.iscpif.fr/gargantext/crawlers/hal.git
commit: 25a1e9558075462a82660987920a698b8863dd63 commit: bfa9069b4ff70f341ca3244e8aff9e83eb4b8b73
- git: https://gitlab.iscpif.fr/gargantext/crawlers/isidore.git - git: https://gitlab.iscpif.fr/gargantext/crawlers/isidore.git
commit: 3db385e767d2100d8abe900833c6e7de3ac55e1b commit: 3db385e767d2100d8abe900833c6e7de3ac55e1b
- git: https://gitlab.iscpif.fr/gargantext/crawlers/arxiv-api.git - git: https://gitlab.iscpif.fr/gargantext/crawlers/arxiv-api.git
...@@ -72,30 +52,31 @@ extra-deps: ...@@ -72,30 +52,31 @@ extra-deps:
commit: b4182487cfe479777c11ca19f3c0d47840b376f6 commit: b4182487cfe479777c11ca19f3c0d47840b376f6
- git: https://github.com/delanoe/patches-map - git: https://github.com/delanoe/patches-map
commit: 76cae88f367976ff091e661ee69a5c3126b94694 commit: 76cae88f367976ff091e661ee69a5c3126b94694
- git: https://gitlab.iscpif.fr/cgenie/patches-class.git - git: https://gitlab.iscpif.fr/gargantext/patches-class.git
commit: 125c7cb90ab8f0cd6ac4a526dbdf302d10c945e9 commit: 3668d28607867a88b2dfc62158139b3cfd629ddb
# Graph libs # Graph libs
- git: https://gitlab.iscpif.fr/gargantext/haskell-igraph.git - git: https://gitlab.iscpif.fr/gargantext/haskell-igraph.git
# 0.10.4-rc1 # 0.10.4-rc2
commit: 2a28524134b68421f30f6e97961063018f814a82 commit: 9f8a2f4a014539826a4eab3215cc70c0813f20cb
- git: https://gitlab.iscpif.fr/gargantext/haskell-infomap.git - git: https://gitlab.iscpif.fr/gargantext/haskell-infomap.git
commit: 1370fea1939e2378ce344e512d80671ac700e787 commit: 1370fea1939e2378ce344e512d80671ac700e787
# Accelerate Linear Algebra and specific instances # Accelerate Linear Algebra and specific instances
- git: https://github.com/alpmestan/accelerate.git - git: https://github.com/AccelerateHS/accelerate.git
commit: 640b5af87cea94b61c7737d878e6f7f2fca5c015 commit: 334d05519436bb7f20f9926ec76418f5b8afa359
- git: https://github.com/alpmestan/accelerate-arithmetic.git - git: https://github.com/alpmestan/accelerate-arithmetic.git
commit: a110807651036ca2228a76507ee35bbf7aedf87a commit: a110807651036ca2228a76507ee35bbf7aedf87a
- git: https://gitlab.iscpif.fr/amestanogullari/accelerate-utility.git - git: https://gitlab.iscpif.fr/amestanogullari/accelerate-utility.git
commit: a3875fe652d3bb5acb522674c22c6c814c1b4ad0 commit: a3875fe652d3bb5acb522674c22c6c814c1b4ad0
- git: https://github.com/alpmestan/accelerate-llvm.git - git: https://github.com/AccelerateHS/accelerate-llvm.git
commit: 944f5a4aea35ee6aedb81ea754bf46b131fce9e3 commit: 2b5d69448557e89002c0179ea1aaf59bb757a6e3
subdirs: subdirs:
- accelerate-llvm/ - accelerate-llvm/
- accelerate-llvm-native/ - accelerate-llvm-native/
- git: https://github.com/rspeer/wikiparsec.git - git: https://github.com/adinapoli/wikiparsec.git
commit: 9637a82344bb70f7fa8f02e75db3c081ccd434ce commit: b3519a0351ae9515497680571f76200c24dedb53
# Gargantext-graph # Gargantext-graph
- eigen-3.3.7.0@sha256:7c24a86426b238e02ba0ac2179cc44997518b443a13828ed7a791fe49a6dffa5,82060 - git: https://github.com/chessai/eigen.git
commit: 8fff32a43df743c8c83428a86dd566a0936a4fba
- git: https://github.com/alpmestan/sparse-linear.git - git: https://github.com/alpmestan/sparse-linear.git
commit: bc6ca8058077b0b5702ea4b88bd4189cfcad267a commit: bc6ca8058077b0b5702ea4b88bd4189cfcad267a
subdirs: subdirs:
...@@ -104,48 +85,73 @@ extra-deps: ...@@ -104,48 +85,73 @@ extra-deps:
commit: b9fca8beee0f23c17a6b2001ec834d071709e6e7 commit: b9fca8beee0f23c17a6b2001ec834d071709e6e7
subdirs: subdirs:
- packages/base - packages/base
- git: https://github.com/adinapoli/llvm-hs.git
commit: 7533a9ccd3bfe77141745f6b61039a26aaf5c83b
subdirs:
- llvm-hs
- llvm-hs-pure
# Mercury is a reputable Haskell company.
- git: https://github.com/MercuryTechnologies/ekg-json.git
commit: 232db57d6ce0940fcc902adf30a9ed3f3561f21d
# Temporary fork of boolexpr # Temporary fork of boolexpr
- git: https://github.com/adinapoli/boolexpr.git - git: https://github.com/adinapoli/boolexpr.git
commit: 91928b5d7f9342e9865dde0d94862792d2b88779 commit: 91928b5d7f9342e9865dde0d94862792d2b88779
# Temporary fork of duckling
- git: https://github.com/adinapoli/duckling.git
commit: 23603a832117e5352d5b0fb9bb1110228324b35a
- git: https://github.com/adinapoli/text16-compat.git
commit: 85533b5d597e6fc5498411b4bcfc76380ec80d71
# Others dependencies (using stack resolver) # Others dependencies (using stack resolver)
- HSvm-0.1.1.3.22
- KMP-0.2.0.0@sha256:6dfbac03ef00ebd9347234732cb86a40f62ab5a80c0cc6bedb8eb51766f7df28,2562
- MissingH-1.4.3.0@sha256:32f9892ec98cd21df4f4d3ed8d95a3831ae74287ea0641d6f09b2dc6ef061d39,4859
- Unique-0.4.7.8@sha256:9661f45aa31dde119a2114566166ea38b011a45653337045ee4ced75636533c0,2067
- constraints-extras-0.3.1.0@sha256:12016ebb91ad5ed2c82bf7e48c6bd6947d164d33c9dca5ac3965de1bb6c780c0,1777
- context-0.2.0.0@sha256:6b643adb4a64fe521873d08df0497f71f88e18b9ecff4b68b4eef938e446cfc9,1886 - context-0.2.0.0@sha256:6b643adb4a64fe521873d08df0497f71f88e18b9ecff4b68b4eef938e446cfc9,1886
- dependent-sum-0.7.1.0@sha256:0e419237f5b86da3659772afff9cab355c0f8d5b3fdb15a5b30e673d8dc83941,2147 - dependent-sum-0.7.1.0@sha256:0e419237f5b86da3659772afff9cab355c0f8d5b3fdb15a5b30e673d8dc83941,2147
- duckling-0.2.0.0@sha256:84becd4e48ee3676cdd6fe5745a77ee60e365ea730cd759610c8a7738f3eb4a6,60543 - fast-tagsoup-1.0.14@sha256:5aacc569a6ab9633077bb181f6bd6a4dde3ebc7a8b844d2fe7b1920a0cf0ba9f,1343
- fast-tagsoup-utf8-only-1.0.5@sha256:9292c8ff275c08b88b6013ccc410182552f180904214a07ad4db932ab462aaa1,1651 - fclabels-2.0.5.1@sha256:0ec200ff51561ed6ee60fa52d47427cd4bbbb3ace618884465ce2b27840cc1d7,4621
- fclabels-2.0.5@sha256:817006077632bd29e637956154aa33d3c10a59be0791c308cef955eb951b2675,4473
- full-text-search-0.2.1.4@sha256:81f6df3327e5b604f99b15e78635e5d6ca996e504c21d268a6d751d7d131aa36,6032
- fullstop-0.1.4@sha256:80a3e382ef53551bb936e7da8b2825621df0ea169af1212debcb0a90010b30c8,2044 - fullstop-0.1.4@sha256:80a3e382ef53551bb936e7da8b2825621df0ea169af1212debcb0a90010b30c8,2044
- ghc-clippy-plugin-0.0.0.1 - full-text-search-0.2.1.4@sha256:81f6df3327e5b604f99b15e78635e5d6ca996e504c21d268a6d751d7d131aa36,6032
- ghc-parser-0.2.6.0@sha256:0b0cbceb3bd2762cef201dc54ae302d7918bed23b2f85ffd99c1c8b6a9df32b6,1579
- hgal-2.0.0.2@sha256:13d58afd0668b9cb881c612eff8488a0e289edd4bbffa893df4beee60cfeb73b,653 - hgal-2.0.0.2@sha256:13d58afd0668b9cb881c612eff8488a0e289edd4bbffa893df4beee60cfeb73b,653
- hsparql-0.3.8 - hsparql-0.3.8
- hstatistics-0.3.1
- hspec-2.11.1 - hspec-2.11.1
- hspec-core-2.11.1 - hspec-core-2.11.1
- hspec-discover-2.11.1 - hspec-discover-2.11.1
- hspec-expectations-0.8.3 - hspec-expectations-0.8.3
- json-stream-0.4.2.4@sha256:8b7f17d54a6e1e6311756270f8bcf51e91bab4300945400de66118470dcf51b9,4716 - hstatistics-0.3.1
- HSvm-0.1.1.3.22
- ihaskell-0.11.0.0
- ipython-kernel-0.11.0.0
- KMP-0.2.0.0@sha256:6dfbac03ef00ebd9347234732cb86a40f62ab5a80c0cc6bedb8eb51766f7df28,2562
- located-base-0.1.1.1@sha256:7c6395f2b6fbf2d5f76c3514f774423838c0ea94e1c6a5530dd3c94b30c9d1c8,1904 - located-base-0.1.1.1@sha256:7c6395f2b6fbf2d5f76c3514f774423838c0ea94e1c6a5530dd3c94b30c9d1c8,1904
- logging-effect-1.3.12@sha256:72d168dd09887649ba9501627219b6027cbec2d5541931555b7885b133785ce3,1679 - logging-effect-1.3.12@sha256:72d168dd09887649ba9501627219b6027cbec2d5541931555b7885b133785ce3,1679
- logict-0.8.0.0 - logict-0.8.0.0
- MissingH-1.4.3.0@sha256:32f9892ec98cd21df4f4d3ed8d95a3831ae74287ea0641d6f09b2dc6ef061d39,4859
- monad-logger-aeson-0.2.0.0 - monad-logger-aeson-0.2.0.0
- monoid-extras-0.5.1@sha256:438dbfd7b4dce47d8f0ca577f56caf94bd1e21391afa545cad09fe7cf2e5793d,2333 - morpheus-graphql-0.24.3
- morpheus-graphql-app-0.24.3
- morpheus-graphql-client-0.24.3
- morpheus-graphql-code-gen-0.24.3
- morpheus-graphql-code-gen-utils-0.24.3
- morpheus-graphql-core-0.24.3
- morpheus-graphql-server-0.24.3
- morpheus-graphql-subscriptions-0.24.3
- morpheus-graphql-tests-0.24.3
- rake-0.0.1@sha256:3380f6567fb17505d1095b7f32222c0b631fa04126ad39726c84262da99c08b3,2025 - rake-0.0.1@sha256:3380f6567fb17505d1095b7f32222c0b631fa04126ad39726c84262da99c08b3,2025
- random-1.2.1 - random-1.2.1
- recover-rtti-0.4.3 - recover-rtti-0.4.3
- servant-cassava-0.10.1@sha256:07e7b6ca67cf57dcb4a0041a399a25d058844505837c6479e01d62be59d01fdf,1665 - servant-0.20.1
- servant-auth-server-0.4.8.0@sha256:f21b8b6a0a31811f31d9b1c1e8a1ca68a0f470cab4ffe8b9c2a41b7649492bb2,5521
- servant-auth-swagger-0.2.10.2@sha256:eda169d986faf562b6f43ac90d48c4fcf895dc235dd002d523ad582ec9be833f,2909
- servant-client-core-0.20
- servant-ekg-0.3.1@sha256:19bd9dc3943983da8e79d6f607614c68faea4054fb889d508c8a2b67b6bdd448,2203 - servant-ekg-0.3.1@sha256:19bd9dc3943983da8e79d6f607614c68faea4054fb889d508c8a2b67b6bdd448,2203
- servant-flatten-0.2@sha256:276896f7c5cdec5b8f8493f6205fded0cc602d050b58fdb09a6d7c85c3bb0837,1234 - servant-flatten-0.2@sha256:276896f7c5cdec5b8f8493f6205fded0cc602d050b58fdb09a6d7c85c3bb0837,1234
- servant-mock-0.8.7@sha256:64cb3e52bbd51ab6cb25e3f412a99ea712c6c26f1efd117f01a8d1664df49c67,2306 - servant-mock-0.8.7@sha256:64cb3e52bbd51ab6cb25e3f412a99ea712c6c26f1efd117f01a8d1664df49c67,2306
- servant-server-0.20
- snap-server-1.1.2.1@sha256:8ea05b9b068c1e86be77073107eadc177d7eec93724963c063877478a062b229,15471
- stemmer-0.5.2@sha256:823aec56249ec2619f60a2c0d1384b732894dbbbe642856d337ebfe9629a0efd,4082 - stemmer-0.5.2@sha256:823aec56249ec2619f60a2c0d1384b732894dbbbe642856d337ebfe9629a0efd,4082
- taggy-0.2.1@sha256:7bc55ddba178971dc6052163597f0445a0a2b5b0ca0e84ce651d53d722e3c265,4662 - taggy-0.2.1@sha256:7bc55ddba178971dc6052163597f0445a0a2b5b0ca0e84ce651d53d722e3c265,4662
- taggy-lens-0.1.2@sha256:091ca81d02bd3d7fb493dce0148e1a38f25eb178a1ebd751043a23239e5e3265,3009 - taggy-lens-0.1.2@sha256:091ca81d02bd3d7fb493dce0148e1a38f25eb178a1ebd751043a23239e5e3265,3009
- tasty-hspec-1.2.0.3 - tasty-hspec-1.2.0.3
- tmp-postgres-1.34.1.0 - tmp-postgres-1.34.1.0
- Unique-0.4.7.8@sha256:9661f45aa31dde119a2114566166ea38b011a45653337045ee4ced75636533c0,2067
- vector-0.12.3.0@sha256:0ae2c1ba86f0077910be242ec6802cc3d7725fe7b2bea6987201aa3737b239b5,7953 - vector-0.12.3.0@sha256:0ae2c1ba86f0077910be242ec6802cc3d7725fe7b2bea6987201aa3737b239b5,7953
- wai-3.2.4 - wai-3.2.4
......
...@@ -47,7 +47,7 @@ tests = sequential $ aroundAll withTestDBAndPort $ do ...@@ -47,7 +47,7 @@ tests = sequential $ aroundAll withTestDBAndPort $ do
| Status{..} <- simpleStatus | Status{..} <- simpleStatus
->liftIO $ do ->liftIO $ do
statusCode `shouldBe` 404 statusCode `shouldBe` 404
simpleBody `shouldBe` [r|{"node":99,"error":"Node does not exist"}|] simpleBody `shouldBe` [r|{"error":"Node does not exist","node":99}|]
it "returns the new error if header X-Garg-Error-Scheme: new is passed" $ \((_testEnv, port), app) -> do it "returns the new error if header X-Garg-Error-Scheme: new is passed" $ \((_testEnv, port), app) -> do
withApplication app $ do withApplication app $ do
......
...@@ -13,14 +13,14 @@ import Language.Haskell.TH.Quote ...@@ -13,14 +13,14 @@ import Language.Haskell.TH.Quote
import Network.HTTP.Types import Network.HTTP.Types
import Network.Wai.Test import Network.Wai.Test
import Prelude import Prelude
import qualified Data.Aeson as JSON
import qualified Data.Aeson.KeyMap as KM
import qualified Data.ByteString.Char8 as B
import Test.Hspec.Expectations import Test.Hspec.Expectations
import Test.Hspec.Wai import Test.Hspec.Wai
import Test.Hspec.Wai.JSON import Test.Hspec.Wai.JSON
import Test.Hspec.Wai.Matcher import Test.Hspec.Wai.Matcher
import Test.Tasty.HUnit import Test.Tasty.HUnit
import qualified Data.Aeson as JSON
import qualified Data.ByteString.Char8 as B
import qualified Data.HashMap.Strict as HM
-- | Marks the input 'Assertion' as pending, by ignoring any exception -- | Marks the input 'Assertion' as pending, by ignoring any exception
-- thrown by it. -- thrown by it.
...@@ -87,5 +87,5 @@ containsJSON expected = MatchBody matcher ...@@ -87,5 +87,5 @@ containsJSON expected = MatchBody matcher
isSubsetOf :: Value -> Value -> Bool isSubsetOf :: Value -> Value -> Bool
isSubsetOf (Object sub) (Object sup) = isSubsetOf (Object sub) (Object sup) =
all (\(key, value) -> HM.lookup key sup == Just value) (HM.toList sub) all (\(key, value) -> KM.lookup key sup == Just value) (KM.toList sub)
isSubsetOf x y = x == y isSubsetOf x y = x == y
...@@ -254,7 +254,7 @@ withJob :: Env ...@@ -254,7 +254,7 @@ withJob :: Env
-> IO (SJ.JobStatus 'SJ.Safe JobLog) -> IO (SJ.JobStatus 'SJ.Safe JobLog)
withJob env f = runMyDummyMonad env $ MyDummyMonad $ withJob env f = runMyDummyMonad env $ MyDummyMonad $
-- the job type doesn't matter in our tests, we use a random one, as long as it's of type 'GargJob'. -- the job type doesn't matter in our tests, we use a random one, as long as it's of type 'GargJob'.
newJob @_ @BackendInternalError mkJobHandle (pure env) RecomputeGraphJob (\_ hdl input -> newJob @_ mkJobHandle (pure env) RecomputeGraphJob (\_ hdl input ->
runMyDummyMonad env $ (Right <$> (f hdl input >> getLatestJobStatus hdl))) (SJ.JobInput () Nothing) runMyDummyMonad env $ (Right <$> (f hdl input >> getLatestJobStatus hdl))) (SJ.JobInput () Nothing)
withJob_ :: Env withJob_ :: Env
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
module Main where module Main where
import Gargantext.Prelude import Gargantext.Prelude hiding (isInfixOf)
import Control.Monad import Control.Monad
import Data.Text (isInfixOf) import Data.Text (isInfixOf)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment