Verified Commit 539a9ae2 authored by Przemyslaw Kaminski's avatar Przemyslaw Kaminski

Merge branch 'dev' into 224-dev-uniform-ngrams-creation

parents e2745b4a 5be7e264
# Optimising CI speed by using tips from https://blog.nimbleways.com/let-s-make-faster-gitlab-ci-cd-pipelines/
image: adinapoli/gargantext:v2.3
image: adinapoli/gargantext:v3.2
variables:
STACK_ROOT: "${CI_PROJECT_DIR}/.stack-root"
......@@ -11,8 +11,8 @@ variables:
CACHE_COMPRESSION_LEVEL: "fast"
stages:
- stack
- cabal
- stack
- bench
- test
......@@ -26,6 +26,7 @@ stack:
script:
- echo "Building the project from '$CI_PROJECT_DIR'"
- nix-shell --run "stack build --no-terminal --fast --dry-run"
allow_failure: false
cabal:
stage: cabal
......@@ -36,7 +37,7 @@ cabal:
- .cabal/
policy: pull-push
script:
- nix-shell --run "./bin/update-cabal-project $CABAL_STORE_DIR && cabal --store-dir=$CABAL_STORE_DIR v2-build --ghc-options='-O2 -fclear-plugins'"
- nix-shell --run "./bin/update-project-dependencies $CABAL_STORE_DIR && cabal --store-dir=$CABAL_STORE_DIR v2-build --ghc-options='-O2 -fclear-plugins'"
allow_failure: false
bench:
......@@ -48,7 +49,7 @@ bench:
- .cabal/
policy: pull-push
script:
- nix-shell --run "./bin/update-cabal-project $CABAL_STORE_DIR && cabal --store-dir=$CABAL_STORE_DIR v2-bench --ghc-options='-O2 -fclear-plugins'"
- nix-shell --run "./bin/update-project-dependencies $CABAL_STORE_DIR && cabal --store-dir=$CABAL_STORE_DIR v2-bench --ghc-options='-O2 -fclear-plugins'"
allow_failure: false
test:
......@@ -74,7 +75,7 @@ test:
echo $CABAL
echo $TEST_NIX_PATH
git config --global --add safe.directory '*'
nix-shell --run "./bin/update-cabal-project $CABAL_STORE_DIR"
nix-shell --run "./bin/update-project-dependencies $CABAL_STORE_DIR"
mkdir -p /root/.cache/cabal/logs
chown -R test:test /root/.cache/cabal/logs/
chown -R test:test /root/.cache/cabal/packages/hackage.haskell.org/
......
## Version 0.0.6.9.9.9.4.9.1
* [FRONT][ERRORFORMAT][Try to invite someone without email (#600)](https://gitlab.iscpif.fr/gargantext/purescript-gargantext/issues/600)
## Version 0.0.6.9.9.9.4.9
* [FRONT][ERRORFORMAT][Try to invite someone without email (#600)](https://gitlab.iscpif.fr/gargantext/purescript-gargantext/issues/600)
* [BACK][CABAL][Stack2cabal install (#301)](https://gitlab.iscpif.fr/gargantext/haskell-gargantext/issues/301)
## Version 0.0.6.9.9.9.4.8
* [BACK][GHC][Upgrade] Upgrade to 947
* [BACK][FIX][[API HAL] Remove useless first 2 characters before authors and sources: ", " (#296)](https://gitlab.iscpif.fr/gargantext/haskell-gargantext/issues/296)
* [BACK][FIX][Add server options to obfuscate credentials from logs (#298)](https://gitlab.iscpif.fr/gargantext/haskell-gargantext/issues/298)
* [BACK][FIX][[Corpus HAL] (Backend) Remove/comment the HAL "All" lang choice (#299)](https://gitlab.iscpif.fr/gargantext/haskell-gargantext/issues/299)
* [FRONT][FIX][[Corpus HAL] Remove/comment the HAL "All" lang choice (#624)](https://gitlab.iscpif.fr/gargantext/purescript-gargantext/issues/624)
## Version 0.0.6.9.9.9.4.7
* [BACK][LOGS][Add server options to obfuscate credentials from logs (#298)](https://gitlab.iscpif.fr/gargantext/haskell-gargantext/issues/298)
* [BACK][DOC] README
## Version 0.0.6.9.9.9.4.6
* [FRONT][FIX][Try to invite someone without email (#600)](https://gitlab.iscpif.fr/gargantext/purescript-gargantext/issues/600)
## Version 0.0.6.9.9.9.4.5
* [FRONT][FIX][The first change in the map term is never taken into account (#622)](https://gitlab.iscpif.fr/gargantext/purescript-gargantext/issues/622)
* [BACK][FIX][[Istex import] Changes in the parser: documents sources are wrong (JSON from ZIP file) (#293)](https://gitlab.iscpif.fr/gargantext/haskell-gargantext/issues/293)
## Version 0.0.6.9.9.9.4.4
* [FRONT][FIX][trim / deblank missing in invitations modal box (#618)](https://gitlab.iscpif.fr/gargantext/purescript-gargantext/issues/618)
......
......@@ -39,12 +39,16 @@ Disclaimer: since this project is still in development, this document remains in
```
### Installation
This project can be built with either Stack or Cabal. For historical reasons, we generate a `cabal.project` from the `stack.yaml`, and we do not commit the former to the repo, to have a single "source of truth".
However, it's always possible to generate a `cabal.project` thanks to [stack2cabal](https://hackage.haskell.org/package/stack2cabal).
This project can be built with either Stack or Cabal. We keep up-to-date the `cabal.project` (which allows us
to build with `cabal` by default) but we support `stack` thanks to thanks to
[cabal2stack](https://github.com/iconnect/cabal2stack), which allows us to generate a valid `stack.yaml` from
a `cabal.project`. Due to the fact gargantext requires a particular set of system dependencies (C++ libraries,
toolchains, etc) we use [nix](https://nixos.org/) to setup an environment with all the required system
dependencies, in a sandboxed and isolated fashion.
#### Install Nix
Gargantext requires [Nix](https://github.com/NixOS/nix) to provide system dependencies (for example, C libraries), but its use is limited to that. In order to install [Nix](https://nixos.org/download.html):
As said, Gargantext requires [Nix](https://github.com/NixOS/nix) to provide system dependencies (for example, C libraries), but its use is limited to that. In order to install [Nix](https://nixos.org/download.html):
```shell
sh <(curl -L https://nixos.org/nix/install) --daemon
......@@ -53,7 +57,7 @@ sh <(curl -L https://nixos.org/nix/install) --daemon
Verify the installation is complete with
```shell
nix-env --version
nix-env (Nix) 2.16.0
nix-env (Nix) 2.19.2
```
**Important:** Before building the project with either `stack` or `cabal` you need to be in the correct Nix shell, which will fetch all the required system dependencies. To do so, just type:
......@@ -62,7 +66,7 @@ nix-env (Nix) 2.16.0
nix-shell
```
This will take a bit of time the first time.
This will take a bit of time as it has to download/build the dependencies, but this will be needed only the first time.
### Build: choose cabal (new) or stack (old)
......@@ -74,18 +78,11 @@ cabal update
cabal install
```
Alternatively, if you want to run the command "from the outside", in your current shell:
Once you have a valid version of `cabal`, building requires generating a valid `cabal.project`. This can be done by installing `stack2cabal`:
```shell
cabal v2-install stack2cabal-1.0.14
```
And finally:
```shell
stack2cabal --no-run-hpack -p '2023-06-25'
cabal v2-build
nix-shell --run "cabal update"
nix-shell --run "cabal install"
```
#### With Stack
......@@ -109,13 +106,32 @@ stack build --fast
```
#### Keeping the stack.yaml updated with the cabal.project
(Section for Developers using stack only)
Once you have a valid version of `stack`, building requires generating a valid `stack.yaml`.
This can be obtained by installing `cabal2stack`:
```shell
git clone https://github.com/iconnect/cabal2stack.git
cd cabal2stack
```
Then, depending on what build system you are using, either build with `cabal install --overwrite-policy=always` or `stack install`.
And finally:
#### Keeping the cabal.project updated with stack.yaml
```shell
cabal2stack --system-ghc --allow-newer --resolver lts-21.17 --resolver-file devops/stack/lts-21.17.yaml -o stack.yaml
stack build
```
Simply run:
The good news is that you don't have to do all of this manually; during development, after modifying the
`cabal.project`, it's enough to do:
```shell
./bin/update-cabal-project
./bin/update-project-dependencies
```
## Initialization <a name="init"></a>
......@@ -183,10 +199,10 @@ When a devlopment is needed on libraries (for instance, the HAL crawler in https
- turn off (temporarily) the `hal` in `source-repository-package`
2. When changes work and tests are OK, commit in repo `hal`
2. When changes are commited / merged:
1. Get the hash id, and edit `stack.yaml` with the **new commit id**
2. run `./bin/update-cabal-project`
- get an error that sha256 don't match, so update the `./bin/update-cabal-project` with new sha256 hash
- run again `./bin/update-cabal-project` (to make sure it's a fixed point now)
1. Get the hash id, and edit `cabal.project` with the **new commit id**
2. run `./bin/update-project-dependencies`
- get an error that sha256 don't match, so update the `./bin/update-project-dependencies` with new sha256 hash
- run again `./bin/update-project-dependencies` (to make sure it's a fixed point now)
> Note: without `stack.yaml` we would have to only fix `cabal.project` -> `source-repository-package` commit id. Sha256 is there to make sure CI reruns the tests.
......
#!/usr/bin/env bash
set -euxo pipefail
DEFAULT_STORE=$HOME/.cabal
STORE_DIR="${1:-$DEFAULT_STORE}"
INDEX_STATE="2023-12-04T09:05:40Z"
# README!
# Every time you modify the `stack.yaml` and as result the relevant `cabal.project`
# changes, you have to make sure to update the `expected_cabal_project_hash` and
# `expected_cabal_project_freeze_hash` with the
# `sha256sum` result calculated on the `cabal.project` and `cabal.project.freeze`.
# This ensures the files stay deterministic so that CI cache can kick in.
expected_cabal_project_hash="0613aabf6f4b9ab9973b4dee63d6ac525e86152ddcd1d19acac0eec0109fc57e"
expected_cabal_project_freeze_hash="796f0109611f3381278b1885ae1fa257c4177b99885eb04701938f1107c06ee5"
cabal --store-dir=$STORE_DIR v2-update "hackage.haskell.org,${INDEX_STATE}"
# Install stack2cabal if it can't be found.
if ! stack2cabal --help &> /dev/null
then
echo "stack2cabal could not be found"
cabal --store-dir=$STORE_DIR v2-install --index-state="${INDEX_STATE}" stack2cabal-1.0.14 --overwrite-policy=always
fi
stack2cabal --no-run-hpack -p "${INDEX_STATE}"
actual_cabal_project_hash=$(sha256sum cabal.project | awk '{printf "%s",$1}')
actual_cabal_project_freeze_hash=$(sha256sum cabal.project.freeze | awk '{printf "%s",$1}')
if [[ $actual_cabal_project_hash != $expected_cabal_project_hash ]]; then
echo "ERROR! hash mismatch between expected cabal.project and the one computed by stack2cabal."
exit 1
else
echo "cabal.project updated successfully."
fi
if [[ $actual_cabal_project_freeze_hash != $expected_cabal_project_freeze_hash ]]; then
echo "ERROR! hash mismatch between expected cabal.project.freeze and the one computed by stack2cabal."
exit 1
else
echo "cabal.project.freeze updated successfully."
fi
#!/usr/bin/env bash
set -euxo pipefail
DEFAULT_STORE=$HOME/.cabal
STORE_DIR="${1:-$DEFAULT_STORE}"
INDEX_STATE="2023-12-10T10:34:46Z"
# README!
# Every time you modify the `cabal.project`, you have to make sure to update
# the `expected_cabal_project_hash` and `expected_cabal_project_freeze_hash`
# with the `sha256sum` result calculated on the `cabal.project` and
# `cabal.project.freeze`. This ensures the files stay deterministic so that CI
# cache can kick in.
expected_cabal_project_hash="1e4d40d48546606fba0ce0eaae9f2799c57d8ce97c4425940f3a535c4f628a8a"
expected_cabal_project_freeze_hash="2c13034bdeaeaece6c81362ef047c3102782b4fbf4fd7670bb677bd1ac3b0151"
cabal --store-dir=$STORE_DIR v2-update "hackage.haskell.org,${INDEX_STATE}"
# Install cabal2stack if it can't be found.
if ! cabal2stack --help &> /dev/null
then
echo "cabal2stack could not be found"
CURDIR=$PWD
git clone https://github.com/iconnect/cabal2stack.git cabal2stack-installer
cd cabal2stack-installer
cabal --store-dir=$STORE_DIR v2-install --index-state="${INDEX_STATE}" --overwrite-policy=always
cd $CURDIR
rm -rf cabal2stack-installer
fi
cabal --store-dir=$STORE_DIR v2-build --dry-run
cabal2stack --system-ghc --allow-newer --resolver lts-21.17 --resolver-file devops/stack/lts-21.17.yaml -o stack.yaml
cabal --store-dir=$STORE_DIR v2-freeze
# Run 'sed' to remove the constraint for 'gargantext', as it doesn't make sense and
# for the test we need to run this with a different flag.
echo -e "\e[33mPatching cabal.project.freeze to remove redundant constraint on gargantext\e[0m"
sed -i '/^ *gargantext/d' cabal.project.freeze
actual_cabal_project_hash=$(sha256sum cabal.project | awk '{printf "%s",$1}')
actual_cabal_project_freeze_hash=$(sha256sum cabal.project.freeze | awk '{printf "%s",$1}')
if [[ $actual_cabal_project_hash != $expected_cabal_project_hash ]]; then
echo -e "\e[31mERROR! hash mismatch between expected cabal.project and the one computed by cabal2stack.\e[0m"
echo -e "\e[33mPlease update the hashes inside the './bin/update-project-dependencies' file.\e[0m"
exit 1
else
echo -e "\e[32mstack.yaml updated successfully.\e[0m"
fi
if [[ $actual_cabal_project_freeze_hash != $expected_cabal_project_freeze_hash ]]; then
echo -e "\e[31mERROR! hash mismatch between expected cabal.project.freeze and the one computed by cabal2stack.\e[0m"
echo -e "\e[33mPlease update the hashes inside the './bin/update-project-dependencies' file.\e[0m"
exit 1
else
echo -e "\e[32mcabal.project.freeze updated successfully.\e[0m"
fi
-- Generated by stack2cabal
index-state: 2023-12-04T00:00:00Z
index-state: 2023-12-10T10:34:46Z
with-compiler: ghc-8.10.7
with-compiler: ghc-9.4.7
packages:
./
source-repository-package
type: git
location: https://github.com/AccelerateHS/accelerate.git
tag: 334d05519436bb7f20f9926ec76418f5b8afa359
source-repository-package
type: git
location: https://github.com/AccelerateHS/accelerate-llvm.git
tag: 2b5d69448557e89002c0179ea1aaf59bb757a6e3
subdir: accelerate-llvm-native/
accelerate-llvm/
source-repository-package
type: git
location: https://github.com/adinapoli/boolexpr.git
tag: 91928b5d7f9342e9865dde0d94862792d2b88779
source-repository-package
type: git
location: https://github.com/adinapoli/duckling.git
tag: 23603a832117e5352d5b0fb9bb1110228324b35a
source-repository-package
type: git
location: https://github.com/adinapoli/haskell-opaleye.git
......@@ -19,25 +36,25 @@ source-repository-package
source-repository-package
type: git
location: https://github.com/alpmestan/accelerate.git
tag: 640b5af87cea94b61c7737d878e6f7f2fca5c015
location: https://github.com/adinapoli/llvm-hs.git
tag: 7533a9ccd3bfe77141745f6b61039a26aaf5c83b
subdir: llvm-hs
llvm-hs-pure
source-repository-package
type: git
location: https://github.com/alpmestan/accelerate-arithmetic.git
tag: a110807651036ca2228a76507ee35bbf7aedf87a
location: https://github.com/adinapoli/text16-compat.git
tag: 85533b5d597e6fc5498411b4bcfc76380ec80d71
source-repository-package
type: git
location: https://github.com/alpmestan/accelerate-llvm.git
tag: 944f5a4aea35ee6aedb81ea754bf46b131fce9e3
subdir: accelerate-llvm-native/
accelerate-llvm/
location: https://github.com/adinapoli/wikiparsec.git
tag: b3519a0351ae9515497680571f76200c24dedb53
source-repository-package
type: git
location: https://github.com/alpmestan/ekg-json.git
tag: fd7e5d7325939103cd87d0dc592faf644160341c
location: https://github.com/alpmestan/accelerate-arithmetic.git
tag: a110807651036ca2228a76507ee35bbf7aedf87a
source-repository-package
type: git
......@@ -56,6 +73,11 @@ source-repository-package
tag: bc6ca8058077b0b5702ea4b88bd4189cfcad267a
subdir: sparse-linear
source-repository-package
type: git
location: https://github.com/chessai/eigen.git
tag: 8fff32a43df743c8c83428a86dd566a0936a4fba
source-repository-package
type: git
location: https://github.com/delanoe/data-time-segment.git
......@@ -94,17 +116,17 @@ source-repository-package
source-repository-package
type: git
location: https://gitlab.iscpif.fr/gargantext/crawlers/hal.git
tag: 25a1e9558075462a82660987920a698b8863dd63
tag: bfa9069b4ff70f341ca3244e8aff9e83eb4b8b73
source-repository-package
type: git
location: https://gitlab.iscpif.fr/gargantext/crawlers/isidore.git
tag: 3db385e767d2100d8abe900833c6e7de3ac55e1b
tag: e9b594a4718acc06db037f6d3f429a90db76c267
source-repository-package
type: git
location: https://gitlab.iscpif.fr/gargantext/crawlers/istex.git
tag: 9b1bd17f3ed38eab83e675bb68278922217a9c73
tag: c0a08d62c40a169b7934ceb7cb12c39952160e7a
source-repository-package
type: git
......@@ -114,7 +136,7 @@ source-repository-package
source-repository-package
type: git
location: https://gitlab.iscpif.fr/gargantext/crawlers/pubmed.git
tag: 234ad423fa682307ff4843ae4acd725dcc6ffc55
tag: 300764df4f78ea6175535f9b78b884cc2aa9da61
source-repository-package
type: git
......@@ -124,12 +146,12 @@ source-repository-package
source-repository-package
type: git
location: https://gitlab.iscpif.fr/gargantext/haskell-gargantext-prelude
tag: 5a8dc3a0a1a4774ec2eb9df5f0f0b0a7dd172f09
tag: 618f711a530df56caefbb1577c4bf3d5ff45e214
source-repository-package
type: git
location: https://gitlab.iscpif.fr/gargantext/haskell-igraph.git
tag: 2a28524134b68421f30f6e97961063018f814a82
tag: 9f8a2f4a014539826a4eab3215cc70c0813f20cb
source-repository-package
type: git
......@@ -148,8 +170,8 @@ source-repository-package
source-repository-package
type: git
location: https://gitlab.iscpif.fr/gargantext/servant-xml-conduit.git
tag: 339fd608341bd2652cf5c0e9e76a3293acffbea6
location: https://gitlab.iscpif.fr/gargantext/patches-class.git
tag: 3668d28607867a88b2dfc62158139b3cfd629ddb
source-repository-package
type: git
......@@ -165,13 +187,18 @@ source-repository-package
source-repository-package
type: git
location: https://github.com/robstewart57/rdf4h.git
tag: 4fd2edf30c141600ffad6d730cc4c1c08a6dbce4
location: https://gitlab.iscpif.fr/gargantext/servant-xml-conduit.git
tag: 339fd608341bd2652cf5c0e9e76a3293acffbea6
source-repository-package
type: git
location: https://github.com/rspeer/wikiparsec.git
tag: 9637a82344bb70f7fa8f02e75db3c081ccd434ce
location: https://github.com/MercuryTechnologies/ekg-json.git
tag: 232db57d6ce0940fcc902adf30a9ed3f3561f21d
source-repository-package
type: git
location: https://github.com/robstewart57/rdf4h.git
tag: 4fd2edf30c141600ffad6d730cc4c1c08a6dbce4
allow-older: *
allow-newer: *
......
This source diff could not be displayed because it is too large. You can view the blob instead.
FROM ubuntu:jammy
FROM adinapoli/gargantext:v3.1
## NOTA BENE: In order for this to be built successfully, you have to run ./devops/coreNLP/build.sh first.
ARG DEBIAN_FRONTEND=noninteractive
ARG GHC=8.10.7
ARG STACK=2.7.3
ARG CABAL=3.10.1.0
ARG CORENLP=4.5.4
ARG CORE
COPY ./shell.nix /builds/gargantext/shell.nix
COPY ./nix/pkgs.nix /builds/gargantext/nix/pkgs.nix
COPY ./nix/pinned-22.05.nix /builds/gargantext/nix/pinned-22.05.nix
COPY ./nix/overlays/Cabal-3.10.1.0.nix /builds/gargantext/nix/overlays/Cabal-3.10.1.0.nix
COPY ./nix/overlays/cabal-install-3.10.1.0.nix /builds/gargantext/nix/overlays/cabal-install-3.10.1.0.nix
COPY ./nix/overlays/cabal-install-solver-3.10.1.0.nix /builds/gargantext/nix/overlays/cabal-install-solver-3.10.1.0.nix
COPY ./nix/overlays/Cabal-syntax-3.10.1.0.nix /builds/gargantext/nix/overlays/Cabal-syntax-3.10.1.0.nix
COPY ./nix/overlays/directory-1.3.7.0.nix /builds/gargantext/nix/overlays/directory-1.3.7.0.nix
COPY ./nix/overlays/hackage-security-0.6.2.3.nix /builds/gargantext/nix/overlays/hackage-security-0.6.2.3.nix
COPY ./nix/overlays/process-1.6.15.0.nix /builds/gargantext/nix/overlays/process-1.6.15.0.nix
COPY ./nix/pinned-23.11.nix /builds/gargantext/nix/pinned-23.11.nix
COPY ./devops/coreNLP/build.sh /root/devops/coreNLP/build.sh
COPY ./devops/coreNLP/startServer.sh /root/devops/coreNLP/startServer.sh
ENV TZ=Europe/Rome
RUN apt-get update && \
apt-get install --no-install-recommends -y \
apt-transport-https \
autoconf \
automake \
build-essential \
ca-certificates \
curl \
gcc \
git \
gnupg2 \
libffi-dev \
libffi7 \
libgmp-dev \
libgmp10 \
libncurses-dev \
libncurses5 \
libnuma-dev \
libtinfo5 \
locales \
lsb-release \
software-properties-common \
strace \
sudo \
wget \
vim \
xz-utils \
zlib1g-dev \
openjdk-18-jdk \
unzip && \
apt-get clean && rm -rf /var/lib/apt/lists/* && \
mkdir -m 0755 /nix && groupadd -r nixbld && chown root /nix && \
for n in $(seq 1 10); do useradd -c "Nix build user $n" -d /var/empty -g nixbld -G nixbld -M -N -r -s "$(command -v nologin)" "nixbld$n"; done
RUN gpg --batch --keyserver keys.openpgp.org --recv-keys 7D1E8AFD1D4A16D71FADA2F2CCC85C0E40C06A8C && \
gpg --batch --keyserver keyserver.ubuntu.com --recv-keys FE5AB6C91FEA597C3B31180B73EDE9E8CFBAEF01
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
RUN cd /root/devops/coreNLP; ./build.sh
RUN set -o pipefail && \
bash <(curl -L https://releases.nixos.org/nix/nix-2.15.0/install) --no-daemon && \
locale-gen en_US.UTF-8 && chown root -R /nix
ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' LC_ALL='en_US.UTF-8'
ENV USER=root
ENV SHELL /bin/bash
RUN . "$HOME/.nix-profile/etc/profile.d/nix.sh" && \
mkdir -p "/builds/gargantext/" && chmod 777 -R "/builds/gargantext" && \
echo "source $HOME/.nix-profile/etc/profile.d/nix.sh" >> "$HOME/.bashrc" && \
echo `which nix-env`
ENV PATH=/root/.nix-profile/bin:$PATH
RUN . $HOME/.bashrc && nix-env --version
ENV PATH=/root/.local/bin:$PATH
RUN cd /builds/gargantext && nix-shell
......
This diff is collapsed.
......@@ -5,7 +5,7 @@ cabal-version: 2.0
-- see: https://github.com/sol/hpack
name: gargantext
version: 0.0.6.9.9.9.4.4
version: 0.0.6.9.9.9.4.9.1
synopsis: Search, map, share
description: Please see README.md
category: Data
......@@ -45,6 +45,10 @@ flag test-crypto
default: False
manual: True
flag disable-db-obfuscation-executable
default: False
manual: True
library
exposed-modules:
Gargantext
......@@ -62,6 +66,7 @@ library
Gargantext.API.Errors.Types
Gargantext.API.Errors.Types.Backend
Gargantext.API.HashedResponse
Gargantext.API.Middleware
Gargantext.API.Ngrams
Gargantext.API.Ngrams.List
Gargantext.API.Ngrams.List.Types
......@@ -167,6 +172,7 @@ library
Gargantext.Utils.Jobs.Settings
Gargantext.Utils.Jobs.State
Gargantext.Utils.SpacyNLP
Gargantext.Utils.SpacyNLP.Types
Gargantext.Utils.Tuple
Gargantext.Utils.Zip
other-modules:
......@@ -396,7 +402,7 @@ library
RankNTypes
RecordWildCards
StrictData
ghc-options: -Wall -Wincomplete-uni-patterns -Wincomplete-record-updates -Wmissing-signatures -Wunused-binds -Wunused-imports -Werror -freduction-depth=300 -fplugin=Clippy -fprint-potential-instances
ghc-options: -Wall -Wincomplete-uni-patterns -Wincomplete-record-updates -Wmissing-signatures -Wunused-binds -Wunused-imports -Werror -freduction-depth=300 -fprint-potential-instances
if flag(test-crypto)
cpp-options: -DTEST_CRYPTO
build-depends:
......@@ -412,6 +418,7 @@ library
, accelerate-utility ^>= 1.0.0.1
, aeson ^>= 1.5.6.0
, aeson-pretty ^>= 0.8.9
, ansi-terminal
, array ^>= 0.5.4.0
, async ^>= 2.2.4
, attoparsec ^>= 0.13.2.5
......@@ -463,7 +470,6 @@ library
, fullstop ^>= 0.1.4
, gargantext-graph >=0.1.0.0
, gargantext-prelude
, ghc-clippy-plugin ^>= 0.0.0.1
, graphviz ^>= 2999.20.1.0
, hashable ^>= 1.3.0.0
, haskell-igraph ^>= 0.10.4
......@@ -478,24 +484,33 @@ library
, http-media ^>= 0.8.0.0
, http-types ^>= 0.12.3
, hxt ^>= 9.3.1.22
, ihaskell ^>= 0.10.2.2
, ihaskell >= 0.11.0.0
-- necessary for ihaskell to build
, ipython-kernel >= 0.11.0.0
, ini ^>= 0.4.1
, insert-ordered-containers ^>= 0.2.5.1
, iso639
, jose ^>= 0.8.4
, json-stream ^>= 0.4.2.4
, lens ^>= 4.19.2
, lens-aeson < 1.3
, lifted-base ^>= 0.2.3.12
, listsafe ^>= 0.1.0.1
, llvm-hs >= 12.0.0
, located-base ^>= 0.1.1.1
, logging-effect ^>= 1.3.12
, matrix ^>= 0.3.6.1
, monad-control ^>= 1.0.3.1
, monad-logger ^>= 0.3.36
, morpheus-graphql ^>= 0.17.0
, morpheus-graphql-app ^>= 0.17.0
, morpheus-graphql-core ^>= 0.17.0
, morpheus-graphql-subscriptions ^>= 0.17.0
, morpheus-graphql >= 0.17.0 && < 0.25
, morpheus-graphql-app >= 0.17.0 && < 0.25
, morpheus-graphql-client >= 0.17.0 && < 0.25
, morpheus-graphql-code-gen >= 0.17.0 && < 0.25
, morpheus-graphql-code-gen-utils >= 0.17.0 && < 0.25
, morpheus-graphql-core >= 0.17.0 && < 0.25
, morpheus-graphql-server >= 0.17.0 && < 0.25
, morpheus-graphql-subscriptions >= 0.17.0 && < 0.25
, morpheus-graphql-tests >= 0.17.0 && < 0.25
, mtl ^>= 2.2.2
, natural-transformation ^>= 0.4
, network-uri ^>= 2.6.4.1
......@@ -529,26 +544,26 @@ library
, scientific ^>= 0.3.7.0
, semigroups ^>= 0.19.2
, serialise ^>= 0.2.4.0
, servant ^>= 0.18.3
, servant >= 0.18.3 && < 0.20
, servant-auth ^>= 0.4.0.0
, servant-auth-client ^>= 0.4.1.0
, servant-auth-server ^>=0.4.6.0
, servant-auth-swagger ^>= 0.2.10.1
, servant-blaze ^>= 0.9.1
, servant-cassava ^>= 0.10.1
, servant-client ^>= 0.18.3
, servant-client-core ^>= 0.18.3
, servant-client >= 0.18.3 && < 0.20
, servant-client-core >= 0.18.3 && < 0.20
, servant-ekg ^>= 0.3.1
, servant-flatten ^>= 0.2
, servant-job >= 0.2.0.0
, servant-mock ^>= 0.8.7
, servant-multipart ^>= 0.12.1
, servant-server ^>= 0.18.3
, servant-server >= 0.18.3 && < 0.20
, servant-swagger ^>= 1.1.10
, servant-swagger-ui ^>= 0.3.5.3.5.0
, servant-xml-conduit >= 0.1.0.4
, simple-reflect ^>= 0.3.3
, singletons ^>= 2.7
, singletons-th >= 3.1
, split ^>= 0.2.3.4
, stemmer ^>= 0.5.2
, stm ^>= 2.5.0.1
......@@ -572,7 +587,7 @@ library
, uuid ^>= 1.3.15
, validity ^>= 0.11.0.1
, vector ^>= 0.12.3.0
, wai ^>= 3.2.3
, wai >= 3.2.4
, wai-app-static ^>= 3.1.7.3
, wai-cors ^>= 0.2.7
, wai-extra ^>= 3.1.8
......@@ -716,15 +731,18 @@ executable gargantext-db-obfuscation
RecordWildCards
StrictData
ghc-options: -Wall -threaded -rtsopts -with-rtsopts=-N -O2 -Wmissing-signatures
build-depends:
base
, extra
, gargantext
, gargantext-prelude
, optparse-simple
, postgresql-simple ^>= 0.6.4
, text
default-language: Haskell2010
if flag(disable-db-obfuscation-executable)
buildable: False
else
build-depends:
base
, extra
, gargantext
, gargantext-prelude
, optparse-simple
, postgresql-simple ^>= 0.6.4
, text
default-language: Haskell2010
executable gargantext-import
main-is: Main.hs
......
......@@ -40,6 +40,9 @@ FRAME_ISTEX_URL = URL_TO_CHANGE
MAX_DOCS_PARSERS = 1000000
MAX_DOCS_SCRAPERS = 10000
# Specific Services
EPO_API_URL =
# in seconds
JS_JOB_TIMEOUT = 1800
JS_ID_TIMEOUT = 1800
......
This diff is collapsed.
import (builtins.fetchGit {
name = "nixos-23.05";
url = "https://github.com/nixos/nixpkgs";
ref = "refs/heads/nixos-23.05";
rev = "4ecab3273592f27479a583fb6d975d4aba3486fe";
})
import (builtins.fetchGit {
name = "nixos-23.11";
url = "https://github.com/nixos/nixpkgs";
ref = "refs/heads/nixos-23.11";
rev = "057f9aecfb71c4437d2b27d3323df7f93c010b7e";
})
{ pkgs ? import ./pinned-22.05.nix {} }:
{ pkgs ? import ./pinned-23.11.nix {} }:
rec {
inherit pkgs;
# If we are on a Mac, in order to build successfully with cabal we need a bit more work.
ghc = if pkgs.stdenv.isDarwin
then haskell1.compiler.ghc8107.overrideAttrs (finalAttrs: previousAttrs: {
# See https://github.com/NixOS/nixpkgs/pull/149942/files
ghc947 = if pkgs.stdenv.isDarwin
then pkgs.haskell.compiler.ghc947.overrideAttrs (finalAttrs: previousAttrs: {
patches = previousAttrs.patches ++ [
# Reverts the linking behavior of GHC to not resolve `-libc++` to `c++`.
(pkgs.fetchpatch {
url = "https://raw.githubusercontent.com/input-output-hk/haskell.nix/613ec38dbd62ab7929178c9c7ffff71df9bb86be/overlays/patches/ghc/ghc-macOS-loadArchive-fix.patch";
sha256 = "0IUpuzjZb1G+gP3q6RnwQbW4mFzc/OZ/7QqZy+57kx0=";
url = "https://gist.githubusercontent.com/adinapoli/bf722db15f72763bf79dff13a3104b6f/raw/362da0aa3db5c530e0d276183ba68569f216d65a/ghc947-macOS-loadArchive-fix.patch";
sha256 = "sha256-0tHrkWRKFWUewj3uIA0DujVCXo1qgX2lA5p0MIsAHYs=";
})
];
})
else pkgs.haskell.compiler.ghc8107;
else pkgs.haskell.compiler.ghc947;
cabal_install_3_10_1_0 = pkgs.haskell.lib.compose.justStaticExecutables pkgs.haskell.packages.ghc947.cabal-install;
graphviz = pkgs.graphviz.overrideAttrs (finalAttrs: previousAttrs: {
# Increase the YY_BUF_SIZE, see https://gitlab.iscpif.fr/gargantext/haskell-gargantext/issues/290#note_9015
patches = [
......@@ -24,26 +23,12 @@ rec {
})
];
});
haskell1 = pkgs.haskell // {
packages = pkgs.haskell.packages // {
ghc8107 = pkgs.haskell.packages.ghc8107.override {
overrides = self: super: {
directory = self.callPackage ./overlays/directory-1.3.7.0.nix {};
process = self.callPackage ./overlays/process-1.6.15.0.nix {};
hackage-security = self.callPackage ./overlays/hackage-security-0.6.2.3.nix {};
Cabal = self.callPackage ./overlays/Cabal-3.10.1.0.nix {};
Cabal-syntax = self.callPackage ./overlays/Cabal-syntax-3.10.1.0.nix {};
cabal-install-solver = self.callPackage ./overlays/cabal-install-solver-3.10.1.0.nix {};
cabal-install = self.callPackage ./overlays/cabal-install-3.10.1.0.nix {};
};
};
};
};
cabal_install_3_10_1_0 = pkgs.haskell.lib.compose.justStaticExecutables haskell1.packages.ghc8107.cabal-install;
igraph_0_10_4 = pkgs.igraph.overrideAttrs (finalAttrs: previousAttrs: {
version = "0.10.4";
nativeBuildInputs = previousAttrs.nativeBuildInputs or [] ++ [ pkgs.clang_12 ];
src = pkgs.fetchFromGitHub {
owner = "igraph";
repo = "igraph";
......@@ -77,7 +62,7 @@ rec {
"-DIGRAPH_USE_INTERNAL_GMP=OFF"
"-DIGRAPH_USE_INTERNAL_PLFIT=OFF"
"-DIGRAPH_GLPK_SUPPORT=ON"
"-DIGRAPH_GRAPHML_SUPPORT=ON"
"-DIGRAPH_GRAPHML_SUPPORT=OFF"
"-DIGRAPH_OPENMP_SUPPORT=ON"
"-DIGRAPH_ENABLE_LTO=AUTO"
"-DIGRAPH_ENABLE_TLS=ON"
......@@ -97,8 +82,11 @@ rec {
});
hsBuildInputs = [
ghc
ghc947
cabal_install_3_10_1_0
pkgs.haskellPackages.alex
pkgs.haskellPackages.happy
pkgs.haskellPackages.pretty-show
];
nonhsBuildInputs = with pkgs; [
bzip2
......@@ -114,20 +102,23 @@ rec {
lnav
lzma
pcre
pkgconfig
pkg-config
postgresql
xz
zlib
blas
gfortran7
# gfortran7.cc.lib
expat
icu
graphviz
llvm_9
clang_12
llvm_12
gcc12
igraph_0_10_4
libpqxx
libsodium
zeromq
curl
] ++ ( lib.optionals stdenv.isDarwin [
darwin.apple_sdk.frameworks.Accelerate
]);
......@@ -135,8 +126,11 @@ rec {
shellHook = ''
export LD_LIBRARY_PATH="${pkgs.gfortran7.cc.lib}:${libPaths}:$LD_LIBRARY_PATH"
export LIBRARY_PATH="${pkgs.gfortran7.cc.lib}:${libPaths}"
export PATH="${pkgs.gccStdenv}/bin:$PATH"
export NIX_CC="${pkgs.gccStdenv}"
export CC="${pkgs.gccStdenv}/bin/gcc"
'';
shell = pkgs.mkShell {
shell = pkgs.mkShell.override { stdenv = pkgs.gccStdenv; } {
name = "gargantext-shell";
buildInputs = hsBuildInputs ++ nonhsBuildInputs;
inherit shellHook;
......
......@@ -45,6 +45,7 @@ import Gargantext.API.Admin.EnvTypes (Env, Mode(..))
import Gargantext.API.Admin.Settings (newEnv)
import Gargantext.API.Admin.Types (FireWall(..), PortNumber, cookieSettings, jwtSettings, settings)
import Gargantext.API.EKG
import Gargantext.API.Middleware (logStdoutDevSanitised)
import Gargantext.API.Ngrams (saveNodeStoryImmediate)
import Gargantext.API.Routes
import Gargantext.API.Server (server)
......@@ -58,7 +59,7 @@ import Network.Wai.Handler.Warp hiding (defaultSettings)
import Network.Wai.Middleware.Cors
import Network.Wai.Middleware.RequestLogger
import Paths_gargantext (getDataDir)
import Servant
import Servant hiding (Header)
import System.Cron.Schedule qualified as Cron
import System.FilePath
......@@ -210,7 +211,10 @@ makeDevMiddleware mode = do
--pure (warpS, logWare . checkOriginAndHost . corsMiddleware)
case mode of
Prod -> pure $ logStdout . corsMiddleware
_ -> pure $ logStdoutDev . corsMiddleware
_ -> do
loggerMiddleware <- logStdoutDevSanitised
pure $ loggerMiddleware . corsMiddleware
---------------------------------------------------------------------
-- | API Global
......
......@@ -119,9 +119,11 @@ auth :: (HasSettings env, HasAuthenticationError err, DbCmd' env err m)
auth (AuthRequest u p) = do
checkAuthRequest' <- checkAuthRequest u p
case checkAuthRequest' of
InvalidUser -> pure $ AuthResponse Nothing (Just $ AuthInvalid "Invalid username or password")
InvalidPassword -> pure $ AuthResponse Nothing (Just $ AuthInvalid "Invalid username or password")
Valid to trId uId -> pure $ AuthResponse (Just $ AuthValid to trId uId) Nothing
InvalidUser -> do
throwError $ _AuthenticationError # InvalidUsernameOrPassword
InvalidPassword -> do
throwError $ _AuthenticationError # InvalidUsernameOrPassword
Valid to trId uId -> pure $ AuthResponse to trId uId
--type instance BasicAuthCfg = BasicAuthData -> IO (AuthResult AuthenticatedUser)
......
......@@ -23,7 +23,7 @@ import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger)
import Gargantext.Database.Admin.Types.Node (NodeId(..), ListId, DocId, UserId (..))
import Gargantext.Prelude hiding (reverse)
import Servant.Auth.Server
import Test.QuickCheck (elements, oneof)
import Test.QuickCheck (elements)
import Test.QuickCheck.Arbitrary (Arbitrary, arbitrary)
import qualified Crypto.JWT as Jose
......@@ -35,21 +35,12 @@ data AuthRequest = AuthRequest { _authReq_username :: Username
}
deriving (Generic)
-- TODO: Use an HTTP error to wrap AuthInvalid
data AuthResponse = AuthResponse { _authRes_valid :: Maybe AuthValid
, _authRes_inval :: Maybe AuthInvalid
data AuthResponse = AuthResponse { _authRes_token :: Token
, _authRes_tree_id :: TreeId
, _authRes_user_id :: UserId
}
deriving (Generic, Eq, Show)
data AuthInvalid = AuthInvalid { _authInv_message :: Text }
deriving (Generic, Eq, Show)
data AuthValid = AuthValid { _authVal_token :: Token
, _authVal_tree_id :: TreeId
, _authVal_user_id :: UserId
}
deriving (Generic, Eq, Show)
type Token = Text
type TreeId = NodeId
......@@ -61,25 +52,21 @@ data AuthenticatedUser = AuthenticatedUser
, _auth_user_id :: UserId
} deriving (Generic)
$(deriveJSON (JSON.defaultOptions { JSON.fieldLabelModifier = tail . dropWhile ((/=) '_') . tail }) ''AuthenticatedUser)
makeLenses ''AuthenticatedUser
instance ToSchema AuthenticatedUser where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_authUser_")
instance ToJWT AuthenticatedUser
instance FromJWT AuthenticatedUser
data AuthenticationError
= LoginFailed NodeId UserId Jose.Error
| InvalidUsernameOrPassword
| UserNotAuthorized UserId Text
deriving (Show, Eq)
-- TODO-SECURITY why is the CookieSettings necessary?
type AuthContext = '[JWTSettings, CookieSettings] -- , BasicAuthCfg
-- | Instances
$(deriveJSON (unPrefix "_authReq_") ''AuthRequest)
instance ToSchema AuthRequest where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_authReq_")
......@@ -89,26 +76,10 @@ instance Arbitrary AuthRequest where
, p <- arbitraryPassword
]
$(deriveJSON (unPrefix "_authRes_") ''AuthResponse)
instance ToSchema AuthResponse where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_authRes_")
instance Arbitrary AuthResponse where
arbitrary = oneof [ AuthResponse Nothing . Just <$> arbitrary
, flip AuthResponse Nothing . Just <$> arbitrary ]
$(deriveJSON (unPrefix "_authInv_") ''AuthInvalid)
instance ToSchema AuthInvalid where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_authInv_")
instance Arbitrary AuthInvalid where
arbitrary = elements [ AuthInvalid m
| m <- [ "Invalid user", "Invalid password"]
]
$(deriveJSON (unPrefix "_authVal_") ''AuthValid)
instance ToSchema AuthValid where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_authVal_")
instance Arbitrary AuthValid where
arbitrary = elements [ AuthValid to' tr u
arbitrary = elements [ AuthResponse to' tr u
| to' <- ["token0", "token1"]
, tr <- map UnsafeMkNodeId [1..3]
, u <- map UnsafeMkUserId [1..3]
......@@ -124,21 +95,39 @@ type Password = Text
data ForgotPasswordRequest = ForgotPasswordRequest { _fpReq_email :: Email }
deriving (Generic )
$(deriveJSON (unPrefix "_fpReq_") ''ForgotPasswordRequest)
instance ToSchema ForgotPasswordRequest where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_fpReq_")
data ForgotPasswordResponse = ForgotPasswordResponse { _fpRes_status :: Text }
deriving (Generic )
$(deriveJSON (unPrefix "_fpRes_") ''ForgotPasswordResponse)
instance ToSchema ForgotPasswordResponse where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_fpRes_")
data ForgotPasswordGet = ForgotPasswordGet {_fpGet_password :: Password}
deriving (Generic )
$(deriveJSON (unPrefix "_fpGet_") ''ForgotPasswordGet)
instance ToSchema ForgotPasswordGet where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_fpGet_")
makeLenses ''AuthValid
--
-- Lenses
--
makeLenses ''AuthResponse
--
-- JSON instances
--
$(deriveJSON (JSON.defaultOptions { JSON.fieldLabelModifier = tail . dropWhile ((/=) '_') . tail }) ''AuthenticatedUser)
$(deriveJSON (unPrefix "_authReq_") ''AuthRequest)
$(deriveJSON (unPrefix "_authRes_") ''AuthResponse)
$(deriveJSON (unPrefix "_fpReq_") ''ForgotPasswordRequest)
$(deriveJSON (unPrefix "_fpRes_") ''ForgotPasswordResponse)
$(deriveJSON (unPrefix "_fpGet_") ''ForgotPasswordGet)
--
-- JWT instances
--
instance ToJWT AuthenticatedUser
instance FromJWT AuthenticatedUser
......@@ -24,7 +24,8 @@ import Control.Lens
import Control.Monad.Logger (LogLevel(..))
import Control.Monad.Reader
import Data.ByteString.Lazy qualified as L
import Data.Pool (Pool, createPool)
import Data.Pool (Pool)
import qualified Data.Pool as Pool
import Database.PostgreSQL.Simple (Connection, connect, close, ConnectInfo)
import Gargantext.API.Admin.EnvTypes
import Gargantext.API.Admin.Types
......@@ -217,7 +218,7 @@ newEnv logger port file = do
}
newPool :: ConnectInfo -> IO (Pool Connection)
newPool param = createPool (connect param) close 1 (60*60) 8
newPool param = Pool.newPool $ Pool.setNumStripes (Just 1) $ Pool.defaultPoolConfig (connect param) close (60*60) 8
{-
cleanEnv :: (HasConfig env, HasRepo env) => env -> IO ()
......
......@@ -104,18 +104,12 @@ messages = toMessage $ [ (400, ["Ill formed query "])
instance Arbitrary Message where
arbitrary = elements messages
instance FromJSON Message
instance ToJSON Message
instance ToSchema Message
-----------------------------------------------------------------------
data Counts = Counts { results :: [Either Message Count]
} deriving (Eq, Show, Generic)
instance FromJSON Counts
instance ToJSON Counts
instance Arbitrary Counts where
arbitrary = elements [Counts [ Right (Count Pubmed (Just 20 ))
, Right (Count IsTex (Just 150))
......@@ -131,8 +125,6 @@ data Count = Count { count_name :: Scraper
}
deriving (Eq, Show, Generic)
$(deriveJSON (unPrefix "count_") ''Count)
instance ToSchema Count where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "count_")
--instance Arbitrary Count where
......@@ -141,3 +133,16 @@ instance ToSchema Count where
-----------------------------------------------------------------------
count :: Monad m => Query -> m Counts
count _ = undefined
--
-- JSON instances
--
instance FromJSON Message
instance ToJSON Message
$(deriveJSON (unPrefix "count_") ''Count)
instance FromJSON Counts
instance ToJSON Counts
......@@ -12,6 +12,7 @@ module Gargantext.API.Errors (
-- * Conversion functions
, backendErrorToFrontendError
, frontendErrorToServerError
, frontendErrorToGQLServerError
-- * Temporary shims
, showAsServantJSONErr
......@@ -39,8 +40,11 @@ $(deriveHttpStatusCode ''BackendErrorCode)
data GargErrorScheme
= -- | The old error scheme.
GES_old
-- | The new error scheme, that returns a 'FrontendError'.
-- | The new error scheme, that returns a 'FrontendError'.
| GES_new
-- | Error scheme for GraphQL, has to be slightly different
-- {errors: [{message, extensions: { ... }}]}
-- https://spec.graphql.org/June2018/#sec-Errors
deriving (Show, Eq)
-- | Transforms a backend internal error into something that the frontend
......@@ -48,26 +52,56 @@ data GargErrorScheme
-- as we later encode this into a 'ServerError' in the main server handler.
backendErrorToFrontendError :: BackendInternalError -> FrontendError
backendErrorToFrontendError = \case
InternalNodeError nodeError
-> nodeErrorToFrontendError nodeError
InternalTreeError treeError
-> treeErrorToFrontendError treeError
InternalValidationError validationError
-> mkFrontendErr' "A validation error occurred"
$ FE_validation_error $ case prettyValidation validationError of
Nothing -> "unknown_validation_error"
Just v -> T.pack v
InternalAuthenticationError authError
-> authErrorToFrontendError authError
InternalServerError internalServerError
-> internalServerErrorToFrontendError internalServerError
InternalNodeError nodeError
-> nodeErrorToFrontendError nodeError
InternalJobError jobError
-> jobErrorToFrontendError jobError
InternalServerError internalServerError
-> internalServerErrorToFrontendError internalServerError
InternalTreeError treeError
-> treeErrorToFrontendError treeError
-- As this carries a 'SomeException' which might exposes sensible
-- information, we do not send to the frontend its content.
InternalUnexpectedError _
-> let msg = T.pack $ "An unexpected error occurred. Please check your server logs."
in mkFrontendErr' msg $ FE_internal_server_error msg
InternalValidationError validationError
-> mkFrontendErr' "A validation error occurred"
$ FE_validation_error $ case prettyValidation validationError of
Nothing -> "unknown_validation_error"
Just v -> T.pack v
frontendErrorToGQLServerError :: FrontendError -> ServerError
frontendErrorToGQLServerError fe@(FrontendError diag ty _) =
ServerError { errHTTPCode = HTTP.statusCode $ backendErrorTypeToErrStatus ty
, errReasonPhrase = T.unpack diag
, errBody = JSON.encode (GraphQLError fe)
, errHeaders = mempty
}
authErrorToFrontendError :: AuthenticationError -> FrontendError
authErrorToFrontendError = \case
-- For now, we ignore the Jose error, as they are too specific
-- (i.e. they should be logged internally to Sentry rather than shared
-- externally).
LoginFailed nid uid _
-> mkFrontendErr' "Invalid username/password, or invalid session token." $ FE_login_failed_error nid uid
InvalidUsernameOrPassword
-> mkFrontendErr' "Invalid username or password." $ FE_login_failed_invalid_username_or_password
UserNotAuthorized uId msg
-> mkFrontendErr' "User not authorized. " $ FE_user_not_authorized uId msg
-- | Converts a 'FrontendError' into a 'ServerError' that the servant app can
-- return to the frontend.
frontendErrorToServerError :: FrontendError -> ServerError
frontendErrorToServerError fe@(FrontendError diag ty _) =
ServerError { errHTTPCode = HTTP.statusCode $ backendErrorTypeToErrStatus ty
, errReasonPhrase = T.unpack diag
, errBody = JSON.encode fe
, errHeaders = mempty
}
internalServerErrorToFrontendError :: ServerError -> FrontendError
internalServerErrorToFrontendError = \case
......@@ -85,14 +119,6 @@ jobErrorToFrontendError = \case
UnknownJob jobId -> mkFrontendErrNoDiagnostic $ FE_job_unknown_job jobId
JobException err -> mkFrontendErrNoDiagnostic $ FE_job_generic_exception (T.pack $ displayException err)
authErrorToFrontendError :: AuthenticationError -> FrontendError
authErrorToFrontendError = \case
-- For now, we ignore the Jose error, as they are too specific
-- (i.e. they should be logged internally to Sentry rather than shared
-- externally).
LoginFailed nid uid _
-> mkFrontendErr' "Invalid username/password, or invalid session token." $ FE_login_failed_error nid uid
nodeErrorToFrontendError :: NodeError -> FrontendError
nodeErrorToFrontendError ne = case ne of
NoListFound lid
......@@ -144,16 +170,6 @@ treeErrorToFrontendError te = case te of
EmptyRoot -> mkFrontendErrShow FE_tree_empty_root
TooManyRoots roots -> mkFrontendErrShow $ FE_tree_too_many_roots roots
-- | Converts a 'FrontendError' into a 'ServerError' that the servant app can
-- return to the frontend.
frontendErrorToServerError :: FrontendError -> ServerError
frontendErrorToServerError fe@(FrontendError diag ty _) =
ServerError { errHTTPCode = HTTP.statusCode $ backendErrorTypeToErrStatus ty
, errReasonPhrase = T.unpack diag
, errBody = JSON.encode fe
, errHeaders = mempty
}
showAsServantJSONErr :: BackendInternalError -> ServerError
showAsServantJSONErr (InternalNodeError err@(NoListFound {})) = err404 { errBody = JSON.encode err }
showAsServantJSONErr (InternalNodeError err@NoRootFound{}) = err404 { errBody = JSON.encode err }
......
This diff is collapsed.
......@@ -37,6 +37,8 @@ data BackendErrorCode
| EC_400__validation_error
-- authentication errors
| EC_403__login_failed_error
| EC_403__login_failed_invalid_username_or_password
| EC_403__user_not_authorized
-- tree errors
| EC_404__tree_root_not_found
| EC_404__tree_empty_root
......
......@@ -22,7 +22,7 @@ import Data.ByteString.Lazy.Char8 ( ByteString )
import Data.Morpheus ( App, deriveApp )
import Data.Morpheus.Server ( httpPlayground )
import Data.Morpheus.Subscriptions ( Event (..), httpPubApp )
import Data.Morpheus.Types ( GQLRequest, GQLResponse, GQLType, RootResolver(..), Undefined(..) )
import Data.Morpheus.Types ( GQLRequest, GQLResponse, GQLType, RootResolver(..), Undefined, defaultRootResolver)
import Data.Proxy
import Gargantext.API.Admin.Auth.Types (AuthenticatedUser)
import Gargantext.API.Admin.Orchestrator.Types (JobLog)
......@@ -65,9 +65,9 @@ data Query m
, context_ngrams :: GQLCTX.ContextNgramsArgs -> m [Text]
, contexts :: GQLCTX.NodeContextArgs -> m [GQLCTX.NodeContextGQL]
, contexts_for_ngrams :: GQLCTX.ContextsForNgramsArgs -> m [GQLCTX.ContextGQL]
, imt_schools :: GQLIMT.SchoolsArgs -> m [GQLIMT.School]
, imt_schools :: m [GQLIMT.School]
, job_logs :: GQLAT.JobLogArgs -> m (Map Int JobLog)
, languages :: GQLNLP.LanguagesArgs -> m GQLNLP.LanguagesMap
, languages :: m [GQLNLP.LanguageTuple]
, nodes :: GQLNode.NodeArgs -> m [GQLNode.Node]
, nodes_corpus :: GQLNode.CorpusArgs -> m [GQLNode.Corpus]
, node_parent :: GQLNode.NodeParentArgs -> m [GQLNode.Node]
......@@ -111,7 +111,7 @@ rootResolver
-> AccessPolicyManager
-> RootResolver (GargM env BackendInternalError) e Query Mutation Undefined
rootResolver authenticatedUser policyManager =
RootResolver
defaultRootResolver
{ queryResolver = Query { annuaire_contacts = GQLA.resolveAnnuaireContacts
, context_ngrams = GQLCTX.resolveContextNgrams
, contexts = GQLCTX.resolveNodeContext
......@@ -133,7 +133,7 @@ rootResolver authenticatedUser policyManager =
, update_user_epo_api_token = GQLUser.updateUserEPOAPIToken
, delete_team_membership = GQLTeam.deleteTeamMembership
, update_node_context_category = GQLCTX.updateNodeContextCategory }
, subscriptionResolver = Undefined }
}
-- | Main GraphQL "app".
app
......
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE DerivingStrategies #-}
module Gargantext.API.GraphQL.IMT
( School(..)
......@@ -13,10 +13,11 @@ import Gargantext.API.GraphQL.Types
import Gargantext.Core.Ext.IMT (School(..), schools)
import Gargantext.Prelude
data SchoolsArgs
= SchoolsArgs
{ } deriving (Generic, GQLType)
newtype SchoolsArgs
= SchoolsArgs ()
deriving stock (Generic)
deriving anyclass (GQLType)
resolveSchools
:: SchoolsArgs -> GqlM e env [School]
resolveSchools SchoolsArgs { } = pure $ schools
:: GqlM e env [School]
resolveSchools = pure $ schools
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE DerivingStrategies #-}
module Gargantext.API.GraphQL.NLP
( Lang(..)
, LanguagesArgs(..)
, LanguagesMap
, LanguageTuple
, resolveLanguages
)
where
......@@ -18,11 +19,17 @@ import Gargantext.Prelude
import Protolude
import qualified Data.Map.Strict as Map
data LanguagesArgs
= LanguagesArgs
{ } deriving (Generic, GQLType)
newtype LanguagesArgs
= LanguagesArgs ()
deriving stock (Generic)
deriving anyclass (GQLType)
type LanguagesMap = Map.Map Lang NLPServer
data LanguageTuple =
LanguageTuple { lt_lang :: Lang
, lt_server :: NLPServer }
deriving stock (Generic)
deriving anyclass (GQLType)
data NLPServer = NLPServer
{
......@@ -32,11 +39,11 @@ data NLPServer = NLPServer
deriving (Show, Eq, Generic, GQLType)
resolveLanguages
:: HasNLPServer env => LanguagesArgs -> GqlM e env LanguagesMap
resolveLanguages LanguagesArgs { } = do
-- pure $ allLangs
:: HasNLPServer env => GqlM e env [LanguageTuple]
resolveLanguages = do
lift $ do
ns <- view nlpServer
printDebug "[resolveLanguages] nlpServer" ns
pure $ Map.map (\(NLPServerConfig { .. }) -> NLPServer { server
, url = Protolude.show url }) ns
pure $ [LanguageTuple { lt_lang = lang
, lt_server = NLPServer { server, url = Protolude.show url } }
| (lang, NLPServerConfig { .. }) <- Map.toList ns]
......@@ -15,7 +15,7 @@ Portability : POSIX
module Gargantext.API.GraphQL.Node where
import Data.Aeson
import Data.HashMap.Strict qualified as HashMap
import Data.Aeson.KeyMap qualified as KM
import Data.Morpheus.Types ( GQLType )
import Data.Text qualified as T
import Gargantext.API.Admin.Auth.Types
......@@ -126,7 +126,7 @@ toCorpus N.Node { .. } = Corpus { id = NN.unNodeId _node_id
pubmedAPIKeyFromValue :: Value -> Maybe PUBMED.APIKey
pubmedAPIKeyFromValue (Object kv) =
case HashMap.lookup "pubmed_api_key" kv of
case KM.lookup "pubmed_api_key" kv of
Nothing -> Nothing
Just v -> case fromJSON v of
Error _ -> Nothing
......
......@@ -16,6 +16,7 @@ module Gargantext.API.GraphQL.Team where
import Data.Morpheus.Types (GQLType, ResolverM)
import Data.Text qualified as T
import Gargantext.API.Admin.Auth.Types (AuthenticationError(..))
import Gargantext.API.Admin.Types (HasSettings)
import Gargantext.API.Errors.Types
import Gargantext.API.GraphQL.Types (GqlM)
......@@ -86,10 +87,12 @@ deleteTeamMembership TeamDeleteMArgs { token, shared_folder_id, team_node_id } =
[] -> panicTrace $ "[deleteTeamMembership] User with id " <> T.pack (show $ uId teamNode) <> " doesn't exist."
(( _, node_u):_) -> do
testAuthUser <- lift $ authUser (nId node_u) token
case testAuthUser of
Invalid -> panicTrace "[deleteTeamMembership] failed to validate user"
lift $ case testAuthUser of
-- Invalid -> panicTrace "[deleteTeamMembership] failed to validate user"
Invalid -> do
throwError $ InternalAuthenticationError $ UserNotAuthorized (uId node_u) "This user is not team owner"
Valid -> do
lift $ deleteMemberShip [(UnsafeMkNodeId shared_folder_id, UnsafeMkNodeId team_node_id)]
deleteMemberShip [(UnsafeMkNodeId shared_folder_id, UnsafeMkNodeId team_node_id)]
where
uId Node { _node_user_id } = _node_user_id
nId Node { _node_id } = _node_id
......@@ -8,8 +8,11 @@ Stability : experimental
Portability : POSIX
-}
{-# OPTIONS_GHC -Wno-deprecations #-} -- FIXME(adn) GraphQL will need updating.
module Gargantext.API.GraphQL.Utils where
import Control.Lens ((^.))
import Control.Lens.Getter (view)
import Data.Morpheus.Types (GQLTypeOptions, fieldLabelModifier)
import Data.Text qualified as T
......@@ -20,7 +23,6 @@ import Gargantext.Database.Admin.Types.Node (NodeId)
import Gargantext.Database.Prelude (Cmd')
import Gargantext.Prelude
import Servant.Auth.Server (verifyJWT, JWTSettings)
import Control.Lens ((^.))
unPrefix :: T.Text -> GQLTypeOptions -> GQLTypeOptions
unPrefix prefix options = options { fieldLabelModifier = nflm }
......
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ViewPatterns #-}
{-| Edit 'sensitiveKeywords' to extend the list of redacted fields. -}
module Gargantext.API.Middleware (
logStdoutDevSanitised
) where
import Control.Lens
import Control.Monad.Logger
import Data.Aeson qualified as A
import Data.Aeson.Lens qualified as L
import Data.ByteString (ByteString)
import Data.ByteString qualified as BS
import Data.ByteString.Char8 qualified as C8
import Data.CaseInsensitive qualified as CI
import Data.List qualified as L
import Data.String
import Data.Text qualified as T
import Data.Text.Encoding qualified as TE
import Network.HTTP.Types
import Network.HTTP.Types.Header
import Network.Wai
import Network.Wai.Middleware.RequestLogger
import Prelude
import System.Console.ANSI
-- | Like 'logStdoutDev' from \"wai-extra\", but redacts (or omits altogether) payloads which might have
-- sensitive information
logStdoutDevSanitised :: IO Middleware
logStdoutDevSanitised = mkRequestLogger $ defaultRequestLoggerSettings { outputFormat = CustomOutputFormatWithDetailsAndHeaders customOutput }
-- |
-- Like 'key', but uses 'at' instead of 'ix'. This is handy when
-- adding and removing object keys:
--
-- >>> "{\"a\": 100, \"b\": 200}" & atKey "a" .~ Nothing
-- "{\"b\":200}"
--
-- >>> "{\"a\": 100, \"b\": 200}" & atKey "c" ?~ String "300"
-- "{\"a\":100,\"b\":200,\"c\":\"300\"}"
atKey :: L.AsValue t => T.Text -> Traversal' t (Maybe A.Value)
atKey i = L._Object . at (fromString $ T.unpack i)
{-# INLINE atKey #-}
customOutput :: OutputFormatterWithDetailsAndHeaders
customOutput _zonedDate rq status _mb_response_size request_dur (sanitiseBody . mconcat -> reqbody) _raw_response (map sanitiseHeader -> headers) =
let params = map sanitiseQueryItem (queryString rq)
in mkRequestLog params reqbody <> mkResponseLog
where
mkRequestLog :: [QueryItem] -> ByteString -> LogStr
mkRequestLog params bdy =
foldMap toLogStr (ansiMethod' (requestMethod rq))
<> " "
<> toLogStr (rawPathInfo rq)
<> "\n"
<> foldMap (\(k, mb_v) -> toLogStr $ show (k, mb_v)) params
<> toLogStr bdy
<> "\n"
<> foldMap (\(k, v) -> toLogStr $ mconcat $ ansiColor' White $ " " <> CI.original k <> ": " <> v <> "\n") headers
<> "\n"
mkResponseLog :: LogStr
mkResponseLog =
foldMap toLogStr (ansiColor' White " Status: ")
<> foldMap toLogStr (ansiStatusCode' status (C8.pack (show $ statusCode status) <> " " <> statusMessage status))
<> " "
<> "Served in " <> toLogStr (C8.pack $ show $ request_dur)
<> "\n"
sanitiseBody :: ByteString -> ByteString
sanitiseBody blob = L.foldr (\k acc -> over (atKey k) (updateField k) acc) blob sensitiveKeywords
where
updateField :: T.Text -> Maybe A.Value -> Maybe A.Value
updateField _ Nothing = Nothing
updateField k (Just x)
| A.String _v <- x
, k `elem` sensitiveKeywords
= Just $ A.String "*****"
| otherwise
= Just x
sanitiseQueryItem :: QueryItem -> QueryItem
sanitiseQueryItem (k, mb_v)
| TE.decodeUtf8 k `elem` sensitiveKeywords
= (k, (\v -> if C8.null v then mempty else "*****") <$> mb_v)
| otherwise
= (k, mb_v)
-- /NOTE:/ Extend this list to filter for more sensitive keywords.
sensitiveKeywords :: [T.Text]
sensitiveKeywords = [
"password"
, "api_key"
, "apiKey"
, "pubmedAPIKey"
]
sanitiseHeader :: Header -> Header
sanitiseHeader (hName, content)
| hName == hAuthorization = (hName, "*****")
| hName == hCookie = (hName, "*****")
| hName == hSetCookie = (hName, "*****")
| otherwise = (hName, content)
ansiColor' :: Color -> BS.ByteString -> [BS.ByteString]
ansiColor' color bs =
[ C8.pack $ setSGRCode [SetColor Foreground Dull color]
, bs
, C8.pack $ setSGRCode [Reset]
]
-- | Tags http method with a unique color.
ansiMethod' :: BS.ByteString -> [BS.ByteString]
ansiMethod' m = case m of
"GET" -> ansiColor' Cyan m
"HEAD" -> ansiColor' Cyan m
"PUT" -> ansiColor' Green m
"POST" -> ansiColor' Yellow m
"DELETE" -> ansiColor' Red m
_ -> ansiColor' Magenta m
ansiStatusCode' :: Status -> ByteString -> [BS.ByteString]
ansiStatusCode' (Status c _) t = case C8.take 1 (C8.pack . show $ c) of
"2" -> ansiColor' Green t
"3" -> ansiColor' Yellow t
"4" -> ansiColor' Red t
"5" -> ansiColor' Magenta t
_ -> ansiColor' Blue t
......@@ -120,7 +120,7 @@ import Gargantext.Database.Query.Table.Node (getNode)
import Gargantext.Database.Query.Table.Node.Error (HasNodeError)
import Gargantext.Database.Query.Table.Node.Select
import Gargantext.Database.Schema.Node (node_id, node_parent_id, node_user_id)
import Gargantext.Prelude hiding (log, to, toLower, (%))
import Gargantext.Prelude hiding (log, to, toLower, (%), isInfixOf)
import Gargantext.Prelude.Clock (hasTime, getTime)
import Gargantext.Utils.Jobs (serveJobsAPI, MonadJobStatus(..))
import GHC.Conc (readTVar, writeTVar)
......
......@@ -9,12 +9,9 @@ Portability : POSIX
-}
module Gargantext.API.Ngrams.List.Types where
--{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
module Gargantext.API.Ngrams.List.Types where
--import Control.Lens hiding (elements, Indexed)
import Data.Aeson
......
......@@ -9,7 +9,8 @@ Portability : POSIX
-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
module Gargantext.API.Ngrams.Tools
where
......
......@@ -85,8 +85,6 @@ instance ToParamSchema TabType
instance ToJSON TabType
instance FromJSON TabType
instance ToSchema TabType
instance Arbitrary TabType where
arbitrary = elements [minBound .. maxBound]
instance FromJSONKey TabType where
fromJSONKey = genericFromJSONKey defaultJSONKeyOptions
instance ToJSONKey TabType where
......@@ -162,14 +160,11 @@ deriveJSON (unPrefix "_nre_") ''NgramsRepoElement
makeLenses ''NgramsRepoElement
instance ToSchema NgramsRepoElement where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_nre_")
instance Serialise NgramsRepoElement
instance FromField NgramsRepoElement where
fromField = fromJSONField
instance ToField NgramsRepoElement where
toField = toJSONField
instance Serialise (MSet NgramsTerm)
data NgramsElement =
NgramsElement { _ne_ngrams :: NgramsTerm
, _ne_occurrences :: Set ContextId
......@@ -226,9 +221,6 @@ newNgramsElement mayList ngrams =
instance ToSchema NgramsElement where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_ne_")
instance Arbitrary NgramsElement where
arbitrary = elements [newNgramsElement Nothing "sport"]
------------------------------------------------------------------------
newtype NgramsTable = NgramsTable [NgramsElement]
......@@ -286,9 +278,6 @@ mockTable = NgramsTable
where
rp n = Just $ RootParent n n
instance Arbitrary NgramsTable where
arbitrary = pure mockTable
instance ToSchema NgramsTable
------------------------------------------------------------------------
......@@ -312,10 +301,6 @@ instance ToParamSchema OrderBy
instance FromJSON OrderBy
instance ToJSON OrderBy
instance ToSchema OrderBy
instance Arbitrary OrderBy
where
arbitrary = elements [minBound..maxBound]
-- | A query on a 'NgramsTable'.
data NgramsSearchQuery = NgramsSearchQuery
......@@ -396,8 +381,6 @@ instance ToSchema a => ToSchema (PatchSet a)
type AddRem = Replace (Maybe ())
instance Serialise AddRem
remPatch, addPatch :: AddRem
remPatch = replace (Just ()) Nothing
addPatch = replace Nothing (Just ())
......@@ -417,9 +400,6 @@ unPatchMSet (PatchMSet a) = a
type ConflictResolutionPatchMSet a = a -> ConflictResolutionReplace (Maybe ())
type instance ConflictResolution (PatchMSet a) = ConflictResolutionPatchMSet a
instance (Serialise a, Ord a) => Serialise (PatchMap a AddRem)
instance (Serialise a, Ord a) => Serialise (PatchMSet a)
-- TODO this breaks module abstraction
makePrisms ''PM.PatchMap
......@@ -448,19 +428,12 @@ instance (Ord a, ToJSON a) => ToJSON (PatchMSet a) where
instance (Ord a, FromJSON a) => FromJSON (PatchMSet a) where
parseJSON = fmap (_PatchMSetIso #) . parseJSON
instance (Ord a, Arbitrary a) => Arbitrary (PatchMSet a) where
arbitrary = (PatchMSet . PM.fromMap) <$> arbitrary
instance ToSchema a => ToSchema (PatchMSet a) where
-- TODO
declareNamedSchema _ = wellNamedSchema "" (Proxy :: Proxy TODO)
type instance Patched (PatchMSet a) = MSet a
instance (Eq a, Arbitrary a) => Arbitrary (Replace a) where
arbitrary = uncurry replace <$> arbitrary
-- If they happen to be equal then the patch is Keep.
instance ToSchema a => ToSchema (Replace a) where
declareNamedSchema (_ :: Proxy (Replace a)) = do
-- TODO Keep constructor is not supported here.
......@@ -504,19 +477,11 @@ instance ToSchema NgramsPatch where
, ("old", nreSch)
, ("new", nreSch)
]
instance Arbitrary NgramsPatch where
arbitrary = frequency [ (9, NgramsPatch <$> arbitrary <*> (replace <$> arbitrary <*> arbitrary))
, (1, NgramsReplace <$> arbitrary <*> arbitrary)
]
instance Serialise NgramsPatch
instance FromField NgramsPatch where
fromField = fromJSONField
instance ToField NgramsPatch where
toField = toJSONField
instance Serialise (Replace ListType)
instance Serialise ListType
type NgramsPatchIso =
MaybePatch NgramsRepoElement (PairPatch (PatchMSet NgramsTerm) (Replace ListType))
......@@ -584,9 +549,6 @@ newtype NgramsTablePatch = NgramsTablePatch (PatchMap NgramsTerm NgramsPatch)
mkNgramsTablePatch :: Map NgramsTerm NgramsPatch -> NgramsTablePatch
mkNgramsTablePatch = NgramsTablePatch . PM.fromMap
instance Serialise NgramsTablePatch
instance Serialise (PatchMap NgramsTerm NgramsPatch)
instance FromField NgramsTablePatch
where
fromField = fromJSONField
......@@ -694,9 +656,6 @@ instance Action NgramsTablePatch (Maybe NgramsTableMap) where
fmap (execState (reParentNgramsTablePatch p)) .
act (p ^. _NgramsTablePatch)
instance Arbitrary NgramsTablePatch where
arbitrary = NgramsTablePatch <$> PM.fromMap <$> arbitrary
-- Should it be less than an Lens' to preserve PatchMap's abstraction.
-- ntp_ngrams_patches :: Lens' NgramsTablePatch (Map NgramsTerm NgramsPatch)
-- ntp_ngrams_patches = _NgramsTablePatch . undefined
......@@ -713,8 +672,6 @@ deriveJSON (unPrefix "_v_") ''Versioned
makeLenses ''Versioned
instance (Typeable a, ToSchema a) => ToSchema (Versioned a) where
declareNamedSchema = wellNamedSchema "_v_"
instance Arbitrary a => Arbitrary (Versioned a) where
arbitrary = Versioned 1 <$> arbitrary -- TODO 1 is constant so far
------------------------------------------------------------------------
type Count = Int
......@@ -728,8 +685,6 @@ deriveJSON (unPrefix "_vc_") ''VersionedWithCount
makeLenses ''VersionedWithCount
instance (Typeable a, ToSchema a) => ToSchema (VersionedWithCount a) where
declareNamedSchema = wellNamedSchema "_vc_"
instance Arbitrary a => Arbitrary (VersionedWithCount a) where
arbitrary = VersionedWithCount 1 1 <$> arbitrary -- TODO 1 is constant so far
toVersionedWithCount :: Count -> Versioned a -> VersionedWithCount a
toVersionedWithCount count (Versioned version data_) = VersionedWithCount version count data_
......@@ -753,8 +708,6 @@ instance (ToJSON s, ToJSON p) => ToJSON (Repo s p) where
toJSON = genericToJSON $ unPrefix "_r_"
toEncoding = genericToEncoding $ unPrefix "_r_"
instance (Serialise s, Serialise p) => Serialise (Repo s p)
makeLenses ''Repo
initRepo :: Monoid s => Repo s p
......@@ -775,11 +728,6 @@ type RepoCmdM env err m =
-- Instances
instance Arbitrary NgramsRepoElement where
arbitrary = elements $ map ngramsElementToRepo ns
where
NgramsTable ns = mockTable
instance FromHttpApiData (Map TableNgrams.NgramsType (Versioned NgramsTableMap))
where
parseUrlPiece x = maybeToEither x (decode $ cs x)
......@@ -818,3 +766,51 @@ instance ToSchema UpdateTableNgramsCharts where
------------------------------------------------------------------------
type NgramsList = (Map TableNgrams.NgramsType (Versioned NgramsTableMap))
--
-- Serialise instances
--
instance Serialise ListType
instance Serialise NgramsRepoElement
instance Serialise NgramsTablePatch
instance Serialise (PatchMap NgramsTerm NgramsPatch)
instance Serialise (MSet NgramsTerm)
instance Serialise AddRem
instance Serialise NgramsPatch
instance Serialise (Replace ListType)
instance (Serialise a, Ord a) => Serialise (PatchMap a AddRem)
instance (Serialise a, Ord a) => Serialise (PatchMSet a)
instance (Serialise s, Serialise p) => Serialise (Repo s p)
--
-- Arbitrary instances
--
instance Arbitrary TabType where
arbitrary = elements [minBound .. maxBound]
instance Arbitrary NgramsElement where
arbitrary = elements [newNgramsElement Nothing "sport"]
instance Arbitrary NgramsTable where
arbitrary = pure mockTable
instance Arbitrary OrderBy
where
arbitrary = elements [minBound..maxBound]
instance (Ord a, Arbitrary a) => Arbitrary (PatchMSet a) where
arbitrary = (PatchMSet . PM.fromMap) <$> arbitrary
instance (Eq a, Arbitrary a) => Arbitrary (Replace a) where
arbitrary = uncurry replace <$> arbitrary
-- If they happen to be equal then the patch is Keep.
instance Arbitrary NgramsPatch where
arbitrary = frequency [ (9, NgramsPatch <$> arbitrary <*> (replace <$> arbitrary <*> arbitrary))
, (1, NgramsReplace <$> arbitrary <*> arbitrary)
]
instance Arbitrary NgramsTablePatch where
arbitrary = NgramsTablePatch <$> PM.fromMap <$> arbitrary
instance Arbitrary a => Arbitrary (Versioned a) where
arbitrary = Versioned 1 <$> arbitrary -- TODO 1 is constant so far
instance Arbitrary a => Arbitrary (VersionedWithCount a) where
arbitrary = VersionedWithCount 1 1 <$> arbitrary -- TODO 1 is constant so far
instance Arbitrary NgramsRepoElement where
arbitrary = elements $ map ngramsElementToRepo ns
where
NgramsTable ns = mockTable
......@@ -189,62 +189,6 @@ nodeNodeAPI p uId cId nId = withAccess (Proxy :: Proxy (NodeNodeAPI a)) Proxy uI
nodeNodeAPI' :: GargServer (NodeNodeAPI a)
nodeNodeAPI' = getNodeWith nId p
------------------------------------------------------------------------
-- TODO: make the NodeId type indexed by `a`, then we no longer need the proxy.
nodeAPI :: forall proxy a.
( HyperdataC a, Show a
) => proxy a
-> AuthenticatedUser
-> NodeId
-> ServerT (NodeAPI a) (GargM Env BackendInternalError)
nodeAPI p authenticatedUser targetNode =
withAccess (Proxy :: Proxy (NodeAPI a)) Proxy authenticatedUser (PathNode targetNode) nodeAPI'
where
userRootId = RootId $ authenticatedUser ^. auth_node_id
nodeAPI' :: ServerT (NodeAPI a) (GargM Env BackendInternalError)
nodeAPI' = withPolicy authenticatedUser (nodeChecks targetNode) (getNodeWith targetNode p)
:<|> rename targetNode
:<|> postNode authenticatedUser targetNode
:<|> postNodeAsyncAPI authenticatedUser targetNode
:<|> FrameCalcUpload.api authenticatedUser targetNode
:<|> putNode targetNode
:<|> Update.api targetNode
:<|> Action.deleteNode userRootId targetNode
:<|> getChildren targetNode p
-- TODO gather it
:<|> tableApi targetNode
:<|> apiNgramsTableCorpus targetNode
:<|> catApi targetNode
:<|> scoreApi targetNode
:<|> Search.api targetNode
:<|> Share.api userRootId targetNode
-- Pairing Tools
:<|> pairWith targetNode
:<|> pairs targetNode
:<|> getPair targetNode
-- VIZ
:<|> scatterApi targetNode
:<|> chartApi targetNode
:<|> pieApi targetNode
:<|> treeApi targetNode
:<|> phyloAPI targetNode
:<|> moveNode userRootId targetNode
-- :<|> nodeAddAPI id'
-- :<|> postUpload id'
:<|> Share.unPublish targetNode
:<|> fileApi targetNode
:<|> fileAsyncApi authenticatedUser targetNode
:<|> DFWN.api authenticatedUser targetNode
:<|> DocumentUpload.api targetNode
------------------------------------------------------------------------
data RenameNode = RenameNode { r_name :: Text }
deriving (Generic)
......@@ -374,5 +318,59 @@ instance ToSchema RenameNode
instance Arbitrary RenameNode where
arbitrary = elements [RenameNode "test"]
------------------------------------------------------------------------
-- TODO: make the NodeId type indexed by `a`, then we no longer need the proxy.
nodeAPI :: forall proxy a.
( HyperdataC a, Show a, MimeUnrender JSON a
) => proxy a
-> AuthenticatedUser
-> NodeId
-> ServerT (NodeAPI a) (GargM Env BackendInternalError)
nodeAPI p authenticatedUser targetNode =
withAccess (Proxy :: Proxy (NodeAPI a)) Proxy authenticatedUser (PathNode targetNode) nodeAPI'
where
userRootId = RootId $ authenticatedUser ^. auth_node_id
nodeAPI' :: ServerT (NodeAPI a) (GargM Env BackendInternalError)
nodeAPI' = withPolicy authenticatedUser (nodeChecks targetNode) (getNodeWith targetNode p)
:<|> rename targetNode
:<|> postNode authenticatedUser targetNode
:<|> postNodeAsyncAPI authenticatedUser targetNode
:<|> FrameCalcUpload.api authenticatedUser targetNode
:<|> putNode targetNode
:<|> Update.api targetNode
:<|> Action.deleteNode userRootId targetNode
:<|> getChildren targetNode p
-- TODO gather it
:<|> tableApi targetNode
:<|> apiNgramsTableCorpus targetNode
:<|> catApi targetNode
:<|> scoreApi targetNode
:<|> Search.api targetNode
:<|> Share.api userRootId targetNode
-- Pairing Tools
:<|> pairWith targetNode
:<|> pairs targetNode
:<|> getPair targetNode
-- VIZ
:<|> scatterApi targetNode
:<|> chartApi targetNode
:<|> pieApi targetNode
:<|> treeApi targetNode
:<|> phyloAPI targetNode
:<|> moveNode userRootId targetNode
-- :<|> nodeAddAPI id'
-- :<|> postUpload id'
:<|> Share.unPublish targetNode
:<|> fileApi targetNode
:<|> fileAsyncApi authenticatedUser targetNode
:<|> DFWN.api authenticatedUser targetNode
:<|> DocumentUpload.api targetNode
-------------------------------------------------------------
......@@ -38,7 +38,7 @@ import Gargantext.API.Node.Corpus.Types
import Gargantext.API.Node.Corpus.Update (addLanguageToCorpus)
import Gargantext.API.Node.Types
import Gargantext.Core (Lang(..), withDefaultLanguage, defaultLanguage)
import Gargantext.Core.NodeStory (HasNodeStoryImmediateSaver, HasNodeArchiveStoryImmediateSaver, currentVersion)
import Gargantext.Core.NodeStory (HasNodeStoryImmediateSaver, HasNodeArchiveStoryImmediateSaver, currentVersion, NgramsStatePatch')
import Gargantext.Core.Text.Corpus.API qualified as API
import Gargantext.Core.Text.Corpus.Parsers qualified as Parser (FileType(..), parseFormatC)
import Gargantext.Core.Text.List.Social (FlowSocialListWith(..))
......@@ -50,7 +50,7 @@ import Gargantext.Database.Action.Mail (sendMail)
import Gargantext.Database.Action.Node (mkNodeWithParent)
import Gargantext.Database.Action.User (getUserId)
import Gargantext.Database.Admin.Types.Hyperdata
import Gargantext.Database.Admin.Types.Node (CorpusId, NodeType(..))
import Gargantext.Database.Admin.Types.Node (CorpusId, NodeType(..), ParentId)
import Gargantext.Database.GargDB qualified as GargDB
import Gargantext.Database.Prelude (hasConfig)
import Gargantext.Database.Query.Table.Node (getNodeWith, getOrMkList)
......@@ -251,10 +251,7 @@ addToCorpusWithQuery user cid (WithQuery { _wq_query = q
corpusId <- flowDataText user txt (Multi l) cid (Just flw) jobHandle
$(logLocM) DEBUG $ T.pack $ "corpus id " <> show corpusId
userId <- getUserId user
listId <- getOrMkList cid userId
v <- currentVersion listId
_ <- commitStatePatch listId (Versioned v mempty)
_ <- commitCorpus cid user
-- printDebug "sending email" ("xxxxxxxxxxxxxxxxxxxxx" :: Text)
sendMail user
......@@ -274,7 +271,10 @@ type AddWithForm = Summary "Add with FormUrlEncoded to corpus endpoint"
:> "async"
:> AsyncJobs JobLog '[FormUrlEncoded] NewWithForm JobLog
addToCorpusWithForm :: (FlowCmdM env err m, MonadJobStatus m)
addToCorpusWithForm :: ( FlowCmdM env err m
, MonadJobStatus m
, HasNodeStoryImmediateSaver env
, HasNodeArchiveStoryImmediateSaver env )
=> User
-> CorpusId
-> NewWithForm
......@@ -343,6 +343,8 @@ addToCorpusWithForm user cid nwf jobHandle = do
--(map (map toHyperdataDocument) docs)
jobHandle
_ <- commitCorpus cid user
-- printDebug "Extraction finished : " cid
-- printDebug "sending email" ("xxxxxxxxxxxxxxxxxxxxx" :: Text)
-- TODO uncomment this
......@@ -421,3 +423,17 @@ addToCorpusWithFile user cid nwf@(NewWithFile _d (withDefaultLanguage -> l) fNam
sendMail user
markComplete jobHandle
--- UTILITIES
commitCorpus :: ( FlowCmdM env err m
, HasNodeArchiveStoryImmediateSaver env
, HasNodeStoryImmediateSaver env )
=> ParentId -> User -> m (Versioned NgramsStatePatch')
commitCorpus cid user = do
userId <- getUserId user
listId <- getOrMkList cid userId
v <- currentVersion listId
commitStatePatch listId (Versioned v mempty)
......@@ -90,6 +90,6 @@ type API = Summary "Document Export"
:<|> "csv"
:> Get '[PlainText] (Headers '[Servant.Header "Content-Disposition" Text] Text)) -- [Document])
$(deriveJSON (unPrefix "_de_") ''DocumentExport)
$(deriveJSON (unPrefix "_d_") ''Document)
$(deriveJSON (unPrefix "_ng_") ''Ngrams)
$(deriveJSON (unPrefix "_d_") ''Document)
$(deriveJSON (unPrefix "_de_") ''DocumentExport)
......@@ -35,6 +35,7 @@ import Gargantext.API.Node.Corpus.New.Types (FileFormat(..), FileType(..))
import Gargantext.API.Node.Types (NewWithForm(..))
import Gargantext.API.Prelude
import Gargantext.Core (Lang)
import Gargantext.Core.NodeStory (HasNodeArchiveStoryImmediateSaver)
import Gargantext.Core.Text.List.Social (FlowSocialListWith(..))
import Gargantext.Core.Types.Individu (User(..))
import Gargantext.Database.Action.Flow.Types
......@@ -70,7 +71,10 @@ api authenticatedUser nId =
frameCalcUploadAsync :: (HasConfig env, FlowCmdM env err m, MonadJobStatus m)
frameCalcUploadAsync :: ( HasConfig env
, FlowCmdM env err m
, MonadJobStatus m
, HasNodeArchiveStoryImmediateSaver env )
=> AuthenticatedUser
-- ^ The logged-in user
-> NodeId
......
......@@ -38,7 +38,7 @@ import Test.QuickCheck.Arbitrary
------------------------------------------------------------------------
data ShareNodeParams = ShareTeamParams { username :: Text }
| SharePublicParams { node_id :: NodeId}
| SharePublicParams { node_id :: NodeId }
deriving (Generic)
------------------------------------------------------------------------
-- TODO unPrefix "pn_" FromJSON, ToJSON, ToSchema, adapt frontend.
......
......@@ -67,13 +67,16 @@ server env = do
:<|> hoistServerWithContext
(Proxy :: Proxy GraphQL.API)
(Proxy :: Proxy AuthContext)
(transformJSON errScheme)
(transformJSONGQL errScheme)
GraphQL.api
:<|> frontEndServer
where
transformJSON :: forall a. GargErrorScheme -> GargM Env BackendInternalError a -> Handler a
transformJSON GES_old = Handler . withExceptT showAsServantJSONErr . (`runReaderT` env) . logPanicErrors
transformJSON GES_new = Handler . withExceptT (frontendErrorToServerError . backendErrorToFrontendError) . (`runReaderT` env) . handlePanicErrors
transformJSONGQL :: forall a. GargErrorScheme -> GargM Env BackendInternalError a -> Handler a
transformJSONGQL GES_old = Handler . withExceptT showAsServantJSONErr . (`runReaderT` env) . logPanicErrors
transformJSONGQL GES_new = Handler . withExceptT (frontendErrorToGQLServerError . backendErrorToFrontendError) . (`runReaderT` env) . handlePanicErrors
handlePanicErrors :: GargM Env BackendInternalError a -> GargM Env BackendInternalError a
handlePanicErrors h = h `catch` handleSomeException
......@@ -104,3 +107,4 @@ logPanicErrors h = h `catch` handleSomeException
= throwError ber -- re-throw the uncaught exception via the 'MonadError' instance
| otherwise
= throwM se -- re-throw the uncaught exception.
......@@ -75,7 +75,7 @@ instance ToSchema Lang where
declareNamedSchema = genericDeclareNamedSchemaUnrestricted defaultSchemaOptions
instance FromHttpApiData Lang
where
parseUrlPiece "All" = pure All
-- parseUrlPiece "All" = pure All
parseUrlPiece "DE" = pure DE
parseUrlPiece "EL" = pure EL
parseUrlPiece "EN" = pure EN
......
......@@ -64,7 +64,7 @@ nlpServerConfigFromURI _ = Nothing
nlpServerMap :: NLPConfig -> NLPServerMap
nlpServerMap (NLPConfig { .. }) =
Map.fromList $ catMaybes $
[ uncurryMaybeSecond (All, nlpServerConfigFromURI _nlp_all) ] ++
[ uncurryMaybeSecond (EN, nlpServerConfigFromURI _nlp_default) ] ++
((\lang ->
uncurryMaybeSecond (lang, Map.lookup (show lang) _nlp_languages >>= nlpServerConfigFromURI ))
<$> allLangs)
......@@ -701,7 +701,7 @@ clearHistory (NodeStory ns) = NodeStory $ ns & (traverse . a_history) .~ emptyHi
currentVersion :: (HasNodeStory env err m) => ListId -> m Version
currentVersion listId = do
pool <- view connPool
nls <- withResource pool $ \c -> liftBase $ getNodeStory c listId
nls <- liftBase $ withResource pool $ \c -> liftBase $ getNodeStory c listId
pure $ nls ^. unNodeStory . at listId . _Just . a_version
......@@ -711,7 +711,7 @@ currentVersion listId = do
fixNodeStoryVersions :: (HasNodeStory env err m) => m ()
fixNodeStoryVersions = do
pool <- view connPool
_ <- withResource pool $ \c -> liftBase $ PGS.withTransaction c $ do
_ <- liftBase $ withResource pool $ \c -> liftBase $ PGS.withTransaction c $ do
nIds <- runPGSQuery c [sql| SELECT id FROM nodes WHERE ? |] (PGS.Only True) :: IO [PGS.Only Int64]
-- printDebug "[fixNodeStoryVersions] nIds" nIds
mapM_ (\(PGS.Only nId) -> do
......
......@@ -56,8 +56,8 @@ toDoc' la (HAL.Corpus { .. }) = do
, _hd_uniqIdBdd = Nothing
, _hd_page = Nothing
, _hd_title = Just $ intercalate " " _corpus_title
, _hd_authors = Just $ foldl (\x y -> x <> ", " <> y) "" _corpus_authors_names
, _hd_institutes = Just $ foldl (\x y -> x <> ", " <> y) "" $ _corpus_authors_affiliations <> map show _corpus_struct_id
, _hd_authors = Just $ foldl (\x y -> if x == "" then y else x <> ", " <> y) "" _corpus_authors_names
, _hd_institutes = Just $ foldl (\x y -> if x == "" then y else x <> ", " <> y) "" $ _corpus_authors_affiliations <> map show _corpus_struct_id
, _hd_source = Just $ maybe "Nothing" identity _corpus_source
, _hd_abstract = Just abstract
, _hd_publication_date = fmap show utctime
......
......@@ -97,7 +97,9 @@ get apiKey q l = do
eRes <- runReaderT PubMed.getMetadataWithC (Config { apiKey = Just apiKey
, query = getRawQuery q
, perPage = Just 200
, mWebEnv = Nothing })
, mWebEnv = Nothing
, enableDebugLogs = False
})
let takeLimit = case l of
Nothing -> mapC identity
Just l' -> takeC $ getLimit l'
......
......@@ -24,7 +24,7 @@ module Gargantext.Core.Text.Corpus.Parsers.Date
import Data.Aeson (toJSON, Value)
import Data.Aeson qualified as Json
import Data.HashMap.Strict as HM hiding (map)
import Data.Aeson.KeyMap as KM hiding (map)
import Data.HashSet qualified as HashSet
import Data.List qualified as List
import Data.Text (unpack, splitOn, replace)
......@@ -184,7 +184,7 @@ getTimeValue rt = case head rt of
extractValue :: Maybe Value -> Maybe Text
extractValue (Just (Json.Object object)) =
case HM.lookup "value" object of
case KM.lookup "value" object of
Just (Json.String date) -> Just date
_ -> Nothing
extractValue _ = Nothing
......
......@@ -41,9 +41,9 @@ toDoc la (ISTEX.Document i t a ab d s) = do
, _hd_uniqIdBdd = Nothing
, _hd_page = Nothing
, _hd_title = t
, _hd_authors = Just $ foldl (\x y -> x <> ", " <> y) "" (map ISTEX._author_name a)
, _hd_institutes = Just $ foldl (\x y -> x <> ", " <> y) "" (concat $ (map ISTEX._author_affiliations) a)
, _hd_source = Just $ foldl (\x y -> x <> ", " <> y) "" (catMaybes $ map ISTEX._source_title s)
, _hd_authors = Just $ foldl (\x y -> if x == "" then y else x <> ", " <> y) "" (map ISTEX._author_name a)
, _hd_institutes = Just $ foldl (\x y -> if x == "" then y else x <> ", " <> y) "" (concat $ (map ISTEX._author_affiliations) a)
, _hd_source = Just $ foldl (\x y -> if x == "" then y else x <> ", " <> y) "" (ISTEX._source_title s)
, _hd_abstract = ab
, _hd_publication_date = fmap (T.pack . show) utctime
, _hd_publication_year = pub_year
......
......@@ -34,10 +34,11 @@ Notes for current implementation:
{-# OPTIONS_GHC -fno-warn-deprecations #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
module Gargantext.Core.Text.Terms.Eleve where
......
......@@ -143,11 +143,11 @@ whitespace :: Tokenizer
whitespace xs = E [Right w | w <- T.words xs ]
instance Monad (EitherList a) where
return x = E [Right x]
return = pure
E xs >>= f = E $ concatMap (either (return . Left) (unE . f)) xs
instance Applicative (EitherList a) where
pure = pure
pure x = E [Right x]
f <*> x = f `ap` x
instance Functor (EitherList a) where
......
......@@ -216,43 +216,6 @@ data ObjectData =
| Layer !GvId !GraphDataData !LayerData
deriving (Show, Eq, Generic)
instance ToJSON ObjectData where
toJSON = \case
GroupToNode gvid commonData nodeTypeData
-> mkObject gvid (Left commonData) nodeTypeData
BranchToNode gvid commonData nodeTypeData
-> mkObject gvid (Left commonData) nodeTypeData
PeriodToNode gvid commonData nodeTypeData
-> mkObject gvid (Left commonData) nodeTypeData
Layer gvid graphData nodeTypeData
-> mkObject gvid (Right graphData) nodeTypeData
instance FromJSON ObjectData where
parseJSON = withObject "ObjectData" $ \o -> do
_gvid <- o .: "_gvid"
-- try to parse the graph data first. If we succeed, then we are dealing with
-- the 'Layer', otherwise we the rest, but for the rest we can avoid re-parsing
-- the 'NodeCommonData' every time.
case parseMaybe @_ @GraphDataData parseJSON (Object o) of
Nothing
-> do commonData <- parseJSON (Object o)
((GroupToNode <$> pure _gvid <*> pure commonData <*> parseJSON (Object o)) <|>
(BranchToNode <$> pure _gvid <*> pure commonData <*> parseJSON (Object o)) <|>
(PeriodToNode <$> pure _gvid <*> pure commonData <*> parseJSON (Object o)))
Just gd
-> Layer <$> pure _gvid <*> pure gd <*> parseJSON (Object o)
mkObject :: ToJSON a => GvId -> Either NodeCommonData GraphDataData -> a -> Value
mkObject gvid commonData objectTypeData =
let commonDataJSON = either toJSON toJSON commonData
objectTypeDataJSON = toJSON objectTypeData
header = object $ [ "_gvid" .= toJSON gvid ]
in case (commonDataJSON, objectTypeDataJSON, header) of
(Object hdr, Object cdJSON, Object etDataJSON)
-> Object $ hdr <> cdJSON <> etDataJSON
_ -> panicTrace "[Gargantext.Core.Types.Phylo.mkObject] impossible: commonData, header or objectTypeDataJSON didn't convert back to JSON Object."
data GroupToNodeData
= GroupToNodeData
{ _gtn_bId :: Text
......@@ -474,17 +437,23 @@ data BranchToGroupData
, _btg_style :: Maybe Text
} deriving (Show, Eq, Generic)
-- | Lenses
makeLenses ''Phylo
makeLenses ''PhyloPeriod
makeLenses ''PhyloLevel
makeLenses ''PhyloGroup
-- | JSON instances
$(deriveJSON (unPrefix "_phylo_" ) ''Phylo )
$(deriveJSON (unPrefix "_phylo_Period" ) ''PhyloPeriod )
$(deriveJSON (unPrefix "_phylo_Level" ) ''PhyloLevel )
$(deriveJSON (unPrefix "_phylo_Group" ) ''PhyloGroup )
instance ToJSON GvId where
toJSON GvId{..} = toJSON _GvId
instance FromJSON GvId where
parseJSON v = GvId <$> parseJSON v
-- /NOTE/ We need to define /after/ the JSON istance for 'GvId' due to GHC stage limitation.
mkObject :: ToJSON a => GvId -> Either NodeCommonData GraphDataData -> a -> Value
mkObject gvid commonData objectTypeData =
let commonDataJSON = either toJSON toJSON commonData
objectTypeDataJSON = toJSON objectTypeData
header = object $ [ "_gvid" .= toJSON gvid ]
in case (commonDataJSON, objectTypeDataJSON, header) of
(Object hdr, Object cdJSON, Object etDataJSON)
-> Object $ hdr <> cdJSON <> etDataJSON
_ -> panicTrace "[Gargantext.Core.Types.Phylo.mkObject] impossible: commonData, header or objectTypeDataJSON didn't convert back to JSON Object."
instance ToJSON GraphData where
toJSON = mkGraphData
......@@ -512,11 +481,6 @@ instance FromJSON GraphData where
_gd_data <- parseJSON (Object o)
pure GraphData{..}
instance ToJSON GvId where
toJSON GvId{..} = toJSON _GvId
instance FromJSON GvId where
parseJSON v = GvId <$> parseJSON v
instance ToJSON EdgeData where
toJSON = \case
GroupToAncestor gvid commonData edgeTypeData
......@@ -608,6 +572,38 @@ instance FromJSON BranchToGroupData where
_btg_style <- o .:? "style"
pure BranchToGroupData{..}
instance ToJSON ObjectData where
toJSON = \case
GroupToNode gvid commonData nodeTypeData
-> mkObject gvid (Left commonData) nodeTypeData
BranchToNode gvid commonData nodeTypeData
-> mkObject gvid (Left commonData) nodeTypeData
PeriodToNode gvid commonData nodeTypeData
-> mkObject gvid (Left commonData) nodeTypeData
Layer gvid graphData nodeTypeData
-> mkObject gvid (Right graphData) nodeTypeData
instance FromJSON ObjectData where
parseJSON = withObject "ObjectData" $ \o -> do
_gvid <- o .: "_gvid"
-- try to parse the graph data first. If we succeed, then we are dealing with
-- the 'Layer', otherwise we the rest, but for the rest we can avoid re-parsing
-- the 'NodeCommonData' every time.
case parseMaybe @_ @GraphDataData parseJSON (Object o) of
Nothing
-> do commonData <- parseJSON (Object o)
((GroupToNode <$> pure _gvid <*> pure commonData <*> parseJSON (Object o)) <|>
(BranchToNode <$> pure _gvid <*> pure commonData <*> parseJSON (Object o)) <|>
(PeriodToNode <$> pure _gvid <*> pure commonData <*> parseJSON (Object o)))
Just gd
-> Layer <$> pure _gvid <*> pure gd <*> parseJSON (Object o)
$(deriveJSON (unPrefix "_phylo_Group" ) ''PhyloGroup )
$(deriveJSON (unPrefix "_phylo_Level" ) ''PhyloLevel )
$(deriveJSON (unPrefix "_phylo_Period" ) ''PhyloPeriod )
$(deriveJSON (unPrefix "_phylo_" ) ''Phylo )
-- | ToSchema instances
instance ToSchema Phylo where
......@@ -637,7 +633,9 @@ instance ToSchema GraphDataData where
instance ToSchema GraphData where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "_gd_")
-- | Arbitrary instances
--
-- Arbitrary instances
--
instance Arbitrary LayerData where
arbitrary = LayerData <$> arbitrary
instance Arbitrary NodeCommonData where
......@@ -723,3 +721,13 @@ instance Arbitrary GraphDataData where
<*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
<*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
<*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
--
-- Lenses
--
makeLenses ''Phylo
makeLenses ''PhyloPeriod
makeLenses ''PhyloLevel
makeLenses ''PhyloGroup
......@@ -475,38 +475,38 @@ makeLenses ''PhyloEdge
------------------------
$(deriveJSON (unPrefix "_phylo_" ) ''Phylo )
$(deriveJSON (unPrefix "_phylo_group" ) ''PhyloGroup )
$(deriveJSON (unPrefix "_phylo_level" ) ''PhyloLevel )
$(deriveJSON (unPrefix "_phylo_foundations" ) ''PhyloFoundations )
$(deriveJSON (unPrefix "_phylo_period" ) ''PhyloPeriod )
$(deriveJSON (unPrefix "_phylo_level" ) ''PhyloLevel )
$(deriveJSON (unPrefix "_phylo_group" ) ''PhyloGroup )
$(deriveJSON (unPrefix "_phyloFis_" ) ''PhyloFis )
--
$(deriveJSON (unPrefix "_software_" ) ''Software )
$(deriveJSON (unPrefix "_phyloParam_" ) ''PhyloParam )
$(deriveJSON (unPrefix "_lb_" ) ''LBParams )
$(deriveJSON (unPrefix "_sb_" ) ''SBParams )
--
$(deriveJSON (unPrefix "_fis_" ) ''FisParams )
$(deriveJSON (unPrefix "_hamming_" ) ''HammingParams )
$(deriveJSON (unPrefix "_wlj_" ) ''WLJParams )
--
$(deriveJSON defaultOptions ''Filter )
$(deriveJSON defaultOptions ''Metric )
$(deriveJSON defaultOptions ''Cluster )
$(deriveJSON defaultOptions ''Proximity )
--
$(deriveJSON (unPrefix "_fis_" ) ''FisParams )
$(deriveJSON (unPrefix "_hamming_" ) ''HammingParams )
$(deriveJSON (unPrefix "_louvain_" ) ''LouvainParams )
$(deriveJSON (unPrefix "_rc_" ) ''RCParams )
$(deriveJSON (unPrefix "_wlj_" ) ''WLJParams )
$(deriveJSON defaultOptions ''Cluster )
$(deriveJSON (unPrefix "_q_" ) ''PhyloQueryBuild )
--
$(deriveJSON (unPrefix "_lb_" ) ''LBParams )
$(deriveJSON (unPrefix "_sb_" ) ''SBParams )
$(deriveJSON (unPrefix "_software_" ) ''Software )
$(deriveJSON (unPrefix "_phyloParam_" ) ''PhyloParam )
$(deriveJSON (unPrefix "_phylo_" ) ''Phylo )
--
$(deriveJSON (unPrefix "_q_" ) ''PhyloQueryBuild )
$(deriveJSON (unPrefix "_pv_" ) ''PhyloView )
$(deriveJSON (unPrefix "_pb_" ) ''PhyloBranch )
$(deriveJSON (unPrefix "_pe_" ) ''PhyloEdge )
$(deriveJSON (unPrefix "_pn_" ) ''PhyloNode )
$(deriveJSON defaultOptions ''Filiation )
$(deriveJSON (unPrefix "_pn_" ) ''PhyloNode )
$(deriveJSON defaultOptions ''EdgeType )
$(deriveJSON (unPrefix "_pe_" ) ''PhyloEdge )
$(deriveJSON (unPrefix "_pv_" ) ''PhyloView )
---------------------------
-- | Swagger instances | --
......
......@@ -625,9 +625,6 @@ makeLenses ''PhyloBranch
-- | JSON instances | --
------------------------
instance FromJSON Phylo
instance ToJSON Phylo
instance FromJSON PhyloSources
instance ToJSON PhyloSources
......@@ -651,6 +648,9 @@ instance ToJSON PhyloGroup
$(deriveJSON (unPrefix "_foundations_" ) ''PhyloFoundations)
instance FromJSON Phylo
instance ToJSON Phylo
-- NFData instances
instance NFData CorpusParser
......@@ -677,3 +677,4 @@ instance NFData Order
instance NFData Sort
instance NFData Tagger
instance NFData PhyloLabel
......@@ -114,7 +114,7 @@ corpusIdtoDocuments timeUnit corpusId = do
docs <- selectDocNodes corpusId
lId <- defaultList corpusId
termList <- getTermList lId MapTerm NgramsTerms
corpus_node <- getNodeWith corpusId (Proxy @ HyperdataCorpus)
corpus_node <- getNodeWith corpusId (Proxy @HyperdataCorpus)
let corpusLang = view (node_hyperdata . to _hc_lang) corpus_node
let patterns = case termList of
......
......@@ -46,7 +46,7 @@ flowPhylo :: (HasNodeStory env err m, HasDBid NodeType)
-> m Phylo
flowPhylo cId = do
corpus_node <- getNodeWith cId (Proxy @ HyperdataCorpus)
corpus_node <- getNodeWith cId (Proxy @HyperdataCorpus)
let lang = withDefaultLanguage $ view (node_hyperdata . to _hc_lang) corpus_node
list' <- defaultList cId
termList <- HashMap.toList <$> getTermsWith (Text.words . unNgramsTerm) [list'] NgramsTerms (Set.singleton MapTerm)
......
......@@ -659,7 +659,7 @@ reIndexWith :: ( HasNodeStory env err m )
-> m ()
reIndexWith cId lId nt lts = do
-- printDebug "(cId,lId,nt,lts)" (cId, lId, nt, lts)
corpus_node <- getNodeWith cId (Proxy @ HyperdataCorpus)
corpus_node <- getNodeWith cId (Proxy @HyperdataCorpus)
let corpusLang = withDefaultLanguage $ view (node_hyperdata . to _hc_lang) corpus_node
-- Getting [NgramsTerm]
......
......@@ -72,7 +72,7 @@ membersOfQuery (_NodeId -> teamId) = proc () -> do
(nn, n, u) <- nodeNode_node_User -< ()
restrict -< (nn ^. nn_node2_id) .== sqlInt4 teamId
returnA -< ( user_username <$> u
, view node_id <$> n)
, view node_id <$> n )
nodeNode_node_User :: O.Select ( NodeNodeRead
......
......@@ -193,6 +193,20 @@ instance Arbitrary HyperdataContact where
-- | Specific Gargantext instance
instance Hyperdata HyperdataContact
-- | All lenses
makeLenses ''ContactWho
makeLenses ''ContactWhere
makeLenses ''ContactTouch
makeLenses ''ContactMetaData
makeLenses ''HyperdataContact
-- | All Json instances
$(deriveJSON (unPrefix "_ct_") ''ContactTouch)
$(deriveJSON (unPrefix "_cw_") ''ContactWho)
$(deriveJSON (unPrefix "_cw_") ''ContactWhere)
$(deriveJSON (unPrefix "_cm_") ''ContactMetaData)
$(deriveJSON (unPrefix "_hc_") ''HyperdataContact)
-- | Database (Posgresql-simple instance)
instance FromField HyperdataContact where
fromField = fromField'
......@@ -207,16 +221,3 @@ instance DefaultFromField (Nullable SqlJsonb) HyperdataContact where
-- | All lenses
makeLenses ''ContactWho
makeLenses ''ContactWhere
makeLenses ''ContactTouch
makeLenses ''ContactMetaData
makeLenses ''HyperdataContact
-- | All Json instances
$(deriveJSON (unPrefix "_cw_") ''ContactWho)
$(deriveJSON (unPrefix "_cw_") ''ContactWhere)
$(deriveJSON (unPrefix "_ct_") ''ContactTouch)
$(deriveJSON (unPrefix "_cm_") ''ContactMetaData)
$(deriveJSON (unPrefix "_hc_") ''HyperdataContact)
......@@ -73,8 +73,6 @@ defaultHyperdataDocument = case decode docExample of
data StatusV3 = StatusV3 { statusV3_error :: !(Maybe Text)
, statusV3_action :: !(Maybe Text)
} deriving (Show, Generic)
$(deriveJSON (unPrefix "statusV3_") ''StatusV3)
------------------------------------------------------------------------
data HyperdataDocumentV3 = HyperdataDocumentV3 { _hdv3_publication_day :: !(Maybe Int)
......@@ -140,12 +138,25 @@ arbitraryHyperdataDocuments =
instance Hyperdata HyperdataDocument
instance Hyperdata HyperdataDocumentV3
------------------------------------------------------------------------
$(makeLenses ''HyperdataDocument)
makePrisms ''HyperdataDocument
-- $(deriveJSON (unPrefix "_hd_") ''HyperdataDocument)
instance ToSchema HyperdataDocument where
declareNamedSchema proxy =
genericDeclareNamedSchema (unPrefixSwagger "_hd_") proxy
& mapped.schema.description ?~ "Document Hyperdata"
& mapped.schema.example ?~ toJSON defaultHyperdataDocument
$(makeLenses ''HyperdataDocumentV3)
{-
-- | For now HyperdataDocumentV3 is not exposed with the API
instance ToSchema HyperdataDocumentV3 where
declareNamedSchema proxy =
genericDeclareNamedSchema (unPrefixSwagger "hyperdataDocumentV3_") proxy
& mapped.schema.description ?~ "Document Hyperdata for Garg V3"
& mapped.schema.example ?~ toJSON defaultHyperdataDocumentV3
-}
-- $(deriveJSON (unPrefix "_hd_") ''HyperdataDocument)
--
-- JSON instances
--
instance FromJSON HyperdataDocument
where
......@@ -167,24 +178,13 @@ instance ToJSON HyperdataDocument
$(deriveJSON (unPrefix "statusV3_") ''StatusV3)
$(deriveJSON (unPrefix "_hdv3_") ''HyperdataDocumentV3)
instance ToSchema HyperdataDocument where
declareNamedSchema proxy =
genericDeclareNamedSchema (unPrefixSwagger "_hd_") proxy
& mapped.schema.description ?~ "Document Hyperdata"
& mapped.schema.example ?~ toJSON defaultHyperdataDocument
--
-- FromField/ToField instances
--
{-
-- | For now HyperdataDocumentV3 is not exposed with the API
instance ToSchema HyperdataDocumentV3 where
declareNamedSchema proxy =
genericDeclareNamedSchema (unPrefixSwagger "hyperdataDocumentV3_") proxy
& mapped.schema.description ?~ "Document Hyperdata for Garg V3"
& mapped.schema.example ?~ toJSON defaultHyperdataDocumentV3
-}
------------------------------------------------------------------------
instance FromField HyperdataDocument
where
fromField = fromField'
......@@ -193,14 +193,12 @@ instance FromField HyperdataDocumentV3
where
fromField = fromField'
-------
instance ToField HyperdataDocument where
toField = toJSONField
instance ToField HyperdataDocumentV3 where
toField = toJSONField
------------------------------------------------------------------------
instance DefaultFromField SqlJsonb HyperdataDocument
where
defaultFromField = fromPGSFromField
......@@ -208,4 +206,10 @@ instance DefaultFromField SqlJsonb HyperdataDocument
instance DefaultFromField SqlJsonb HyperdataDocumentV3
where
defaultFromField = fromPGSFromField
------------------------------------------------------------------------
--
-- Lenses
--
$(makeLenses ''HyperdataDocument)
makePrisms ''HyperdataDocument
$(makeLenses ''HyperdataDocumentV3)
......@@ -98,9 +98,9 @@ makeLenses ''HyperdataPrivate
makeLenses ''HyperdataPublic
-- | All Json instances
$(deriveJSON (unPrefix "_hu_") ''HyperdataUser)
$(deriveJSON (unPrefix "_hpr_") ''HyperdataPrivate)
$(deriveJSON (unPrefix "_hpu_") ''HyperdataPublic)
$(deriveJSON (unPrefix "_hu_") ''HyperdataUser)
-- | Arbitrary instances
instance Arbitrary HyperdataUser where
......
......@@ -41,8 +41,8 @@ instance Arbitrary Metric
<*> arbitrary
<*> arbitrary
deriveJSON (unPrefix "metrics_") ''Metrics
deriveJSON (unPrefix "m_") ''Metric
deriveJSON (unPrefix "metrics_") ''Metrics
newtype ChartMetrics a = ChartMetrics { chartMetrics_data :: a }
......
......@@ -117,7 +117,7 @@ fromInt64ToInt = fromIntegral
mkCmd :: (Connection -> IO a) -> DBCmd err a
mkCmd k = do
pool <- view connPool
withResource pool (liftBase . k)
liftBase $ withResource pool (liftBase . k)
runCmd :: (HasConnectionPool env)
=> env
......
......@@ -15,6 +15,7 @@ Portability : POSIX
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
------------------------------------------------------------------------
module Gargantext.Database.Query.Facet
( runViewAuthorsDoc
......
......@@ -30,11 +30,9 @@ import Control.Lens (Prism', (#), (^?))
import Data.Aeson
import Data.Text qualified as T
import Gargantext.Core.Types.Individu
import Prelude hiding (null, id, map, sum, show)
import Gargantext.Database.Admin.Types.Node (ListId, NodeId(..), ContextId, UserId, ParentId)
import Gargantext.Prelude hiding (sum, head)
import Prelude hiding (null, id, map, sum, show)
import Prelude qualified
data NodeCreationError
......@@ -42,6 +40,9 @@ data NodeCreationError
| UserParentDoesNotExist UserId
| UserHasNegativeId UserId
| InsertNodeFailed UserId ParentId
deriving (Show, Eq, Generic)
instance ToJSON NodeCreationError
renderNodeCreationFailed :: NodeCreationError -> T.Text
renderNodeCreationFailed = \case
......@@ -56,13 +57,16 @@ data NodeLookupError
| UserDoesNotExist UserId
| UserNameDoesNotExist Username
| UserHasTooManyRoots UserId [NodeId]
deriving (Show, Eq, Generic)
instance ToJSON NodeLookupError
renderNodeLookupFailed :: NodeLookupError -> T.Text
renderNodeLookupFailed = \case
NodeDoesNotExist nid -> "node with id " <> T.pack (show nid) <> " couldn't be found."
NodeParentDoesNotExist nid -> "no parent for node with id " <> T.pack (show nid) <> "."
UserDoesNotExist uid -> "user with id " <> T.pack (show uid) <> " couldn't be found."
UserNameDoesNotExist uname -> "user with username '" <> uname <> " couldn't be found."
UserNameDoesNotExist uname -> "user with username '" <> uname <> "' couldn't be found."
UserHasTooManyRoots uid roots -> "user with id " <> T.pack (show uid) <> " has too many roots: [" <> T.intercalate "," (map (T.pack . show) roots)
------------------------------------------------------------------------
......@@ -95,11 +99,29 @@ instance Prelude.Show NodeError
show (DoesNotExist n) = "Node does not exist (" <> show n <> ")"
instance ToJSON NodeError where
toJSON (DoesNotExist n) =
object [ ( "error", "Node does not exist" )
, ( "node", toJSON n ) ]
toJSON (NoListFound listId) =
object [ ( "error", "No list found" )
, ( "listId", toJSON listId ) ]
toJSON (NodeError e) =
object [ ( "error", "Node error" )
, ( "exception", toJSON $ T.pack $ show e ) ]
toJSON (NoUserFound ur) =
object [ ( "error", "No user found" )
, ( "user", toJSON ur ) ]
toJSON (NodeCreationFailed reason) =
object [ ( "error", "Node creation failed" )
, ( "reason", toJSON reason ) ]
toJSON (NodeLookupFailed reason) =
object [ ( "error", "Node lookup failed" )
, ( "reason", toJSON reason ) ]
toJSON (NoContextFound n) =
object [ ( "error", "No context found" )
, ( "node", toJSON n ) ]
toJSON err =
object [ ( "error", String $ T.pack $ show err ) ]
object [ ( "error", toJSON $ T.pack $ show err ) ]
class HasNodeError e where
_NodeError :: Prism' e NodeError
......
......@@ -121,11 +121,11 @@ userTable = Table "auth_user"
}
)
$(deriveJSON (unPrefix "userLight_") ''UserLight)
$(deriveJSON (unPrefix "user_") ''UserPoly)
instance FromField UserLight where
fromField = fromField'
instance FromField UserDB where
fromField = fromField'
$(deriveJSON (unPrefix "userLight_") ''UserLight)
$(deriveJSON (unPrefix "user_") ''UserPoly)
......@@ -10,6 +10,7 @@ Portability : POSIX
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
module Gargantext.Utils.Jobs (
-- * Serving the JOBS API
serveJobsAPI
......
......@@ -20,8 +20,10 @@ import Data.Kind (Type)
import Data.Sequence (Seq)
import qualified Data.Sequence as Seq
import Prelude
import Servant.API
import Servant.API.Alternative
import Servant.API.ContentTypes
import Gargantext.API.Errors.Types (BackendInternalError)
import Gargantext.Utils.Jobs.Map
import Gargantext.Utils.Jobs.Monad
......@@ -33,16 +35,16 @@ import qualified Servant.Job.Core as SJ
import qualified Servant.Job.Types as SJ
serveJobsAPI
:: ( Ord t, Exception e, MonadError e m
:: ( Ord t, MonadError BackendInternalError m
, MonadJob m t (Seq event) output
, ToJSON e, ToJSON event, ToJSON output
, ToJSON event, ToJSON output, MimeRender JSON output
, Foldable callback
)
=> (SJ.JobID 'SJ.Safe -> LoggerM m event -> JobHandle m)
-> m env
-> t
-> (JobError -> e)
-> (env -> JobHandle m -> input -> IO (Either e output))
-> (JobError -> BackendInternalError)
-> (env -> JobHandle m -> input -> IO (Either BackendInternalError output))
-> SJ.AsyncJobsServerT' ctI ctO callback event input output m
serveJobsAPI newJobHandle getenv t joberr f
= newJob newJobHandle getenv t f (SJ.JobInput undefined Nothing)
......@@ -50,10 +52,10 @@ serveJobsAPI newJobHandle getenv t joberr f
:<|> serveJobAPI t joberr
serveJobAPI
:: forall (m :: Type -> Type) e t event output.
(Ord t, MonadError e m, MonadJob m t (Seq event) output)
:: forall (m :: Type -> Type) t event output.
(Ord t, MonadError BackendInternalError m, MonadJob m t (Seq event) output, MimeRender JSON output)
=> t
-> (JobError -> e)
-> (JobError -> BackendInternalError)
-> SJ.JobID 'SJ.Unsafe
-> SJ.AsyncJobServerT event output m
serveJobAPI t joberr jid' = wrap' (killJob t)
......@@ -72,14 +74,15 @@ serveJobAPI t joberr jid' = wrap' (killJob t)
wrap' g limit offset = wrap (g limit offset)
newJob
:: ( Ord t, Exception e, MonadJob m t (Seq event) output
, ToJSON e, ToJSON event, ToJSON output
:: ( Ord t, MonadJob m t (Seq event) output
, ToJSON event, ToJSON output
, MimeRender JSON output
, Foldable callbacks
)
=> (SJ.JobID 'SJ.Safe -> LoggerM m event -> JobHandle m)
-> m env
-> t
-> (env -> JobHandle m -> input -> IO (Either e output))
-> (env -> JobHandle m -> input -> IO (Either BackendInternalError output))
-> SJ.JobInput callbacks input
-> m (SJ.JobStatus 'SJ.Safe event)
newJob newJobHandle getenv jobkind f input = do
......
......@@ -15,16 +15,24 @@ Server to be used: https://gitlab.iscpif.fr/gargantext/spacy-server
{-# LANGUAGE TemplateHaskell #-}
module Gargantext.Utils.SpacyNLP where
module Gargantext.Utils.SpacyNLP (
module Gargantext.Utils.SpacyNLP.Types
, spacyRequest
, spacyTagsToToken
, spacyDataToPosSentences
, nlp
) where
import Control.Lens
import Data.Aeson (encode)
import Data.Aeson.TH (deriveJSON)
import Data.Set qualified as Set
import Data.Text hiding (map, group, filter, concat, zip)
import Gargantext.Core (Lang(..))
import Gargantext.Core.Text.Terms.Multi.PosTagging.Types
import Gargantext.Core.Types (POS(..), NER(..), TokenTag(..))
import Gargantext.Core.Utils.Prefix (unPrefix)
import Gargantext.Prelude
import Gargantext.Utils.SpacyNLP.Types
import Network.HTTP.Simple (parseRequest, httpJSON, setRequestBodyLBS, getResponseBody, Response)
import Network.URI (URI(..))
......@@ -88,19 +96,18 @@ spacyRequest uri txt = do
result <- httpJSON request :: IO (Response SpacyData)
pure $ getResponseBody result
-- Instances
deriveJSON (unPrefix "_spacy_") ''SpacyData
deriveJSON (unPrefix "_spacy_") ''SpacyText
deriveJSON (unPrefix "_spacyTags_") ''SpacyTags
deriveJSON (unPrefix "_spacyRequest_") ''SpacyRequest
makeLenses ''SpacyData
makeLenses ''SpacyText
makeLenses ''SpacyTags
makeLenses ''SpacyRequest
----------------------------------------------------------------
spacyTagsToToken :: SpacyTags -> Token
spacyTagsToToken st = Token (st ^. spacyTags_index)
(st ^. spacyTags_normalized)
(st ^. spacyTags_text)
(st ^. spacyTags_lemma)
(st ^. spacyTags_head_index)
(st ^. spacyTags_char_offset)
(Just $ st ^. spacyTags_pos)
(Just $ st ^. spacyTags_ent_type)
(Just $ st ^. spacyTags_prefix)
(Just $ st ^. spacyTags_suffix)
spacyDataToTokenTags :: SpacyData -> [[TokenTag]]
spacyDataToTokenTags (SpacyData ds) =
......
{-|
Module : Gargantext.Utils.SpacyNLP.Types
Description : John Snow NLP API connexion
Copyright : (c) CNRS, 2017
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
Spacy ecosystem: https://github.com/explosion/spaCy
Server to be used: https://gitlab.iscpif.fr/gargantext/spacy-server
-}
{-# LANGUAGE TemplateHaskell #-}
module Gargantext.Utils.SpacyNLP.Types where
import Control.Lens
import Data.Aeson.TH (deriveJSON)
import Data.Text hiding (map, group, filter, concat, zip)
import Gargantext.Core.Types (POS(..), NER(..))
import Gargantext.Core.Utils.Prefix (unPrefix)
import Gargantext.Prelude
data SpacyData = SpacyData { _spacy_data :: ![SpacyText]}
deriving (Show)
data SpacyText = SpacyText { _spacy_text :: !Text
, _spacy_tags :: ![SpacyTags]
} deriving (Show)
data SpacyTags =
SpacyTags { _spacyTags_text :: !Text
, _spacyTags_text_with_ws :: !Text
, _spacyTags_whitespace :: !Text
, _spacyTags_head :: !Text
, _spacyTags_head_index :: !Int
, _spacyTags_left_edge :: !Text
, _spacyTags_right_edge :: !Text
, _spacyTags_index :: Int
, _spacyTags_ent_type :: !NER
, _spacyTags_ent_iob :: !Text
, _spacyTags_lemma :: !Text
, _spacyTags_normalized :: !Text
, _spacyTags_shape :: !Text
, _spacyTags_prefix :: !Text
, _spacyTags_suffix :: !Text
, _spacyTags_is_alpha :: Bool
, _spacyTags_is_ascii :: Bool
, _spacyTags_is_digit :: Bool
, _spacyTags_is_title :: Bool
, _spacyTags_is_punct :: Bool
, _spacyTags_is_left_punct :: Bool
, _spacyTags_is_right_punct :: Bool
, _spacyTags_is_space :: Bool
, _spacyTags_is_bracket :: Bool
, _spacyTags_is_quote :: Bool
, _spacyTags_is_currency :: Bool
, _spacyTags_like_url :: Bool
, _spacyTags_like_num :: Bool
, _spacyTags_like_email :: Bool
, _spacyTags_is_oov :: Bool
, _spacyTags_is_stop :: Bool
, _spacyTags_pos :: POS
, _spacyTags_tag :: POS
, _spacyTags_dep :: !Text
, _spacyTags_lang :: !Text
, _spacyTags_prob :: !Int
, _spacyTags_char_offset :: !Int
} deriving (Show)
data SpacyRequest = SpacyRequest { _spacyRequest_text :: !Text }
deriving (Show)
--
-- JSON instances
--
deriveJSON (unPrefix "_spacyTags_") ''SpacyTags
deriveJSON (unPrefix "_spacy_") ''SpacyText
deriveJSON (unPrefix "_spacy_") ''SpacyData
deriveJSON (unPrefix "_spacyRequest_") ''SpacyRequest
--
-- Lenses
--
makeLenses ''SpacyData
makeLenses ''SpacyText
makeLenses ''SpacyTags
makeLenses ''SpacyRequest
This diff is collapsed.
......@@ -59,24 +59,17 @@ tests = sequential $ aroundAll withTestDBAndPort $ do
let authPayload = AuthRequest "alice" (GargPassword "alice")
result0 <- runClientM (auth_api authPayload) (clientEnv port)
let result = over (_Right . authRes_valid . _Just . authVal_token) (const cannedToken) result0
let result = over (_Right . authRes_token) (const cannedToken) result0
let expected = AuthResponse {
_authRes_valid = Just $
AuthValid {
_authVal_token = cannedToken
, _authVal_tree_id = fromMaybe (UnsafeMkNodeId 1) $ listToMaybe $ result0 ^.. _Right . authRes_valid . _Just . authVal_tree_id
, _authVal_user_id = fromMaybe (UnsafeMkUserId 1) $ listToMaybe $ result0 ^.. _Right . authRes_valid . _Just . authVal_user_id
}
, _authRes_inval = Nothing
}
_authRes_token = cannedToken
, _authRes_tree_id = fromMaybe (UnsafeMkNodeId 1) $ listToMaybe $ result0 ^.. _Right . authRes_tree_id
, _authRes_user_id = fromMaybe (UnsafeMkUserId 1) $ listToMaybe $ result0 ^.. _Right . authRes_user_id
}
result `shouldBe` (Right expected)
it "denies login for user 'alice' if password is invalid" $ \((_testEnv, port), _) -> do
let authPayload = AuthRequest "alice" (GargPassword "wrong")
result <- runClientM (auth_api authPayload) (clientEnv port)
let expected = AuthResponse {
_authRes_valid = Nothing
, _authRes_inval = Just $ AuthInvalid "Invalid username or password"
}
result `shouldBe` (Right expected)
putText $ "result: " <> show result
-- result `shouldBe` (Left $ InvalidUsernameOrPassword)
......@@ -47,7 +47,7 @@ tests = sequential $ aroundAll withTestDBAndPort $ do
| Status{..} <- simpleStatus
->liftIO $ do
statusCode `shouldBe` 404
simpleBody `shouldBe` [r|{"error":"Node does not exist (nodeId-99)"}|]
simpleBody `shouldBe` [r|{"error":"Node does not exist","node":99}|]
it "returns the new error if header X-Garg-Error-Scheme: new is passed" $ \((_testEnv, port), app) -> do
withApplication app $ do
......
......@@ -114,11 +114,7 @@ withValidLogin port ur pwd act = do
result <- liftIO $ runClientM (auth_api authPayload) clientEnv
case result of
Left err -> liftIO $ throwIO $ Prelude.userError (show err)
Right res
| Just tkn <- _authRes_valid res
-> act (_authVal_token tkn)
| otherwise
-> Prelude.fail $ "No token found in " <> show res
Right res -> act $ _authRes_token res
tests :: Spec
......
......@@ -13,14 +13,14 @@ import Language.Haskell.TH.Quote
import Network.HTTP.Types
import Network.Wai.Test
import Prelude
import qualified Data.Aeson as JSON
import qualified Data.Aeson.KeyMap as KM
import qualified Data.ByteString.Char8 as B
import Test.Hspec.Expectations
import Test.Hspec.Wai
import Test.Hspec.Wai.JSON
import Test.Hspec.Wai.Matcher
import Test.Tasty.HUnit
import qualified Data.Aeson as JSON
import qualified Data.ByteString.Char8 as B
import qualified Data.HashMap.Strict as HM
-- | Marks the input 'Assertion' as pending, by ignoring any exception
-- thrown by it.
......@@ -87,5 +87,5 @@ containsJSON expected = MatchBody matcher
isSubsetOf :: Value -> Value -> Bool
isSubsetOf (Object sub) (Object sup) =
all (\(key, value) -> HM.lookup key sup == Just value) (HM.toList sub)
all (\(key, value) -> KM.lookup key sup == Just value) (KM.toList sub)
isSubsetOf x y = x == y
......@@ -254,7 +254,7 @@ withJob :: Env
-> IO (SJ.JobStatus 'SJ.Safe JobLog)
withJob env f = runMyDummyMonad env $ MyDummyMonad $
-- the job type doesn't matter in our tests, we use a random one, as long as it's of type 'GargJob'.
newJob @_ @BackendInternalError mkJobHandle (pure env) RecomputeGraphJob (\_ hdl input ->
newJob @_ mkJobHandle (pure env) RecomputeGraphJob (\_ hdl input ->
runMyDummyMonad env $ (Right <$> (f hdl input >> getLatestJobStatus hdl))) (SJ.JobInput () Nothing)
withJob_ :: Env
......
......@@ -2,7 +2,7 @@
module Main where
import Gargantext.Prelude
import Gargantext.Prelude hiding (isInfixOf)
import Control.Monad
import Data.Text (isInfixOf)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment