Verified Commit 7c5ddfa4 authored by Przemyslaw Kaminski's avatar Przemyslaw Kaminski

Merge branch 'dev' into 471-dev-node-multiterms

parents b388c75e 73a95b9f
Pipeline #7734 failed with stages
in 47 minutes and 44 seconds
......@@ -85,6 +85,8 @@ convertConfigs ini@(Ini.GargConfig { .. }) iniMail nlpConfig connInfo =
, _ac_scrapyd_url }
, _gc_worker = WorkerSettings { _wsDefinitions = [ wd ]
, _wsDefaultVisibilityTimeout = 1
, _wsDefaultJobTimeout = 60
, _wsLongJobTimeout = 3000
, _wsDefaultDelay = 0
, _wsDatabase = connInfo { PGS.connectDatabase = "pgmq"} }
, _gc_logging = Config.LogConfig {
......@@ -141,5 +143,8 @@ defaultNotificationsConfig :: CTypes.NotificationsConfig
defaultNotificationsConfig =
CTypes.NotificationsConfig { _nc_central_exchange_bind = "tcp://*:5560"
, _nc_central_exchange_connect = "tcp://localhost:5560"
, _nc_ce_send_timeout_ms = 200
, _nc_dispatcher_bind = "tcp://*:5561"
, _nc_dispatcher_connect = "tcp://localhost:5561" }
, _nc_dispatcher_connect = "tcp://localhost:5561"
, _nc_dispatcher_send_timeout_ms = 500
, _nc_dispatcher_throttle_ms = 500 }
......@@ -16,8 +16,8 @@ fi
# with the `sha256sum` result calculated on the `cabal.project` and
# `cabal.project.freeze`. This ensures the files stay deterministic so that CI
# cache can kick in.
expected_cabal_project_hash="c7e0466c8d4c1ca88b4f3d62d022bd29329d44afc48fffbcfacf0f65293acba8"
expected_cabal_project_freeze_hash="553b98aadb35506a305bd740cdd71f5fadc1e6d55d10f91cf39daa6735a63d78"
expected_cabal_project_hash="eb8fdb1a14aa2f7a13f565cf7fa9f6ab0e2dab9212538aed0db5691015be286b"
expected_cabal_project_freeze_hash="553b98aadb35506a305bd740cdd71f5fadc1e6d55d10f91cf39daa6735a63d78"
cabal --store-dir=$STORE_DIR v2-build --dry-run
......
......@@ -151,12 +151,12 @@ source-repository-package
source-repository-package
type: git
location: https://gitlab.iscpif.fr/gargantext/haskell-pgmq
tag: 1dd92f0aa8e9f8096064e5656c336e562680f4e3
tag: 9a869df2842eccc86a0f31a69fb8dc5e5ca218a8
source-repository-package
type: git
location: https://gitlab.iscpif.fr/gargantext/haskell-bee
tag: 4a9c709613554eed0189b486de2126c18797088c
tag: 05c39e424d15149dc32097b3318cb6007e0e7052
subdir: haskell-bee/
haskell-bee-pgmq/
haskell-bee-tests/
......
......@@ -123,9 +123,19 @@ smtp_host = "localhost"
# HOST_password = password
[notifications]
central-exchange = { bind = "tcp://*:5560", connect = "tcp://127.0.0.1:5560" }
dispatcher = { bind = "tcp://*:5561", connect = "tcp://127.0.0.1:5561" }
[notifications.central-exchange]
bind = "tcp://:5560"
connect = "tcp://127.0.0.1:5560"
# see https://gitlab.iscpif.fr/gargantext/haskell-gargantext/commit/77a687ea1483441675320fd2413fac52bb112a4c
send_timeout_ms = 200
[notifications.dispatcher]
bind = "tcp://:5561"
connect = "tcp://127.0.0.1:5561"
# see https://gitlab.iscpif.fr/gargantext/haskell-gargantext/commit/77a687ea1483441675320fd2413fac52bb112a4c
send_timeout_ms = 500
# Same dispatcher messages are throttled, this is the throttle delay
throttle_ms = 500
[nlp]
......@@ -149,6 +159,11 @@ default_visibility_timeout = 1
# default delay before job is visible to the worker
default_delay = 0
# default timeout (in seconds)
default_job_timeout = 60
# default timeout for "long" jobs (in seconds)
long_job_timeout = 3000
# if you leave the same credentials as in [database] section above,
# workers will try to set up the `gargantext_pgmq` database
# automatically
......
......@@ -728,12 +728,19 @@ common commonTestDependencies
build-depends:
base >=4.7 && <5
, QuickCheck ^>= 2.14.2
, accelerate >= 1.3.0.0
, aeson ^>= 2.1.2.1
, aeson-pretty ^>= 0.8.9
, aeson-qq
, async ^>= 2.2.4
, boolexpr ^>= 0.3
, bytestring ^>= 0.11.5.3
, cache >= 0.1.3.0
, conduit ^>= 1.3.4.2
, containers ^>= 0.6.7
, crawlerArxiv
, cryptohash
, directory ^>= 1.3.7.1
, epo-api-client
, fast-logger ^>= 3.2.2
, filepath ^>= 1.4.2.2
......@@ -741,16 +748,20 @@ common commonTestDependencies
, gargantext
, gargantext-prelude
, generic-arbitrary >= 1.0.1 && < 2
, graphviz ^>= 2999.20.1.0
, haskell-bee
, hspec ^>= 2.11.1
, hspec-expectations >= 0.8 && < 0.9
, hspec-expectations-lifted < 0.11
, hspec-golden
, hspec-wai
, hspec-wai-json
, http-client ^>= 0.7.14
, http-client-tls >= 0.3.6.1 && < 0.4
, http-types
, HUnit
, lens >= 5.2.2 && < 5.3
, massiv < 1.1
, monad-control >= 1.0.3 && < 1.1
, mtl >= 2.2.2 && < 2.4
, network-uri
......@@ -760,71 +771,79 @@ common commonTestDependencies
, patches-map ^>= 0.1.0.1
, postgres-options >= 0.2 && < 0.3
, postgresql-simple >= 0.6.4 && <= 0.7.0.0
, process ^>= 1.6.18.0
, product-profunctors
, quickcheck-instances ^>= 0.3.25.2
, random
, raw-strings-qq
, resource-pool >= 0.4.0.0 && < 0.5
, safe-exceptions >= 0.1.7.4 && < 0.2
, scientific < 0.4
, servant >= 0.20.1 && < 0.21
, servant-auth-client
, servant-client >= 0.20 && < 0.21
, servant-client-core >= 0.20 && < 0.21
, servant-conduit >= 0.15 && < 0.17
, servant-server >= 0.20.1 && < 0.21
, shelly >= 1.9 && < 2
, split
, sqlite-simple >= 0.4.19 && < 0.5
, stm >= 2.5.1.0 && < 2.6
, streaming-commons
, tasty-hunit
, tasty-quickcheck
, text ^>= 2.0.2
, time ^>= 1.12.2
, tmp-postgres >= 1.34.1 && < 1.35
, tree-diff
, unicode-collation >= 0.1.3.5
, unix >= 2.7.3 && < 2.9
, unliftio
, unordered-containers ^>= 0.2.16.0
, utf8-string ^>= 1.0.2
, validity ^>= 0.12.0.2
, vector >= 0.12.3.0 && <= 0.13.1.0
, wai
, wai-extra
, warp
, websockets
test-suite garg-test-tasty
test-suite garg-test
import:
defaults
, commonTestDependencies
type: exitcode-stdio-1.0
main-is: drivers/tasty/Main.hs
main-is: Main.hs
build-depends:
aeson-pretty ^>= 0.8.9
, accelerate >= 1.3.0.0
, boolexpr ^>= 0.3
, conduit ^>= 1.3.4.2
, crawlerArxiv
, cryptohash
, directory ^>= 1.3.7.1
, graphviz ^>= 2999.20.1.0
, massiv < 1.1
, scientific < 0.4
, split
, tasty >= 1.4.3 && < 1.6
, tasty-golden
, tasty-hspec
, time ^>= 1.12.2
, unicode-collation >= 0.1.3.5
, unordered-containers ^>= 0.2.16.0
, utf8-string ^>= 1.0.2
, vector >= 0.12.3.0 && <= 0.13.1.0
other-modules:
CLI.Phylo.Common
Paths_gargantext
Paths_gargantext
Test.API
Test.API.Authentication
Test.API.Authentication
Test.API.Errors
Test.API.Export
Test.API.GraphQL
Test.API.Notifications
Test.API.Prelude
Test.API.Prelude
Test.API.Private
Test.API.Private.List
Test.API.Private.List
Test.API.Private.Move
Test.API.Private.Move
Test.API.Private.Remote
Test.API.Private.Remote
Test.API.Private.Share
Test.API.Private.Share
Test.API.Private.Table
Test.API.Private.Table
Test.API.Authentication
Test.API.Routes
Test.API.Routes
Test.API.Setup
Test.API.Prelude
Test.API.Setup
Test.API.UpdateList
Test.API.UpdateList
Test.API.Worker
Test.Core.LinearAlgebra
Test.Core.Notifications
Test.Core.Orchestrator
......@@ -837,15 +856,23 @@ test-suite garg-test-tasty
Test.Core.Utils
Test.Core.Worker
Test.Database.Operations
Test.Database.Operations
Test.Database.Operations.DocumentSearch
Test.Database.Operations.DocumentSearch
Test.Database.Operations.NodeStory
Test.Database.Operations.NodeStory
Test.Database.Operations.PublishNode
Test.Database.Operations.PublishNode
Test.Database.Setup
Test.Database.Setup
Test.Database.Transactions
Test.Database.Transactions
Test.Database.Types
Test.Database.Types
Test.Graph.Clustering
Test.Graph.Distance
Test.Instances
Test.Instances
Test.Ngrams.Lang
Test.Ngrams.Lang.En
Test.Ngrams.Lang.Fr
......@@ -864,62 +891,20 @@ test-suite garg-test-tasty
Test.Parsers.Types
Test.Parsers.WOS
Test.Server.ReverseProxy
Test.Server.ReverseProxy
Test.Types
Test.Types
Test.Utils
Test.Utils
Test.Utils.Crypto
Test.Utils.Db
Test.Utils.Db
Test.Utils.Jobs
Test.Utils.Jobs.Types
Test.Utils.Notifications
hs-source-dirs:
test bin/gargantext-cli
ghc-options: -Wall -Werror -threaded -rtsopts -with-rtsopts=-N
test-suite garg-test-hspec
import:
defaults
, commonTestDependencies
type: exitcode-stdio-1.0
main-is: drivers/hspec/Main.hs
build-depends:
process ^>= 1.6.18.0
, servant >= 0.20.1 && < 0.21
, sqlite-simple >= 0.4.19 && < 0.5
, unix >= 2.7.3 && < 2.9
other-modules:
Paths_gargantext
Test.API
Test.API.Authentication
Test.API.Errors
Test.API.Export
Test.API.GraphQL
Test.API.Notifications
Test.API.Private
Test.API.Private.List
Test.API.Private.Move
Test.API.Private.Remote
Test.API.Private.Share
Test.API.Private.Table
Test.API.Routes
Test.API.Setup
Test.API.Prelude
Test.API.UpdateList
Test.API.Worker
Test.Database.Operations
Test.Database.Operations.DocumentSearch
Test.Database.Operations.NodeStory
Test.Database.Operations.PublishNode
Test.Database.Setup
Test.Database.Transactions
Test.Database.Types
Test.Instances
Test.Server.ReverseProxy
Test.Types
Test.Utils
Test.Utils.Db
Test.Utils.Notifications
hs-source-dirs:
test
test bin/gargantext-cli
ghc-options: -Wall -Werror -threaded -rtsopts -with-rtsopts=-N
benchmark garg-bench
......
......@@ -61,10 +61,13 @@ cradle:
component: "gargantext:exe:gargantext"
- path: "./test"
component: "gargantext:test:garg-test-tasty"
component: "gargantext:test:garg-test"
- path: "./bin/gargantext-cli"
component: "gargantext:test:garg-test-tasty"
component: "gargantext:test:garg-test"
- path: "./test"
component: "gargantext:test:garg-test-hspec"
- path: "./bench/Main.hs"
component: "gargantext:bench:garg-bench"
- path: "./bench/Paths_gargantext.hs"
component: "gargantext:bench:garg-bench"
{ pkgs ? import (if builtins.elem builtins.currentSystem ["x86_64-darwin" "aarch64-darwin"]
then ./pinned-25.05.darwin.nix
else ./pinned-25.05.nix) {} }:
{ pkgs ? import
(if builtins.elem builtins.currentSystem [ "x86_64-darwin" "aarch64-darwin" ]
then ./pinned-25.05.darwin.nix
else ./pinned-25.05.nix)
{ }
}:
rec {
inherit pkgs;
ghc966 = pkgs.haskell.compiler.ghc966;
cabal_install = pkgs.haskell.lib.compose.justStaticExecutables pkgs.haskell.packages.ghc966.cabal-install;
graphviz = pkgs.callPackage ./graphviz.nix {};
igraph_0_10_4 = pkgs.callPackage ./igraph.nix {};
corenlp = pkgs.callPackage ./corenlp.nix { }; # 4.5.8
graphviz = pkgs.callPackage ./graphviz.nix { };
igraph_0_10_4 = pkgs.callPackage ./igraph.nix { };
corenlp = pkgs.callPackage ./corenlp.nix { }; # 4.5.8
cabal2stack = pkgs.callPackage ./cabal2stack.nix { ghc = ghc966; };
nng_notls = pkgs.nng.overrideAttrs (old: {
cmakeFlags = (old.cmakeFlags or []) ++ [ "-DNNG_ENABLE_TLS=OFF" ];
cmakeFlags = (old.cmakeFlags or [ ]) ++ [ "-DNNG_ENABLE_TLS=OFF" ];
});
hsBuildInputs = [
ghc966
cabal_install
pkgs.haskellPackages.alex
pkgs.haskellPackages.ghcid
pkgs.haskellPackages.happy
pkgs.haskellPackages.pretty-show
];
......@@ -47,7 +49,7 @@ rec {
libpqxx
libsodium
nng_notls
nil # nix language server
nil # nix language server
pcre
pkg-config
postgresql
......@@ -56,9 +58,9 @@ rec {
zlib
zeromq
curl
] ++ ( lib.optionals stdenv.isDarwin [
darwin.apple_sdk.frameworks.Accelerate
]);
] ++ (lib.optionals stdenv.isDarwin [
darwin.apple_sdk.frameworks.Accelerate
]);
libPaths = pkgs.lib.makeLibraryPath nonhsBuildInputs;
shellHook = ''
export LD_LIBRARY_PATH="${pkgs.gfortran.cc.lib}:${libPaths}:$LD_LIBRARY_PATH"
......
......@@ -55,7 +55,7 @@ defaultSettingsFile :: SettingsFile
defaultSettingsFile = SettingsFile "gargantext-settings.toml"
-- | Run Cmd Sugar for the Repl (GHCI)
runCmdRepl :: Show err => CmdRandom DevEnv err a -> IO a
runCmdRepl :: (Typeable err, Show err) => CmdRandom DevEnv err a -> IO a
runCmdRepl f = withDevEnv defaultSettingsFile $ \env -> runCmdDev env f
runCmdReplServantErr :: CmdRandom DevEnv ServerError a -> IO a
......@@ -65,7 +65,7 @@ runCmdReplServantErr = runCmdRepl
-- the command.
-- This function is constrained to the DevEnv rather than
-- using HasConnectionPool and HasRepoVar.
runCmdDev :: Show err => DevEnv -> CmdRandom DevEnv err a -> IO a
runCmdDev :: (Typeable err, Show err) => DevEnv -> CmdRandom DevEnv err a -> IO a
runCmdDev env f =
either (fail . show) pure =<< runCmd env f
......
......@@ -257,6 +257,8 @@ addToCorpusWithTempFile user cid nwtf jobHandle = do
-- TODO Add progress (jobStatus) update for docs - this is a
-- long action
markStarted (fromIntegral count) jobHandle
let docsC' = zipSources (yieldMany [1..]) (transPipe liftBase docsC)
.| mapMC (\(idx, doc) ->
if idx > limit then do
......
......@@ -321,23 +321,29 @@ makeLenses ''APIsConfig
data NotificationsConfig =
NotificationsConfig { _nc_central_exchange_bind :: ~T.Text
, _nc_central_exchange_connect :: ~T.Text
, _nc_dispatcher_bind :: ~T.Text
, _nc_dispatcher_connect :: ~T.Text }
NotificationsConfig { _nc_central_exchange_bind :: ~T.Text
, _nc_central_exchange_connect :: ~T.Text
, _nc_ce_send_timeout_ms :: ~Int
, _nc_dispatcher_bind :: ~T.Text
, _nc_dispatcher_connect :: ~T.Text
, _nc_dispatcher_send_timeout_ms :: ~Int
, _nc_dispatcher_throttle_ms :: ~Int }
deriving (Show, Eq)
instance FromValue NotificationsConfig where
fromValue = parseTableFromValue $ do
(_nc_central_exchange_bind, _nc_central_exchange_connect) <-
(_nc_central_exchange_bind, _nc_central_exchange_connect, _nc_ce_send_timeout_ms) <-
reqKeyOf "central-exchange" $ parseTableFromValue $ do
b <- reqKey "bind"
c <- reqKey "connect"
pure (b, c)
(_nc_dispatcher_bind, _nc_dispatcher_connect) <-
t <- reqKey "send_timeout_ms"
pure (b, c, t)
(_nc_dispatcher_bind, _nc_dispatcher_connect, _nc_dispatcher_send_timeout_ms, _nc_dispatcher_throttle_ms) <-
reqKeyOf "dispatcher" $ parseTableFromValue $ do
b <- reqKey "bind"
c <- reqKey "connect"
pure (b, c)
t <- reqKey "send_timeout_ms"
tt <- reqKey "throttle_ms"
pure (b, c, t, tt)
return $ NotificationsConfig { .. }
instance ToValue NotificationsConfig where
toValue = defaultTableToValue
......@@ -345,8 +351,11 @@ instance ToTable NotificationsConfig where
toTable (NotificationsConfig { .. }) =
table [ "central-exchange" .=
table [ "bind" .= _nc_central_exchange_bind
, "connect" .= _nc_central_exchange_connect ]
, "connect" .= _nc_central_exchange_connect
, "send_timeout_ms" .= _nc_ce_send_timeout_ms ]
, "dispatcher" .=
table [ "bind" .= _nc_dispatcher_bind
, "connect" .= _nc_dispatcher_connect ]
, "connect" .= _nc_dispatcher_connect
, "send_timeout_ms" .= _nc_dispatcher_send_timeout_ms
, "throttle" .= _nc_dispatcher_throttle_ms ]
]
......@@ -38,8 +38,13 @@ type WorkerName = Text
data WorkerSettings =
WorkerSettings {
_wsDatabase :: !PGS.ConnectInfo
-- After this number of seconds, the job will be available again.
-- | default job timeout, in seconds
, _wsDefaultJobTimeout :: ~Int
-- | default "long" job timeout, in seconds
, _wsLongJobTimeout :: ~Int
-- After this number of seconds, the job will be available again.
-- You can set timeout for each job individually and this is the
-- preferred method over using defaultVt.
, _wsDefaultVisibilityTimeout :: PGMQ.VisibilityTimeout
......@@ -53,8 +58,12 @@ instance FromValue WorkerSettings where
dbConfig <- reqKey "database"
_wsDefinitions <- reqKey "definitions"
_wsDefaultVisibilityTimeout <- reqKey "default_visibility_timeout"
_wsDefaultJobTimeout <- reqKey "default_job_timeout"
_wsLongJobTimeout <- reqKey "long_job_timeout"
defaultDelay <- reqKey "default_delay"
return $ WorkerSettings { _wsDatabase = unTOMLConnectInfo dbConfig
, _wsDefaultJobTimeout
, _wsLongJobTimeout
, _wsDefinitions
, _wsDefaultVisibilityTimeout
, _wsDefaultDelay = B.TimeoutS defaultDelay }
......@@ -63,6 +72,8 @@ instance ToValue WorkerSettings where
instance ToTable WorkerSettings where
toTable (WorkerSettings { .. }) =
table [ "database" .= TOMLConnectInfo _wsDatabase
, "default_job_timeout" .= _wsDefaultJobTimeout
, "long_job_timeout" .= _wsLongJobTimeout
, "default_visibility_timeout" .= _wsDefaultVisibilityTimeout
, "default_delay" .= B._TimeoutS _wsDefaultDelay
, "definitions" .= _wsDefinitions ]
......
......@@ -74,7 +74,7 @@ gServer cfg = do
-- C.putStrLn $ "[central_exchange] " <> r
atomically $ TChan.writeTChan tChan r
where
NotificationsConfig{..} = cfg ^. gc_notifications_config
nc@NotificationsConfig{..} = cfg ^. gc_notifications_config
log_cfg = cfg ^. gc_logging
worker s_dispatcher tChan = do
withLogger log_cfg $ \ioLogger -> do
......@@ -99,29 +99,24 @@ gServer cfg = do
-- process, independent of the server.
-- send the same message that we received
-- void $ sendNonblocking s_dispatcher r
sendTimeout ioLogger s_dispatcher r
sendTimeout nc ioLogger s_dispatcher r
Just (UpdateWorkerProgress _ji _jl) -> do
-- $(logLoc) ioLogger DEBUG $ "[central_exchange] update worker progress: " <> show ji <> ", " <> show jl
sendTimeout ioLogger s_dispatcher r
sendTimeout nc ioLogger s_dispatcher r
Just Ping -> do
sendTimeout ioLogger s_dispatcher r
sendTimeout nc ioLogger s_dispatcher r
Nothing ->
$(logLoc) ioLogger ERROR $ "[central_exchange] cannot decode message: " <> show r
-- | A static send timeout in microseconds.
send_timeout_us :: Int
send_timeout_us = 50_000
-- | Sends the given payload ensure the send doesn't take more than the static
-- 'send_timeout_ns', logging a message if the timeouts kicks in.
sendTimeout :: Sender a => Logger IO -> Socket a -> ByteString -> IO ()
sendTimeout ioLogger sock payload = withFrozenCallStack $ do
timeoutKickedIn <- timeout send_timeout_us $ send sock $ payload
-- | Sends the given payload ensure the send doesn't take more than the
-- 'nc_ce_send_timeout_ms', logging a message if the timeouts kicks in.
sendTimeout :: Sender a => NotificationsConfig -> Logger IO -> Socket a -> ByteString -> IO ()
sendTimeout (NotificationsConfig { _nc_ce_send_timeout_ms }) ioLogger sock payload = withFrozenCallStack $ do
timeoutKickedIn <- timeout (_nc_ce_send_timeout_ms * 1000) $ send sock $ payload
case timeoutKickedIn of
Nothing ->
$(logLoc) ioLogger ERROR $ "[central_exchange] couldn't send msg in timely fashion."
Just () ->
$(logLoc) ioLogger DEBUG $ "[central_exchange] message sent."
Just () -> pure ()
notify :: HasCallStack => GargConfig -> CEMessage -> IO ()
notify cfg ceMessage = withLogger log_cfg $ \ioLogger -> do
......@@ -130,12 +125,11 @@ notify cfg ceMessage = withLogger log_cfg $ \ioLogger -> do
connectEndpoint <- connect s $ T.unpack _nc_central_exchange_connect
let do_work = do
let str = Aeson.encode ceMessage
$(logLoc) ioLogger DEBUG $ "[central_exchange] sending to " <> _nc_central_exchange_connect
$(logLoc) ioLogger DEBUG $ "[central_exchange] sending: " <> (TE.decodeUtf8 $ BSL.toStrict str)
-- err <- sendNonblocking s $ BSL.toStrict str
-- putText $ "[notify] err: " <> show err
sendTimeout ioLogger s (BSL.toStrict str)
sendTimeout nc ioLogger s (BSL.toStrict str)
do_work `finally` shutdown s connectEndpoint
where
NotificationsConfig { _nc_central_exchange_connect } = cfg ^. gc_notifications_config
nc@NotificationsConfig { _nc_central_exchange_connect } = cfg ^. gc_notifications_config
log_cfg = cfg ^. gc_logging
......@@ -31,16 +31,19 @@ import Data.Aeson qualified as Aeson
import Data.ByteString.Lazy qualified as BSL
import Data.Text qualified as T
import DeferredFolds.UnfoldlM qualified as UnfoldlM
import Gargantext.Core.Config
( GargConfig, LogConfig, gc_logging, gc_notifications_config )
import Gargantext.Core.Config.Types (NotificationsConfig(..))
import Gargantext.Core.Notifications.CentralExchange.Types qualified as CETypes
import Gargantext.Core.Notifications.Dispatcher.Types
import Gargantext.Core.Worker.Types (JobInfo(..))
import Gargantext.Prelude
import Gargantext.System.Logging
( HasLogger(logMsg), LogLevel(..), withLogger, logLoc )
import Nanomsg (Pull(..), bind, recv, withSocket)
import Network.WebSockets qualified as WS
import StmContainers.Set qualified as SSet
import Gargantext.Core.Config
import Gargantext.System.Logging
import System.Timeout (timeout)
{-
......@@ -84,7 +87,8 @@ dispatcherListener config subscriptions = do
-- NOTE I'm not sure that we need more than 1 worker here, but in
-- theory, the worker can perform things like user authentication,
-- DB queries etc so it can be slow sometimes.
Async.withAsync (throttle 500_000 throttleTChan (sendDataMessageThrottled log_cfg)) $ \_ -> do
Async.withAsync (throttle (_nc_dispatcher_throttle_ms * 1000) throttleTChan
(sendDataMessageThrottled nc log_cfg)) $ \_ -> do
void $ Async.concurrently (Async.replicateConcurrently 5 $ worker tChan throttleTChan) $ do
forever $ do
-- putText "[dispatcher_listener] receiving"
......@@ -92,7 +96,7 @@ dispatcherListener config subscriptions = do
-- C.putStrLn $ "[dispatcher_listener] " <> r
atomically $ TChan.writeTChan tChan r
where
NotificationsConfig { _nc_dispatcher_bind } = config ^. gc_notifications_config
nc@NotificationsConfig { _nc_dispatcher_bind, _nc_dispatcher_throttle_ms } = config ^. gc_notifications_config
log_cfg = config ^. gc_logging
worker tChan throttleTChan = withLogger log_cfg $ \ioL -> do
tId <- myThreadId
......@@ -164,11 +168,19 @@ sendNotification throttleTChan ceMessage sub = do
-- | The "true" message sending to websocket. After it was withheld
-- for a while (for throttling), it is finally sent here
sendDataMessageThrottled :: LogConfig -> (WS.Connection, WS.DataMessage) -> IO ()
sendDataMessageThrottled log_cfg (conn, msg) = do
withLogger log_cfg $ \ioL ->
sendDataMessageThrottled :: NotificationsConfig -> LogConfig -> (WS.Connection, WS.DataMessage) -> IO ()
sendDataMessageThrottled (NotificationsConfig { _nc_dispatcher_send_timeout_ms }) log_cfg (conn, msg) = do
withLogger log_cfg $ \ioL -> do
logMsg ioL DEBUG $ "[sendDataMessageThrottled] dispatching notification: " <> show msg
WS.sendDataMessage conn msg
-- | We need a timeout here for the following reason:
-- when a message is sent and the user disconnects the WS
-- connection (e.g. refreshes the page), it seems that this message sending hangs.
-- We don't want to block the thread indefinitely.
timeoutKickedIn <- timeout (_nc_dispatcher_send_timeout_ms * 1000) $ WS.sendDataMessage conn msg
case timeoutKickedIn of
Nothing ->
$(logLoc) ioL ERROR $ "[sendMessageThrottled] couldn't send msg in timely fashion."
Just _ -> pure ()
-- | Custom filtering of list of Subscriptions based on
......
......@@ -232,7 +232,6 @@ performAction env _state bm = do
-- | Uses temporary file to add documents into corpus
AddCorpusTempFileAsync { .. } -> runWorkerMonad env $ do
-- TODO CES.filnally
$(logLocM) DEBUG "[performAction] add to corpus with temporary file"
CES.finally (addToCorpusWithTempFile _actf_user _actf_cid _actf_args jh)
(removeLargeObject $ _wtf_file_oid _actf_args)
......
......@@ -15,6 +15,7 @@ module Gargantext.Core.Worker.Jobs where
import Async.Worker qualified as W
import Async.Worker.Types qualified as WT
import Control.Lens (view)
import Gargantext.Core.Config (gc_database_config, gc_worker, HasConfig(..), GargConfig, gc_logging)
import Gargantext.Core.Config.Worker (WorkerSettings(..), WorkerDefinition(..))
......@@ -44,25 +45,34 @@ sendJobWithCfg gcConfig job = do
Just wd -> do
b <- initBrokerWithDBCreate (gcConfig ^. gc_database_config) ws
let queueName = _wdQueue wd
let job' = (updateJobData job $ W.mkDefaultSendJob' b queueName job) { W.delay = _wsDefaultDelay }
let job' = (updateJobData ws job $ W.mkDefaultSendJob' b queueName job) { W.delay = _wsDefaultDelay }
withLogger (gcConfig ^. gc_logging) $ \ioL ->
$(logLoc) ioL DEBUG $ "[sendJob] sending job " <> show job <> " (delay " <> show (W.delay job') <> ")"
W.sendJob' job'
-- | We want to fine-tune job metadata parameters, for each job type
updateJobData :: Job -> SendJob -> SendJob
updateJobData (AddCorpusWithQuery {}) sj = sj { W.timeout = 3000 }
updateJobData (AddToAnnuaireWithForm {}) sj = sj { W.timeout = 3000 }
updateJobData (AddWithFile {}) sj = sj { W.timeout = 3000 }
updateJobData (DocumentsFromWriteNodes {}) sj = sj { W.timeout = 3000 }
updateJobData (FrameCalcUpload {}) sj = sj { W.timeout = 3000 }
updateJobData (JSONPost {}) sj = sj { W.timeout = 3000 }
updateJobData (NgramsPostCharts {}) sj = sj { W.timeout = 3000 }
updateJobData (RecomputeGraph {}) sj = sj { W.timeout = 3000 }
updateJobData (UpdateNode {}) sj = sj { W.timeout = 3000 }
updateJobData (UploadDocument {}) sj = sj { W.timeout = 3000 }
updateJobData (ImportRemoteDocuments {}) sj = sj { W.timeout = 3000 }
updateJobData (ImportRemoteTerms {}) sj = sj { W.timeout = 3000 }
updateJobData :: WorkerSettings -> Job -> SendJob -> SendJob
updateJobData ws (AddCorpusTempFileAsync {}) sj = withLongTimeout ws $ sj { W.toStrat = WT.TSDelete
, W.resendOnKill = False }
updateJobData ws (AddCorpusWithQuery {}) sj = withLongTimeout ws sj
updateJobData ws (AddToAnnuaireWithForm {}) sj = withLongTimeout ws sj
updateJobData ws (AddWithFile {}) sj = withLongTimeout ws $ sj { W.toStrat = WT.TSDelete
, W.resendOnKill = False }
updateJobData ws (DocumentsFromWriteNodes {}) sj = withLongTimeout ws sj
updateJobData ws (FrameCalcUpload {}) sj = withLongTimeout ws sj
updateJobData ws (JSONPost {}) sj = withLongTimeout ws $ sj { W.toStrat = WT.TSDelete
, W.resendOnKill = False }
updateJobData ws (NgramsPostCharts {}) sj = withLongTimeout ws sj
updateJobData ws (RecomputeGraph {}) sj = withLongTimeout ws sj
updateJobData ws (UpdateNode {}) sj = withLongTimeout ws sj
updateJobData ws (UploadDocument {}) sj = withLongTimeout ws sj
updateJobData ws (ImportRemoteDocuments {}) sj = withLongTimeout ws sj
updateJobData ws (ImportRemoteTerms {}) sj = withLongTimeout ws sj
-- | ForgotPasswordAsync, PostNodeAsync
updateJobData _ sj = sj { W.resendOnKill = False
, W.timeout = 60 }
updateJobData ws _ sj = withDefaultTimeout ws $ sj { W.resendOnKill = False }
withDefaultTimeout :: WorkerSettings -> SendJob -> SendJob
withDefaultTimeout (WorkerSettings { _wsDefaultJobTimeout }) sj = sj { W.timeout = _wsDefaultJobTimeout }
withLongTimeout :: WorkerSettings -> SendJob -> SendJob
withLongTimeout (WorkerSettings { _wsLongJobTimeout }) sj = sj { W.timeout = _wsLongJobTimeout }
......@@ -32,7 +32,7 @@ import Gargantext.Database.GargDB qualified as GargDB
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.Node (getNodeWith)
import Gargantext.Database.Query.Table.Node qualified as N (getNode, deleteNode)
import Gargantext.Database.Query.Table.Node.Error (HasNodeError, errorWith)
import Gargantext.Database.Query.Table.Node.Error (HasNodeError, nodeErrorWith)
import Gargantext.Database.Schema.Node
import Gargantext.Prelude
......@@ -51,7 +51,7 @@ deleteNode u nodeId = do
(num, upd_node, cleanup) <- runDBTx $ do
node' <- N.getNode nodeId
(rows, clean_it) <- case view node_typename node' of
nt | nt == toDBid NodeUser -> errorWith "[G.D.A.D.deleteNode] Not allowed to delete NodeUser (yet)"
nt | nt == toDBid NodeUser -> nodeErrorWith "[G.D.A.D.deleteNode] Not allowed to delete NodeUser (yet)"
nt | nt == toDBid NodeTeam -> do
uId <- getUserId u
if _node_user_id node' == uId
......
......@@ -55,6 +55,7 @@ module Gargantext.Database.Action.Flow -- (flowDatabase, ngrams2list)
where
import Conduit
import Control.Exception.Safe qualified as CES
import Control.Lens ( to, view )
import Control.Exception.Safe (catch, MonadCatch)
import Data.Conduit qualified as C
......@@ -173,7 +174,7 @@ flowDataText :: forall env err m.
, HasTreeError err
, HasValidationError err
, MonadJobStatus m
, MonadCatch m
, CES.MonadCatch m
, HasCentralExchangeNotification env
)
=> User
......@@ -208,7 +209,7 @@ flowAnnuaire :: ( IsDBCmd env err m
, HasTreeError err
, HasValidationError err
, MonadJobStatus m
, MonadCatch m
, CES.MonadCatch m
, HasCentralExchangeNotification env )
=> MkCorpusUser
-> TermType Lang
......@@ -228,7 +229,7 @@ flowCorpusFile :: ( IsDBCmd env err m
, HasTreeError err
, HasValidationError err
, MonadJobStatus m
, MonadCatch m
, CES.MonadCatch m
, HasCentralExchangeNotification env )
=> MkCorpusUser
-> TermType Lang
......@@ -479,7 +480,7 @@ extractNgramsFromDocument :: ( UniqParameters doc
, ExtractNgrams m doc
, IsDBCmd err env m
, MonadLogger m
, MonadCatch m
, CES.MonadCatch m
)
=> NLPServerConfig
-> TermType Lang
......@@ -525,7 +526,7 @@ extractNgramsFromDocuments :: forall doc env err m.
, ExtractNgrams m doc
, IsDBCmd env err m
, MonadLogger m
, MonadCatch m
, CES.MonadCatch m
)
=> NLPServerConfig
-> TermType Lang
......
......@@ -26,7 +26,7 @@ import Gargantext.Database.Admin.Config (hasNodeType, isInNodeTypes)
import Gargantext.Database.Admin.Types.Hyperdata.Any (HyperdataAny(..))
import Gargantext.Database.Admin.Types.Node
import Gargantext.Database.Query.Table.Node (getNode, getNodesWith)
import Gargantext.Database.Query.Table.Node.Error (HasNodeError, errorWith)
import Gargantext.Database.Query.Table.Node.Error (HasNodeError, nodeErrorWith)
import Gargantext.Database.Query.Table.User
import Gargantext.Database.Query.Tree.Root (getRootId)
import Gargantext.Database.Schema.Node
......@@ -98,10 +98,10 @@ shareNodeWith (ShareNodeWith_User NodeFolderShared u) n = do
nodeToCheck <- getNode n
userIdCheck <- getUserId u
if not (hasNodeType nodeToCheck NodeTeam)
then errorWith "[G.D.A.S.shareNodeWith] Can share node Team only"
then nodeErrorWith "[G.D.A.S.shareNodeWith] Can share node Team only"
else
if (view node_user_id nodeToCheck == userIdCheck)
then errorWith "[G.D.A.S.shareNodeWith] Can share to others only"
then nodeErrorWith "[G.D.A.S.shareNodeWith] Can share to others only"
else do
folderSharedId <- getFolderId u NodeFolderShared
ret <- shareNode (SourceId folderSharedId) (TargetId n)
......@@ -111,7 +111,7 @@ shareNodeWith (ShareNodeWith_User NodeFolderShared u) n = do
shareNodeWith (ShareNodeWith_Node NodeFolderPublic nId) n = do
nodeToCheck <- getNode n
if not (isInNodeTypes nodeToCheck publicNodeTypes)
then errorWith $ "[G.D.A.S.shareNodeWith] Can share this nodesTypes only: "
then nodeErrorWith $ "[G.D.A.S.shareNodeWith] Can share this nodesTypes only: "
<> (show publicNodeTypes)
else do
folderToCheck <- getNode nId
......@@ -120,9 +120,9 @@ shareNodeWith (ShareNodeWith_Node NodeFolderPublic nId) n = do
ret <- shareNode (SourceId nId) (TargetId n)
let msgs = [CE.UpdateTreeFirstLevel nId, CE.UpdateTreeFirstLevel n]
pure (ret, msgs)
else errorWith "[G.D.A.S.shareNodeWith] Can share NodeWith NodeFolderPublic only"
else nodeErrorWith "[G.D.A.S.shareNodeWith] Can share NodeWith NodeFolderPublic only"
shareNodeWith _ _ = errorWith "[G.D.A.S.shareNodeWith] Not implemented for this NodeType"
shareNodeWith _ _ = nodeErrorWith "[G.D.A.S.shareNodeWith] Not implemented for this NodeType"
------------------------------------------------------------------------
getFolderId :: HasNodeError err => User -> NodeType -> DBQuery err x NodeId
......@@ -130,7 +130,7 @@ getFolderId u nt = do
rootId <- getRootId u
s <- getNodesWith rootId HyperdataAny (Just nt) Nothing Nothing
case head s of
Nothing -> errorWith "[G.D.A.S.getFolderId] No folder shared found"
Nothing -> nodeErrorWith "[G.D.A.S.getFolderId] No folder shared found"
Just f -> pure (_node_id f)
------------------------------------------------------------------------
......
......@@ -72,7 +72,7 @@ getUsername user@(UserDBId _) = do
users <- getUsersWithId user
case head users of
Just u -> pure $ userLight_username u
Nothing -> errorWith "G.D.A.U.getUserName: User not found with that id"
Nothing -> nodeErrorWith "G.D.A.U.getUserName: User not found with that id"
getUsername (RootId rid) = do
n <- getNode rid
getUsername (UserDBId $ _node_user_id n)
......
......@@ -2,15 +2,16 @@
module Gargantext.Database.Class where
import Control.Exception.Safe (MonadCatch)
import Control.Lens (Getter)
import Control.Monad.Random ( MonadRandom )
import Control.Monad.Trans.Control (MonadBaseControl)
import Data.Pool (Pool)
import Database.PostgreSQL.Simple (Connection)
import Gargantext.Core.Notifications.CentralExchange.Types qualified as CET
import Gargantext.Core.Config (HasConfig(..))
import Gargantext.Core.Mail.Types (HasMail)
import Gargantext.Core.NLP (HasNLPServer)
import Gargantext.Core.Notifications.CentralExchange.Types qualified as CET
import Gargantext.Prelude
-- $typesAndConstraints
......@@ -61,6 +62,13 @@ type IsCmd env err m =
( MonadReader env m
, MonadError err m
, MonadBaseControl IO m
-- These 3 instances below are needed because in the transactional code
-- we can throw 'err' as an exception, which requires 'err' to be an 'Exception'
-- and thus have a 'Show' and 'Typeable' instances. The fact that we can catch
-- exceptions in the evaluator of the 'DBTx' monad code means we need a 'MonadCatch'.
, Typeable err
, Show err
, MonadCatch m
)
-- | Only the /minimum/ amount of class constraints required
......
......@@ -89,7 +89,8 @@ withConn k = do
pool <- view connPool
liftBase $ withResource pool (liftBase . k)
runCmd :: env
runCmd :: (Show err, Typeable err)
=> env
-> CmdRandom env err a
-> IO (Either err a)
runCmd env m = runExceptT $ runReaderT m env
......
......@@ -19,7 +19,7 @@ module Gargantext.Database.Query.Table.Node.Error (
, HasNodeError(..)
-- * Functions
, errorWith
, nodeErrorWith
, nodeError
, nodeCreationError
, nodeLookupError
......@@ -28,14 +28,15 @@ module Gargantext.Database.Query.Table.Node.Error (
) where
import Control.Lens (Prism', (#), (^?))
import Control.Lens qualified as L
import Data.Aeson (object)
import Data.Text qualified as T
import Gargantext.Core.Types.Individu ( Username )
import Gargantext.Database.Admin.Types.Node (ListId, NodeId(..), ContextId, UserId, ParentId)
import Gargantext.Database.Transactional
import Gargantext.Prelude hiding (sum, head)
import Prelude hiding (null, id, map, sum, show)
import Prelude qualified
import Gargantext.Database.Transactional
data NodeCreationError
= UserParentAlreadyExists UserId ParentId
......@@ -89,6 +90,9 @@ data NodeError = NoListFound ListId
| MoveError NodeId NodeId T.Text
| NodeNotExportable NodeId T.Text
instance HasNodeError NodeError where
_NodeError = L.prism' Prelude.id Just
instance Prelude.Show NodeError
where
show (NoListFound {}) = "No list found"
......@@ -106,6 +110,8 @@ instance Prelude.Show NodeError
show (MoveError s t reason) = "Moving " <> show s <> " to " <> show t <> " failed: " <> T.unpack reason
show (NodeNotExportable nid reason) = "Node " <> show nid <> " is not exportable: " <> show reason
instance Exception NodeError
instance ToJSON NodeError where
toJSON (DoesNotExist n) =
object [ ( "error", "Node does not exist" )
......@@ -135,8 +141,8 @@ instance ToJSON NodeError where
class HasNodeError e where
_NodeError :: Prism' e NodeError
errorWith :: HasNodeError e => Text -> DBTx e r a
errorWith x = nodeError (NodeError $ toException $ userError $ T.unpack x)
nodeErrorWith :: HasNodeError e => Text -> DBTx e r a
nodeErrorWith x = nodeError (NodeError $ toException $ userError $ T.unpack x)
nodeError :: HasNodeError e => NodeError -> DBTx e r a
nodeError ne = dbFail $ _NodeError # ne
......
......@@ -37,7 +37,7 @@ getRootId :: (HasNodeError err) => User -> DBQuery err x NodeId
getRootId u = do
maybeRoot <- head <$> getRoot u
case maybeRoot of
Nothing -> errorWith "[G.D.Q.T.R.getRootId] No root id"
Nothing -> nodeErrorWith "[G.D.Q.T.R.getRootId] No root id"
Just r -> pure (_node_id r)
getRoot :: User -> DBQuery err x [Node HyperdataUser]
......@@ -115,7 +115,7 @@ mkCorpus :: (HasNodeError err, MkCorpus a)
mkCorpus cName c rootId userId = do
c' <- mk (Just cName) c rootId userId
_tId <- case head c' of
Nothing -> errorWith "[G.D.Q.T.Root.getOrMk...] mk Corpus failed"
Nothing -> nodeErrorWith "[G.D.Q.T.Root.getOrMk...] mk Corpus failed"
Just c'' -> insertDefaultNode NodeTexts c'' userId
corpusId <- maybe (nodeError NoCorpusFound) pure (head c')
......
......@@ -33,8 +33,10 @@ module Gargantext.Database.Transactional (
, mkOpaInsert
, mkOpaDelete
-- * Throwing errors (which allows rollbacks)
-- * Throwing and catching errors (which allows rollbacks)
, dbFail
, catchDBTxError
, handleDBTxError
) where
import Control.Exception.Safe qualified as Safe
......@@ -42,6 +44,7 @@ import Control.Lens
import Control.Monad.Base
import Control.Monad.Error.Class
import Control.Monad.Free
import Control.Monad.Free.Church
import Control.Monad.Trans.Control (MonadBaseControl, control)
import Data.Int (Int64)
import Data.Pool (withResource, Pool)
......@@ -51,7 +54,12 @@ import Database.PostgreSQL.Simple.Transaction qualified as PG
import Gargantext.Database.Class
import Opaleye
import Prelude
import Control.Monad.Free.Church
data DBTxException err
= RollbackRequested err
deriving (Show, Eq)
instance (Show err, Safe.Typeable err) => Safe.Exception (DBTxException err) where
data DBOperation = DBRead | DBWrite
......@@ -133,7 +141,7 @@ type DBReadOnly err r a = DBTx err DBRead a
-- Strict constraints to perform transactional read and writes.
-- Isomorphic to a DBCmd, but it doesn't impose a 'HasConfig' constraint, as
-- values can always be passed as parameters of a query or update.
type DBTxCmd err a = forall m env. (IsCmd env err m, HasConnectionPool env) => m a
type DBTxCmd err a = forall m env. (IsCmd env err m, HasConnectionPool env, Safe.MonadCatch m) => m a
instance Functor (DBTransactionOp err r) where
fmap f = \case
......@@ -179,23 +187,37 @@ withReadOnlyTransactionM conn action =
-- | Run a PostgreSQL transaction, suitable for operations that mixes read and writes,
-- and actually the only choice available to run 'DBUpdate' operations.
runDBTx :: DBUpdate err a -> DBTxCmd err a
runDBTx :: (Show err, Safe.Typeable err) => DBUpdate err a -> DBTxCmd err a
runDBTx (DBTx m) = do
pool <- view connPool
withResourceM pool $ \conn -> withTransactionM conn $ foldF (evalOp conn) m
withResourceM pool $ \conn ->
(withTransactionM conn $ foldF (evalOp conn) m)
-- IMPORTANT: We are catching the exception (after 'withTransactionM' has run, so rollback already
-- happened) and we are rethrowing this via 'throwError', such that application code can catch this
-- via 'catchDBTxError'.
-- /NOTA BENE/: the parenthesis around 'withTransactionM' ARE NOT OPTIONAL! If we remove them, we
-- would be catching this exception from 'foldF', meaning that we wouldn't let 'withTransactionM'
-- handle it, resulting in ROLLBACK NOT HAPPENING!
`Safe.catches`
[ Safe.Handler $ \(RollbackRequested err) -> throwError err ]
-- | Runs a DB query.
-- /NOTE/ the input type is 'DBReadOnly', i.e. a transaction where /all/
-- the operations are 'DBRead'. This makes impossible to sneak in updates
-- into otherwise read-only queries.
runDBQuery :: DBReadOnly err r a -> DBTxCmd err a
runDBQuery :: (Show err, Safe.Typeable err) => DBReadOnly err r a -> DBTxCmd err a
runDBQuery (DBTx m) = do
pool <- view connPool
withResourceM pool $ \conn -> withReadOnlyTransactionM conn $ foldF (evalOp conn) m
withResourceM pool $ \conn ->
(withReadOnlyTransactionM conn $ foldF (evalOp conn) m)
-- IMPORTANT: Same proviso as for 'runDBTx'. Technically speaking we wouldn't need
-- to throw and catch things for a query, but we are doing so for consistency with 'runDBTx'.
`Safe.catches`
[ Safe.Handler $ \(RollbackRequested err) -> throwError err ]
-- | The main evaluator, turns our pure operations into side-effects that run into the
-- 'DBCmd'.
evalOp :: PG.Connection -> DBTransactionOp err r a -> DBTxCmd err a
evalOp :: (Show err, Safe.Typeable err) => PG.Connection -> DBTransactionOp err r a -> DBTxCmd err a
evalOp conn = \case
PGQuery qr q cc -> cc <$> liftBase (PG.query conn qr q)
PGUpdate qr a cc -> cc <$> liftBase (PG.execute conn qr a)
......@@ -206,7 +228,7 @@ evalOp conn = \case
OpaInsert ins cc -> cc <$> liftBase (runInsert conn ins)
OpaUpdate upd cc -> cc <$> liftBase (runUpdate conn upd)
OpaDelete del cc -> cc <$> liftBase (runDelete conn del)
DBFail err -> throwError err
DBFail err -> liftBase (Safe.throwIO $ RollbackRequested err)
evalOpaCountQuery :: PG.Connection -> Select a -> IO Int
evalOpaCountQuery conn sel = do
......@@ -228,6 +250,49 @@ queryOne conn q v = do
[ ] -> Safe.throwIO $ userError "queryOne: no result returned. Check your SQL!"
_ -> Safe.throwIO $ userError "queryOne: more than one result returned. Have you used the 'RETURNING' directive?"
{-
Throwing and catching exceptions in a DBTx monad
================================================
It's /VERY/ important to understand the proper way to throw and catch exceptions in a DBTx monad,
as not doing so might lead to footguns.
We need to remember that when we are composing 'DBTx' operations, we are just writing a DSL which
won't get evaluated until we call either 'runDBTx' or 'runDBQuery', therefore if some parts of
our transaction throw an error, we wouldn't know until there.
There are two types of errors we might have, and it's important to be precise in terminology:
1. IO Exception: these are being thrown by the evaluators for SQL queries, i.e. we might have
IO errors being thrown by wrongly-formatted SQL queries or the Postgres DB dying on us for any reason;
These exceptions get caught by 'withTransactionM' which allows proper rollback behavior, but crucially
these kind of exceptions gets rethrown by 'withTransactionM' and must be caught via the classic
exception handlers in upstream code, but the crucial point is that even if we don't catch them, the
transaction has been rolled back successfully;
2. Domain-specific ERRORS (not exceptions, ERRORS!) being thrown within a transaction itself via things like
'nodeError' and friends. These are errors which can be thrown because our transaction code didn't go as
planned (look for the implementation of 'insertNodeWithHyperdata' for a concrete example). These errors
are translated into the evaluator as proper exception but then caught and rethrown via 'throwError', which
is crucial, because it means that them being thrown as an exception means 'withTransactionM' can rollback
as we expect to, but upstream application code can still handle these errors via 'catchError' and friends.
In order to facilitate the handling of this, we expose the 'catchDBTxError' and 'handleDBTxError', which are
just wrappers over 'catchError' -- this is what users should be using if they want to handle domain-specific errors.
But the crucial bit, and let's state this again, is that rollbacks will happen in both scenario, which is
what we want.
-}
catchDBTxError :: DBTxCmd err a
-> (err -> DBTxCmd err a)
-> DBTxCmd err a
catchDBTxError = catchError
handleDBTxError :: (err -> DBTxCmd err a)
-> DBTxCmd err a
-> DBTxCmd err a
handleDBTxError = flip catchError
--
-- Smart constructors
--
......@@ -235,9 +300,6 @@ queryOne conn q v = do
-- we are not exposing for information hiding purposes.
--
dbFail :: err -> DBTx err r b
dbFail = DBTx . liftF . DBFail
mkPGQuery :: (PG.ToRow q, PG.FromRow a)
=> PG.Query
-> q
......@@ -270,3 +332,6 @@ mkOpaInsert a = DBTx $ liftF (OpaInsert a id)
mkOpaDelete :: Delete a -> DBUpdate err a
mkOpaDelete a = DBTx $ liftF (OpaDelete a id)
dbFail :: err -> DBTx err r b
dbFail = DBTx . liftF . DBFail
......@@ -174,15 +174,15 @@
git: "https://gitlab.iscpif.fr/gargantext/gargantext-graph.git"
subdirs:
- "gargantext-graph-core"
- commit: 4a9c709613554eed0189b486de2126c18797088c
- commit: 05c39e424d15149dc32097b3318cb6007e0e7052
git: "https://gitlab.iscpif.fr/gargantext/haskell-bee"
subdirs:
- "haskell-bee-pgmq/"
- commit: 4a9c709613554eed0189b486de2126c18797088c
- commit: 05c39e424d15149dc32097b3318cb6007e0e7052
git: "https://gitlab.iscpif.fr/gargantext/haskell-bee"
subdirs:
- "haskell-bee-tests/"
- commit: 4a9c709613554eed0189b486de2126c18797088c
- commit: 05c39e424d15149dc32097b3318cb6007e0e7052
git: "https://gitlab.iscpif.fr/gargantext/haskell-bee"
subdirs:
- "haskell-bee/"
......@@ -198,7 +198,7 @@
git: "https://gitlab.iscpif.fr/gargantext/haskell-infomap.git"
subdirs:
- .
- commit: 1dd92f0aa8e9f8096064e5656c336e562680f4e3
- commit: 9a869df2842eccc86a0f31a69fb8dc5e5ca218a8
git: "https://gitlab.iscpif.fr/gargantext/haskell-pgmq"
subdirs:
- .
......
......@@ -66,11 +66,18 @@ from = ""
login_type = "Normal"
[notifications]
# We do not hardcode the bind and connect here, because the test infrastructure
# will randomize the connection endpoints via IPC.
central-exchange = { bind = "", connect = "" }
dispatcher = { bind = "", connect = "" }
[notifications.central-exchange]
bind = ""
connect = ""
send_timeout_ms = 200
[notifications.dispatcher]
bind = ""
connect = ""
send_timeout_ms = 500
throttle_ms = 500
[nlp]
......@@ -85,6 +92,11 @@ default_visibility_timeout = 1
# default delay before job is visible to the worker
default_delay = 1
# default timeout (in seconds)
default_job_timeout = 60
# default timeout for "long" jobs (in seconds)
long_job_timeout = 3000
# NOTE This is overridden by Test.Database.Setup
[worker.database]
host = "127.0.0.1"
......
......@@ -13,8 +13,30 @@ import System.Posix.Signals ( keyboardSignal, signalProcessGroup )
import Test.API qualified as API
import Test.Database.Operations qualified as DB
import Test.Database.Transactions qualified as DBT
import Test.Hspec ( hspec, sequential, runIO )
import Test.Hspec
import Test.Server.ReverseProxy qualified as ReverseProxy
import Test.Core.LinearAlgebra qualified as LinearAlgebra
import Test.Core.Notifications qualified as Notifications
import Test.Core.Orchestrator qualified as Orchestrator
import Test.Core.Similarity qualified as Similarity
import Test.Core.Text.Corpus.Query qualified as CorpusQuery
import Test.Core.Text.Corpus.TSV qualified as TSVParser
import Test.Core.Utils qualified as Utils
import Test.Core.Worker qualified as Worker
import Test.Graph.Clustering qualified as Clustering
import Test.Graph.Distance qualified as Distance
import Test.Ngrams.Lang.Occurrences qualified as Occurrences
import Test.Ngrams.NLP qualified as NLP
import Test.Ngrams.Query qualified as NgramsQuery
import Test.Ngrams.Terms qualified as NgramsTerms
import Test.Offline.Errors qualified as Errors
import Test.Offline.JSON qualified as JSON
import Test.Offline.Ngrams qualified as Ngrams
import Test.Offline.Phylo qualified as Phylo
import Test.Offline.Stemming.Lancaster qualified as Lancaster
import Test.Parsers.Date qualified as PD
import Test.Utils.Crypto qualified as Crypto
import Test.Utils.Jobs qualified as Jobs
startCoreNLPServer :: IO (Maybe ProcessHandle)
......@@ -81,4 +103,26 @@ main = do
DB.tests
DBT.tests
DB.nodeStoryTests
runIO $ putText "tests finished"
describe "Utils" $ Utils.test
describe "Graph Clustering" $ Clustering.test
describe "Graph Distance" $ Distance.test
describe "Date split" $ PD.testDateSplit
describe "Crypto" $ Crypto.test
describe "NLP" $ NLP.test
describe "Jobs" $ Jobs.test
describe "Similarity" $ Similarity.test
describe "Notifications" $ Notifications.test
describe "Occurrences" $ Occurrences.test
describe "LinearAlgebra" $ LinearAlgebra.tests
describe "Orchestrator" $ Orchestrator.qcTests
describe "Corpus Query" $ CorpusQuery.tests
describe "TSV Parser" $ TSVParser.tests
describe "Worker" $ Worker.tests
describe "Ngrams Query" $ NgramsQuery.tests
describe "Ngrams Terms" $ NgramsTerms.tests
describe "Offline" $ do
describe "Errors" $ Errors.tests
describe "JSON" $ JSON.tests
describe "Ngrams" $ Ngrams.tests
describe "Phylo" $ Phylo.tests
describe "Lancaster" $ Lancaster.tests
......@@ -38,7 +38,7 @@ import Gargantext.Prelude hiding (get)
import Prelude (fail)
import Servant.Client.Core
import Test.Database.Types
import Test.Tasty.HUnit (Assertion, (@?=))
import Test.HUnit (Assertion, (@?=))
checkEither :: (Show a, Monad m) => m (Either a b) -> m b
checkEither = fmap (either (\x -> panicTrace $ "checkEither:" <> T.pack (show x)) identity)
......
......@@ -19,7 +19,7 @@ import Test.API.Setup
import Test.Hspec (Spec, it, aroundAll, describe, sequential)
import Test.Hspec.Wai.Internal (withApplication)
import Test.Hspec.Expectations.Lifted
import Test.Tasty.HUnit (assertBool)
import Test.HUnit (assertBool)
import Test.Utils
tests :: Spec
......
......@@ -40,7 +40,7 @@ import Test.Utils.Notifications
import Gargantext.System.Logging
import qualified Data.Text.Encoding as TE
import qualified Data.ByteString as BL
import Test.Tasty.HUnit (assertBool)
import Test.HUnit (assertBool)
......
......@@ -8,8 +8,8 @@
module Test.Core.LinearAlgebra where
import Data.Array.Accelerate hiding (Ord, Eq, map, (<=))
import Data.Array.Accelerate.Interpreter qualified as Naive
import Data.Array.Accelerate qualified as A
import Data.Array.Accelerate.Interpreter qualified as Naive
import Data.Massiv.Array qualified as Massiv
import Data.Proxy
import Data.Scientific
......@@ -19,8 +19,9 @@ import Gargantext.Core.Methods.Matrix.Accelerate.Utils qualified as Legacy
import Gargantext.Core.Methods.Similarities.Accelerate.Distributional qualified as Legacy
import Gargantext.Orphans.Accelerate (sliceArray)
import Prelude hiding ((^))
import Test.Tasty
import Test.Tasty.QuickCheck
import Test.Hspec
import Test.Hspec.QuickCheck
import Test.QuickCheck
--
......@@ -97,32 +98,29 @@ testMatrix_04 = SquareMatrix $ fromList (Z :. 8 :. 8) $
-- Main test runner
--
tests :: TestTree
tests = testGroup "LinearAlgebra" [
testProperty "termDivNan" compareTermDivNan
, testProperty "diag" compareDiag
, testProperty "sumRows" compareSumRows
, testProperty "matMaxMini" compareMatMaxMini
, testProperty "sumM_go" compareSumM_go
, testProperty "sumMin_go" compareSumMin_go
, testProperty "matrixEye" compareMatrixEye
, testProperty "diagNull" compareDiagNull
, testGroup "distributional" [
testProperty "reference implementation roundtrips" compareDistributionalImplementations
, testProperty "2x2" (compareDistributional (Proxy @Double) twoByTwo)
, testProperty "7x7" (compareDistributional (Proxy @Double) testMatrix_02)
, testProperty "14x14" (compareDistributional (Proxy @Double) testMatrix_01)
, testProperty "roundtrips" (compareDistributional (Proxy @Double))
]
, testGroup "logDistributional2" [
testProperty "2x2" (compareLogDistributional2 (Proxy @Double) twoByTwo)
, testProperty "7x7" (compareLogDistributional2 (Proxy @Double) testMatrix_02)
, testProperty "8x8" (compareLogDistributional2 (Proxy @Double) testMatrix_04)
, testProperty "11x11" (compareLogDistributional2 (Proxy @Double) testMatrix_03)
, testProperty "14x14" (compareLogDistributional2 (Proxy @Double) testMatrix_01)
,testProperty "roundtrips" (compareLogDistributional2 (Proxy @Double))
]
]
tests :: Spec
tests = describe "LinearAlgebra" $ do
prop "termDivNan" compareTermDivNan
prop "diag" compareDiag
prop "sumRows" compareSumRows
prop "matMaxMini" compareMatMaxMini
prop "sumM_go" compareSumM_go
prop "sumMin_go" compareSumMin_go
prop "matrixEye" compareMatrixEye
prop "diagNull" compareDiagNull
describe "distributional" $ do
prop "reference implementation roundtrips" compareDistributionalImplementations
prop "2x2" (compareDistributional (Proxy @Double) twoByTwo)
prop "7x7" (compareDistributional (Proxy @Double) testMatrix_02)
prop "14x14" (compareDistributional (Proxy @Double) testMatrix_01)
prop "roundtrips" (compareDistributional (Proxy @Double))
describe "logDistributional2" $ do
prop "2x2" (compareLogDistributional2 (Proxy @Double) twoByTwo)
prop "7x7" (compareLogDistributional2 (Proxy @Double) testMatrix_02)
prop "8x8" (compareLogDistributional2 (Proxy @Double) testMatrix_04)
prop "11x11" (compareLogDistributional2 (Proxy @Double) testMatrix_03)
prop "14x14" (compareLogDistributional2 (Proxy @Double) testMatrix_01)
prop "roundtrips" (compareLogDistributional2 (Proxy @Double))
--
-- Tests
......
......@@ -20,8 +20,7 @@ import Gargantext.Core.Notifications.Dispatcher.Types
import Gargantext.Prelude
import Test.Hspec
import Test.Instances ()
import Test.Tasty
import Test.Tasty.QuickCheck qualified as QC
import Test.Hspec.QuickCheck
test :: Spec
......@@ -31,9 +30,9 @@ test = do
let ce = UpdateTreeFirstLevel 15
A.decode (A.encode ce) `shouldBe` (Just ce)
qcTests :: TestTree
qcTests :: Spec
qcTests =
testGroup "Notifications QuickCheck tests" $ do
[ QC.testProperty "CEMessage aeson encoding" $ \m -> A.decode (A.encode (m :: CEMessage)) == Just m
, QC.testProperty "Topic aeson encoding" $ \t -> A.decode (A.encode (t :: Topic)) == Just t
, QC.testProperty "WSRequest aeson encoding" $ \ws -> A.decode (A.encode (ws :: WSRequest)) == Just ws ]
describe "Notifications QuickCheck tests" $ do
prop "CEMessage aeson encoding" $ \m -> A.decode (A.encode (m :: CEMessage)) == Just m
prop "Topic aeson encoding" $ \t -> A.decode (A.encode (t :: Topic)) == Just t
prop "WSRequest aeson encoding" $ \ws -> A.decode (A.encode (ws :: WSRequest)) == Just ws
......@@ -17,11 +17,11 @@ import Data.Aeson qualified as A
import Gargantext.API.Admin.Orchestrator.Types
import Gargantext.Prelude
import Test.Instances ()
import Test.Tasty
import Test.Tasty.QuickCheck qualified as QC
import Test.Hspec
import Test.Hspec.QuickCheck
qcTests :: TestTree
qcTests :: Spec
qcTests =
testGroup "Orchestrator QuickCheck tests" $ do
[ QC.testProperty "ExternalAPIs aeson encoding" $ \m -> A.decode (A.encode (m :: ExternalAPIs)) == Just m ]
describe "Orchestrator QuickCheck tests" $ do
prop "ExternalAPIs aeson encoding" $ \m -> A.decode (A.encode (m :: ExternalAPIs)) == Just m
......@@ -16,9 +16,10 @@ import qualified Gargantext.Core.Text.Corpus.API.Arxiv as Arxiv
import qualified Gargantext.Core.Text.Corpus.API.Pubmed as Pubmed
import qualified Network.Api.Arxiv as Arxiv
import Test.Tasty
import Test.Tasty.HUnit
import Test.Tasty.QuickCheck hiding (Positive, Negative)
import Test.HUnit
import Test.Hspec
import Test.Hspec.QuickCheck
import Test.QuickCheck hiding (Positive, Negative)
newtype PubmedApiKey
= PubmedApiKey { _PubmedApiKey :: T.Text }
......@@ -28,54 +29,50 @@ newtype PubmedApiKey
pubmedSettings :: IO (Maybe PubmedApiKey)
pubmedSettings = fmap fromString <$> lookupEnv "PUBMED_API_KEY"
tests :: TestTree
tests = withResource pubmedSettings (const (pure ())) $ \getPubmedKey ->
testGroup "Boolean Query Engine" [
testProperty "Parses 'A OR B'" testParse01
, testProperty "Parses 'A AND B'" testParse02
, testProperty "Parses '-A'" testParse03
, testProperty "Parses 'NOT A'" testParse03_01
, testProperty "Parses 'A -B'" testParse04
, testProperty "Parses 'A NOT -B'" testParse04_01
, testProperty "Parses 'A AND B -C' (left associative)" testParse05
, testProperty "Parses 'A AND (B -C)' (right associative)" testParse05_01
, testProperty "Parses (A OR B OR NOT C) AND (D OR E OR F) -(G OR H OR I)" testParse06
, testProperty "It supports '\"Haskell\" AND \"Idris\"'" testParse07
, testProperty "It supports 'Haskell AND Idris'" testParse07_01
, testProperty "It supports 'Raphael'" testParse07_02
, testProperty "It supports 'Niki', 'Ajeje' and 'Orf'" testParse07_03
, testCase "Parses words into a single constant" testWordsIntoConst
, testGroup "Arxiv expression converter" [
testCase "It supports 'A AND B'" testArxiv01_01
, testCase "It supports '\"Haskell\" AND \"Agda\"'" testArxiv01_02
, testCase "It supports 'A OR B'" testArxiv02
, testCase "It supports 'A AND NOT B'" testArxiv03_01
, testCase "It supports 'A AND -B'" testArxiv03_02
, testCase "It supports 'A AND -B'" testArxiv03_02
, testCase "It supports 'A AND NOT (NOT B)'" testArxiv04_01
, testCase "It supports 'A AND NOT (NOT (NOT B))'" testArxiv04_02
, testCase "It supports 'A OR NOT B'" testArxiv05
, testCase "It supports '-A'" testArxiv06
]
, testGroup "PUBMED expression converter" [
testCase "It supports 'A'" testPubMed01
, testCase "It supports '-A'" testPubMed02_01
, testCase "It supports 'NOT A'" testPubMed02_02
, testCase "It supports 'NOT (NOT A)'" testPubMed02_03
, testCase "It supports '\"Haskell\" AND \"Idris\"'" testPubMed03
, testCase "It supports 'A OR B'" testPubMed04
]
, testGroup "PUBMED real queries (skipped if PUBMED_API_KEY env var not set)" [
testCase "It searches for \"Covid\"" (testPubMedCovid_01 getPubmedKey)
, testCase "It searches for \"Covid\" AND \"Alzheimer\"" (testPubMedCovid_02 getPubmedKey)
]
-- We skip the Arxiv tests if the PUBMED_API_KEY is not set just for conveniency, to have
-- only a single flow-control mechanism.
, testGroup "ARXIV real queries (skipped if PUBMED_API_KEY env var not set)" [
testCase "It searches for \"Haskell\"" (testArxivRealWorld_01 getPubmedKey)
, testCase "It searches for \"Haskell\" AND \"Agda\"" (testArxivRealWorld_02 getPubmedKey)
]
]
tests :: Spec
tests = do
describe "Boolean Query Engine" $ do
prop "Parses 'A OR B'" testParse01
prop "Parses 'A AND B'" testParse02
prop "Parses '-A'" testParse03
prop "Parses 'NOT A'" testParse03_01
prop "Parses 'A -B'" testParse04
prop "Parses 'A NOT -B'" testParse04_01
prop "Parses 'A AND B -C' (left associative)" testParse05
prop "Parses 'A AND (B -C)' (right associative)" testParse05_01
prop "Parses (A OR B OR NOT C) AND (D OR E OR F) -(G OR H OR I)" testParse06
prop "It supports '\"Haskell\" AND \"Idris\"'" testParse07
prop "It supports 'Haskell AND Idris'" testParse07_01
prop "It supports 'Raphael'" testParse07_02
prop "It supports 'Niki', 'Ajeje' and 'Orf'" testParse07_03
it "Parses words into a single constant" testWordsIntoConst
describe "Arxiv expression converter" $ do
it "It supports 'A AND B'" testArxiv01_01
it "It supports '\"Haskell\" AND \"Agda\"'" testArxiv01_02
it "It supports 'A OR B'" testArxiv02
it "It supports 'A AND NOT B'" testArxiv03_01
it "It supports 'A AND -B'" testArxiv03_02
it "It supports 'A AND -B'" testArxiv03_02
it "It supports 'A AND NOT (NOT B)'" testArxiv04_01
it "It supports 'A AND NOT (NOT (NOT B))'" testArxiv04_02
it "It supports 'A OR NOT B'" testArxiv05
it "It supports '-A'" testArxiv06
describe "PUBMED expression converter" $ do
it "It supports 'A'" testPubMed01
it "It supports '-A'" testPubMed02_01
it "It supports 'NOT A'" testPubMed02_02
it "It supports 'NOT (NOT A)'" testPubMed02_03
it "It supports '\"Haskell\" AND \"Idris\"'" testPubMed03
it "It supports 'A OR B'" testPubMed04
beforeAll pubmedSettings $ do
describe "PUBMED real queries (skipped if PUBMED_API_KEY env var not set)" $ do
it "It searches for \"Covid\"" $ \key -> testPubMedCovid_01 key
it "It searches for \"Covid\" AND \"Alzheimer\"" $ \key -> testPubMedCovid_02 key
-- We skip the Arxiv tests if the PUBMED_API_KEY is not set just for conveniency, to have
-- only a single flow-control mechanism.
describe "ARXIV real queries (skipped if PUBMED_API_KEY env var not set)" $ do
it "It searches for \"Haskell\"" $ \key -> testArxivRealWorld_01 key
it "It searches for \"Haskell\" AND \"Agda\"" $ \key -> testArxivRealWorld_02 key
-- | Checks that the 'RawQuery' can be translated into the expected 'BoolExpr' form,
-- by also checking that both renders back to the initial 'RawQuery'.
......@@ -242,9 +239,8 @@ testPubMed04 :: Assertion
testPubMed04 = withValidQuery "A OR B" $ \q ->
Pubmed.getESearch (Pubmed.convertQuery q) @?= "A+OR+B"
testPubMedCovid_01 :: IO (Maybe PubmedApiKey) -> Assertion
testPubMedCovid_01 getPubmedKey = do
mb_key <- getPubmedKey
testPubMedCovid_01 :: Maybe PubmedApiKey -> Assertion
testPubMedCovid_01 mb_key = do
case mb_key of
Nothing -> pure ()
Just k -> withValidQuery "\"Covid\"" $ \query -> do
......@@ -257,9 +253,8 @@ testPubMedCovid_01 getPubmedKey = do
[] -> fail "No documents found."
(x:_) -> _hd_title x @?= Just "Being a Hospice Nurse in Times of the COVID-19 Pandemic: A Phenomenological Study of Providing End-of-Life Care."
testPubMedCovid_02 :: IO (Maybe PubmedApiKey) -> Assertion
testPubMedCovid_02 getPubmedKey = do
mb_key <- getPubmedKey
testPubMedCovid_02 :: Maybe PubmedApiKey -> Assertion
testPubMedCovid_02 mb_key = do
case mb_key of
Nothing -> pure ()
Just k -> withValidQuery "\"Covid\" AND \"Alzheimer\"" $ \query -> do
......@@ -272,9 +267,8 @@ testPubMedCovid_02 getPubmedKey = do
[] -> fail "No documents found."
(x:_) -> _hd_title x @?= Just "Neurodegenerative and Neurodevelopmental Diseases and the Gut-Brain Axis: The Potential of Therapeutic Targeting of the Microbiome."
testArxivRealWorld_01 :: IO (Maybe PubmedApiKey) -> Assertion
testArxivRealWorld_01 getPubmedKey = do
mb_key <- getPubmedKey
testArxivRealWorld_01 :: Maybe PubmedApiKey -> Assertion
testArxivRealWorld_01 mb_key = do
case mb_key of
Nothing -> pure ()
Just _ -> withValidQuery "\"Haskell\"" $ \query -> do
......@@ -284,9 +278,8 @@ testArxivRealWorld_01 getPubmedKey = do
[] -> fail "No documents found."
(x:_) -> _hd_title x @?= Just "Haskell for OCaml programmers"
testArxivRealWorld_02 :: IO (Maybe PubmedApiKey) -> Assertion
testArxivRealWorld_02 getPubmedKey = do
mb_key <- getPubmedKey
testArxivRealWorld_02 :: Maybe PubmedApiKey -> Assertion
testArxivRealWorld_02 mb_key = do
case mb_key of
Nothing -> pure ()
Just _ -> withValidQuery "\"Haskell\" AND \"Agda\"" $ \query -> do
......
......@@ -13,20 +13,18 @@ import Data.Text.Encoding as DT
import Prelude
import Test.Tasty
import Test.Tasty.QuickCheck hiding (Positive, Negative)
tests :: TestTree
tests = testGroup "TSV Parser" [
testProperty "Parses 'Valid Text'" testValidText
, testProperty "Parses 'Valid Number'" testValidNumber
, testProperty "Parses 'Error Per Line On A File'" testTestErrorPerLine
, testProperty "Parses 'Correct File'" testTestCorrectFile
, testProperty "Parses 'Correct File With New Line In Last Header'" testTestCorrectFileWithNewLine
, testProperty "Parses 'Find Delimiter'" testFindDelimiter
, testProperty "Parses 'Get Headers'" testGetHeader]
import Test.Hspec
import Test.Hspec.QuickCheck
tests :: Spec
tests = describe "TSV Parser" $ do
prop "Parses 'Valid Text'" testValidText
prop "Parses 'Valid Number'" testValidNumber
prop "Parses 'Error Per Line On A File'" testTestErrorPerLine
prop "Parses 'Correct File'" testTestCorrectFile
prop "Parses 'Correct File With New Line In Last Header'" testTestCorrectFileWithNewLine
prop "Parses 'Find Delimiter'" testFindDelimiter
prop "Parses 'Get Headers'" testGetHeader
delimiterBS :: Delimiter -> BL.ByteString
......@@ -36,10 +34,10 @@ delimiterBS Line = BLU.fromString "\n"
data File = File { fDelimiter :: Delimiter
, allCorpus :: [RandomCorpus]
}
}
deriving (Show)
data RandomCorpus =
data RandomCorpus =
RandomCorpus { abstract :: Text
, title :: Text
, authors :: Text
......@@ -47,7 +45,7 @@ data RandomCorpus =
, day :: Int
, month :: Int
, years :: Int
}
}
deriving (Show)
instance Arbitrary File where
......@@ -67,19 +65,19 @@ delimiterToText Line = DT.pack "\n"
delimiterToString :: Delimiter -> Char
delimiterToString Tab = '\t'
delimiterToString Comma = ','
delimiterToString Line = '\n'
delimiterToString Line = '\n'
textToBL :: Text -> BL.ByteString
textToBL b = BL.fromChunks . return . DT.encodeUtf8 $ b
generateRandomCorpus :: Gen RandomCorpus
generateRandomCorpus = RandomCorpus
generateRandomCorpus = RandomCorpus
<$> generateString
<*> generateString
<*> generateString
<*> generateString
<*> generateNumber
<*> generateNumber
<*> generateString
<*> generateString
<*> generateNumber
<*> generateNumber
<*> generateNumber
generateFileDelimiter :: Gen File
......@@ -99,7 +97,7 @@ generateNumber :: Gen Int
generateNumber = arbitrary :: Gen Int
randomHeaderList :: Gen [String]
randomHeaderList = frequency [
randomHeaderList = frequency [
(1, return [])
, (7, (:) <$> (elements ["Publication Day", "Publication Month", "Publication Year", "Authors", "Title", "Source", "Abstract"]) <*> randomHeaderList)
]
......@@ -168,7 +166,7 @@ testValidNumber = forAll generateNumber (\s -> do
-- Test the 'validTextField' function (test if a field is good on garganText)
testValidText :: Property
testValidText = forAll generateString (\s ->
testValidText = forAll generateString (\s ->
let bl = textToBL s in
case validTextField bl s 1 of
Right _ -> True
......@@ -188,7 +186,7 @@ testTestErrorPerLine = forAll generateRandomCorpus (\tsv -> do
Right _ -> True
Left _ -> validRandomCorpus tsv del)
--check :
--check :
-- True Del
-- False Error
......@@ -198,7 +196,7 @@ testTestCorrectFile = forAll generateFile (\file -> do
let tsv = createFile file
case testCorrectFile tsv of
Right del -> del == fDelimiter file
Left _ -> Prelude.all (\x -> do
Left _ -> Prelude.all (\x -> do
let del = fDelimiter file
let headers = Prelude.map DT.pack ["Publication Day", "Publication Month", "Publication Year", "Authors", "Title", "Source", "Abstract"]
let splitLine = BL.splitWith (==delimiter del) $ createLineFromCorpus x del
......@@ -213,7 +211,7 @@ testTestCorrectFileWithNewLine = forAll generateFile (\file -> do
let tsv = createFileWithNewLine file
case testCorrectFile tsv of
Right _ -> True
Left _ -> Prelude.all (\x -> do
Left _ -> Prelude.all (\x -> do
let del = fDelimiter file
let headers = Prelude.map DT.pack ["Publication Day", "Publication Month", "Publication Year", "Authors", "Title", "Source", "Abstract"]
let splitLine = BL.splitWith (==delimiter del) $ createLineFromCorpus x del
......
......@@ -15,16 +15,15 @@ import Data.Aeson qualified as Aeson
import Gargantext.Core.Worker.Jobs.Types (Job(..))
import Gargantext.Prelude
import Test.Instances ()
import Test.Tasty
import Test.Tasty.QuickCheck hiding (Positive, Negative)
import Test.Hspec
import Test.Hspec.QuickCheck
tests :: TestTree
tests = testGroup "worker unit tests" [
testProperty "Worker Job to/from JSON serialization is correct" $
tests :: Spec
tests = describe "worker unit tests" $
prop "Worker Job to/from JSON serialization is correct" $
\job -> Aeson.decode (Aeson.encode (job :: Job)) == Just job
-- , testProperty "JobInfo to/from JSON serialization is correct" $
-- \ji -> Aeson.decode (Aeson.encode (ji :: JobInfo)) == Just ji
]
......@@ -34,10 +34,10 @@ import Test.Database.Operations.NodeStory
import Test.Database.Operations.PublishNode
import Test.Database.Setup (withTestDB)
import Test.Database.Types
import Test.HUnit hiding (assert)
import Test.Hspec
import Test.QuickCheck
import Test.QuickCheck.Monadic
import Test.Tasty.HUnit hiding (assert)
import Test.Tasty.QuickCheck
-- | Keeps a log of usernames we have already generated, so that our
......
......@@ -31,14 +31,14 @@ import Gargantext.Database.Prelude
import Gargantext.Database.Query.Facet
import Gargantext.Database.Query.Table.Node
import Gargantext.Database.Query.Table.Node.Error (HasNodeError)
import Gargantext.Database.Query.Table.Node.Error (errorWith)
import Gargantext.Database.Query.Table.Node.Error (nodeErrorWith)
import Gargantext.Database.Query.Tree.Root
import Gargantext.Database.Query.Table.NodeContext (selectCountDocs)
import Gargantext.Database.Schema.Node (NodePoly(..))
import Prelude
import Test.Database.Types
import Test.Hspec.Expectations
import Test.Tasty.HUnit
import Test.HUnit hiding (Node)
exampleDocument_01 :: HyperdataDocument
......@@ -119,7 +119,7 @@ getCorporaWithParentIdOrFail parentId = do
xs <- getCorporaWithParentId parentId
case xs of
[corpus] -> pure corpus
_ -> errorWith $ "getCorporaWithParentIdOrFail, impossible: " <> T.pack (show xs)
_ -> nodeErrorWith $ "getCorporaWithParentIdOrFail, impossible: " <> T.pack (show xs)
addCorpusDocuments :: TestEnv -> IO TestEnv
addCorpusDocuments env = runTestMonad env $ do
......
......@@ -39,7 +39,7 @@ import Gargantext.Prelude
import Test.Database.Operations.DocumentSearch (getCorporaWithParentIdOrFail)
import Test.Database.Types
import Test.Hspec.Expectations
import Test.Tasty.HUnit
import Test.HUnit
commonInitialization :: TestMonad ( UserId, NodeId, ListId, ArchiveList )
......
......@@ -23,7 +23,7 @@ import Gargantext.Database.Query.Table.NodeNode
import Gargantext.Database.Schema.Node (NodePoly(..))
import Test.API.Prelude (newPrivateFolderForUser, newPublicFolderForUser, alice)
import Test.Database.Types
import Test.Tasty.HUnit
import Test.HUnit
publishStrict :: SourceId -> TargetId -> DBCmd err ()
publishStrict sid = runDBTx . publishNode NPP_publish_no_edits_allowed sid
......
......@@ -87,8 +87,11 @@ withTestNotificationConfig cfg action = do
action $ cfg & gc_notifications_config
.~ NotificationsConfig { _nc_central_exchange_bind = "ipc://" <> ce_fp
, _nc_central_exchange_connect = "ipc://" <> ce_fp
, _nc_ce_send_timeout_ms = 200
, _nc_dispatcher_bind = "ipc://" <> ds_fp
, _nc_dispatcher_connect = "ipc://" <> ds_fp
, _nc_dispatcher_send_timeout_ms = 500
, _nc_dispatcher_throttle_ms = 500
}
setup :: IO TestEnv
......
......@@ -4,6 +4,7 @@
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeApplications #-}
{-| Tests for the transactional DB API -}
......@@ -16,6 +17,7 @@ import Control.Exception.Safe
import Control.Exception.Safe qualified as Safe
import Control.Monad.Reader
import Control.Monad.Trans.Control
import Data.List.NonEmpty qualified as NE
import Data.Pool
import Data.Profunctor.Product.TH (makeAdaptorAndInstance)
import Data.String
......@@ -28,20 +30,24 @@ import Database.PostgreSQL.Simple.Options qualified as Client
import Database.PostgreSQL.Simple.SqlQQ (sql)
import Database.PostgreSQL.Simple.ToField
import Database.Postgres.Temp qualified as Tmp
import Gargantext.API.Errors.Types (BackendInternalError)
import Gargantext.Database.Query.Table.Node.Error (errorWith)
import Gargantext.Core.Types.Individu
import Gargantext.Database.Query.Table.User
import Gargantext.Database.Schema.Prelude (Table (..))
import Gargantext.Database.Transactional
import Gargantext.Prelude
import Gargantext.Prelude hiding (throwIO, catch)
import Opaleye (selectTable, requiredTableField, SqlInt4)
import Opaleye qualified as O
import Prelude qualified
import Shelly as SH
import System.Random.Stateful
import Test.API.Setup (setupEnvironment)
import Test.Database.Setup
import Test.Database.Types hiding (Counter)
import Test.Hspec
import Test.Tasty.HUnit hiding (assert)
import Test.HUnit hiding (assert)
import Text.RawString.QQ
import Gargantext.Database.Action.User
import Gargantext.Database.Query.Table.Node.Error
--
-- For these tests we do not want to test the normal GGTX database queries, but rather
......@@ -79,9 +85,9 @@ countersTable =
)
newtype TestDBTxMonad a = TestDBTxMonad { _TestDBTxMonad :: TestMonadM DBHandle BackendInternalError a }
newtype TestDBTxMonad a = TestDBTxMonad { _TestDBTxMonad :: TestMonadM DBHandle IOException a }
deriving ( Functor, Applicative, Monad
, MonadReader DBHandle, MonadError BackendInternalError
, MonadReader DBHandle, MonadError IOException
, MonadBase IO
, MonadBaseControl IO
, MonadFail
......@@ -91,8 +97,12 @@ newtype TestDBTxMonad a = TestDBTxMonad { _TestDBTxMonad :: TestMonadM DBHandle
, MonadThrow
)
runTestDBTxMonad :: DBHandle -> TestMonadM DBHandle BackendInternalError a -> IO a
runTestDBTxMonad env = flip runReaderT env . _TestMonad
runTestDBTxMonad :: DBHandle -> TestMonadM DBHandle IOException a -> IO a
runTestDBTxMonad env m = do
res <- flip runReaderT env . runExceptT . _TestMonad $ m
case res of
Left err -> throwIO $ Prelude.userError ("runTestDBTxMonad: " <> displayException err)
Right x -> pure x
setup :: IO DBHandle
setup = do
......@@ -163,23 +173,23 @@ teardown test_db = do
instance PG.FromRow Counter where
fromRow = Counter <$> field <*> field
getCounterById :: CounterId -> DBQuery BackendInternalError r Counter
getCounterById :: CounterId -> DBQuery IOException r Counter
getCounterById (CounterId cid) = do
xs <- mkPGQuery [sql| SELECT * FROM public.ggtx_test_counter_table WHERE id = ?; |] (PG.Only cid)
case xs of
[c] -> pure c
rst -> errorWith $ "getCounterId returned more than one result: " <> T.pack (show rst)
rst -> dbFail $ Prelude.userError $ "getCounterId returned more than one result: " <> show rst
insertCounter :: DBUpdate BackendInternalError Counter
insertCounter :: DBUpdate IOException Counter
insertCounter = do
mkPGUpdateReturningOne [sql| INSERT INTO public.ggtx_test_counter_table(counter_value) VALUES(0) RETURNING id, counter_value|] ()
updateCounter :: CounterId -> Int -> DBUpdate BackendInternalError Counter
updateCounter :: CounterId -> Int -> DBUpdate IOException Counter
updateCounter cid x = do
mkPGUpdateReturningOne [sql| UPDATE public.ggtx_test_counter_table SET counter_value = ? WHERE id = ? RETURNING *|] (x, cid)
-- | We deliberately write this as a composite operation.
stepCounter :: CounterId -> DBUpdate BackendInternalError Counter
stepCounter :: CounterId -> DBUpdate IOException Counter
stepCounter cid = do
Counter{..} <- getCounterById cid
mkPGUpdateReturningOne [sql| UPDATE public.ggtx_test_counter_table SET counter_value = ? WHERE id = ? RETURNING *|] (counterValue + 1, cid)
......@@ -189,8 +199,15 @@ stepCounter cid = do
--
tests :: Spec
tests = parallel $ around withTestCounterDB $
describe "Database Transactions" $ do
tests = describe "Database Transactions" $ do
counterDBTests
ggtxDBTests
-- | Testing the transactional behaviour outside the classic GGTX operations.
-- We test that throwing exceptions in IO leads to rollbacks.
counterDBTests :: Spec
counterDBTests = parallel $ around withTestCounterDB $
describe "Counter Transactions" $ do
describe "Opaleye count queries" $ do
it "Supports counting rows" opaCountQueries
describe "Pure PG Queries" $ do
......@@ -206,6 +223,14 @@ tests = parallel $ around withTestCounterDB $
describe "Read/Write Consistency" $ do
it "should return a consistent state to different actors" testConsistency
-- | Testing the transactional behaviour inside the classic GGTX operations.
-- We test that throwing something like a 'NodeError' results in a proper rollback.
ggtxDBTests :: Spec
ggtxDBTests = parallel $ around withTestDB $ beforeWith (\ctx -> setupEnvironment ctx >>= (const $ pure ctx)) $
describe "GGTX Transactions" $ do
describe "Rollback support" $ do
it "can rollback if a ggtx error gets thrown" testGGTXErrorRollback
simplePGQueryWorks :: DBHandle -> Assertion
simplePGQueryWorks env = runTestDBTxMonad env $ do
x <- runDBQuery $ getCounterById (CounterId 1)
......@@ -239,9 +264,9 @@ testRollback env = runTestDBTxMonad env $ do
liftIO $ counterValue initialCounter `shouldBe` 1
-- Let's do another transaction where at the very last instruction we
-- fail.
Safe.handle (\(_ :: SomeException) -> pure ()) $ runDBTx $ do
handleDBTxError (\(_ :: IOException) -> pure ()) $ runDBTx $ do
_x' <- stepCounter (counterId initialCounter)
errorWith "urgh"
dbFail $ Prelude.userError "urgh"
-- Let's check that the second 'stepCounter' didn't actually modified the counter's value.
finalCounter <- runDBTx $ getCounterById (counterId initialCounter)
......@@ -277,3 +302,26 @@ opaCountQueries env = runTestDBTxMonad env $ do
_ <- insertCounter
mkOpaCountQuery (selectTable countersTable)
liftIO $ num @?= 3
-- | In this simple test we create a user node in GGTX, we try
-- to update it, and check that if we throw an error in the update
-- transaction, the changes are not propagated
testGGTXErrorRollback :: TestEnv -> Assertion
testGGTXErrorRollback env = runTestMonadM @NodeError env $ do
let ur = NewUser "alfredo" "alfredo@foo.com" (GargPassword "mypass")
let newUsers = ur NE.:| []
hashed <- liftIO $ mapM toUserHash newUsers
void $ runDBTx $ insertNewUsers hashed
-- Retrieve the user, check the details
insertedUr <- runDBQuery $ getUserLightDB (UserName "alfredo")
liftIO $ userLight_username insertedUr `shouldBe` "alfredo"
-- CRUCIAL bit: try to update the email, throw an exception in the same tx block
void $ (runDBTx $ do
void $ updateUserEmail (insertedUr { userLight_email = "alfredo@bar.com" })
nodeError $ NoRootFound -- it doesn't matter which exception
) `catchDBTxError` \(_e :: NodeError) -> pure () -- swallow it.
-- let's check that the email hasn't been changed.
insertedUr' <- runDBQuery $ getUserLightDB (UserName "alfredo")
liftIO $ userLight_email insertedUr' `shouldBe` "alfredo@foo.com"
......@@ -17,7 +17,6 @@ module Test.Database.Types where
import Control.Exception.Safe
import Control.Lens
import Control.Monad.Except
import Control.Monad.Reader
import Control.Monad.Trans.Control
import Data.IORef
......@@ -25,8 +24,7 @@ import Data.Map qualified as Map
import Data.Pool
import Database.PostgreSQL.Simple qualified as PG
import Database.Postgres.Temp qualified as Tmp
import GHC.IO.Exception (userError)
import Gargantext hiding (to)
import Gargantext hiding (throwIO, to)
import Gargantext.API.Admin.Orchestrator.Types
import Gargantext.API.Errors.Types
import Gargantext.API.Prelude
......@@ -41,6 +39,7 @@ import Gargantext.Utils.Jobs.Monad (MonadJobStatus(..))
import Network.URI (parseURI)
import Prelude qualified
import System.Log.FastLogger qualified as FL
import System.IO.Error (userError)
newtype Counter = Counter { _Counter :: IORef Int }
......@@ -64,10 +63,11 @@ data TestEnv = TestEnv {
, test_worker_tid :: !ThreadId
}
newtype TestMonadM env err a = TestMonad { _TestMonad :: ReaderT env IO a }
newtype TestMonadM env err a = TestMonad { _TestMonad :: ExceptT err (ReaderT env IO) a }
deriving ( Functor, Applicative, Monad
, MonadReader env
, MonadBase IO
, MonadError err
, MonadBaseControl IO
, MonadFail
, MonadIO
......@@ -76,10 +76,10 @@ newtype TestMonadM env err a = TestMonad { _TestMonad :: ReaderT env IO a }
, MonadThrow
)
instance HasLogger (TestMonadM TestEnv BackendInternalError) where
data instance Logger (TestMonadM TestEnv BackendInternalError) = TestLogger { _IOLogger :: IOStdLogger }
type instance LogInitParams (TestMonadM TestEnv BackendInternalError) = LogConfig
type instance LogPayload (TestMonadM TestEnv BackendInternalError) = Prelude.String
instance HasLogger (TestMonadM TestEnv err) where
data instance Logger (TestMonadM TestEnv err) = TestLogger { _IOLogger :: IOStdLogger }
type instance LogInitParams (TestMonadM TestEnv err) = LogConfig
type instance LogPayload (TestMonadM TestEnv err) = Prelude.String
initLogger cfg = fmap TestLogger $ (liftIO $ ioStdLogger cfg)
destroyLogger = liftIO . _iosl_destroy . _IOLogger
logMsg (TestLogger ioLogger) lvl msg = liftIO $ _iosl_log_msg ioLogger lvl msg
......@@ -89,18 +89,19 @@ instance MonadLogger (TestMonadM TestEnv BackendInternalError) where
getLogger = TestMonad $ do
initLogger @(TestMonadM TestEnv BackendInternalError) (LogConfig Nothing ERROR)
runTestMonadM :: env -> TestMonadM env err a -> IO a
runTestMonadM env = flip runReaderT env . _TestMonad
runTestMonadM :: Show err => env -> TestMonadM env err a -> IO a
runTestMonadM env m = do
res <- flip runReaderT env . runExceptT . _TestMonad $ m
case res of
Left err -> throwIO $ userError (show err)
Right x -> pure x
runTestMonad :: TestEnv -> TestMonadM TestEnv BackendInternalError a -> IO a
runTestMonad env = flip runReaderT env . _TestMonad
-- | Shoehorn a BackendInternalError into an IOException, suitable
-- for testing.
instance MonadError BackendInternalError (TestMonadM env BackendInternalError) where
throwError e = TestMonad $ throwError (userError $ show e)
catchError (TestMonad m) hdl =
TestMonad $ ReaderT $ \e -> catchError (flip runReaderT e m) (\e' -> runTestMonadM e $ hdl (InternalWorkerError e'))
runTestMonad env m = do
res <- flip runReaderT env . runExceptT . _TestMonad $ m
case res of
Left err -> throwIO $ userError ("runTestMonad: " <> show err)
Right x -> pure x
type TestMonad = TestMonadM TestEnv BackendInternalError
data TestJobHandle = TestNoJobHandle
......
......@@ -16,13 +16,13 @@ import Gargantext.Core.Types.Main
import Gargantext.Core.Types.Query
import Gargantext.Prelude
import Test.Ngrams.Query.PaginationCorpus
import Test.Tasty
import Test.Tasty.HUnit
import Test.HUnit
import Test.Hspec
import Test.Utils ((@??=))
tests :: TestTree
tests = testGroup "Ngrams" [unitTests]
tests :: Spec
tests = describe "Ngrams" $ unitTests
curryElem :: NgramsElement
curryElem = mkMapTerm "curry"
......@@ -43,32 +43,31 @@ mockQueryFn :: Maybe T.Text -> NgramsTerm -> Bool
mockQueryFn searchQuery (NgramsTerm nt) =
maybe (const True) (T.isInfixOf . T.toLower) searchQuery (T.toLower nt)
unitTests :: TestTree
unitTests = testGroup "Query tests"
[ -- Sorting
testCase "Simple query mockFlatCorpus" testFlat01
, testCase "Simple query (desc sorting)" testFlat02
, testCase "[#331] sorting via DUCET works" testSortDiacriticsDucet
, testCase "[#331] Natural sort ascending works" testNaturalSortAsceding
, testCase "[#331] Natural sort descending works" testNaturalSortDescending
unitTests :: Spec
unitTests = describe "Query tests" $ do
-- Sorting
it "Simple query mockFlatCorpus" testFlat01
it "Simple query (desc sorting)" testFlat02
it "[#331] sorting via DUCET works" testSortDiacriticsDucet
it "[#331] Natural sort ascending works" testNaturalSortAsceding
it "[#331] Natural sort descending works" testNaturalSortDescending
-- -- Filtering
, testCase "Simple query (listType = MapTerm)" testFlat03
, testCase "Simple query (listType = StopTerm)" testFlat04
it "Simple query (listType = MapTerm)" testFlat03
it "Simple query (listType = StopTerm)" testFlat04
-- -- Full text search
, testCase "Simple query (search with match)" testFlat05
it "Simple query (search with match)" testFlat05
-- -- Pagination
, testCase "Simple pagination on all terms" test_pagination_allTerms
, testCase "Simple pagination on MapTerm" test_pagination01
, testCase "Simple pagination on MapTerm (limit < total terms)" test_pagination02
, testCase "Simple pagination on MapTerm (offset works)" test_pagination02_offset
, testCase "Simple pagination on ListTerm (limit < total terms)" test_pagination03
, testCase "Simple pagination on ListTerm (offset works)" test_pagination03_offset
, testCase "Simple pagination on CandidateTerm (limit < total terms)" test_pagination04
, testCase "paginating QuantumComputing corpus works (MapTerms)" test_paginationQuantum
, testCase "paginating QuantumComputing corpus works (CandidateTerm)" test_paginationQuantum_02
it "Simple pagination on all terms" test_pagination_allTerms
it "Simple pagination on MapTerm" test_pagination01
it "Simple pagination on MapTerm (limit < total terms)" test_pagination02
it "Simple pagination on MapTerm (offset works)" test_pagination02_offset
it "Simple pagination on ListTerm (limit < total terms)" test_pagination03
it "Simple pagination on ListTerm (offset works)" test_pagination03_offset
it "Simple pagination on CandidateTerm (limit < total terms)" test_pagination04
it "paginating QuantumComputing corpus works (MapTerms)" test_paginationQuantum
it "paginating QuantumComputing corpus works (CandidateTerm)" test_paginationQuantum_02
-- -- Patching
, testCase "I can apply a patch to term mapTerms to stopTerms (issue #217)" test_217
]
it "I can apply a patch to term mapTerms to stopTerms (issue #217)" test_217
-- Let's test that if we request elements sorted in
-- /ascending/ order, we get them.
......
......@@ -15,32 +15,27 @@ import Gargantext.Database.Admin.Types.Hyperdata.Document ( HyperdataDocument(..
import Gargantext.Database.Admin.Types.Node (NodeId(..))
import Gargantext.Database.Schema.Context ( ContextPolyOnlyId(..) )
import Gargantext.Prelude
import Test.Tasty ( TestTree, testGroup )
import Test.Tasty.HUnit ( Assertion, testCase, (@?=) )
tests :: TestTree
tests = testGroup "Ngrams" [unitTests]
unitTests :: TestTree
unitTests = testGroup "Terms tests"
[ -- Sorting
testCase "Build patterns works 01" testBuildPatterns01
, testCase "Build patterns works 02" testBuildPatterns02
, testCase "termsInText works 01" testTermsInText01
, testCase "termsInText works 02" testTermsInText02
, testCase "termsInText works 03" testTermsInText03
, testCase "termsInText works 04 (related to issue #221)" testTermsInText04
, testCase "extractTermsWithList' works 01" testExtractTermsWithList'01
, testCase "extractTermsWithList' works 02 (#471)" testExtractTermsWithList'02
, testCase "docNgrams works 01" testDocNgrams01
, testCase "docNgrams works 02" testDocNgrams02
, testCase "ngramsByDoc works 01" testNgramsByDoc01
]
import Test.Hspec
import Test.HUnit
tests :: Spec
tests = describe "Ngrams" unitTests
unitTests :: Spec
unitTests = describe "Terms tests" $ do
-- Sorting
it "Build patterns works 01" testBuildPatterns01
it "Build patterns works 02" testBuildPatterns02
it "termsInText works 01" testTermsInText01
it "termsInText works 02" testTermsInText02
it "termsInText works 03" testTermsInText03
it "termsInText works 04 (related to issue #221)" testTermsInText04
it "extractTermsWithList' works 01" testExtractTermsWithList'01
it "extractTermsWithList' works 02 (#471)" testExtractTermsWithList'02
it "docNgrams works 01" testDocNgrams01
it "docNgrams works 02" testDocNgrams02
it "ngramsByDoc works 01" testNgramsByDoc01
-- | Let's document how the `buildPatternsWith` function works.
testBuildPatterns01 :: Assertion
......
......@@ -9,14 +9,13 @@ import Gargantext.Core (fromDBid)
import Gargantext.Database.Admin.Config ()
import Gargantext.Database.Admin.Types.Node
import Prelude
import Test.Tasty
import Test.Tasty.HUnit
import Test.Hspec
import Test.HUnit
tests :: TestTree
tests = testGroup "Errors" [
testCase "fromDBid comes with a CallStack" fromDBid_cs
]
tests :: Spec
tests = describe "Errors" $
it "fromDBid comes with a CallStack" fromDBid_cs
fromDBid_cs :: Assertion
fromDBid_cs = do
......
......@@ -21,9 +21,9 @@ import Gargantext.Database.Admin.Types.Node
import Paths_gargantext
import Prelude
import Test.Instances (genFrontendErr)
import Test.Tasty
import Test.Tasty.HUnit
import Test.Tasty.QuickCheck
import Test.Hspec
import Test.HUnit
import Test.QuickCheck
import Text.RawString.QQ
jsonRoundtrip :: (Show a, FromJSON a, ToJSON a, Eq a) => a -> Property
......@@ -50,34 +50,32 @@ jsonFrontendErrorRoundtrip = conjoin $ map mk_prop [minBound .. maxBound]
mk_prop code = forAll (genFrontendErr code) $ \a ->
counterexample ("Parsed JSON: " <> C8.unpack (encode a)) $ eitherDecode (encode a) === Right a
tests :: TestTree
tests = testGroup "JSON" [
testProperty "NodeId roundtrips" (jsonRoundtrip @NodeId)
, testProperty "RootId roundtrips" (jsonRoundtrip @RootId)
, testProperty "Datafield roundtrips" (jsonRoundtrip @Datafield)
, testProperty "WithQuery roundtrips" (jsonRoundtrip @WithQuery)
, testProperty "PublishRequest roundtrips" (jsonRoundtrip @PublishRequest)
, testProperty "RemoteExportRequest roundtrips" (jsonRoundtrip @RemoteExportRequest)
, testProperty "FrontendError roundtrips" jsonFrontendErrorRoundtrip
, testProperty "BackendErrorCode roundtrips" (jsonEnumRoundtrip (Dict @_ @BackendErrorCode))
, testProperty "NodeType roundtrips" (jsonEnumRoundtrip (Dict @_ @NodeType))
, testProperty "NodePublishPolicy roundtrips" (jsonEnumRoundtrip (Dict @_ @NodePublishPolicy))
, testCase "WithQuery frontend compliance" testWithQueryFrontend
, testCase "WithQuery frontend compliance (PubMed)" testWithQueryFrontendPubMed
, testCase "WithQuery frontend compliance (EPO)" testWithQueryFrontendEPO
, testGroup "Phylo" [
testProperty "PeriodToNode" (jsonRoundtrip @PeriodToNodeData)
, testProperty "GraphData" (jsonRoundtrip @GraphData)
, testProperty "GraphDataData" (jsonRoundtrip @GraphDataData)
, testProperty "ObjectData" (jsonRoundtrip @ObjectData)
, testProperty "PhyloData" (jsonRoundtrip @PhyloData)
, testProperty "ComputeTimeHistory" (jsonRoundtrip @VizPhylo.ComputeTimeHistory)
, testProperty "Phylo" (jsonRoundtrip @VizPhylo.Phylo)
, testProperty "LayerData" (jsonRoundtrip @LayerData)
, testCase "can parse bpa_phylo_test.json" testParseBpaPhylo
, testCase "can parse open_science.json" testOpenSciencePhylo
]
]
tests :: Spec
tests = describe "JSON" $ do
it "NodeId roundtrips" (property $ jsonRoundtrip @NodeId)
it "RootId roundtrips" (property $ jsonRoundtrip @RootId)
it "Datafield roundtrips" (property $ jsonRoundtrip @Datafield)
it "WithQuery roundtrips" (property $ jsonRoundtrip @WithQuery)
it "PublishRequest roundtrips" (property $ jsonRoundtrip @PublishRequest)
it "RemoteExportRequest roundtrips" (property $ jsonRoundtrip @RemoteExportRequest)
it "FrontendError roundtrips" jsonFrontendErrorRoundtrip
it "BackendErrorCode roundtrips" (jsonEnumRoundtrip (Dict @_ @BackendErrorCode))
it "NodeType roundtrips" (jsonEnumRoundtrip (Dict @_ @NodeType))
it "NodePublishPolicy roundtrips" (jsonEnumRoundtrip (Dict @_ @NodePublishPolicy))
it "WithQuery frontend compliance" testWithQueryFrontend
it "WithQuery frontend compliance (PubMed)" testWithQueryFrontendPubMed
it "WithQuery frontend compliance (EPO)" testWithQueryFrontendEPO
describe "Phylo" $ do
it "PeriodToNode" (property $ jsonRoundtrip @PeriodToNodeData)
it "GraphData" (property $ jsonRoundtrip @GraphData)
it "GraphDataData" (property $ jsonRoundtrip @GraphDataData)
it "ObjectData" (property $ jsonRoundtrip @ObjectData)
it "PhyloData" (property $ jsonRoundtrip @PhyloData)
it "ComputeTimeHistory" (property $ jsonRoundtrip @VizPhylo.ComputeTimeHistory)
it "Phylo" (property $ jsonRoundtrip @VizPhylo.Phylo)
it "LayerData" (property $ jsonRoundtrip @LayerData)
it "can parse bpa_phylo_test.json" testParseBpaPhylo
it "can parse open_science.json" testOpenSciencePhylo
testWithQueryFrontend :: Assertion
testWithQueryFrontend = do
......
......@@ -13,8 +13,7 @@ import Gargantext.Database.Schema.Context
import Gargantext.Database.Admin.Types.Hyperdata
import Test.Instances ()
import Test.QuickCheck
import Test.Tasty
import Test.Tasty.QuickCheck (testProperty)
import Test.Hspec
import Control.Lens
import qualified Test.QuickCheck as QC
import Gargantext.Core.Text.Terms.Mono (isSep)
......@@ -82,18 +81,14 @@ instance Arbitrary DocumentWithMatches where
pure $ DocumentWithMatches generatedTerms hyperDoc
tests :: TestTree
tests = testGroup "Ngrams" [
testGroup "buildPatterns internal correctness" [
testProperty "patterns, no matter how simple, can be searched" prop_patterns_internal_consistency
]
, testGroup "buildPatternsWith" [
testProperty "return results for non-empty input terms" testBuildPatternsNonEmpty
]
, testGroup "docNgrams" [
testProperty "always matches if the input text contains any of the terms" testDocNgramsOKMatch
]
]
tests :: Spec
tests = describe "Ngrams" $ do
describe "buildPatterns internal correctness" $ do
it "patterns, no matter how simple, can be searched" $ property prop_patterns_internal_consistency
describe "buildPatternsWith" $ do
it "return results for non-empty input terms" $ property testBuildPatternsNonEmpty
describe "docNgrams" $ do
it "always matches if the input text contains any of the terms" $ property testDocNgramsOKMatch
testDocNgramsOKMatch :: Lang -> DocumentWithMatches -> Property
testDocNgramsOKMatch lang (DocumentWithMatches ts doc) =
......
......@@ -6,8 +6,10 @@ module Test.Offline.Phylo (tests) where
import CLI.Phylo.Common ( fileToList, fileToDocsDefault )
import Data.Aeson as JSON
import Data.Aeson.Types qualified as JSON
import Data.Aeson.Encode.Pretty qualified as JSON
import Data.Aeson.Types qualified as JSON
import Data.ByteString.Char8 qualified as C8
import Data.ByteString.Lazy qualified as BIO
import Data.ByteString.Lazy qualified as BL
import Data.GraphViz.Attributes.Complete qualified as Graphviz
import Data.Text.Lazy as TL
......@@ -23,10 +25,9 @@ import Gargantext.Core.Viz.Phylo.PhyloMaker (toPhylo, toPhyloWithoutLink)
import Gargantext.Core.Viz.Phylo.PhyloTools ( setConfig, ngramsToLabel, relatedComponents )
import Paths_gargantext ( getDataFileName )
import Prelude
import Test.Tasty ( TestTree, TestName, testGroup )
import Test.Tasty.Golden (goldenVsStringDiff)
import Test.Tasty.HUnit ( testCase, Assertion, assertBool, (@?=) )
import qualified Test.Tasty.Golden.Advanced as Advanced
import Test.HUnit
import Test.Hspec
import Test.Hspec.Golden
phyloTestConfig :: PhyloConfig
phyloTestConfig = PhyloConfig {
......@@ -52,62 +53,79 @@ phyloTestConfig = PhyloConfig {
, exportFilter = [ByBranchSize {_branch_size = 3.0}]
}
phyloGolden :: TestName -> (FilePath, IO BL.ByteString) -> TestTree
phyloGolden testName (fp, action) =
goldenVsStringDiff testName differ fp action
where
differ ref new = [ "diff", "-u", "-w", "--color=always", ref, new]
phyloGolden :: (FilePath, BL.ByteString) -> Golden BL.ByteString
phyloGolden (fp, actualOutput) =
Golden {
output = actualOutput
, encodePretty = C8.unpack . BIO.toStrict
, writeToFile = \_ _ -> pure ()
, readFromFile = BIO.readFile
, goldenFile = fp
, actualFile = Nothing
, failFirstTime = True
}
newtype GraphDataFuzzy =
GraphDataFuzzy { _GraphDataFuzzy :: GraphData }
instance Eq GraphDataFuzzy where
(GraphDataFuzzy a) == (GraphDataFuzzy b) = a `compareGraphDataFuzzy` b
-- | Use this variant for those tests which requires a more sophisticated way of
-- comparing outputs directly on the GraphData
phyloGoldenGraphData :: TestName -> (FilePath, IO GraphData) -> TestTree
phyloGoldenGraphData testName (goldenPath, getActual) =
Advanced.goldenTest testName getGolden getActual differ updateGolden
phyloGoldenGraphData :: (FilePath, GraphDataFuzzy) -> Golden GraphDataFuzzy
phyloGoldenGraphData (goldenPath, new) =
Golden {
output = new
, encodePretty = differ
, writeToFile = \_ new' -> updateGolden new'
, readFromFile = const getGolden
, goldenFile = goldenPath
, actualFile = Nothing
, failFirstTime = True
}
where
differ ref new = pure $ if compareGraphDataFuzzy ref new
then Nothing
else Just $ show (ansiWlEditExprCompact $ ediff ref new)
differ :: GraphDataFuzzy -> String
differ (GraphDataFuzzy ref) = show (ansiWlEditExprCompact $ ediff ref (_GraphDataFuzzy new))
updateGolden :: GraphData -> IO ()
updateGolden gd = BL.writeFile goldenPath (JSON.encodePretty gd)
updateGolden :: GraphDataFuzzy -> IO ()
updateGolden (GraphDataFuzzy gd) = BL.writeFile goldenPath (JSON.encodePretty gd)
getGolden :: IO GraphData
getGolden = do
getGolden :: IO GraphDataFuzzy
getGolden = GraphDataFuzzy <$> do
expected_e <- JSON.eitherDecodeFileStrict' =<< getDataFileName goldenPath
case expected_e of
Left err -> fail err
Right (expected :: GraphData) -> pure expected
tests :: TestTree
tests = testGroup "Phylo" [
testGroup "Export" [
testCase "ngramsToLabel respects encoding" test_ngramsToLabel_01
, testCase "ngramsToLabel is rendered correctly in CustomAttribute" test_ngramsToLabel_02
]
, testGroup "toPhyloWithoutLink" [
testCase "returns expected data" testSmallPhyloWithoutLinkExpectedOutput
, phyloGolden "phyloCleopatre returns expected data" testCleopatreWithoutLinkExpectedOutput
, phyloGolden "Nadal canned corpus returns expected data" testNadalWithoutLinkExpectedOutput
]
, testGroup "phylo2dot2json" [
phyloGoldenGraphData "is deterministic" testPhylo2dot2json
]
, testGroup "toPhylo" [
phyloGolden "is deterministic" testToPhyloDeterminism
]
, testGroup "relatedComponents" [
testCase "finds simple connection" testRelComp_Connected
]
, testCase "parses csv phylo" testCsvPhylo
]
tests :: Spec
tests = describe "Phylo" $ do
describe "Export" $ do
it "ngramsToLabel respects encoding" test_ngramsToLabel_01
it "ngramsToLabel is rendered correctly in CustomAttribute" test_ngramsToLabel_02
describe "toPhyloWithoutLink" $ do
it "returns expected data" testSmallPhyloWithoutLinkExpectedOutput
it "phyloCleopatre returns expected data" $ phyloGolden testCleopatreWithoutLinkExpectedOutput
beforeAll testNadalWithoutLinkExpectedOutput $
it "Nadal canned corpus returns expected data" $ phyloGolden
describe "phylo2dot2json" $ do
beforeAll testPhylo2dot2json $
it "is deterministic" phyloGoldenGraphData
describe "toPhylo" $ do
beforeAll testToPhyloDeterminism $
it "is deterministic" $ phyloGolden
describe "relatedComponents" $ do
it "finds simple connection" testRelComp_Connected
it "parses csv phylo" testCsvPhylo
testCleopatreWithoutLinkExpectedOutput :: (FilePath, IO BL.ByteString)
testCleopatreWithoutLinkExpectedOutput :: (FilePath, BL.ByteString)
testCleopatreWithoutLinkExpectedOutput =
let actual = toPhyloWithoutLink Cleopatre.docs Cleopatre.config
in ("test-data/phylo/cleopatre.golden.json", pure $ JSON.encodePretty actual)
in ("test-data/phylo/cleopatre.golden.json", JSON.encodePretty actual)
testNadalWithoutLinkExpectedOutput :: IO (FilePath, BL.ByteString)
testNadalWithoutLinkExpectedOutput = do
testNadalWithoutLinkExpectedOutput :: (FilePath, IO BL.ByteString)
testNadalWithoutLinkExpectedOutput = ("test-data/phylo/nadal.golden.json",) $ do
corpusPath' <- getDataFileName "test-data/phylo/nadal_docslist.golden.tsv"
listPath' <- getDataFileName "test-data/phylo/nadal_ngramslist.golden.tsv"
let config = phyloTestConfig { corpusPath = corpusPath'
......@@ -119,7 +137,9 @@ testNadalWithoutLinkExpectedOutput = ("test-data/phylo/nadal.golden.json",) $ do
(corpusPath config)
[Year 3 1 5,Month 3 1 5,Week 4 2 5]
mapList
pure $ JSON.encodePretty $ setConfig phyloTestConfig $ toPhyloWithoutLink corpus config
pure ( "test-data/phylo/nadal.golden.json"
, JSON.encodePretty $ setConfig phyloTestConfig $ toPhyloWithoutLink corpus config
)
testSmallPhyloWithoutLinkExpectedOutput :: Assertion
testSmallPhyloWithoutLinkExpectedOutput = do
......@@ -138,12 +158,12 @@ testSmallPhyloWithoutLinkExpectedOutput = do
expected <- setConfig phyloTestConfig <$> (readPhylo =<< getDataFileName "test-data/phylo/small-phylo.golden.json")
assertBool (show $ ansiWlEditExprCompact $ ediff expected actual) (expected == actual)
testPhylo2dot2json :: (FilePath, IO GraphData)
testPhylo2dot2json = ("test-data/phylo/phylo2dot2json.golden.json",) $ do
testPhylo2dot2json :: IO (FilePath, GraphDataFuzzy)
testPhylo2dot2json = do
actual_e <- JSON.parseEither JSON.parseJSON <$> phylo2dot2json Cleopatre.phyloCleopatre
case actual_e of
Left err -> fail err
Right (actual :: GraphData) -> pure actual
Right (actual :: GraphData) -> pure ("test-data/phylo/phylo2dot2json.golden.json", GraphDataFuzzy actual)
compareGraphDataFuzzy :: GraphData -> GraphData -> Bool
compareGraphDataFuzzy gd1 gd2 =
......@@ -253,8 +273,8 @@ testRelComp_Connected = do
(relatedComponents @Int) [[1,2], [3,5], [2,4],[9,5],[5,4]] @?= [[1,2,4,3,5,9]]
(relatedComponents @Int) [[1,2,5], [4,5,9]] @?= [[1,2,5,4,9]]
testToPhyloDeterminism :: (FilePath, IO BL.ByteString)
testToPhyloDeterminism = ("test-data/phylo/187481.json",) $ do
testToPhyloDeterminism :: IO (FilePath, BL.ByteString)
testToPhyloDeterminism = do
corpusPath' <- getDataFileName "test-data/phylo/GarganText_DocsList-nodeId-187481.tsv"
listPath' <- getDataFileName "test-data/phylo/GarganText_NgramsList-187482.tsv"
let config = phyloTestConfig { corpusPath = corpusPath'
......@@ -267,7 +287,9 @@ testToPhyloDeterminism = ("test-data/phylo/187481.json",) $ do
[Year 3 1 5,Month 3 1 5,Week 4 2 5]
mapList
let actual = setConfig phyloTestConfig $ toPhylo $ toPhyloWithoutLink corpus config
pure $ JSON.encodePretty actual
pure ( "test-data/phylo/187481.json"
, JSON.encodePretty actual
)
testCsvPhylo :: Assertion
testCsvPhylo = do
......
......@@ -4,19 +4,30 @@ module Test.Offline.Stemming.Lancaster where
import Prelude
import Data.ByteString.Char8 qualified as C8
import Data.ByteString.Lazy qualified as BIO
import Data.Text qualified as T
import Gargantext.Core.Text.Terms.Mono.Stem.Internal.Lancaster (stem)
import Gargantext.Prelude (toS)
import Test.Tasty
import Test.Tasty.Golden (goldenVsString)
import Test.Hspec
import Test.Hspec.Golden
import qualified Data.ByteString.Lazy as BL
import qualified Data.Text.Encoding as TE
goldenBS :: BL.ByteString -> Golden BL.ByteString
goldenBS actualOutput =
Golden {
output = actualOutput
, encodePretty = C8.unpack . BIO.toStrict
, writeToFile = \_ _ -> pure ()
, readFromFile = BIO.readFile
, goldenFile = "test-data/stemming/lancaster.txt"
, actualFile = Nothing
, failFirstTime = True
}
tests :: TestTree
tests = testGroup "Lancaster" [
goldenVsString "test vector works" "test-data/stemming/lancaster.txt" mkTestVector
]
tests :: Spec
tests = describe "Lancaster" $
it "test vector works" $ goldenBS mkTestVector
-- | List un /unstemmed/ test words
testWords :: [(Int, T.Text)]
......@@ -126,5 +137,5 @@ testWords = [
, (103, "corroborate")
]
mkTestVector :: IO BL.ByteString
mkTestVector = pure $ toS $ C8.unlines (map (\(indx, w) -> (C8.pack $ show indx) <> "," <> TE.encodeUtf8 (stem w)) testWords)
mkTestVector :: BL.ByteString
mkTestVector = toS $ C8.unlines (map (\(indx, w) -> (C8.pack $ show indx) <> "," <> TE.encodeUtf8 (stem w)) testWords)
......@@ -73,7 +73,7 @@ import Test.Hspec.Expectations
import Test.Hspec.Wai.JSON (FromValue(..))
import Test.Hspec.Wai (MatchBody(..), WaiExpectation, WaiSession, request)
import Test.Hspec.Wai.Matcher (MatchHeader(..), ResponseMatcher(..), bodyEquals, formatHeader, match)
import Test.Tasty.HUnit (Assertion, assertBool)
import Test.HUnit (Assertion, assertBool)
import Test.Utils.Notifications (withWSConnection, millisecond)
......
{--|
Module : Main.hs
Description : Main for Gargantext Tasty Tests
Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
-}
module Main where
import Gargantext.Prelude
import qualified Test.Core.LinearAlgebra as LinearAlgebra
import qualified Test.Core.Notifications as Notifications
import qualified Test.Core.Orchestrator as Orchestrator
import qualified Test.Core.Similarity as Similarity
import qualified Test.Core.Text.Corpus.Query as CorpusQuery
import qualified Test.Core.Text.Corpus.TSV as TSVParser
import qualified Test.Core.Utils as Utils
import qualified Test.Core.Worker as Worker
import qualified Test.Graph.Clustering as Clustering
import qualified Test.Graph.Distance as Distance
import qualified Test.Ngrams.Lang.Occurrences as Occurrences
import qualified Test.Ngrams.NLP as NLP
import qualified Test.Ngrams.Query as NgramsQuery
import qualified Test.Ngrams.Terms as NgramsTerms
import qualified Test.Offline.Errors as Errors
import qualified Test.Offline.JSON as JSON
import qualified Test.Offline.Ngrams as Ngrams
import qualified Test.Offline.Phylo as Phylo
import qualified Test.Offline.Stemming.Lancaster as Lancaster
import qualified Test.Parsers.Date as PD
import qualified Test.Utils.Crypto as Crypto
import qualified Test.Utils.Jobs as Jobs
import System.IO (hGetBuffering, hSetBuffering)
import Test.Tasty
import Test.Tasty.Hspec
-- | https://mercurytechnologies.github.io/ghciwatch/integration/tasty.html
protectStdoutBuffering :: IO a -> IO a
protectStdoutBuffering action =
bracket
(hGetBuffering stdout)
(\bufferMode -> hSetBuffering stdout bufferMode)
(const action)
main :: IO ()
main = do
utilSpec <- testSpec "Utils" Utils.test
clusteringSpec <- testSpec "Graph Clustering" Clustering.test
distanceSpec <- testSpec "Graph Distance" Distance.test
dateSplitSpec <- testSpec "Date split" PD.testDateSplit
cryptoSpec <- testSpec "Crypto" Crypto.test
nlpSpec <- testSpec "NLP" NLP.test
jobsSpec <- testSpec "Jobs" Jobs.test
similaritySpec <- testSpec "Similarity" Similarity.test
asyncUpdatesSpec <- testSpec "Notifications" Notifications.test
occurrencesSpec <- testSpec "Occurrences" Occurrences.test
protectStdoutBuffering $ defaultMain $ testGroup "Gargantext"
[ utilSpec
, clusteringSpec
, distanceSpec
, dateSplitSpec
, cryptoSpec
, nlpSpec
, jobsSpec
, occurrencesSpec
, NgramsQuery.tests
, occurrencesSpec
, CorpusQuery.tests
, TSVParser.tests
, JSON.tests
, Ngrams.tests
, Errors.tests
, similaritySpec
, Phylo.tests
, testGroup "Stemming" [ Lancaster.tests ]
, Worker.tests
, asyncUpdatesSpec
, Notifications.qcTests
, Orchestrator.qcTests
, NgramsTerms.tests
, LinearAlgebra.tests
]
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment