[nix] some attempt to make tests work

parent ac6ce3c0
Pipeline #7745 failed with stages
in 56 minutes and 33 seconds
...@@ -107,3 +107,44 @@ test: ...@@ -107,3 +107,44 @@ test:
chown -R root:root /root/ chown -R root:root /root/
chown -R root:root $STORE_DIR chown -R root:root $STORE_DIR
chown -R root:root /root/.cache/cabal/logs/ chown -R root:root /root/.cache/cabal/logs/
test-flake:
stage: test
# The tests needs to run as the 'test' user, because they leverage the
# initdb utility from postgres that cannot be run by 'root'.
before_script:
- echo "Creating test user..."
- mkdir -p /home/test
- mkdir -p /root/.config
- useradd -U test
- chown -R test:test dist-newstyle/
- chown -R test:test /root/
- chown -R test:test $STORE_DIR
- chown -R test:test ${CABAL_DIR}
- mkdir -p "$XDG_CACHE_HOME/nix"
- chown -R test:test "$XDG_CACHE_HOME/nix"
cache:
key: cabal.project
paths:
- dist-newstyle/
- .cabal/
policy: pull-push
script:
- |
git config --global --add safe.directory $XDG_CACHE_HOME/nix/tarball-cache
git config --global --add safe.directory '*'
export TEST_TMPDIR="${CI_PROJECT_DIR}/tmp"
mkdir -p "$TEST_TMPDIR"
nix-shell --run "./bin/update-project-dependencies $STORE_DIR"
mkdir -p /root/.cache/cabal/logs
chown -R test:test /root/.cache/cabal/logs/
chown -R test:test "$TEST_TMPDIR"
chown -R test:test /root/.config/
su -m test -c "export TMPDIR=$TEST_TMPDIR && cd /builds/gargantext/haskell-gargantext; nix run .#gargantext:test:garg-test"
chown -R root:root dist-newstyle/
chown -R root:root /root/
chown -R root:root $STORE_DIR
chown -R root:root /root/.cache/cabal/logs/
...@@ -209,12 +209,12 @@ ...@@ -209,12 +209,12 @@
# Preserve rev for the inputMap # Preserve rev for the inputMap
passthru.rev = src.rev; passthru.rev = src.rev;
}; };
applyPatches = inputName: patches: pkgs.buildPackages.applyPatches # applyPatches = inputName: patches: pkgs.buildPackages.applyPatches
{ # {
name = "${inputName}-patched"; # name = "${inputName}-patched";
src = inputs.${inputName}; # src = inputs.${inputName};
inherit patches; # inherit patches;
} // { inherit (inputs.${inputName}) rev; }; # } // { inherit (inputs.${inputName}) rev; };
in in
{ {
#"https://github.com/AccelerateHS/accelerate-llvm.git" = inputs.accelerate-llvm; #"https://github.com/AccelerateHS/accelerate-llvm.git" = inputs.accelerate-llvm;
...@@ -295,6 +295,9 @@ ...@@ -295,6 +295,9 @@
}; };
packages.llvm-hs.components.library.build-tools = lib.mkOverride 49 [ pkgs.llvmPackages_12.llvm ]; packages.llvm-hs.components.library.build-tools = lib.mkOverride 49 [ pkgs.llvmPackages_12.llvm ];
#packages.gargantext.components.tests.garg-test.systemDepends = [ pkgs.postgresql_14 ];
packages.postgresql-simple.components.library.libs = [ pkgs.postgresql_14 ];
}) })
]; ];
...@@ -541,31 +544,17 @@ ...@@ -541,31 +544,17 @@
''; '';
}; };
}; };
# For garg-test-hspec
coreNLP = pkgs.callPackage ./nix/corenlp.nix { };
# # Avoid recompiling openjdk due to any overlay used in the common pkgs.
# let pkgs = import inputs.nixpkgs { inherit system; }; in
# inputs.flake-utils.lib.mkApp {
# drv = pkgs.writeShellApplication {
# name = "coreNLP";
# text = ''
# set -x
# exec ${pkgs.openjdk}/bin/java -mx4g -cp '${inputs.coreNLP}/*' edu.stanford.nlp.pipeline.StanfordCoreNLPServer -port 9000 -timeout 15000
# '';
# };
# };
}; };
# For running all checks (very slow): # For running all checks (very slow):
# $ nix -L flake check # $ nix -L flake check
# #
# For building a specific check of the project: # For building a specific check of the project:
# $ nix -L build .#project.hsPkgs.gargantext.components.tests.garg-test-tasty # $ nix -L build .#project.hsPkgs.gargantext.components.tests.garg-test
# $ result/bin/garg-test-tasty # $ result/bin/garg-test
# #
# Alternatively, but slower: # Alternatively, but slower:
# $ nix -L build .#checks.x86_64-linux.gargantext:test:garg-test-tasty # $ nix -L build .#checks.x86_64-linux.gargantext:test:garg-test
# $ bat result/test-stdout # $ bat result/test-stdout
# #
# See names from: # See names from:
......
...@@ -9,7 +9,7 @@ ...@@ -9,7 +9,7 @@
export GARGANTEXT_CORENLP_SERVER="${pkgs.nixVersions.stable}/bin/nix run .#coreNLP" export GARGANTEXT_CORENLP_SERVER="${pkgs.nixVersions.stable}/bin/nix run .#coreNLP"
''; '';
packages.gargantext.components.tests.garg-test-hspec.testFlags = [ packages.gargantext.components.tests.garg-test.testFlags = [
# FIXME: # FIXME:
# test/Test/Database/Operations.hs:64:7: # test/Test/Database/Operations.hs:64:7:
# 1) Database.Read/Writes, Corpus creation, Can add documents to a Corpus # 1) Database.Read/Writes, Corpus creation, Can add documents to a Corpus
...@@ -32,120 +32,112 @@ ...@@ -32,120 +32,112 @@
# (ConnectionFailure Network.Socket.connect: <socket: 18>: does not exist (Connection refused)) # (ConnectionFailure Network.Socket.connect: <socket: 18>: does not exist (Connection refused))
"--match" "--match"
"'!/Database/Read/Writes/Corpus creation/Can add documents to a Corpus/'" "'!/Database/Read/Writes/Corpus creation/Can add documents to a Corpus/'"
];
# Beware those failures are non-deterministic. # Beware those failures are non-deterministic.
# For reproducing it may help to use a project's variant like "coverage": # For reproducing it may help to use a project's variant like "coverage":
# $ nix -L build .#project.projectVariants.coverage.hsPkgs.gargantext.components.tests.garg-test-tasty # $ nix -L build .#project.projectVariants.coverage.hsPkgs.gargantext.components.tests.garg-test-tasty
# $ for i in {1..100}; do result/bin/garg-test-tasty --hide-successes || break; done # $ for i in {1..100}; do result/bin/garg-test-tasty --hide-successes || break; done
# Note that testFlags are only applied on checks.*, not on tests.* # Note that testFlags are only applied on checks.*, not on tests.*
packages.gargantext.components.tests.garg-test-tasty = { ("'" + lib.concatStringsSep " && " [
build-tools = [ # FAIL: (non-deterministic)
pkgs.graphviz # Graph Clustering
]; # Cross
testFlags = [ # Partition test: FAIL (0.14s)
"--pattern" # uncaught exception: ErrorCall
("'" + lib.concatStringsSep " && " [ # *** Internal error in package accelerate ***
# FAIL: (non-deterministic) # *** Please submit a bug report at https://github.com/AccelerateHS/accelerate/issues
# Graph Clustering # Encountered a duplicate 'Tag'
# Cross # 81: Tag 1, 77: Tag 1, 79: Tag 1, 84: Tag 1
# Partition test: FAIL (0.14s) # CallStack (from HasCallStack):
# uncaught exception: ErrorCall # internalError: Data.Array.Accelerate.Trafo.Sharing:2274:19
# *** Internal error in package accelerate *** # buildInitialEnvExp: Data.Array.Accelerate.Trafo.Sharing:2702:17
# *** Please submit a bug report at https://github.com/AccelerateHS/accelerate/issues # determineScopesExp: Data.Array.Accelerate.Trafo.Sharing:2608:17
# Encountered a duplicate 'Tag' # scopesExp: Data.Array.Accelerate.Trafo.Sharing:2636:26
# 81: Tag 1, 77: Tag 1, 79: Tag 1, 84: Tag 1 # scopesFun1: Data.Array.Accelerate.Trafo.Sharing:2429:60
# CallStack (from HasCallStack): # scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2490:33
# internalError: Data.Array.Accelerate.Trafo.Sharing:2274:19 # travF2MEA: Data.Array.Accelerate.Trafo.Sharing:2406:38
# buildInitialEnvExp: Data.Array.Accelerate.Trafo.Sharing:2702:17 # scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2464:33
# determineScopesExp: Data.Array.Accelerate.Trafo.Sharing:2608:17 # travEA: Data.Array.Accelerate.Trafo.Sharing:2397:38
# scopesExp: Data.Array.Accelerate.Trafo.Sharing:2636:26 # scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2509:34
# scopesFun1: Data.Array.Accelerate.Trafo.Sharing:2429:60 # travF2A2: Data.Array.Accelerate.Trafo.Sharing:2405:38
# scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2490:33 # scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2490:33
# travF2MEA: Data.Array.Accelerate.Trafo.Sharing:2406:38 # travF2MEA: Data.Array.Accelerate.Trafo.Sharing:2406:38
# scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2464:33 # scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2490:33
# travEA: Data.Array.Accelerate.Trafo.Sharing:2397:38 # travF2MEA: Data.Array.Accelerate.Trafo.Sharing:2406:38
# scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2509:34 # scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2331:46
# travF2A2: Data.Array.Accelerate.Trafo.Sharing:2405:38 # determineScopesSharingAcc: Data.Array.Accelerate.Trafo.Sharing:2718:17
# scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2490:33 # scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2818:33
# travF2MEA: Data.Array.Accelerate.Trafo.Sharing:2406:38 # travAE: Data.Array.Accelerate.Trafo.Sharing:2767:36
# scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2490:33 # scopesExp: Data.Array.Accelerate.Trafo.Sharing:2790:32
# travF2MEA: Data.Array.Accelerate.Trafo.Sharing:2406:38 # travE2: Data.Array.Accelerate.Trafo.Sharing:2749:36
# scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2331:46 # scopesExp: Data.Array.Accelerate.Trafo.Sharing:2780:30
# determineScopesSharingAcc: Data.Array.Accelerate.Trafo.Sharing:2718:17 # travE1: Data.Array.Accelerate.Trafo.Sharing:2766:36
# scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2818:33 # scopesExp: Data.Array.Accelerate.Trafo.Sharing:2789:32
# travAE: Data.Array.Accelerate.Trafo.Sharing:2767:36 # travE2: Data.Array.Accelerate.Trafo.Sharing:2749:36
# scopesExp: Data.Array.Accelerate.Trafo.Sharing:2790:32 # scopesExp: Data.Array.Accelerate.Trafo.Sharing:2780:30
# travE2: Data.Array.Accelerate.Trafo.Sharing:2749:36 # travE1: Data.Array.Accelerate.Trafo.Sharing:2751:36
# scopesExp: Data.Array.Accelerate.Trafo.Sharing:2780:30 # scopesExp: Data.Array.Accelerate.Trafo.Sharing:2800:32
# travE1: Data.Array.Accelerate.Trafo.Sharing:2766:36 # travE3: Data.Array.Accelerate.Trafo.Sharing:2760:36
# scopesExp: Data.Array.Accelerate.Trafo.Sharing:2789:32 # scopesExp: Data.Array.Accelerate.Trafo.Sharing:2712:56
# travE2: Data.Array.Accelerate.Trafo.Sharing:2749:36 # determineScopesSharingExp: Data.Array.Accelerate.Trafo.Sharing:2696:60
# scopesExp: Data.Array.Accelerate.Trafo.Sharing:2780:30 # determineScopesExp: Data.Array.Accelerate.Trafo.Sharing:2608:17
# travE1: Data.Array.Accelerate.Trafo.Sharing:2751:36 # scopesExp: Data.Array.Accelerate.Trafo.Sharing:2636:26
# scopesExp: Data.Array.Accelerate.Trafo.Sharing:2800:32 # scopesFun1: Data.Array.Accelerate.Trafo.Sharing:2400:60
# travE3: Data.Array.Accelerate.Trafo.Sharing:2760:36 # scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2331:46
# scopesExp: Data.Array.Accelerate.Trafo.Sharing:2712:56 # determineScopesSharingAcc: Data.Array.Accelerate.Trafo.Sharing:2317:37
# determineScopesSharingExp: Data.Array.Accelerate.Trafo.Sharing:2696:60 # determineScopesAcc: Data.Array.Accelerate.Trafo.Sharing:3101:5
# determineScopesExp: Data.Array.Accelerate.Trafo.Sharing:2608:17 # recoverSharingAcc: Data.Array.Accelerate.Trafo.Sharing:240:34
# scopesExp: Data.Array.Accelerate.Trafo.Sharing:2636:26 # convertOpenAcc: Data.Array.Accelerate.Trafo.Sharing:162:35
# scopesFun1: Data.Array.Accelerate.Trafo.Sharing:2400:60 # convertAccWith: Data.Array.Accelerate.Trafo:71:37
# scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2331:46 "!/Partition test/"
# determineScopesSharingAcc: Data.Array.Accelerate.Trafo.Sharing:2317:37
# determineScopesAcc: Data.Array.Accelerate.Trafo.Sharing:3101:5
# recoverSharingAcc: Data.Array.Accelerate.Trafo.Sharing:240:34
# convertOpenAcc: Data.Array.Accelerate.Trafo.Sharing:162:35
# convertAccWith: Data.Array.Accelerate.Trafo:71:37
"!/Partition test/"
# FIXME: FAIL (non-deterministic) # FIXME: FAIL (non-deterministic)
# Starting 1 job runners. # Starting 1 job runners.
# Starting 1 job runners. # Starting 1 job runners.
# expected: [Nothing,Just 10,Just 5] # expected: [Nothing,Just 10,Just 5]
# but got: [Nothing,Nothing,Nothing] # but got: [Nothing,Nothing,Nothing]
"!/can fetch the latest job status/" "!/can fetch the latest job status/"
# FIXME: FAIL (non-deterministic) # FIXME: FAIL (non-deterministic)
# expected: [Just 100] # expected: [Just 100]
# but got: [Nothing] # but got: [Nothing]
# expected: [Just 50] # expected: [Just 50]
# but got: [Nothing] # but got: [Nothing]
"!/can spin two separate jobs and track their status separately/" "!/can spin two separate jobs and track their status separately/"
# FIXME: FAIL # FIXME: FAIL
# -- | Build the coocurency matrix for 62 unit of time # -- | Build the coocurency matrix for 62 unit of time
# -- | Group 22 docs by 62 unit of time # -- | Group 22 docs by 62 unit of time
# Exception: # Exception:
# *** Internal error in package accelerate *** # *** Internal error in package accelerate ***
# *** Please submit a bug report at https://github.com/AccelerateHS/accelerate/issues # *** Please submit a bug report at https://github.com/AccelerateHS/accelerate/issues
# Encountered a duplicate 'Tag' # Encountered a duplicate 'Tag'
# 208: Tag 1, 212: Tag 1, 210: Tag 1, 205: Tag 1 # 208: Tag 1, 212: Tag 1, 210: Tag 1, 205: Tag 1
"!/phyloCleopatre returns expected data/" "!/phyloCleopatre returns expected data/"
# FIXME: FAIL # FIXME: FAIL
# huge error diff # huge error diff
"!/phylo2dot2json.is deterministic/" "!/phylo2dot2json.is deterministic/"
# FIXME: FAIL # FIXME: FAIL
# Exception: /nix/store/yx2cnkj4hq6zk867nkvpffvyd9qy2slp-gargantext-test-garg-test-tasty-0.0.7.1.1-data/share/ghc-9.4.7/x86_64-linux-ghc-9.4.7/gargantext-0.0.7.1.1/test-data/phylo/GarganText_NgramsList-187482.csv: openBinaryFile: does not exist (No such file or directory) # Exception: /nix/store/yx2cnkj4hq6zk867nkvpffvyd9qy2slp-gargantext-test-garg-test-tasty-0.0.7.1.1-data/share/ghc-9.4.7/x86_64-linux-ghc-9.4.7/gargantext-0.0.7.1.1/test-data/phylo/GarganText_NgramsList-187482.csv: openBinaryFile: does not exist (No such file or directory)
#"!/toPhylo.is deterministic/" #"!/toPhylo.is deterministic/"
# FIXME: recursively call cabal # FIXME: recursively call cabal
"!/test vector works/" "!/test vector works/"
# FIXME: FAIL (deterministic when coverage is enabled): # FIXME: FAIL (deterministic when coverage is enabled):
# nix -L build .#project.projectVariants.haskell-nix-ghc.flake"'".ciJobs.coverage.gargantext # nix -L build .#project.projectVariants.haskell-nix-ghc.flake"'".ciJobs.coverage.gargantext
# #
# check if similarities optimizations are well implemented # check if similarities optimizations are well implemented
# uncaught exception: ErrorCall # uncaught exception: ErrorCall
# *** Internal error in package accelerate *** # *** Internal error in package accelerate ***
# *** Please submit a bug report at https://github.com/AccelerateHS/accelerate/issues # *** Please submit a bug report at https://github.com/AccelerateHS/accelerate/issues
# Encountered a duplicate 'Tag' # Encountered a duplicate 'Tag'
# 259: Tag 1, 261: Tag 1 # 259: Tag 1, 261: Tag 1
"!/Conditional/" "!/Conditional/"
] + "'") ] + "'")
]; ];
};
}) })
]; ];
} }
...@@ -74,7 +74,7 @@ killProcessTree :: (Handle, Handle, ProcessHandle) -> IO () ...@@ -74,7 +74,7 @@ killProcessTree :: (Handle, Handle, ProcessHandle) -> IO ()
killProcessTree (_, _, ph) = do killProcessTree (_, _, ph) = do
pid <- getPid ph pid <- getPid ph
case pid of case pid of
Nothing -> putText "Process already terminated" Nothing -> putText "[killProcessTree] Process already terminated"
Just p -> do Just p -> do
pgid <- getProcessGroupIDOf p pgid <- getProcessGroupIDOf p
signalProcessGroup keyboardSignal pgid signalProcessGroup keyboardSignal pgid
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment