[flake] some work

parent 8d6dab80
-- Generated by stack2cabal
-- index-state: 2023-12-10T10:34:46Z
index-state: 2025-02-17T10:13:39Z
--index-state: 2025-02-17T10:13:39Z
index-state: 2024-12-11T00:00:00Z
with-compiler: ghc-9.6.6
optimization: 2
......@@ -25,7 +26,7 @@ source-repository-package
source-repository-package
type: git
location: https://gitlab.iscpif.fr/gargantext/opaleye-textsearch.git
tag: 04b5c9044fef44393b66bffa258ca0b0f59c1087
tag: 4527a30d793953c74c87558689a35266f3e92fa4
source-repository-package
type: git
......@@ -94,7 +95,7 @@ source-repository-package
source-repository-package
type: git
location: https://gitlab.iscpif.fr/gargantext/haskell-gargantext-prelude
tag: 214b31a2db46de5a2cac24231a3c07a1c4c3fab9
tag: 1474451ece6af89fcda27cde351149a6a3034e4a
source-repository-package
type: git
......@@ -120,7 +121,7 @@ source-repository-package
source-repository-package
type: git
location: https://gitlab.iscpif.fr/gargantext/patches-class.git
tag: a591716220cfcabffa24eb29cbaa2517023642af
tag: f2d7d7a85fddbc6341a77fdea70a60c9c159a29f
source-repository-package
type: git
......
This diff is collapsed.
{ lib, ... }:
{
modules = [
({ pkgs, ... }: {
# Start StanfordCoreNLPServer to pass the Hspec tests
# Use `nix run` instead of `inputs.coreNLP` directly
# to avoid downloading it if not running garg-test-hspec
packages.gargantext.components.tests.garg-test-hspec.preCheck = ''
export GARGANTEXT_CORENLP_SERVER="${pkgs.nixFlakes}/bin/nix run .#coreNLP"
'';
packages.gargantext.components.tests.garg-test-hspec.testFlags = [
# FIXME:
# test/Test/Database/Operations.hs:64:7:
# 1) Database.Read/Writes, Corpus creation, Can add documents to a Corpus
# uncaught exception: HttpException
# HttpExceptionRequest Request {
# host = "localhost"
# port = 9000
# secure = False
# requestHeaders = [("Accept","application/json")]
# path = "/"
# queryString = "?properties=%7B%22annotators%22:%22tokenize,ssplit,pos,ner%22,%22outputFormat%22:%22json%22%7D"
# method = "POST"
# proxy = Nothing
# rawBody = False
# redirectCount = 10
# responseTimeout = ResponseTimeoutDefault
# requestVersion = HTTP/1.1
# proxySecureMode = ProxySecureWithConnect
# }
# (ConnectionFailure Network.Socket.connect: <socket: 18>: does not exist (Connection refused))
"--match"
"'!/Database/Read/Writes/Corpus creation/Can add documents to a Corpus/'"
];
# Beware those failures are non-deterministic.
# For reproducing it may help to use a project's variant like "coverage":
# $ nix -L build .#project.projectVariants.coverage.hsPkgs.gargantext.components.tests.garg-test-tasty
# $ for i in {1..100}; do result/bin/garg-test-tasty --hide-successes || break; done
# Note that testFlags are only applied on checks.*, not on tests.*
packages.gargantext.components.tests.garg-test-tasty = {
build-tools = [
pkgs.graphviz
];
testFlags = [
"--pattern"
("'" + lib.concatStringsSep " && " [
# FAIL: (non-deterministic)
# Graph Clustering
# Cross
# Partition test: FAIL (0.14s)
# uncaught exception: ErrorCall
# *** Internal error in package accelerate ***
# *** Please submit a bug report at https://github.com/AccelerateHS/accelerate/issues
# Encountered a duplicate 'Tag'
# 81: Tag 1, 77: Tag 1, 79: Tag 1, 84: Tag 1
# CallStack (from HasCallStack):
# internalError: Data.Array.Accelerate.Trafo.Sharing:2274:19
# buildInitialEnvExp: Data.Array.Accelerate.Trafo.Sharing:2702:17
# determineScopesExp: Data.Array.Accelerate.Trafo.Sharing:2608:17
# scopesExp: Data.Array.Accelerate.Trafo.Sharing:2636:26
# scopesFun1: Data.Array.Accelerate.Trafo.Sharing:2429:60
# scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2490:33
# travF2MEA: Data.Array.Accelerate.Trafo.Sharing:2406:38
# scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2464:33
# travEA: Data.Array.Accelerate.Trafo.Sharing:2397:38
# scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2509:34
# travF2A2: Data.Array.Accelerate.Trafo.Sharing:2405:38
# scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2490:33
# travF2MEA: Data.Array.Accelerate.Trafo.Sharing:2406:38
# scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2490:33
# travF2MEA: Data.Array.Accelerate.Trafo.Sharing:2406:38
# scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2331:46
# determineScopesSharingAcc: Data.Array.Accelerate.Trafo.Sharing:2718:17
# scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2818:33
# travAE: Data.Array.Accelerate.Trafo.Sharing:2767:36
# scopesExp: Data.Array.Accelerate.Trafo.Sharing:2790:32
# travE2: Data.Array.Accelerate.Trafo.Sharing:2749:36
# scopesExp: Data.Array.Accelerate.Trafo.Sharing:2780:30
# travE1: Data.Array.Accelerate.Trafo.Sharing:2766:36
# scopesExp: Data.Array.Accelerate.Trafo.Sharing:2789:32
# travE2: Data.Array.Accelerate.Trafo.Sharing:2749:36
# scopesExp: Data.Array.Accelerate.Trafo.Sharing:2780:30
# travE1: Data.Array.Accelerate.Trafo.Sharing:2751:36
# scopesExp: Data.Array.Accelerate.Trafo.Sharing:2800:32
# travE3: Data.Array.Accelerate.Trafo.Sharing:2760:36
# scopesExp: Data.Array.Accelerate.Trafo.Sharing:2712:56
# determineScopesSharingExp: Data.Array.Accelerate.Trafo.Sharing:2696:60
# determineScopesExp: Data.Array.Accelerate.Trafo.Sharing:2608:17
# scopesExp: Data.Array.Accelerate.Trafo.Sharing:2636:26
# scopesFun1: Data.Array.Accelerate.Trafo.Sharing:2400:60
# scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2331:46
# determineScopesSharingAcc: Data.Array.Accelerate.Trafo.Sharing:2317:37
# determineScopesAcc: Data.Array.Accelerate.Trafo.Sharing:3101:5
# recoverSharingAcc: Data.Array.Accelerate.Trafo.Sharing:240:34
# convertOpenAcc: Data.Array.Accelerate.Trafo.Sharing:162:35
# convertAccWith: Data.Array.Accelerate.Trafo:71:37
"!/Partition test/"
# FIXME: FAIL (non-deterministic)
# Starting 1 job runners.
# Starting 1 job runners.
# expected: [Nothing,Just 10,Just 5]
# but got: [Nothing,Nothing,Nothing]
"!/can fetch the latest job status/"
# FIXME: FAIL (non-deterministic)
# expected: [Just 100]
# but got: [Nothing]
# expected: [Just 50]
# but got: [Nothing]
"!/can spin two separate jobs and track their status separately/"
# FIXME: FAIL
# -- | Build the coocurency matrix for 62 unit of time
# -- | Group 22 docs by 62 unit of time
# Exception:
# *** Internal error in package accelerate ***
# *** Please submit a bug report at https://github.com/AccelerateHS/accelerate/issues
# Encountered a duplicate 'Tag'
# 208: Tag 1, 212: Tag 1, 210: Tag 1, 205: Tag 1
"!/phyloCleopatre returns expected data/"
# FIXME: FAIL
# huge error diff
"!/phylo2dot2json.is deterministic/"
# FIXME: FAIL
# Exception: /nix/store/yx2cnkj4hq6zk867nkvpffvyd9qy2slp-gargantext-test-garg-test-tasty-0.0.7.1.1-data/share/ghc-9.4.7/x86_64-linux-ghc-9.4.7/gargantext-0.0.7.1.1/test-data/phylo/GarganText_NgramsList-187482.csv: openBinaryFile: does not exist (No such file or directory)
#"!/toPhylo.is deterministic/"
# FIXME: recursively call cabal
"!/test vector works/"
# FIXME: FAIL (deterministic when coverage is enabled):
# nix -L build .#project.projectVariants.haskell-nix-ghc.flake"'".ciJobs.coverage.gargantext
#
# check if similarities optimizations are well implemented
# uncaught exception: ErrorCall
# *** Internal error in package accelerate ***
# *** Please submit a bug report at https://github.com/AccelerateHS/accelerate/issues
# Encountered a duplicate 'Tag'
# 259: Tag 1, 261: Tag 1
"!/Conditional/"
] + "'")
];
};
})
];
}
{ config, pkgs, lib, haskellLib, ... }:
let
cfg = config.weeder;
toml = pkgs.formats.toml { };
in
{
options.weeder = {
analysis = lib.mkOption {
type = lib.types.package;
default = null;
internal = true;
};
packages = lib.mkOption {
type = lib.types.unspecified;
default = haskellLib.selectProjectPackages;
};
settings = lib.mkOption {
type = toml.type;
default = { };
};
};
config = {
modules = [
{
# Enable writeHieFiles by default.
# See https://github.com/input-output-hk/haskell.nix/issues/298#issuecomment-767936405
# for what's going on in this trick.
options.packages = lib.mkOption {
type = lib.types.attrsOf (
lib.types.submodule (
{ config, ... }: {
config = lib.mkIf
((cfg.packages { ${config.package.identifier.name} = config.package; })
? ${config.package.identifier.name})
{
writeHieFiles = lib.mkForce true;
};
}
)
);
};
}
];
weeder.analysis =
pkgs.runCommand "weeder-${config.name}"
{
buildInputs = [
pkgs.glibcLocales
pkgs.haskell.packages.${config.compiler-nix-name}.weeder
];
allHieFiles = pkgs.linkFarm "allHieFiles" (
lib.concatMap
(package:
let
lib-hies = lib.optional (package.components ? library) {
name = "${package.identifier.name}-library";
path = package.components.library.hie;
};
exe-hies = lib.concatMap
(exe:
lib.optional (package.components.exes.${exe} ? hie) {
name = "${package.identifier.name}-exe-${exe}";
path = package.components.exes.${exe}.hie;
})
(lib.attrNames package.components.exes);
test-hies = lib.concatMap
(test-name:
let
test = package.components.tests.${test-name};
is-doctest =
# doctest tests build _all_ components of a package.
# The GHC id of these packages will be different,
# which means that when we run weeder, all this code
# will be uncalled. These are false positives, so
# we don't include hie files from anything that
# depends on `doctest`.
lib.any (x: x.identifier.name or "" == "doctest") test.config.depends;
in
lib.optional (!is-doctest) {
name = "${package.identifier.name}-test-${test-name}";
path = test.hie;
})
(lib.attrNames package.components.tests);
in
lib-hies ++ exe-hies ++ test-hies
)
(builtins.attrValues (cfg.packages config.hsPkgs))
);
} ''
export LC_ALL=en_US.UTF-8 LANG=en_US.UTF-8 LANGUAGE=en_US.UTF-8
cd $allHieFiles
weeder >$out --no-default-fields --config ${toml.generate "weeder.toml" cfg.settings} ||
test $? = 228 # One or more weeds found
'';
};
}
finalPkgs: previousPkgs: {
# FIXME: update to a version >= 10.0.1
# to include 2a265497 from https://gitlab.com/graphviz/graphviz/-/merge_requests/3487
graphviz = previousPkgs.graphviz.overrideAttrs (finalAttrs: previousAttrs: {
patches = (previousAttrs.patches or [ ]) ++ [
# Increase the YY_BUF_SIZE
# See https://gitlab.iscpif.fr/gargantext/haskell-gargantext/issues/290#note_9015
graphviz/graphviz-yy-buf-size.diff
];
});
}
--- a/lib/cgraph/scan.l
+++ b/lib/cgraph/scan.l
@@ -32,7 +32,7 @@
#include <stdbool.h>
#include <stddef.h>
#include <string.h>
-// #define YY_BUF_SIZE 128000
+#define YY_BUF_SIZE 128000
#define GRAPH_EOF_TOKEN '@' /* lex class must be defined below */
/* this is a workaround for linux flex */
static int line_num = 1;
finalPkgs: previousPkgs: {
# WARNING: the nixpkgs pinned brings igraph-0.10.7 not igraph-0.10.4
# as expected by haskell-igraph-0.10.4
# Note that igraph uses the lapack and blas alternative selector packages
# which default to using openblas.
igraph = (previousPkgs.igraph.override {
# fop pulls openjdk, scipy, blas… just to build PDF docs,
fop = null;
}).overrideAttrs (previousAttrs: {
cmakeFlags = previousAttrs.cmakeFlags ++ [
# Disable Open Multi-Processing, to let GHC handle the kernel threads.
"-DIGRAPH_OPENMP_SUPPORT=OFF"
];
});
# Dependency of igraph
plfit = previousPkgs.plfit.overrideAttrs (previousAttrs: {
cmakeFlags = previousAttrs.cmakeFlags ++ [
# Disable Open Multi-Processing, to let GHC handle the kernel threads.
"-DPLFIT_USE_OPENMP=OFF"
];
});
}
finalPkgs: previousPkgs: {
# Dependency of igraph and hmatrix
openblas = previousPkgs.openblas.override {
# Let GHC handle the kernel threads.
# See also https://github.com/OpenMathLib/OpenBLAS/issues/2543
singleThreaded = true;
};
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment