[flake] some work

parent 8d6dab80
-- Generated by stack2cabal
-- index-state: 2023-12-10T10:34:46Z
index-state: 2025-02-17T10:13:39Z
--index-state: 2025-02-17T10:13:39Z
index-state: 2024-12-11T00:00:00Z
with-compiler: ghc-9.6.6
optimization: 2
......@@ -25,7 +26,7 @@ source-repository-package
source-repository-package
type: git
location: https://gitlab.iscpif.fr/gargantext/opaleye-textsearch.git
tag: 04b5c9044fef44393b66bffa258ca0b0f59c1087
tag: 4527a30d793953c74c87558689a35266f3e92fa4
source-repository-package
type: git
......@@ -94,7 +95,7 @@ source-repository-package
source-repository-package
type: git
location: https://gitlab.iscpif.fr/gargantext/haskell-gargantext-prelude
tag: 214b31a2db46de5a2cac24231a3c07a1c4c3fab9
tag: 1474451ece6af89fcda27cde351149a6a3034e4a
source-repository-package
type: git
......@@ -120,7 +121,7 @@ source-repository-package
source-repository-package
type: git
location: https://gitlab.iscpif.fr/gargantext/patches-class.git
tag: a591716220cfcabffa24eb29cbaa2517023642af
tag: f2d7d7a85fddbc6341a77fdea70a60c9c159a29f
source-repository-package
type: git
......
# SPDX-FileCopyrightText: (C) Gargantext Team <team@gargantext.org>
# SPDX-License-Identifier: CC0-1.0
{
description = "A Nix Flake for Gargantext's Haskell server";
# To use this Nix flake you may need to enable Nix flake support for your user with:
# echo >>~/.config/nix/nix.conf "experimental-features = nix-command flakes"
# WARNING: be sure that `nix --version` is greater or equal to 2.18,
# otherwise nix may not support some attributes used in flake.lock.
# For any input, one can:
# Update to the latest commit:
# nix flake lock --update-input nixpkgs
# Or to a specific commit (eg. a green one on https://status.nixos.org):
# nix flake lock --override-input nixpkgs github:NixOS/nixpkgs/72da83d9515b43550436891f538ff41d68eecc7f
# Or to a commit (in /etc/nix/registry.json) of a NixOS host:
# nix flake lock --override-input nixpkgs flake:nixpkgs
inputs = {
# FIXME: update only once https://github.com/input-output-hk/haskell.nix/issues/2258
# has been fixed, otherwise accelerate-llvm will fail to install.
#haskell-nix.url = "github:input-output-hk/haskell.nix/a921f7340c0c1a561f0e743d4b07b8130ea5c5be";
haskell-nix.url = "github:input-output-hk/haskell.nix/nix-tools-0.2.9";
# For trying to hit cache.iog.io one would have
# to follow haskell.nix's nixpkgs pinned version,
# but it may be a few months old, so pin it here instead.
#nixpkgs.follows = "haskell-nix/nixpkgs";
nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.11";
haskell-nix.inputs.nixpkgs.follows = "nixpkgs";
# Convenient Nix Flake utilities, like flake-utils.lib.eachSystem.
flake-utils.url = "github:numtide/flake-utils";
# Git pre-commit hooks.
git-hooks.url = "github:cachix/git-hooks.nix";
git-hooks.inputs.nixpkgs.follows = "nixpkgs";
# FIXME: git.iscpif.fr's gitlab's API does not work well,
# hence fallback to the git+https:// scheme.
# WARNING: be sure to use git+https:// and not https://
# otherwise the rev attribute will be missing for the inputMap to work.
# gargantext-prelude.url = "gitlab:gargantext/haskell-gargantext-prelude?host=git.iscpif.fr";
accelerate = { url = "git+https://github.com/AccelerateHS/accelerate.git?submodules=1&rev=334d05519436bb7f20f9926ec76418f5b8afa359"; flake = false; };
#accelerate-arithmetic = { url = "github:alpmestan/accelerate-arithmetic"; flake = false; };
#accelerate-llvm = { url = "github:AccelerateHS/accelerate-llvm"; flake = false; };
# WARNING: using the fix-error branch
#accelerate-utility = { url = "git+https://gitlab.iscpif.fr/amestanogullari/accelerate-utility.git?ref=fix-error"; flake = false; };
boolexpr = { url = "github:boolexpr/boolexpr"; flake = false; };
#coreNLP = { url = "http://nlp.stanford.edu/software/stanford-corenlp-4.5.4.zip"; flake = false; };
crawlerArxiv = { url = "git+https://gitlab.iscpif.fr/gargantext/crawlers/arxiv-api.git"; flake = false; };
crawlerHAL = { url = "git+https://gitlab.iscpif.fr/gargantext/crawlers/hal.git"; flake = false; };
crawlerISTEX = { url = "git+https://gitlab.iscpif.fr/gargantext/crawlers/istex.git"; flake = false; };
crawlerIsidore = { url = "git+https://gitlab.iscpif.fr/gargantext/crawlers/isidore.git"; flake = false; };
crawlerPubMed = { url = "git+https://gitlab.iscpif.fr/gargantext/crawlers/pubmed.git"; flake = false; };
data-time-segment = { url = "github:delanoe/data-time-segment"; flake = false; };
eigen = { url = "github:chessai/eigen"; flake = false; };
# WARNING: using the wavewave/ghcHEAD branch
#ekg-json = { url = "github:MercuryTechnologies/ekg-json/wavewave/ghcHEAD"; flake = false; };
ekg-json = { url = "github:haskell-github-trust/ekg-json"; flake = false; };
epo-api-client = { url = "git+https://gitlab.iscpif.fr/gargantext/crawlers/epo-proxy-api.git"; flake = false; };
# WARNING: using the debugNaN branch
gargantext-graph = { url = "git+https://gitlab.iscpif.fr/gargantext/gargantext-graph.git?rev=a08ceed71b297a811f90cb86c3c61dc0b153036b"; flake = false; };
# WARNING: unmerged commit
# See https://gitlab.iscpif.fr/gargantext/haskell-gargantext-prelude/merge_requests/13
gargantext-prelude = { url = "git+https://gitlab.iscpif.fr/gargantext/haskell-gargantext-prelude.git?ref=master"; flake = false; };
haskell-igraph = { url = "git+https://gitlab.iscpif.fr/gargantext/haskell-igraph.git?rev=9f8a2f4a014539826a4eab3215cc70c0813f20cb"; flake = false; };
haskell-throttle = { url = "git+https://gitlab.iscpif.fr/gargantext/haskell-throttle"; flake = false; };
hlcm = { url = "git+https://gitlab.iscpif.fr/gargantext/hlcm.git"; flake = false; };
# WARNING: using the alp/imap-static branch
hmatrix = { url = "github:alpmestan/hmatrix/alp/imap-static"; flake = false; };
hsinfomap = { url = "git+https://gitlab.iscpif.fr/gargantext/haskell-infomap.git"; flake = false; };
http-reverse-proxy = { url = "github:adinapoli/http-reverse-proxy"; flake = false; };
iso639 = { url = "git+https://gitlab.iscpif.fr/gargantext/iso639.git"; flake = false; };
# WARNING: fork of https://github.com/llvm-hs/llvm-hs
# using adinapoli/llvm-12-ghc-947-compat branch
# Tracked in: https://gitlab.iscpif.fr/gargantext/haskell-gargantext/issues/318
#llvm-hs = { url = "github:adinapoli/llvm-hs/adinapoli/llvm-12-ghc-947-compat"; flake = false; };
nanomsg-haskell = { url = "git+https://github.com/garganscript/nanomsg-haskell"; flake = false; };
opaleye-textsearch = { url = "git+https://gitlab.iscpif.fr/gargantext/opaleye-textsearch.git?allRefs=1"; flake = false; };
openalex = { url = "git+https://gitlab.iscpif.fr/gargantext/crawlers/openalex.git"; flake = false; };
patches-class = { url = "git+https://gitlab.iscpif.fr/gargantext/patches-class.git"; flake = false; };
patches-map = { url = "github:delanoe/patches-map"; flake = false; };
rdf4h = { url = "github:robstewart57/rdf4h"; flake = false; };
# WARNING: using the more-exports branch
servant-job = { url = "github:adinapoli/servant-job"; flake = false; };
servant-routes = { url = "github:fpringle/servant-routes"; flake = false; };
servant-xml-conduit = { url = "git+https://gitlab.iscpif.fr/gargantext/servant-xml-conduit"; flake = false; };
# WARNING: using the alp/static branch
sparse-linear = { url = "github:alpmestan/sparse-linear/alp/static"; flake = false; };
#toml-parser = { url = "git+https://github.com/glguy/toml-parser/toml-parser-2.0.1.0"; flake = false; };
};
# For printing the available outputs:
# $ nix -L flake show --allow-import-from-derivation
# Note that multiple-systems are enabled hence it can only work
# without IFD because of https://github.com/NixOS/nix/issues/4265,
# ie. with a non-null materialized=
outputs = { self, ... }@inputs:
let
supportedSystems = with inputs.flake-utils.lib.system; [
x86_64-linux
x86_64-darwin
aarch64-linux
aarch64-darwin
];
in
inputs.flake-utils.lib.eachSystem supportedSystems (system:
let
pkgs = import inputs.nixpkgs {
inherit system;
config = inputs.haskell-nix.config;
overlays = [
inputs.haskell-nix.overlay
(import nix/nixpkgs/overlays/graphviz.nix)
(import nix/nixpkgs/overlays/igraph.nix)
(import nix/nixpkgs/overlays/openblas.nix)
];
};
# A standard library of Nix expressions.
lib = inputs.nixpkgs.lib;
# A library of Nix expressions internal to haskell.nix.
inherit (pkgs.haskell-nix) haskellLib;
rawCabalProject = lib.readFile ./cabal.project;
# haskell.nix's main entry point
project = pkgs.haskell-nix.cabalProject' [
({ config, pkgs, ... }: {
name = "gargantext";
# Filter-in input files to avoid unnecessary rebuilds
# after changing any file tracked in Git that is not actually used by cabalProject.
src = with lib.fileset; toSource {
root = ./.;
fileset = unions [
./.clippy.dhall
./LICENSE
./bench-data
./cabal.project
./cabal.project.freeze
./devops
./ekg-assets
#./gargantext-cors-settings.toml
./gargantext.cabal
./test-data
(fileFilter (file: lib.any file.hasExt [ "hs" ]) ./bench)
(fileFilter (file: lib.any file.hasExt [ "hs" ]) ./bin)
(fileFilter (file: lib.any file.hasExt [ "hs" ]) ./src)
(fileFilter (file: lib.any file.hasExt [ "hs" ]) ./test)
];
};
# By default plan evaluation is done on the build system.
#evalSystem = "x86_64-linux";
# Retrieve compiler-nix-name from cabal.project's with-compiler field.
# Eg. `with-compiler: ghc-9.4.7` becomes "ghc947"
compiler-nix-name =
lib.replaceStrings [ "-" "." ] [ "" "" ]
(lib.head (lib.concatLists (lib.filter (l: l != null)
(builtins.map (l: builtins.match "^with-compiler: *(.*)" l)
(lib.splitString "\n" rawCabalProject)))));
# Download GHC from Nixpkgs' binary cache instead of IOHK's
# which would be done by using: pkgs.haskell-nix.compiler
# Beware that if any dependency has `build-depends: ghc`
# then` reinstallableLibGhc = false` is required
# to avoid missing `genprimopcode`.
# See https://github.com/input-output-hk/haskell.nix/issues/1809#issuecomment-1358469589
compilerSelection = pkgs: pkgs.haskell.compiler;
# Pinning the index-state of Hackage,
# instead of using the latest known by haskell.nix,
# removes haskell.nix from interfering too much into the reproducibility.
# It also enables to materialize the plan-nix.
index-state = haskellLib.parseIndexState rawCabalProject;
# Materializing a project means caching the nix files
# generated from the *.cabal/stack.yaml/package.yaml files.
# To update:
# $ nix run .#update-nix-cache-haskell-nix-materialized
# It's only checked in ciJobs.
materialized = if builtins.pathExists nix/cache/haskell.nix/materialized then nix/cache/haskell.nix/materialized else null;
# Using inputMap for each source-repository-package of cabal.project
# leverages Nix Flake's inputs to automatically get their rev and sha256 hashes
# and to check upstreams for updates (using `nix flake update`
# or `nix flake lock --update-input <input>`).
inputMap =
let
# findCabalFiles (in nix-tools/nix-tools/cabal2nix/Main.hs)
# always prefers package.yaml over *.cabal,
# but when the resulting *.cabal file is different
# than a previously existing one,
# the build fails with an error like this one:
# crawlerIsidore.cabal was modified manually, please use --force to overwrite.
# Hence just remove this out-of-sync package.yaml.
removePackageYaml = src: pkgs.symlinkJoin {
name = "removePackageYaml-patched";
paths = [ src ];
postBuild = "rm $out/package.yaml";
# Preserve rev for the inputMap
passthru.rev = src.rev;
};
applyPatches = inputName: patches: pkgs.buildPackages.applyPatches
{
name = "${inputName}-patched";
src = inputs.${inputName};
inherit patches;
} // { inherit (inputs.${inputName}) rev; };
in
{
#"https://github.com/AccelerateHS/accelerate-llvm.git" = inputs.accelerate-llvm;
"https://github.com/AccelerateHS/accelerate.git" = inputs.accelerate;
"https://github.com/MercuryTechnologies/ekg-json.git" = inputs.ekg-json;
"https://github.com/boolexpr/boolexpr.git" = inputs.boolexpr;
#"https://github.com/adinapoli/llvm-hs.git" = inputs.llvm-hs;
#"https://github.com/alpmestan/accelerate-arithmetic.git" = applyPatches "accelerate-arithmetic" [
# nix/haskell.nix/patches/accelerate-arithmetic/0001-remove-test-using-removed-realBandedGramian.patch
#];
"https://github.com/alpmestan/hmatrix.git" = inputs.hmatrix;
"https://github.com/adinapoli/servant-job.git" = removePackageYaml inputs.servant-job;
"https://github.com/alpmestan/sparse-linear.git" = inputs.sparse-linear;
"https://github.com/chessai/eigen.git" = inputs.eigen;
"https://github.com/delanoe/data-time-segment.git" = inputs.data-time-segment;
"https://github.com/adinapoli/http-reverse-proxy.git" = inputs.http-reverse-proxy;
"https://github.com/delanoe/patches-map" = inputs.patches-map;
"https://gitlab.iscpif.fr/gargantext/opaleye-textsearch.git" = inputs.opaleye-textsearch;
"https://github.com/robstewart57/rdf4h.git" = inputs.rdf4h;
"https://github.com/fpringle/servant-routes.git" = inputs.servant-routes;
"https://gitlab.iscpif.fr/amestanogullari/accelerate-utility.git" = inputs.accelerate-utility;
"https://gitlab.iscpif.fr/gargantext/crawlers/arxiv-api.git" = inputs.crawlerArxiv;
"https://gitlab.iscpif.fr/gargantext/crawlers/epo-proxy-api.git" = inputs.epo-api-client;
"https://gitlab.iscpif.fr/gargantext/crawlers/hal.git" = inputs.crawlerHAL;
"https://gitlab.iscpif.fr/gargantext/haskell-throttle" = inputs.haskell-throttle;
"https://gitlab.iscpif.fr/gargantext/crawlers/isidore.git" = removePackageYaml inputs.crawlerIsidore;
"https://gitlab.iscpif.fr/gargantext/crawlers/istex.git" = removePackageYaml inputs.crawlerISTEX;
"https://gitlab.iscpif.fr/gargantext/crawlers/openalex.git" = inputs.openalex;
"https://gitlab.iscpif.fr/gargantext/crawlers/pubmed.git" = inputs.crawlerPubMed;
"https://gitlab.iscpif.fr/gargantext/gargantext-graph.git" = inputs.gargantext-graph;
"https://gitlab.iscpif.fr/gargantext/haskell-gargantext-prelude" = removePackageYaml inputs.gargantext-prelude;
"https://gitlab.iscpif.fr/gargantext/haskell-igraph.git" = inputs.haskell-igraph;
"https://gitlab.iscpif.fr/gargantext/haskell-infomap.git" = inputs.hsinfomap;
"https://gitlab.iscpif.fr/gargantext/hlcm.git" = inputs.hlcm;
"https://gitlab.iscpif.fr/gargantext/iso639.git" = inputs.iso639;
"https://github.com/garganscript/nanomsg-haskell" = inputs.nanomsg-haskell;
"https://gitlab.iscpif.fr/gargantext/patches-class.git" = inputs.patches-class;
"https://gitlab.iscpif.fr/gargantext/servant-xml-conduit.git" = inputs.servant-xml-conduit;
# TODO tag used in cabal.project
#"https://github.com/glguy/toml-parser/toml-parser-2.0.1.0" = inputs.toml-parser;
};
# Default project configuration.
modules = [
({ pkgs, ... }: {
# Make the closure dependency significantly larger
# but avoid missing genprimopcode with compilerSelection = p: pkgs.haskell.compiler
reinstallableLibGhc = false;
packages.haskell-igraph.components.library = {
# The generated plan includes pkgs.igraph, giving access to libigraph.so,
# but pkgs.igraph.dev is also needed for igraph.h
libs = [ pkgs.igraph.dev ];
# Extra include needed because haskell-igraph's cbits
# use: #include <igraph.h>
# not: #include <igraph/igraph.h>
configureFlags = [ "--extra-include-dirs=${pkgs.igraph.dev}/include/igraph" ];
};
# Link with OpenBLAS optimized libraries.
# WARNING: OpenBLAS must only be used by trusted code
# it is inherently unsuitable for security-conscious applications.
# See nixpkgs/pkgs/development/libraries/science/math/openblas/default.nix
packages.hmatrix.flags.openblas = true;
# Not really necessary because nix builds in a sandbox by default anyway.
packages.hmatrix.flags.disable-default-paths = true;
})
];
# Shell configuration shared by the default shell
# and all shells from the flake.variants.
shell = {
# By default haskell.nix does not force cabal-install (by setting CABAL_CONFIG=)
# to use the packages selected by project.plan-nix and available in `ghc-pkg list`,
# leaving cabal-install in charge of provisioning Haskell packages,
# which gives more flexibility when developing.
#exactDeps = false;
#allToolDeps = true;
# haskell.nix provisions (in `ghc-pkg list`)
# the **dependencies** of the packages selected here,
# which are also **not** selected here.
#
# By default haskell.nix selects all _local_ packages here
# (packages from both the `packages` and the `source-repository-package` stanzas)
# which therefore excludes `source-repository-package`s from being provisioned,
#
# Note that it means `cabal update` must be run to get an index-state.
# and be able to download and build missing dependencies
# that depend on `source-repository-package`s.
# Eg. gargantext's dependency `hstatistics` depends on `hmatrix`,
# but hmatrix is a `source-repository-package`
# hence `hstatistics` is not provisioned by haskell.nix.
#packages = ps: lib.attrValues (haskellLib.selectLocalPackages ps);
# Add in this list any development tool needed
# that is not expected to come from the developers' own system.
nativeBuildInputs = [
pkgs.graphviz # for `dot`
pkgs.haskell.packages.${config.compiler-nix-name}.cabal-install
pkgs.haskell.packages.${config.compiler-nix-name}.ghcid
pkgs.haskell.packages.${config.compiler-nix-name}.haskell-language-server
pkgs.haskell.packages.${config.compiler-nix-name}.hlint
];
shellHook =
''
export GARGANTEXT_CORENLP_SERVER="nix -L run .#coreNLP"
cp -f ${pkgs.buildPackages.writeText "cabal.project.local" ''
-- Same fix as in haskell.nix's packages.haskell-igraph.components.library,
-- but for cabal-install
package haskell-igraph
extra-include-dirs: ${pkgs.igraph.dev}/include/igraph
extra-lib-dirs: ${lib.concatMapStringsSep " " (p: "${lib.getLib p}/lib") [
pkgs.igraph
pkgs.openblas
]}
-- Enable openblas
constraints: hmatrix +openblas
package hmatrix
flags: +openblas
extra-lib-dirs: ${lib.concatMapStringsSep " " (p: "${lib.getLib p}/lib") [ pkgs.openblas ]}
''} cabal.project.local
cat >&2 ${pkgs.buildPackages.writeText "shellEnterMessage.txt" ''
**Warning**
This Nix development shell is not configured to provision
`cabal.project`'s `source-repository-package`s and their reverse dependencies,
therefore `cabal update` has to be run manually to fetch an `index-state`
before `cabal build`.
''}
'' +
self.checks.${system}.git-hooks-check.shellHook;
# When true, builds a Hoogle documentation index of all dependencies,
# and provides a "hoogle" command to search the index.
# FIXME: this currently does not respect `compilerSelection`,
# pullint in `haskell-nix.compiler` instead of `haskell.compiler`.
withHoogle = false;
};
# Variants to the default project configuration above.
# They're accessed in the flake's outputs with their name prefixed.
# $ nix -L build .#haskell-nix-ghc:gargantext:exe:gargantext-phylo-profile
# Or via `legacyPackages.${system}.project.projectVariants`:
# $ nix -L build .#project.projectVariants.haskell-nix-ghc.components.executables.gargantext-phylo-profile
flake.variants = {
# For using profiling versions of Haskell packages:
# $ nix develop .#profiling
profiling = {
modules = [
{
# Applies to all packages of the Haskell closure. For instance:
# $ nix eval .#project.hsPkgs.containers.components.library.config.enableProfiling
# false
# $ nix eval .#project.projectVariants.profiling.hsPkgs.containers.components.library.config.enableProfiling
# true
enableProfiling = true;
enableLibraryProfiling = true;
}
];
};
# For using haskell.nix's GHC:
# $ nix -L develop .#haskell-nix-ghc
# $ nix -L build .#haskell-nix-ghc:gargantext:exe:gargantext-phylo-profile
haskell-nix-ghc = {
compilerSelection = lib.mkForce (pkgs: pkgs.haskell-nix.compiler);
materialized = lib.mkForce null;
modules = [
{
# Revert to the default
reinstallableLibGhc = lib.mkForce true;
}
];
};
};
# Enable coverage report in `ciJobs` and `hydraJobs` flake outputs.
# For building the coverages:
# $ nix -L build .#ciJobs.x86_64-linux.coverage.gargantext
# Alas, coverage fails to find hpc when using Nixpkgs' GHC:
# gargantext> no such hpc command
# So for now the haskell-nix-ghc variant has to be used:
# $ nix -L build .#project.projectVariants.haskell-nix-ghc.flake"'".ciJobs.coverage.gargantext
# $ firefox result/share/hpc/vanilla/html/
flake.doCoverage = true;
# Defaults to haskellLib.selectProjectPackages which select cabal.project's `packages`
# but rather make all `source-repository-package`s also available in `ciJobs.coverage.*`
flake.packages = haskellLib.selectLocalPackages;
# FIXME: haskell.nix uses a `doCoverage = lib.mkDefault true` which causes conflicts.
flake.coverageProjectModule = {
modules = [
{
packages =
let packageNames = project: builtins.attrNames (config.flake.packages project.hsPkgs); in
lib.genAttrs (packageNames config) (_: { doCoverage = true; });
}
];
};
# Dead-code analysis
# $ nix -L build .#weeder-project-analysis
# $ bat result
# Note that there may be false positives
# and that some file location may be wrong.
# weeder = {
# packages = ps:
# haskellLib.selectProjectPackages ps //
# lib.getAttrs [
# "crawlerArxiv"
# "crawlerHAL"
# "crawlerIsidore"
# "crawlerPubMed"
# "epo-api-client"
# "gargantext-graph"
# "gargantext-prelude"
# ]
# ps;
# # See https://github.com/ocharles/weeder?tab=readme-ov-file#configuration-options
# settings = {
# roots = [
# "^Main.main$"
# # Automatically generated by Cabal
# "^Paths_.*"
# ];
# root-instances = [
# ];
# # Consider all instances of type classes as roots.
# type-class-roots = true;
# unused-types = true;
# };
# };
# Make some variables available to all project modules
_module.specialArgs = {
# Use specialArgs to avoid avoid infinite recursion
# when `inputs` is used in `imports`.
inherit inputs;
};
_module.args = {
inherit system;
inherit (pkgs.haskell-nix) haskellLib;
};
})
# project modules
(import nix/haskell.nix/modules/gargantext.nix)
#(import nix/haskell.nix/modules/weeder.nix)
];
projectFlake = project.flake { };
in
{
legacyPackages = pkgs // {
# For exploring the project:
# $ nix --extra-experimental-features 'flakes repl-flake' repl .
# nix-repl> :lf .
# nix-repl> legacyPackages.x86_64-linux.project.<TAB>
inherit project;
};
# For building a component of this project:
# $ nix -L build .#gargantext:exe:gargantext-phylo-profile
packages = projectFlake.packages // {
#weeder-analysis = project.args.weeder.analysis;
};
# For entering the default development shell:
# $ nix -L develop
# $ cabal build --disable-optimization
#
# For entering the development shell variant `profiling`:
# $ nix -L develop .#profiling
# $ cabal run --enable-profiling gargantext-phylo-profile
devShells = projectFlake.devShells;
apps = projectFlake.apps // {
# For updating nix/cache/haskell.nix/materialized:
# $ nix run .#update-nix-cache-haskell-nix-materialized
# It needs to be updated when cabal.freeze or any other input to the plan-nix changes.
# It's only OK to use it when the plan-nix does not depend on `system`.
# See https://github.com/input-output-hk/haskell.nix/blob/master/docs/tutorials/materialization.md#when-is-it-ok-to-materialize
update-nix-cache-haskell-nix-materialized = inputs.flake-utils.lib.mkApp {
drv = pkgs.writeShellApplication {
name = "update-nix-cache-haskell-nix-materialized";
text = ''
set -eux
git diff --exit-code
${(project.appendModule { materialized = lib.mkForce null; }).plan-nix.passthru.generateMaterialized} nix/cache/haskell.nix/materialized
git add --all nix/cache/haskell.nix/materialized
git commit -m "nix: update nix/cache/haskell.nix/materialized"
'';
};
};
# Register the default project's toolchain,
# to prevent nix-collect-garbage from removing them from the Nix store.
# Note that it does not register the roots of the `projectVariants`.
update-nix-cache-haskell-nix-gc-roots = inputs.flake-utils.lib.mkApp {
drv = pkgs.writeShellApplication {
name = "update-nix-cache-haskell-nix-gc-roots";
text = ''
set -eux
rm -rf nix/cache/haskell.nix/gc-roots
nix-store --add-root nix/cache/haskell.nix/gc-roots/default --indirect --realise ${project.roots}
nix-store --add-root nix/cache/haskell.nix/gc-roots/coreNLP --indirect --realise ${self.apps.${system}.coreNLP.program}
'';
};
};
# For garg-test-hspec
coreNLP = pkgs.callPackage ./nix/corenlp.nix {};
# # Avoid recompiling openjdk due to any overlay used in the common pkgs.
# let pkgs = import inputs.nixpkgs { inherit system; }; in
# inputs.flake-utils.lib.mkApp {
# drv = pkgs.writeShellApplication {
# name = "coreNLP";
# text = ''
# set -x
# exec ${pkgs.openjdk}/bin/java -mx4g -cp '${inputs.coreNLP}/*' edu.stanford.nlp.pipeline.StanfordCoreNLPServer -port 9000 -timeout 15000
# '';
# };
# };
};
# For running all checks (very slow):
# $ nix -L flake check
#
# For building a specific check of the project:
# $ nix -L build .#project.hsPkgs.gargantext.components.tests.garg-test-tasty
# $ result/bin/garg-test-tasty
#
# Alternatively, but slower:
# $ nix -L build .#checks.x86_64-linux.gargantext:test:garg-test-tasty
# $ bat result/test-stdout
#
# See names from:
# $ nix -L flake show --allow-import-from-derivation
# Alas, currently coverage reports do not work (can't find hpc)
# with nixpkgs.haskellPackages' GHC, so haskell.nix's GHC has to be used:
# $ # nix -L build .#project.projectCoverageReport
# $ nix -L build .#project.projectVariants.haskell-nix-ghc.projectCoverageReport
# $ firefox result/share/hpc/vanilla/html/index.html
checks = projectFlake.checks // {
git-hooks-check = inputs.git-hooks.lib.${system}.run {
src = ./.;
hooks = {
#cabal-fmt.enable = true;
#fourmolu.enable = true;
#hlint.enable = true;
nixpkgs-fmt.enable = true;
};
};
};
# Jobs for the Nix-based continuous integration system: Hydra
# https://nixos.wiki/wiki/Hydra
# Note that haskell.nix always set `checkMaterialization = true` in `hydraJobs`.
#hydraJobs = projectFlake.hydraJobs;
# `ciJobs` is like `hydraJobs` but with `${system}` first
# so that the IFDs will not have to run for systems
# we are not testing (placement of `${system}` is done by `flake-utils.eachSystem`
# and it treats `hydraJobs` differently from the other flake.
# Note that haskell.nix always set `checkMaterialization = true` in `ciJobs`.
ciJobs = projectFlake.ciJobs;
}
);
# Ask users to set Nix config entries in ~/.local/share/nix/trusted-settings.json.
nixConfig = {
# This sets the flake to use the IOG nix cache.
# Only useful when using the haskell-nix-ghc variant.
extra-substituters = [
"https://cache.iog.io"
];
extra-trusted-public-keys = [
"hydra.iohk.io:f/Ea+s+dFdN+3Y/G+FDgSq+a5NEWhJGzdjvKNGv0/EQ="
];
# haskell.nix translates to Nix expressions:
# - the build plan usually generated in `dist-newstyle/cache/plan.json` by `cabal configure`
# - and the `.cabal`/`stack.yaml`/`package.yaml` files of projects.
#
# haskell.nix can either generate those Nix expressions on-demand
# by calling its nix-tools' make-install-plan and cabal-to-nix,
# hence importing them from a derivation (IFD).
# Or import pre-generated files whenever project's materialized= attribute is not null,
# and then no longer needs to allow IFD.
allow-import-from-derivation = "true";
};
}
{ lib, ... }:
{
modules = [
({ pkgs, ... }: {
# Start StanfordCoreNLPServer to pass the Hspec tests
# Use `nix run` instead of `inputs.coreNLP` directly
# to avoid downloading it if not running garg-test-hspec
packages.gargantext.components.tests.garg-test-hspec.preCheck = ''
export GARGANTEXT_CORENLP_SERVER="${pkgs.nixFlakes}/bin/nix run .#coreNLP"
'';
packages.gargantext.components.tests.garg-test-hspec.testFlags = [
# FIXME:
# test/Test/Database/Operations.hs:64:7:
# 1) Database.Read/Writes, Corpus creation, Can add documents to a Corpus
# uncaught exception: HttpException
# HttpExceptionRequest Request {
# host = "localhost"
# port = 9000
# secure = False
# requestHeaders = [("Accept","application/json")]
# path = "/"
# queryString = "?properties=%7B%22annotators%22:%22tokenize,ssplit,pos,ner%22,%22outputFormat%22:%22json%22%7D"
# method = "POST"
# proxy = Nothing
# rawBody = False
# redirectCount = 10
# responseTimeout = ResponseTimeoutDefault
# requestVersion = HTTP/1.1
# proxySecureMode = ProxySecureWithConnect
# }
# (ConnectionFailure Network.Socket.connect: <socket: 18>: does not exist (Connection refused))
"--match"
"'!/Database/Read/Writes/Corpus creation/Can add documents to a Corpus/'"
];
# Beware those failures are non-deterministic.
# For reproducing it may help to use a project's variant like "coverage":
# $ nix -L build .#project.projectVariants.coverage.hsPkgs.gargantext.components.tests.garg-test-tasty
# $ for i in {1..100}; do result/bin/garg-test-tasty --hide-successes || break; done
# Note that testFlags are only applied on checks.*, not on tests.*
packages.gargantext.components.tests.garg-test-tasty = {
build-tools = [
pkgs.graphviz
];
testFlags = [
"--pattern"
("'" + lib.concatStringsSep " && " [
# FAIL: (non-deterministic)
# Graph Clustering
# Cross
# Partition test: FAIL (0.14s)
# uncaught exception: ErrorCall
# *** Internal error in package accelerate ***
# *** Please submit a bug report at https://github.com/AccelerateHS/accelerate/issues
# Encountered a duplicate 'Tag'
# 81: Tag 1, 77: Tag 1, 79: Tag 1, 84: Tag 1
# CallStack (from HasCallStack):
# internalError: Data.Array.Accelerate.Trafo.Sharing:2274:19
# buildInitialEnvExp: Data.Array.Accelerate.Trafo.Sharing:2702:17
# determineScopesExp: Data.Array.Accelerate.Trafo.Sharing:2608:17
# scopesExp: Data.Array.Accelerate.Trafo.Sharing:2636:26
# scopesFun1: Data.Array.Accelerate.Trafo.Sharing:2429:60
# scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2490:33
# travF2MEA: Data.Array.Accelerate.Trafo.Sharing:2406:38
# scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2464:33
# travEA: Data.Array.Accelerate.Trafo.Sharing:2397:38
# scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2509:34
# travF2A2: Data.Array.Accelerate.Trafo.Sharing:2405:38
# scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2490:33
# travF2MEA: Data.Array.Accelerate.Trafo.Sharing:2406:38
# scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2490:33
# travF2MEA: Data.Array.Accelerate.Trafo.Sharing:2406:38
# scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2331:46
# determineScopesSharingAcc: Data.Array.Accelerate.Trafo.Sharing:2718:17
# scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2818:33
# travAE: Data.Array.Accelerate.Trafo.Sharing:2767:36
# scopesExp: Data.Array.Accelerate.Trafo.Sharing:2790:32
# travE2: Data.Array.Accelerate.Trafo.Sharing:2749:36
# scopesExp: Data.Array.Accelerate.Trafo.Sharing:2780:30
# travE1: Data.Array.Accelerate.Trafo.Sharing:2766:36
# scopesExp: Data.Array.Accelerate.Trafo.Sharing:2789:32
# travE2: Data.Array.Accelerate.Trafo.Sharing:2749:36
# scopesExp: Data.Array.Accelerate.Trafo.Sharing:2780:30
# travE1: Data.Array.Accelerate.Trafo.Sharing:2751:36
# scopesExp: Data.Array.Accelerate.Trafo.Sharing:2800:32
# travE3: Data.Array.Accelerate.Trafo.Sharing:2760:36
# scopesExp: Data.Array.Accelerate.Trafo.Sharing:2712:56
# determineScopesSharingExp: Data.Array.Accelerate.Trafo.Sharing:2696:60
# determineScopesExp: Data.Array.Accelerate.Trafo.Sharing:2608:17
# scopesExp: Data.Array.Accelerate.Trafo.Sharing:2636:26
# scopesFun1: Data.Array.Accelerate.Trafo.Sharing:2400:60
# scopesAcc: Data.Array.Accelerate.Trafo.Sharing:2331:46
# determineScopesSharingAcc: Data.Array.Accelerate.Trafo.Sharing:2317:37
# determineScopesAcc: Data.Array.Accelerate.Trafo.Sharing:3101:5
# recoverSharingAcc: Data.Array.Accelerate.Trafo.Sharing:240:34
# convertOpenAcc: Data.Array.Accelerate.Trafo.Sharing:162:35
# convertAccWith: Data.Array.Accelerate.Trafo:71:37
"!/Partition test/"
# FIXME: FAIL (non-deterministic)
# Starting 1 job runners.
# Starting 1 job runners.
# expected: [Nothing,Just 10,Just 5]
# but got: [Nothing,Nothing,Nothing]
"!/can fetch the latest job status/"
# FIXME: FAIL (non-deterministic)
# expected: [Just 100]
# but got: [Nothing]
# expected: [Just 50]
# but got: [Nothing]
"!/can spin two separate jobs and track their status separately/"
# FIXME: FAIL
# -- | Build the coocurency matrix for 62 unit of time
# -- | Group 22 docs by 62 unit of time
# Exception:
# *** Internal error in package accelerate ***
# *** Please submit a bug report at https://github.com/AccelerateHS/accelerate/issues
# Encountered a duplicate 'Tag'
# 208: Tag 1, 212: Tag 1, 210: Tag 1, 205: Tag 1
"!/phyloCleopatre returns expected data/"
# FIXME: FAIL
# huge error diff
"!/phylo2dot2json.is deterministic/"
# FIXME: FAIL
# Exception: /nix/store/yx2cnkj4hq6zk867nkvpffvyd9qy2slp-gargantext-test-garg-test-tasty-0.0.7.1.1-data/share/ghc-9.4.7/x86_64-linux-ghc-9.4.7/gargantext-0.0.7.1.1/test-data/phylo/GarganText_NgramsList-187482.csv: openBinaryFile: does not exist (No such file or directory)
#"!/toPhylo.is deterministic/"
# FIXME: recursively call cabal
"!/test vector works/"
# FIXME: FAIL (deterministic when coverage is enabled):
# nix -L build .#project.projectVariants.haskell-nix-ghc.flake"'".ciJobs.coverage.gargantext
#
# check if similarities optimizations are well implemented
# uncaught exception: ErrorCall
# *** Internal error in package accelerate ***
# *** Please submit a bug report at https://github.com/AccelerateHS/accelerate/issues
# Encountered a duplicate 'Tag'
# 259: Tag 1, 261: Tag 1
"!/Conditional/"
] + "'")
];
};
})
];
}
{ config, pkgs, lib, haskellLib, ... }:
let
cfg = config.weeder;
toml = pkgs.formats.toml { };
in
{
options.weeder = {
analysis = lib.mkOption {
type = lib.types.package;
default = null;
internal = true;
};
packages = lib.mkOption {
type = lib.types.unspecified;
default = haskellLib.selectProjectPackages;
};
settings = lib.mkOption {
type = toml.type;
default = { };
};
};
config = {
modules = [
{
# Enable writeHieFiles by default.
# See https://github.com/input-output-hk/haskell.nix/issues/298#issuecomment-767936405
# for what's going on in this trick.
options.packages = lib.mkOption {
type = lib.types.attrsOf (
lib.types.submodule (
{ config, ... }: {
config = lib.mkIf
((cfg.packages { ${config.package.identifier.name} = config.package; })
? ${config.package.identifier.name})
{
writeHieFiles = lib.mkForce true;
};
}
)
);
};
}
];
weeder.analysis =
pkgs.runCommand "weeder-${config.name}"
{
buildInputs = [
pkgs.glibcLocales
pkgs.haskell.packages.${config.compiler-nix-name}.weeder
];
allHieFiles = pkgs.linkFarm "allHieFiles" (
lib.concatMap
(package:
let
lib-hies = lib.optional (package.components ? library) {
name = "${package.identifier.name}-library";
path = package.components.library.hie;
};
exe-hies = lib.concatMap
(exe:
lib.optional (package.components.exes.${exe} ? hie) {
name = "${package.identifier.name}-exe-${exe}";
path = package.components.exes.${exe}.hie;
})
(lib.attrNames package.components.exes);
test-hies = lib.concatMap
(test-name:
let
test = package.components.tests.${test-name};
is-doctest =
# doctest tests build _all_ components of a package.
# The GHC id of these packages will be different,
# which means that when we run weeder, all this code
# will be uncalled. These are false positives, so
# we don't include hie files from anything that
# depends on `doctest`.
lib.any (x: x.identifier.name or "" == "doctest") test.config.depends;
in
lib.optional (!is-doctest) {
name = "${package.identifier.name}-test-${test-name}";
path = test.hie;
})
(lib.attrNames package.components.tests);
in
lib-hies ++ exe-hies ++ test-hies
)
(builtins.attrValues (cfg.packages config.hsPkgs))
);
} ''
export LC_ALL=en_US.UTF-8 LANG=en_US.UTF-8 LANGUAGE=en_US.UTF-8
cd $allHieFiles
weeder >$out --no-default-fields --config ${toml.generate "weeder.toml" cfg.settings} ||
test $? = 228 # One or more weeds found
'';
};
}
finalPkgs: previousPkgs: {
# FIXME: update to a version >= 10.0.1
# to include 2a265497 from https://gitlab.com/graphviz/graphviz/-/merge_requests/3487
graphviz = previousPkgs.graphviz.overrideAttrs (finalAttrs: previousAttrs: {
patches = (previousAttrs.patches or [ ]) ++ [
# Increase the YY_BUF_SIZE
# See https://gitlab.iscpif.fr/gargantext/haskell-gargantext/issues/290#note_9015
graphviz/graphviz-yy-buf-size.diff
];
});
}
--- a/lib/cgraph/scan.l
+++ b/lib/cgraph/scan.l
@@ -32,7 +32,7 @@
#include <stdbool.h>
#include <stddef.h>
#include <string.h>
-// #define YY_BUF_SIZE 128000
+#define YY_BUF_SIZE 128000
#define GRAPH_EOF_TOKEN '@' /* lex class must be defined below */
/* this is a workaround for linux flex */
static int line_num = 1;
finalPkgs: previousPkgs: {
# WARNING: the nixpkgs pinned brings igraph-0.10.7 not igraph-0.10.4
# as expected by haskell-igraph-0.10.4
# Note that igraph uses the lapack and blas alternative selector packages
# which default to using openblas.
igraph = (previousPkgs.igraph.override {
# fop pulls openjdk, scipy, blas… just to build PDF docs,
fop = null;
}).overrideAttrs (previousAttrs: {
cmakeFlags = previousAttrs.cmakeFlags ++ [
# Disable Open Multi-Processing, to let GHC handle the kernel threads.
"-DIGRAPH_OPENMP_SUPPORT=OFF"
];
});
# Dependency of igraph
plfit = previousPkgs.plfit.overrideAttrs (previousAttrs: {
cmakeFlags = previousAttrs.cmakeFlags ++ [
# Disable Open Multi-Processing, to let GHC handle the kernel threads.
"-DPLFIT_USE_OPENMP=OFF"
];
});
}
finalPkgs: previousPkgs: {
# Dependency of igraph and hmatrix
openblas = previousPkgs.openblas.override {
# Let GHC handle the kernel threads.
# See also https://github.com/OpenMathLib/OpenBLAS/issues/2543
singleThreaded = true;
};
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment