Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
haskell-gargantext
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
157
Issues
157
List
Board
Labels
Milestones
Merge Requests
9
Merge Requests
9
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
gargantext
haskell-gargantext
Commits
a0703fa1
Commit
a0703fa1
authored
Jul 10, 2023
by
Alexandre Delanoë
Browse files
Options
Browse Files
Download
Plain Diff
Merge remote-tracking branch 'origin/adinapoli/improve-cabal-ci-caching' into dev
parents
933ee8ad
58e19238
Pipeline
#4377
passed with stages
in 10 minutes and 14 seconds
Changes
11
Pipelines
1
Show whitespace changes
Inline
Side-by-side
Showing
11 changed files
with
2452 additions
and
281 deletions
+2452
-281
.gitlab-ci.yml
.gitlab-ci.yml
+37
-43
update-cabal-project
bin/update-cabal-project
+27
-3
cabal.project
cabal.project
+1
-1
cabal.project.freeze
cabal.project.freeze
+2322
-215
pkgs.nix
nix/pkgs.nix
+14
-2
shell.nix
shell.nix
+1
-1
JSON.hs
src-test/Offline/JSON.hs
+4
-3
Jobs.hs
src-test/Utils/Jobs.hs
+43
-12
Monad.hs
src/Gargantext/Utils/Jobs/Monad.hs
+1
-0
Settings.hs
src/Gargantext/Utils/Jobs/Settings.hs
+1
-0
State.hs
src/Gargantext/Utils/Jobs/State.hs
+1
-1
No files found.
.gitlab-ci.yml
View file @
a0703fa1
# Thanks to:
# https://vadosware.io/post/zero-to-continuous-integrated-testing-a-haskell-project-with-gitlab/
#
#
image
:
adinapoli/gargantext:v2
# Optimising CI speed by using tips from https://blog.nimbleways.com/let-s-make-faster-gitlab-ci-cd-pipelines/
image
:
adinapoli/gargantext:v2.1
variables
:
STACK_ROOT
:
"
${CI_PROJECT_DIR}/.stack-root"
STACK_OPTS
:
"
--system-ghc"
#before_script:
#- apt-get update
#- apt-get install make xz-utils
CABAL_STORE_DIR
:
"
${CI_PROJECT_DIR}/.cabal"
FF_USE_FASTZIP
:
"
true"
ARTIFACT_COMPRESSION_LEVEL
:
"
fast"
CACHE_COMPRESSION_LEVEL
:
"
fast"
stages
:
-
stack
...
...
@@ -21,58 +18,55 @@ stages:
stack
:
stage
:
stack
cache
:
# cache per branch name
# key: ${CI_COMMIT_REF_SLUG}
key
:
stack.yaml
paths
:
-
.stack-root/
-
.stack-work/
-
target
script
:
-
echo "Building the project from '$CI_PROJECT_DIR'"
-
nix-shell --run "stack build --no-terminal --haddock --no-haddock-deps --only-dependencies --fast --dry-run"
docs
:
stage
:
docs
cache
:
# cache per branch name
# key: ${CI_COMMIT_REF_SLUG}
paths
:
-
.stack-root/
-
.stack-work/
-
target
script
:
-
nix-shell --run "stack build --no-terminal --haddock --no-haddock-deps --fast --dry-run"
-
cp -R "$(stack path --local-install-root)"/doc ./output
artifacts
:
paths
:
-
./output
expire_in
:
1 week
allow_failure
:
true
-
nix-shell --run "stack build --no-terminal --fast --dry-run"
cabal
:
stage
:
cabal
cache
:
# cache per branch name
# key: ${CI_COMMIT_REF_SLUG}
key
:
cabal.project
paths
:
-
.stack-root/
-
.stack-work/
-
dist-newstyle/
-
target
-
.cabal/
policy
:
pull-push
script
:
-
nix-shell --run "./bin/update-cabal-project
&& cabal v2-build
"
-
nix-shell --run "./bin/update-cabal-project
$CABAL_STORE_DIR && cabal --store-dir=$CABAL_STORE_DIR v2-build --ghc-options='-O0 -fclear-plugins'
"
allow_failure
:
false
test
:
stage
:
test
cache
:
# cache per branch name
# key: ${CI_COMMIT_REF_SLUG}
key
:
cabal.project
paths
:
-
.stack-root/
-
.stack-work/
-
dist-newstyle/
-
target
-
.cabal/
policy
:
pull-push
script
:
-
nix-shell --run "
cabal v2-test --test-show-details=streaming
"
-
nix-shell --run "
./bin/update-cabal-project $CABAL_STORE_DIR && cabal --store-dir=$CABAL_STORE_DIR v2-test --test-show-details=streaming --ghc-options='-O0 -fclear-plugins'
"
docs
:
stage
:
docs
cache
:
key
:
stack.yaml
paths
:
-
.stack-root/
-
.stack-work/
policy
:
pull
script
:
-
nix-shell --run "stack build --no-terminal --haddock --no-haddock-deps --fast --dry-run"
-
cp -R "$(stack path --local-install-root)"/doc ./output
# FIXME(adinapoli) Currently Gitlab 11.x doesn't support the 'rules' keyword.
# rules:
# - if: '$CI_MERGE_REQUEST_IID' # Run job on Merge Requests
only
:
-
merge_requests
artifacts
:
paths
:
-
./output
expire_in
:
1 week
allow_failure
:
true
bin/update-cabal-project
View file @
a0703fa1
#!/usr/bin/env bash
cabal v2-update
'hackage.haskell.org,2023-06-25T00:00:00Z'
cabal v2-install stack2cabal-1.0.14
--overwrite-policy
=
always
stack2cabal
--no-run-hpack
-p
'2023-06-25'
set
-euxo
pipefail
DEFAULT_STORE
=
$HOME
/.cabal
STORE_DIR
=
"
${
1
:-
$DEFAULT_STORE
}
"
# README!
# Every time you modify the `stack.yaml` and as result the relevant `cabal.project`
# changes, you have to make sure to update the `expected_cabal_projet_hash` with the
# `sha256sum` result calculated on the `cabal.project`. This ensures the `cabal.project`
# stays deterministic so that CI cache can kick in.
expected_cabal_project_hash
=
"41b2a260acaa6252541612a43ef42789ce61cc544a11249a98fa148c7ffe0cb8"
cabal
--store-dir
=
$STORE_DIR
v2-update
'hackage.haskell.org,2023-06-24T21:28:46Z'
# Install stack2cabal if it can't be found.
if
!
stack2cabal
--help
&> /dev/null
then
echo
"stack2cabal could not be found"
cabal
--store-dir
=
$STORE_DIR
v2-install
--index-state
=
"2023-06-24T21:28:46Z"
stack2cabal-1.0.14
--overwrite-policy
=
always
fi
stack2cabal
--no-run-hpack
-p
'2023-06-24 21:28:46'
actual_cabal_project_hash
=
$(
sha256sum
cabal.project |
awk
'{printf "%s",$1}'
)
if
[[
$actual_cabal_project_hash
!=
$expected_cabal_project_hash
]]
;
then
echo
"ERROR! hash mismatch between expected cabal.project and the one computed by stack2cabal."
exit
1
fi
cabal.project
View file @
a0703fa1
--
Generated
by
stack2cabal
index
-
state
:
2023
-
06
-
2
5
T00
:
00
:
00
Z
index
-
state
:
2023
-
06
-
2
4
T21
:
28
:
46
Z
with
-
compiler
:
ghc
-
8.10.7
...
...
cabal.project.freeze
View file @
a0703fa1
This source diff could not be displayed because it is too large. You can
view the blob
instead.
nix/pkgs.nix
View file @
a0703fa1
...
...
@@ -2,7 +2,19 @@
rec
{
inherit
pkgs
;
ghc
=
pkgs
.
haskell
.
compiler
.
ghc8107
;
# If we are on a Mac, in order to build successfully with cabal we need a bit more work.
ghc
=
if
pkgs
.
stdenv
.
isDarwin
then
haskell1
.
compiler
.
ghc8107
.
overrideAttrs
(
finalAttrs
:
previousAttrs
:
{
# See https://github.com/NixOS/nixpkgs/pull/149942/files
patches
=
previousAttrs
.
patches
++
[
# Reverts the linking behavior of GHC to not resolve `-libc++` to `c++`.
(
pkgs
.
fetchpatch
{
url
=
"https://raw.githubusercontent.com/input-output-hk/haskell.nix/613ec38dbd62ab7929178c9c7ffff71df9bb86be/overlays/patches/ghc/ghc-macOS-loadArchive-fix.patch"
;
sha256
=
"0IUpuzjZb1G+gP3q6RnwQbW4mFzc/OZ/7QqZy+57kx0="
;
})
];
})
else
pkgs
.
haskell
.
compiler
.
ghc8107
;
haskell1
=
pkgs
.
haskell
//
{
packages
=
pkgs
.
haskell
.
packages
//
{
ghc8107
=
pkgs
.
haskell
.
packages
.
ghc8107
.
override
{
...
...
shell.nix
View file @
a0703fa1
...
...
@@ -8,7 +8,7 @@ let
in
pkgs
.
pkgs
.
mkShell
{
name
=
pkgs
.
shell
.
name
;
LOCALE_ARCHIVE
=
"
${
pkgs
.
pkgs
.
glibcLocales
}
/lib/locale/locale-archive
"
;
LOCALE_ARCHIVE
=
if
pkgs
.
pkgs
.
stdenv
.
isLinux
then
"
${
pkgs
.
pkgs
.
glibcLocales
}
/lib/locale/locale-archive"
else
"
"
;
#home.sessionVariables.LOCALE_ARCHIVE = "${pkgs.glibcLocales}/lib/locale/locale-archive";
shellHook
=
pkgs
.
shell
.
shellHook
;
buildInputs
=
pkgs
.
shell
.
buildInputs
++
myBuildInputs
;
...
...
src-test/Offline/JSON.hs
View file @
a0703fa1
...
...
@@ -44,14 +44,15 @@ tests = testGroup "JSON" [
testWithQueryFrontend
::
Assertion
testWithQueryFrontend
=
do
assertBool
"JSON instance will break frontend!"
(
isRight
$
eitherDecode
@
WithQuery
(
C8
.
pack
cannedWithQueryPayload
))
case
eitherDecode
@
WithQuery
(
C8
.
pack
cannedWithQueryPayload
)
of
Left
err
->
fail
$
"JSON instance will break frontend!: JSON decoding returned: "
<>
err
Right
_
->
pure
()
-- The aim of this type is to catch regressions in the frontend serialisation; this
-- is what the frontend currently expects, and therefore if we were to change the JSON
-- instances, this test would fail, and we will be notified.
cannedWithQueryPayload
::
String
cannedWithQueryPayload
=
[
r
|
{"query":"Haskell","node_id":138,"lang":"EN","flowListWith":{"type":"MyListsFirst"},"datafield":
"External Arxiv"
,"databases":"Arxiv"}
|]
cannedWithQueryPayload
=
[
r
|
{"query":"Haskell","node_id":138,"lang":"EN","flowListWith":{"type":"MyListsFirst"},"datafield":
{ "External": "Arxiv"}
,"databases":"Arxiv"}
|]
testParseBpaPhylo
::
Assertion
testParseBpaPhylo
=
do
...
...
src-test/Utils/Jobs.hs
View file @
a0703fa1
...
...
@@ -21,7 +21,7 @@ import Prelude
import
System.IO.Unsafe
import
Network.HTTP.Client.TLS
(
newTlsManager
)
import
Network.HTTP.Client
(
Manager
)
import
Test.Hspec
import
Test.Hspec
hiding
(
pending
)
import
qualified
Servant.Job.Types
as
SJ
import
qualified
Servant.Job.Core
as
SJ
...
...
@@ -57,20 +57,45 @@ jobDuration, initialDelay :: Int
jobDuration
=
100000
initialDelay
=
20000
-- | Use in conjuction with 'registerDelay' to create an 'STM' transaction
-- that will simulate the duration of a job by waiting the timeout registered
-- by 'registerDelay' before continuing.
waitJobSTM
::
TVar
Bool
->
STM
()
waitJobSTM
tv
=
do
v
<-
readTVar
tv
check
v
-- | The aim of this test is to ensure that the \"max runners\" setting is
-- respected, i.e. we have no more than \"N\" jobs running at the same time.
testMaxRunners
::
IO
()
testMaxRunners
=
do
-- max runners = 2 with default settings
let
num_jobs
=
4
k
<-
genSecret
let
settings
=
defaultJobSettings
2
k
st
::
JobsState
JobT
[
String
]
()
<-
newJobsState
settings
defaultPrios
now
<-
getCurrentTime
runningJs
<-
newTVarIO
[]
let
j
num
_jHandle
_inp
_l
=
do
atomically
$
modifyTVar
runningJs
(
\
xs
->
(
"Job #"
++
show
num
)
:
xs
)
threadDelay
jobDuration
atomically
$
modifyTVar
runningJs
(
\
xs
->
filter
(
/=
(
"Job #"
++
show
num
))
xs
)
jobs
=
[
j
n
|
n
<-
[
1
..
4
::
Int
]
]
_jids
<-
forM
jobs
$
\
f
->
pushJob
A
()
f
settings
st
threadDelay
initialDelay
remainingJs
<-
newTVarIO
num_jobs
let
duration
=
1
_000_000
j
num
_jHandle
_inp
_l
=
do
durationTimer
<-
registerDelay
duration
atomically
$
do
modifyTVar
runningJs
(
\
xs
->
(
"Job #"
++
show
num
)
:
xs
)
waitJobSTM
durationTimer
modifyTVar
runningJs
(
\
xs
->
filter
(
/=
(
"Job #"
++
show
num
))
xs
)
modifyTVar
remainingJs
pred
jobs
=
[
(
A
,
j
n
)
|
n
<-
[
1
..
num_jobs
::
Int
]
]
atomically
$
forM_
jobs
$
\
(
t
,
f
)
->
void
$
pushJobWithTime
now
t
()
f
settings
st
let
waitFinished
=
atomically
$
do
x
<-
readTVar
remainingJs
check
(
x
==
0
)
waitFinished
r1
<-
readTVarIO
runningJs
sort
r1
`
shouldBe
`
[
"Job #1"
,
"Job #2"
]
threadDelay
jobDuration
...
...
@@ -348,20 +373,26 @@ testMarkProgress = do
]
}
pending
::
String
->
IO
()
->
IO
()
pending
reason
act
=
act
`
catch
`
(
\
(
e
::
SomeException
)
->
do
putStrLn
$
"PENDING: "
<>
reason
putStrLn
(
displayException
e
))
test
::
Spec
test
=
do
describe
"job queue"
$
do
it
"respects max runners limit"
$
testMaxRunners
pending
"Ticket #198"
testMaxRunners
it
"respects priorities"
$
testPrios
it
"can handle exceptions"
$
testExceptions
pending
"Ticket #198"
testExceptions
it
"fairly picks equal-priority-but-different-kind jobs"
$
testFairness
describe
"job status update and tracking"
$
do
it
"can fetch the latest job status"
$
testFetchJobStatus
pending
"Ticket #198"
testFetchJobStatus
it
"can spin two separate jobs and track their status separately"
$
testFetchJobStatusNoContention
it
"marking stuff behaves as expected"
$
...
...
src/Gargantext/Utils/Jobs/Monad.hs
View file @
a0703fa1
...
...
@@ -68,6 +68,7 @@ defaultJobSettings numRunners k = JobSettings
,
jsIDTimeout
=
30
*
60
-- 30 minutes
,
jsGcPeriod
=
1
*
60
-- 1 minute
,
jsSecretKey
=
k
,
jsDebugLogs
=
False
}
genSecret
::
IO
SJ
.
SecretKey
...
...
src/Gargantext/Utils/Jobs/Settings.hs
View file @
a0703fa1
...
...
@@ -14,6 +14,7 @@ data JobSettings = JobSettings
,
jsIDTimeout
::
Int
-- in seconds, how long a job ID is valid
,
jsGcPeriod
::
Int
-- in seconds, how long between each GC
,
jsSecretKey
::
SJ
.
SecretKey
,
jsDebugLogs
::
Bool
-- if 'True', enable debug logs
}
makeLensesFor
[
(
"jsJobTimeout"
,
"l_jsJobTimeout"
)
...
...
src/Gargantext/Utils/Jobs/State.hs
View file @
a0703fa1
...
...
@@ -53,7 +53,7 @@ newJobsState js prios = do
(
_res
,
_logs
)
<-
waitJobDone
jid
rj
jmap
return
()
_
->
return
()
putStrLn
$
"Starting "
++
show
(
jsNumRunners
js
)
++
" job runners."
when
(
jsDebugLogs
js
)
$
putStrLn
$
"Starting "
++
show
(
jsNumRunners
js
)
++
" job runners."
gcAsync
<-
async
$
gcThread
js
jmap
runnersAsyncs
<-
traverse
async
runners
return
(
JobsState
jmap
q
idgen
gcAsync
runnersAsyncs
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment