Commit 5ac27a46 authored by Przemyslaw Kaminski's avatar Przemyslaw Kaminski

Merge branch 'dev' into dev-graph-explorer-gexf

parents d424a377 5b70b168
......@@ -37,7 +37,6 @@ import System.Environment (getArgs)
--import Gargantext.Text.Corpus.Parsers.GrandDebat (readFile, GrandDebatReference(..))
import Data.Text (Text)
import qualified Data.Text as Text
import Control.Monad.IO.Class (liftIO)
main :: IO ()
main = do
......
......@@ -22,9 +22,11 @@ Script to start gargantext with different modes (Dev, Prod, Mock).
module Main where
import Options.Generic
import Data.Version (showVersion)
import Data.Text (unpack)
import qualified Paths_gargantext as PG -- cabal magic build module
import Options.Generic
import System.Exit (exitSuccess)
import Gargantext.Prelude
import Gargantext.API (startGargantext) -- , startGargantextMock)
......@@ -51,6 +53,8 @@ data MyOptions w =
<?> "By default: 8008"
, ini :: w ::: Maybe Text
<?> "Ini-file path of gargantext.ini"
, version :: w ::: Bool
<?> "Show version number and exit"
}
deriving (Generic)
......@@ -60,9 +64,15 @@ deriving instance Show (MyOptions Unwrapped)
main :: IO ()
main = do
MyOptions myMode myPort myIniFile <- unwrapRecord
MyOptions myMode myPort myIniFile myVersion <- unwrapRecord
"Gargantext server"
if myVersion then do
putStrLn $ "Version: " <> showVersion PG.version
System.Exit.exitSuccess
else
return ()
let myPort' = case myPort of
Just p -> p
Nothing -> 8008
......
{ghc}:
with (import ./pinned.nix {});
haskell.lib.buildStackProject {
inherit ghc;
name = "gargantext";
buildInputs = [
docker-compose
blas
{ ghc
, pkgs ? import ./pinned.nix {}
}:
let
buildInputs = with pkgs; [
bzip2
#gfortran
gfortran.cc.lib
glibc
gmp
gsl
igraph
liblapack
lzma
pcre
postgresql
#stack
xz
zlib
blas
gfortran7
gfortran7.cc.lib
];
libraryPaths = pkgs.lib.makeLibraryPath buildInputs;
in
pkgs.haskell.lib.buildStackProject rec {
inherit ghc;
inherit buildInputs;
name = "gargantext";
shellHook = ''
export LD_LIBRARY_PATH="${libraryPaths}"
export LIBRARY_PATH="${libraryPaths}"
'';
}
......@@ -26,7 +26,7 @@ tmux
# Open Stack only: attach volumes
# attach the volume created (OS interface or API)
sudo fdisk -l
sudo fisk /dev/vdb (n,p,t,83,w)
sudo fdisk /dev/vdb (n,p,t,83,w)
sudo mkfs.ext4 /dev/vdb1
sudo blkid
......
......@@ -3,6 +3,7 @@ version: '3'
services:
postgres:
image: 'postgres:latest'
network_mode: host
ports:
- 5432:5432
environment:
......
......@@ -10,6 +10,8 @@ copyright:
license: BSD3
homepage: https://gargantext.org
ghc-options: -Wall
extra-libraries:
- gfortran
dependencies:
- extra
- text
......@@ -82,13 +84,18 @@ library:
- Gargantext.Viz.Phylo.View.Export
- Gargantext.Viz.Phylo.View.ViewMaker
dependencies:
- array
- HSvm
- KMP
- MonadRandom
- QuickCheck
- SHA
- Unique
- accelerate
- aeson
- aeson-lens
- aeson-pretty
- argon2
- array
- async
- attoparsec
- auto-update
......@@ -101,53 +108,51 @@ library:
- bytestring
- case-insensitive
- cassava
#- charsetdetect-ae # detect charset
- cereal # (IGraph)
- clock
- clustering-louvain
- conduit
- conduit-extra
- containers
- contravariant
- crawlerPubMed
- crawlerIsidore
- crawlerHAL
- crawlerISTEX
- crawlerIsidore
- crawlerPubMed
- data-time-segment
- deepseq
- directory
- duckling
- exceptions
- filepath
- formatting
- fullstop
- fast-logger
- fclabels
- fgl
- fast-logger
- filelock
- filepath
- formatting
- full-text-search
- fullstop
- graphviz
- haskell-igraph
- hlcm
- hsparql
- hstatistics
- http-api-data
- http-client
- http-client-tls
- http-conduit
- http-media
- http-api-data
- http-types
- hsparql
- hstatistics
- HSvm
- hxt
- hlcm
- ini
- insert-ordered-containers
- jose
# - kmeans-vector
- json-stream
- KMP
- lens
- located-base
- logging-effect
- matrix
- monad-control
- monad-logger
- mtl
- natural-transformation
......@@ -166,36 +171,35 @@ library:
- profunctors
- protolude
- pureMD5
- random-shuffle
- MonadRandom
- SHA
- simple-reflect
- cereal # (IGraph)
- singletons # (IGraph)
- quickcheck-instances
- random
- rake
- random
- random-shuffle
- rdf4h
- regex-compat
- resource-pool
- resourcet
- rdf4h
- safe
- semigroups
- serialise
- servant
- servant-auth
- servant-auth-server >= 0.4.4.0
- servant-auth-swagger
- servant-blaze
- servant-cassava
- servant-client
- servant-flatten
- servant-job
- servant-mock
- servant-multipart
- servant-server
- servant-static-th
- servant-swagger
- servant-swagger-ui
- servant-static-th
- servant-cassava
- serialise
- servant-xml
- simple-reflect
- singletons # (IGraph)
- split
- stemmer
- string-conversions
......@@ -209,7 +213,6 @@ library:
- transformers
- transformers-base
- unordered-containers
- Unique
- uuid
- validity
- vector
......@@ -220,9 +223,12 @@ library:
- wreq
- xml-conduit
- xml-types
- xmlbf
- yaml
- zip
- zlib
# - kmeans-vector
#- charsetdetect-ae # detect charset
# - utc
# API external connections
......
# this version of nixpkgs contains liblapack at 3.8.0
# this version of nixpkgs contains gsl at 2.5.0
import (
builtins.fetchTarball {
url = "https://github.com/NixOS/nixpkgs/archive/19.09.tar.gz";
sha256 = "0mhqhq21y5vrr1f30qd2bvydv4bbbslvyzclhw0kdxmkgg3z4c92";
}
)
# this version of nixpkgs contains liblapack at 3.8.0
# this version of nixpkgs contains gsl at 2.5.0
import (
builtins.fetchTarball {
url = "https://github.com/nixos/nixpkgs/archive/ece829033b7b8f4e81261fef5427144df4147bc4.tar.gz";
sha256 = "07n91k3d9i9pym8npsszha9mnvg4d1r0l0ldnhk4g8sx15vv1br5";
url = "https://github.com/NixOS/nixpkgs/archive/18.09.tar.gz";
sha256 = "1ib96has10v5nr6bzf7v8kw7yzww8zanxgw2qi1ll1sbv6kj6zpd";
}
)
{ pkgs ? import ./pinned.nix {} }:
{ pkgs ? import ./pinned-19.09.nix {} }:
pkgs.mkShell {
buildInputs = with pkgs; [
......
......@@ -51,16 +51,17 @@ import Control.Concurrent (threadDelay)
import Control.Exception (finally)
import Control.Lens
import Control.Monad.Except (withExceptT, ExceptT)
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Reader (ReaderT, runReaderT)
import Data.Aeson.Encode.Pretty (encodePretty)
import Data.Swagger
import Data.Text (Text)
import Data.Validity
import Data.Version (showVersion)
import GHC.Generics (D1, Meta (..), Rep)
import GHC.TypeLits (AppendSymbol, Symbol)
import Network.Wai
import Network.Wai.Handler.Warp hiding (defaultSettings)
import qualified Paths_gargantext as PG -- cabal magic build module
import Servant
import Servant.Auth as SA
import Servant.Auth.Server (AuthResult(..))
......@@ -84,7 +85,7 @@ import Gargantext.API.Types
import Gargantext.Database.Node.Contact (HyperdataContact)
import Gargantext.Database.Types.Node
import Gargantext.Database.Types.Node (NodeId, CorpusId, AnnuaireId)
import Gargantext.Database.Utils (HasConnection)
import Gargantext.Database.Utils (HasConnectionPool)
import Gargantext.Prelude
import Gargantext.Viz.Graph.API
import Network.HTTP.Types hiding (Query)
......@@ -197,17 +198,25 @@ type GargAPI = "api" :> Summary "API " :> GargAPIVersion
-- | TODO :<|> Summary "Latest API" :> GargAPI'
type GargAPIVersion = "v1.0" :> Summary "v1.0: " :> GargAPI'
type GargAPIVersion = "v1.0"
:> Summary "Garg API Version "
:> GargAPI'
type GargVersion = "version"
:> Summary "Backend version"
:> Get '[JSON] Text
type GargAPI' =
-- Auth endpoint
"auth" :> Summary "AUTH API"
:> ReqBody '[JSON] AuthRequest
:> Post '[JSON] AuthResponse
-- TODO-ACCESS here we want to request a particular header for
:<|> GargVersion
-- TODO-ACCESS here we want to request a particular header for
-- auth and capabilities.
:<|> GargPrivateAPI
type GargPrivateAPI = SA.Auth '[SA.JWT, SA.Cookie] AuthenticatedUser :> GargPrivateAPI'
type GargAdminAPI
......@@ -225,7 +234,7 @@ waitAPI :: Int -> GargServer WaitAPI
waitAPI n = do
let
m = (10 :: Int) ^ (6 :: Int)
_ <- liftIO $ threadDelay ( m * n)
_ <- liftBase $ threadDelay ( m * n)
pure $ "Waited: " <> (cs $ show n)
----------------------------------------
......@@ -293,7 +302,7 @@ type GargPrivateAPI' =
:<|> New.AddWithForm
:<|> New.AddWithQuery
:<|> Annuaire.AddWithForm
:<|> "annuaire" :> Annuaire.AddWithForm
-- :<|> New.AddWithFile
-- :<|> "scraper" :> WithCallbacks ScraperAPI
-- :<|> "new" :> New.Api
......@@ -324,7 +333,7 @@ type API = SwaggerAPI
type GargServerM env err = ReaderT env (ExceptT err IO)
type EnvC env =
( HasConnection env
( HasConnectionPool env
, HasRepo env
, HasSettings env
, HasJobEnv env ScraperStatus ScraperStatus
......@@ -337,7 +346,11 @@ server :: forall env. EnvC env => env -> IO (Server API)
server env = do
-- orchestrator <- scrapyOrchestrator env
pure $ schemaUiServer swaggerDoc
:<|> hoistServerWithContext (Proxy :: Proxy GargAPI) (Proxy :: Proxy AuthContext) transform serverGargAPI
:<|> hoistServerWithContext
(Proxy :: Proxy GargAPI)
(Proxy :: Proxy AuthContext)
transform
serverGargAPI
:<|> frontEndServer
where
transform :: forall a. GargServerM env GargError a -> Handler a
......@@ -345,12 +358,18 @@ server env = do
serverGargAPI :: GargServerT env err (GargServerM env err) GargAPI
serverGargAPI -- orchestrator
= auth :<|> serverPrivateGargAPI
= auth
:<|> gargVersion
:<|> serverPrivateGargAPI
-- :<|> orchestrator
where
gargVersion :: GargServer GargVersion
gargVersion = pure (cs $ showVersion PG.version)
serverPrivateGargAPI :: GargServerT env err (GargServerM env err) GargPrivateAPI
serverPrivateGargAPI (Authenticated auser) = serverPrivateGargAPI' auser
serverPrivateGargAPI _ = throwAll' (_ServerError # err401)
serverPrivateGargAPI :: GargServerT env err (GargServerM env err) GargPrivateAPI
serverPrivateGargAPI (Authenticated auser) = serverPrivateGargAPI' auser
serverPrivateGargAPI _ = throwAll' (_ServerError # err401)
-- Here throwAll' requires a concrete type for the monad.
-- TODO-SECURITY admin only: withAdmin
......@@ -386,7 +405,7 @@ serverPrivateGargAPI' (AuthenticatedUser (NodeId uid))
-- TODO access
-- :<|> addUpload
-- :<|> (\corpus -> addWithQuery corpus :<|> addWithFile corpus)
:<|> addCorpusWithForm
:<|> addCorpusWithForm "user1"
:<|> addCorpusWithQuery
:<|> addAnnuaireWithForm
......@@ -398,35 +417,40 @@ serverPrivateGargAPI' (AuthenticatedUser (NodeId uid))
{-
addUpload :: GargServer New.Upload
addUpload cId = (serveJobsAPI $ JobFunction (\i log -> New.addToCorpusJobFunction cid i (liftIO . log)))
:<|> (serveJobsAPI $ JobFunction (\i log -> New.addToCorpusWithForm cid i (liftIO . log)))
addUpload cId = (serveJobsAPI $ JobFunction (\i log -> New.addToCorpusJobFunction cid i (liftBase . log)))
:<|> (serveJobsAPI $ JobFunction (\i log -> New.addToCorpusWithForm cid i (liftBase . log)))
--}
addCorpusWithQuery :: GargServer New.AddWithQuery
addCorpusWithQuery cid =
serveJobsAPI $
JobFunction (\i log -> New.addToCorpusJobFunction cid i (liftIO . log))
JobFunction (\i log -> New.addToCorpusJobFunction cid i (liftBase . log))
addWithFile :: GargServer New.AddWithFile
addWithFile cid i f =
serveJobsAPI $
JobFunction (\_i log -> New.addToCorpusWithFile cid i f (liftIO . log))
JobFunction (\_i log -> New.addToCorpusWithFile cid i f (liftBase . log))
addCorpusWithForm :: GargServer New.AddWithForm
addCorpusWithForm cid =
addCorpusWithForm :: Text -> GargServer New.AddWithForm
addCorpusWithForm username cid =
serveJobsAPI $
JobFunction (\i log -> New.addToCorpusWithForm cid i (liftIO . log))
JobFunction (\i log ->
let
log' x = do
printDebug "addCorpusWithForm" x
liftBase $ log x
in New.addToCorpusWithForm username cid i log')
addAnnuaireWithForm :: GargServer Annuaire.AddWithForm
addAnnuaireWithForm cid =
serveJobsAPI $
JobFunction (\i log -> Annuaire.addToAnnuaireWithForm cid i (liftIO . log))
JobFunction (\i log -> Annuaire.addToAnnuaireWithForm cid i (liftBase . log))
{-
serverStatic :: Server (Get '[HTML] Html)
serverStatic = $(do
let path = "purescript-gargantext/dist/index.html"
Just s <- liftIO (fileTypeToFileTree (FileTypeFile path))
Just s <- liftBase (fileTypeToFileTree (FileTypeFile path))
fileTreeToServer s
)
-}
......
......@@ -88,7 +88,7 @@ addToAnnuaireWithForm _cid (WithForm ft _d _l) logStatus = do
-- WOS -> Parser.parseFormat Parser.WOS
-- PresseRIS -> Parser.parseFormat Parser.RisPresse
-- docs <- liftIO
-- docs <- liftBase
-- $ splitEvery 500
-- <$> take 1000000
-- <$> parse (cs d)
......
......@@ -33,7 +33,6 @@ module Gargantext.API.Auth
where
import Control.Lens (view)
import Control.Monad.IO.Class (liftIO)
import Data.Aeson.TH (deriveJSON)
import Data.List (elem)
import Data.Swagger
......@@ -50,7 +49,7 @@ import Gargantext.API.Types (HasJoseError(..), joseError, HasServerError, GargSe
import Gargantext.Database.Root (getRoot)
import Gargantext.Database.Tree (isDescendantOf, isIn)
import Gargantext.Database.Types.Node (NodePoly(_node_id), NodeId(..), UserId, ListId, DocId)
import Gargantext.Database.Utils (Cmd', CmdM, HasConnection)
import Gargantext.Database.Utils (Cmd', CmdM, HasConnectionPool)
import Gargantext.Prelude hiding (reverse)
import Test.QuickCheck (elements, oneof)
import Test.QuickCheck.Arbitrary (Arbitrary, arbitrary)
......@@ -91,12 +90,12 @@ makeTokenForUser :: (HasSettings env, HasJoseError err)
=> NodeId -> Cmd' env err Token
makeTokenForUser uid = do
jwtS <- view $ settings . jwtSettings
e <- liftIO $ makeJWT (AuthenticatedUser uid) jwtS Nothing
e <- liftBase $ makeJWT (AuthenticatedUser uid) jwtS Nothing
-- TODO-SECURITY here we can implement token expiration ^^.
either joseError (pure . toStrict . decodeUtf8) e
-- TODO not sure about the encoding...
checkAuthRequest :: (HasSettings env, HasConnection env, HasJoseError err)
checkAuthRequest :: (HasSettings env, HasConnectionPool env, HasJoseError err)
=> Username -> Password -> Cmd' env err CheckAuth
checkAuthRequest u p
| not (u `elem` arbitraryUsername) = pure InvalidUser
......@@ -109,7 +108,7 @@ checkAuthRequest u p
token <- makeTokenForUser uid
pure $ Valid token uid
auth :: (HasSettings env, HasConnection env, HasJoseError err)
auth :: (HasSettings env, HasConnectionPool env, HasJoseError err)
=> AuthRequest -> Cmd' env err AuthResponse
auth (AuthRequest u p) = do
checkAuthRequest' <- checkAuthRequest u p
......
......@@ -27,8 +27,6 @@ module Gargantext.API.Corpus.New
--import Gargantext.Text.Corpus.Parsers (parseFile, FileFormat(..))
import Control.Lens hiding (elements)
import Control.Monad.IO.Class (liftIO)
import Control.Concurrent
import Data.Aeson
import Data.Aeson.TH (deriveJSON)
import Data.Maybe (fromMaybe)
......@@ -67,7 +65,6 @@ data Query = Query { query_query :: Text
deriveJSON (unPrefix "query_") 'Query
instance Arbitrary Query where
arbitrary = elements [ Query q n fs
| q <- ["a","b"]
......@@ -97,7 +94,7 @@ api _uId (Query q _ as) = do
Nothing -> flowCorpusSearchInDatabase "user1" EN q
Just API.All -> flowCorpusSearchInDatabase "user1" EN q
Just a -> do
docs <- liftIO $ API.get a q (Just 1000)
docs <- liftBase $ API.get a q (Just 1000)
cId' <- flowCorpus "user1" (Left q) (Multi EN) [docs]
pure cId'
......@@ -157,7 +154,6 @@ type Upload = Summary "Corpus Upload endpoint"
:<|> "addWithquery" :> AsyncJobsAPI ScraperStatus WithQuery ScraperStatus
:<|> "addWithfile" :> AsyncJobs ScraperStatus '[FormUrlEncoded] WithForm ScraperStatus
type AddWithQuery = Summary "Add with Query to corpus endpoint"
:> "corpus"
:> Capture "corpus_id" CorpusId
......@@ -205,7 +201,6 @@ addToCorpusJobFunction _cid (WithQuery _q _dbs _l) logStatus = do
, _scst_events = Just []
}
addToCorpusWithFile :: FlowCmdM env err m
=> CorpusId
-> MultipartData Mem
......@@ -218,6 +213,7 @@ addToCorpusWithFile cid input filetype logStatus = do
, _scst_remaining = Just 138
, _scst_events = Just []
}
printDebug "addToCorpusWithFile" cid
_h <- postUpload cid filetype input
pure ScraperStatus { _scst_succeeded = Just 137
......@@ -237,20 +233,19 @@ addToCorpusWithForm' :: FlowCmdM env err m
-> (ScraperStatus -> m ())
-> m ScraperStatus
addToCorpusWithForm' cid (WithForm ft d l) logStatus = do
newStatus <- liftIO newEmptyMVar
newStatus <- liftBase newEmptyMVar
s <- addToCorpusWithForm cid (WithForm ft d l) logStatus
_ <- liftIO $ forkIO $ putMVar newStatus s
s' <- liftIO $ takeMVar newStatus
_ <- liftBase $ forkIO $ putMVar newStatus s
s' <- liftBase $ takeMVar newStatus
pure s'
-}
addToCorpusWithForm :: FlowCmdM env err m
=> CorpusId
=> Text
-> CorpusId
-> WithForm
-> (ScraperStatus -> m ())
-> m ScraperStatus
addToCorpusWithForm cid (WithForm ft d l _n) logStatus = do
printDebug "ft" ft
addToCorpusWithForm username cid (WithForm ft d l _n) logStatus = do
let
parse = case ft of
......@@ -259,29 +254,29 @@ addToCorpusWithForm cid (WithForm ft d l _n) logStatus = do
WOS -> Parser.parseFormat Parser.WOS
PresseRIS -> Parser.parseFormat Parser.RisPresse
newDocs <- liftIO newEmptyMVar
docs <- liftIO
$ splitEvery 500
<$> take 1000000
<$> parse (cs d)
_ <- liftIO $ forkIO $ putMVar newDocs docs
logStatus ScraperStatus { _scst_succeeded = Just 1
, _scst_failed = Just 0
, _scst_remaining = Just 1
, _scst_events = Just []
}
docs' <- liftIO $ takeMVar newDocs
newCid <- liftIO newEmptyMVar
cid' <- flowCorpus "user1"
printDebug "Parsing corpus: " cid
-- TODO granularity of the logStatus
docs <- liftBase $ splitEvery 500
<$> take 1000000
<$> parse (cs d)
printDebug "Parsing corpus finished : " cid
printDebug "Starting extraction : " cid
-- TODO granularity of the logStatus
_cid' <- flowCorpus username
(Right [cid])
(Multi $ fromMaybe EN l)
(map (map toHyperdataDocument) docs')
_ <- liftIO $ forkIO $ putMVar newCid cid'
(map (map toHyperdataDocument) docs)
cid'' <- liftIO $ takeMVar newCid
printDebug "cid'" cid''
printDebug "Extraction finished : " cid
pure ScraperStatus { _scst_succeeded = Just 2
, _scst_failed = Just 0
......
......@@ -27,7 +27,6 @@ module Gargantext.API.Corpus.New.File
import Control.Lens ((.~), (?~))
import Control.Monad (forM)
import Control.Monad.IO.Class (liftIO)
import Data.Maybe
import Data.Aeson
import Data.Monoid (mempty)
......@@ -100,18 +99,18 @@ postUpload :: NodeId
-> Cmd err [Hash]
postUpload _ Nothing _ = panic "fileType is a required parameter"
postUpload _ (Just fileType) multipartData = do
putStrLn $ "File Type: " <> (show fileType)
is <- liftIO $ do
putStrLn ("Inputs:" :: Text)
printDebug "File Type: " fileType
is <- liftBase $ do
printDebug "Inputs:" ()
forM (inputs multipartData) $ \input -> do
putStrLn $ ("iName " :: Text) <> (iName input)
<> ("iValue " :: Text) <> (iValue input)
printDebug "iName " (iName input)
printDebug "iValue " (iValue input)
pure $ iName input
_ <- forM (files multipartData) $ \file -> do
let content = fdPayload file
putStrLn $ ("XXX " :: Text) <> (fdFileName file)
putStrLn $ ("YYY " :: Text) <> cs content
printDebug "XXX " (fdFileName file)
printDebug "YYY " content
--pure $ cs content
-- is <- inputs multipartData
......
......@@ -119,9 +119,11 @@ import qualified Data.Set as Set
import Control.Category ((>>>))
import Control.Concurrent
import Control.Lens (makeLenses, makePrisms, Getter, Iso', iso, from, (.~), (?=), (#), to, folded, {-withIndex, ifolded,-} view, use, (^.), (^..), (^?), (+~), (%~), (.~), (%=), sumOf, at, _Just, Each(..), itraverse_, both, forOf_, (%%~), (?~), mapped)
import Control.Monad.Base (MonadBase, liftBase)
import Control.Monad.Error.Class (MonadError)
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.Trans.Control (MonadBaseControl)
import Data.Aeson hiding ((.=))
import Data.Aeson.TH (deriveJSON)
import Data.Either(Either(Left))
......@@ -140,7 +142,7 @@ import Gargantext.Database.Config (userMaster)
import Gargantext.Database.Metrics.NgramsByNode (getOccByNgramsOnlyFast')
import Gargantext.Database.Schema.Ngrams (NgramsType)
import Gargantext.Database.Types.Node (NodeType(..))
import Gargantext.Database.Utils (fromField', HasConnection)
import Gargantext.Database.Utils (fromField', HasConnectionPool)
import Gargantext.Database.Node.Select
import Gargantext.Database.Ngrams
--import Gargantext.Database.Lists (listsWith)
......@@ -796,14 +798,14 @@ instance HasRepoSaver RepoEnv where
type RepoCmdM env err m =
( MonadReader env m
, MonadError err m
, MonadIO m
, MonadBaseControl IO m
, HasRepo env
)
------------------------------------------------------------------------
saveRepo :: ( MonadReader env m, MonadIO m, HasRepoSaver env )
saveRepo :: ( MonadReader env m, MonadBase IO m, HasRepoSaver env )
=> m ()
saveRepo = liftIO =<< view repoSaver
saveRepo = liftBase =<< view repoSaver
listTypeConflictResolution :: ListType -> ListType -> ListType
listTypeConflictResolution _ _ = undefined -- TODO Use Map User ListType
......@@ -834,7 +836,7 @@ copyListNgrams :: RepoCmdM env err m
-> m ()
copyListNgrams srcListId dstListId ngramsType = do
var <- view repoVar
liftIO $ modifyMVar_ var $
liftBase $ modifyMVar_ var $
pure . (r_state . at ngramsType %~ (Just . f . something))
saveRepo
where
......@@ -849,7 +851,7 @@ addListNgrams :: RepoCmdM env err m
-> [NgramsElement] -> m ()
addListNgrams listId ngramsType nes = do
var <- view repoVar
liftIO $ modifyMVar_ var $
liftBase $ modifyMVar_ var $
pure . (r_state . at ngramsType . _Just . at listId . _Just <>~ m)
saveRepo
where
......@@ -871,7 +873,7 @@ setListNgrams :: RepoCmdM env err m
-> m ()
setListNgrams listId ngramsType ns = do
var <- view repoVar
liftIO $ modifyMVar_ var $
liftBase $ modifyMVar_ var $
pure . ( r_state
. at ngramsType %~
(Just .
......@@ -899,7 +901,7 @@ putListNgrams' :: RepoCmdM env err m
putListNgrams' listId ngramsType ns = do
-- printDebug "putListNgrams" (length nes)
var <- view repoVar
liftIO $ modifyMVar_ var $
liftBase $ modifyMVar_ var $
pure . ( r_state
. at ngramsType %~
(Just .
......@@ -928,7 +930,7 @@ currentVersion :: RepoCmdM env err m
=> m Version
currentVersion = do
var <- view repoVar
r <- liftIO $ readMVar var
r <- liftBase $ readMVar var
pure $ r ^. r_version
tableNgramsPull :: RepoCmdM env err m
......@@ -937,7 +939,7 @@ tableNgramsPull :: RepoCmdM env err m
-> m (Versioned NgramsTablePatch)
tableNgramsPull listId ngramsType p_version = do
var <- view repoVar
r <- liftIO $ readMVar var
r <- liftBase $ readMVar var
let
q = mconcat $ take (r ^. r_version - p_version) (r ^. r_history)
......@@ -966,7 +968,7 @@ tableNgramsPut tabType listId (Versioned p_version p_table)
assertValid p_validity
var <- view repoVar
vq' <- liftIO $ modifyMVar var $ \r -> do
vq' <- liftBase $ modifyMVar var $ \r -> do
let
q = mconcat $ take (r ^. r_version - p_version) (r ^. r_history)
(p', q') = transformWith ngramsStatePatchConflictResolution p q
......@@ -1006,7 +1008,7 @@ getNgramsTableMap :: RepoCmdM env err m
-> m (Versioned NgramsTableMap)
getNgramsTableMap nodeId ngramsType = do
v <- view repoVar
repo <- liftIO $ readMVar v
repo <- liftBase $ readMVar v
pure $ Versioned (repo ^. r_version)
(repo ^. r_state . at ngramsType . _Just . at nodeId . _Just)
......@@ -1018,12 +1020,12 @@ type MaxSize = Int
-- | Table of Ngrams is a ListNgrams formatted (sorted and/or cut).
-- TODO: should take only one ListId
getTime' :: MonadIO m => m TimeSpec
getTime' = liftIO $ getTime ProcessCPUTime
getTime' :: MonadBase IO m => m TimeSpec
getTime' = liftBase $ getTime ProcessCPUTime
getTableNgrams :: forall env err m.
(RepoCmdM env err m, HasNodeError err, HasConnection env)
(RepoCmdM env err m, HasNodeError err, HasConnectionPool env)
=> NodeType -> NodeId -> TabType
-> ListId -> Limit -> Maybe Offset
-> Maybe ListType
......@@ -1085,7 +1087,7 @@ getTableNgrams _nType nId tabType listId limit_ offset
ngramsType
ngrams_terms
t2 <- getTime'
liftIO $ hprint stderr
liftBase $ hprint stderr
("getTableNgrams/setScores #ngrams=" % int % " time=" % timeSpecs % "\n")
(length ngrams_terms) t1 t2
{-
......@@ -1114,7 +1116,7 @@ getTableNgrams _nType nId tabType listId limit_ offset
. setScores (not scoresNeeded)
. selectAndPaginate
t3 <- getTime'
liftIO $ hprint stderr
liftBase $ hprint stderr
("getTableNgrams total=" % timeSpecs
% " map1=" % timeSpecs
% " map2=" % timeSpecs
......@@ -1184,7 +1186,7 @@ type TableNgramsApi = TableNgramsApiGet
:<|> TableNgramsApiPut
:<|> TableNgramsApiPost
getTableNgramsCorpus :: (RepoCmdM env err m, HasNodeError err, HasConnection env)
getTableNgramsCorpus :: (RepoCmdM env err m, HasNodeError err, HasConnectionPool env)
=> NodeId -> TabType
-> ListId -> Limit -> Maybe Offset
-> Maybe ListType
......@@ -1198,7 +1200,7 @@ getTableNgramsCorpus nId tabType listId limit_ offset listType minSize maxSize o
searchQuery = maybe (const True) isInfixOf mt
-- | Text search is deactivated for now for ngrams by doc only
getTableNgramsDoc :: (RepoCmdM env err m, HasNodeError err, HasConnection env)
getTableNgramsDoc :: (RepoCmdM env err m, HasNodeError err, HasConnectionPool env)
=> DocId -> TabType
-> ListId -> Limit -> Maybe Offset
-> Maybe ListType
......@@ -1218,7 +1220,7 @@ getTableNgramsDoc dId tabType listId limit_ offset listType minSize maxSize orde
apiNgramsTableCorpus :: ( RepoCmdM env err m
, HasNodeError err
, HasInvalidError err
, HasConnection env
, HasConnectionPool env
)
=> NodeId -> ServerT TableNgramsApi m
apiNgramsTableCorpus cId = getTableNgramsCorpus cId
......@@ -1229,7 +1231,7 @@ apiNgramsTableCorpus cId = getTableNgramsCorpus cId
apiNgramsTableDoc :: ( RepoCmdM env err m
, HasNodeError err
, HasInvalidError err
, HasConnection env
, HasConnectionPool env
)
=> DocId -> ServerT TableNgramsApi m
apiNgramsTableDoc dId = getTableNgramsDoc dId
......
......@@ -24,7 +24,6 @@ module Gargantext.API.Ngrams.List
where
import Control.Lens hiding (elements)
import Control.Monad.IO.Class (liftIO)
import Data.Aeson
import Data.List (zip)
import Data.Map (Map, toList, fromList)
......@@ -50,7 +49,7 @@ import Servant.Job.Utils (jsonOptions)
------------------------------------------------------------------------
type NgramsList = (Map NgramsType (Versioned NgramsTableMap))
------------------------------------------------------------------------
type API = Get '[HTML] (Headers '[Header "Content-Disposition" Text] NgramsList)
type API = Get '[JSON, HTML] (Headers '[Header "Content-Disposition" Text] NgramsList)
-- :<|> ReqBody '[JSON] NgramsList :> Post '[JSON] Bool
:<|> PostAPI
......@@ -111,7 +110,7 @@ type PostAPI = Summary "Update List"
postAsync :: ListId -> GargServer PostAPI
postAsync lId =
serveJobsAPI $
JobFunction (\f log' -> postAsync' lId f (liftIO . log'))
JobFunction (\f log' -> postAsync' lId f (liftBase . log'))
postAsync' :: FlowCmdM env err m
=> ListId
......
......@@ -9,6 +9,7 @@ Portability : POSIX
-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
......@@ -36,7 +37,7 @@ type RootTerm = Text
getRepo :: RepoCmdM env err m => m NgramsRepo
getRepo = do
v <- view repoVar
liftIO $ readMVar v
liftBase $ readMVar v
listNgramsFromRepo :: [ListId] -> NgramsType
-> NgramsRepo -> Map Text NgramsRepoElement
......
......@@ -85,7 +85,7 @@ data ScraperEvent = ScraperEvent
, _scev_level :: !(Maybe Text)
, _scev_date :: !(Maybe Text)
}
deriving Generic
deriving (Show, Generic)
instance Arbitrary ScraperEvent where
arbitrary = ScraperEvent <$> elements [Nothing, Just "test message"]
......@@ -104,7 +104,7 @@ data ScraperStatus = ScraperStatus
, _scst_remaining :: !(Maybe Int)
, _scst_events :: !(Maybe [ScraperEvent])
}
deriving Generic
deriving (Show, Generic)
instance Arbitrary ScraperStatus where
arbitrary = ScraperStatus
......
......@@ -35,13 +35,14 @@ import System.Environment (lookupEnv)
import System.IO (FilePath, hClose)
import System.IO.Temp (withTempFile)
import System.FileLock (tryLockFile, unlockFile, SharedExclusive(Exclusive))
import Database.PostgreSQL.Simple (Connection, connect)
import Database.PostgreSQL.Simple (Connection, connect, close, ConnectInfo)
import Network.HTTP.Client (Manager)
import Network.HTTP.Client.TLS (newTlsManager)
import Data.Aeson
import Data.Maybe (fromMaybe)
import Data.Either (either)
import Data.Pool (Pool, createPool)
import Data.Text
--import Data.Text.Encoding (encodeUtf8)
import Data.ByteString (ByteString)
......@@ -61,7 +62,7 @@ import Control.Monad.Logger
import Control.Monad.Reader
import Control.Lens
import Gargantext.Prelude
import Gargantext.Database.Utils (databaseParameters, HasConnection(..), Cmd', runCmd)
import Gargantext.Database.Utils (databaseParameters, HasConnectionPool(..), Cmd', runCmd)
import Gargantext.API.Ngrams (NgramsRepo, HasRepoVar(..), HasRepoSaver(..), HasRepo(..), RepoEnv(..), r_version, saveRepo, initRepo, renv_var, renv_lock)
import Gargantext.API.Orchestrator.Types
......@@ -141,7 +142,7 @@ data FireWall = FireWall { unFireWall :: Bool }
data Env = Env
{ _env_settings :: !Settings
, _env_logger :: !LoggerSet
, _env_conn :: !Connection
, _env_pool :: !(Pool Connection)
, _env_repo :: !RepoEnv
, _env_manager :: !Manager
, _env_self_url :: !BaseUrl
......@@ -151,8 +152,8 @@ data Env = Env
makeLenses ''Env
instance HasConnection Env where
connection = env_conn
instance HasConnectionPool Env where
connPool = env_pool
instance HasRepoVar Env where
repoVar = repoEnv . repoVar
......@@ -254,7 +255,7 @@ newEnv port file = do
self_url <- parseBaseUrl $ "http://0.0.0.0:" <> show port
param <- databaseParameters file
conn <- connect param
pool <- newPool param
repo <- readRepoEnv
scrapers_env <- newJobEnv defaultSettings manager
logger <- newStderrLoggerSet defaultBufSize
......@@ -262,23 +263,26 @@ newEnv port file = do
pure $ Env
{ _env_settings = settings
, _env_logger = logger
, _env_conn = conn
, _env_pool = pool
, _env_repo = repo
, _env_manager = manager
, _env_scrapers = scrapers_env
, _env_self_url = self_url
}
newPool :: ConnectInfo -> IO (Pool Connection)
newPool param = createPool (connect param) close 1 (60*60) 8
data DevEnv = DevEnv
{ _dev_env_conn :: !Connection
{ _dev_env_pool :: !(Pool Connection)
, _dev_env_repo :: !RepoEnv
, _dev_env_settings :: !Settings
}
makeLenses ''DevEnv
instance HasConnection DevEnv where
connection = dev_env_conn
instance HasConnectionPool DevEnv where
connPool = dev_env_pool
instance HasRepoVar DevEnv where
repoVar = repoEnv . repoVar
......@@ -306,11 +310,11 @@ withDevEnv iniPath k = do
where
newDevEnv = do
param <- databaseParameters iniPath
conn <- connect param
pool <- newPool param
repo <- readRepoEnv
setts <- devSettings devJwkFile
pure $ DevEnv
{ _dev_env_conn = conn
{ _dev_env_pool = pool
, _dev_env_repo = repo
, _dev_env_settings = setts
}
......@@ -326,7 +330,7 @@ runCmdReplServantErr = runCmdRepl
-- In particular this writes the repo file after running
-- the command.
-- This function is constrained to the DevEnv rather than
-- using HasConnection and HasRepoVar.
-- using HasConnectionPool and HasRepoVar.
runCmdDev :: Show err => DevEnv -> Cmd' DevEnv err a -> IO a
runCmdDev env f =
(either (fail . show) pure =<< runCmd env f)
......
......@@ -10,6 +10,7 @@ Portability : POSIX
-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ConstrainedClassMethods #-}
......
......@@ -12,8 +12,8 @@ commentary with @some markup@.
-}
------------------------------------------------------------------------
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TemplateHaskell #-}
module Gargantext.Core.Types ( module Gargantext.Core.Types.Main
......@@ -139,7 +139,7 @@ class HasInvalidError e where
assertValid :: (MonadError e m, HasInvalidError e) => Validation -> m ()
assertValid v = when (not $ validationIsValid v) $ throwError $ _InvalidError # v
-- assertValid :: MonadBase IO m => Validation -> m ()
-- assertValid v = when (not $ validationIsValid v) $ fail $ show v
......
......@@ -44,7 +44,6 @@ import Data.Tuple.Extra (first, second)
import Data.Traversable (traverse)
import Debug.Trace (trace)
import Control.Lens ((^.), view, _Just)
import Control.Monad.IO.Class (liftIO)
import Data.List (concat)
import Data.Map (Map, lookup)
import Data.Maybe (Maybe(..), catMaybes)
......@@ -109,7 +108,7 @@ _flowCorpusApi :: ( FlowCmdM env err m)
-> ApiQuery
-> m CorpusId
_flowCorpusApi u n tt l q = do
docs <- liftIO $ splitEvery 500 <$> getDataApi (_tt_lang tt) l q
docs <- liftBase $ splitEvery 500 <$> getDataApi (_tt_lang tt) l q
flowCorpus u n tt docs
------------------------------------------------------------------------
......@@ -121,7 +120,7 @@ flowAnnuaire :: FlowCmdM env err m
-> FilePath
-> m AnnuaireId
flowAnnuaire u n l filePath = do
docs <- liftIO $ (( splitEvery 500 <$> deserialiseImtUsersFromFile filePath) :: IO [[HyperdataContact]])
docs <- liftBase $ (( splitEvery 500 <$> deserialiseImtUsersFromFile filePath) :: IO [[HyperdataContact]])
flow (Nothing :: Maybe HyperdataAnnuaire) u n l docs
-- UNUSED
......@@ -130,7 +129,7 @@ _flowCorpusDebat :: FlowCmdM env err m
-> Limit -> FilePath
-> m CorpusId
_flowCorpusDebat u n l fp = do
docs <- liftIO ( splitEvery 500
docs <- liftBase ( splitEvery 500
<$> take l
<$> readFile' fp
:: IO [[GD.GrandDebatReference ]]
......@@ -143,7 +142,7 @@ flowCorpusFile :: FlowCmdM env err m
-> TermType Lang -> FileFormat -> FilePath
-> m CorpusId
flowCorpusFile u n l la ff fp = do
docs <- liftIO ( splitEvery 500
docs <- liftBase ( splitEvery 500
<$> take l
<$> parseFile ff fp
)
......@@ -439,7 +438,7 @@ instance ExtractNgramsT HyperdataDocument
terms' <- map text2ngrams
<$> map (intercalate " " . _terms_label)
<$> concat
<$> liftIO (extractTerms lang' $ hasText doc)
<$> liftBase (extractTerms lang' $ hasText doc)
pure $ Map.fromList $ [(source, Map.singleton Sources 1)]
<> [(i', Map.singleton Institutes 1) | i' <- institutes ]
......
......@@ -29,7 +29,7 @@ import Gargantext.Database.Flow
flowAnnuaire :: FlowCmdM env ServantErr m => FilePath -> m ()
flowAnnuaire filePath = do
contacts <- liftIO $ deserialiseImtUsersFromFile filePath
contacts <- liftBase $ deserialiseImtUsersFromFile filePath
ps <- flowInsertAnnuaire "Annuaire"
$ map (\h-> ToDbContact h)
$ map addUniqIdsContact contacts
......
......@@ -9,6 +9,7 @@ Portability : POSIX
-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
......
......@@ -9,6 +9,7 @@ Portability : POSIX
-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
......
......@@ -11,6 +11,7 @@ Ngrams by node enable contextual metrics.
-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
......
......@@ -11,6 +11,7 @@ Ngrams by node enable contextual metrics.
-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
......
......@@ -13,6 +13,8 @@ Add Documents/Contact to a Corpus/Annuaire.
------------------------------------------------------------------------
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
......
......@@ -49,6 +49,7 @@ the concatenation of the parameters defined by @shaParameters@.
-}
------------------------------------------------------------------------
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
......
......@@ -9,6 +9,7 @@ Portability : POSIX
-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
......
......@@ -9,6 +9,7 @@ Portability : POSIX
-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
......
......@@ -13,6 +13,7 @@ Ngrams connection to the Database.
{-# LANGUAGE Arrows #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
......
......@@ -15,6 +15,7 @@ NodeNgrams register Context of Ngrams (named Cgrams then)
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE Arrows #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE QuasiQuotes #-}
......
......@@ -14,6 +14,7 @@ commentary with @some markup@.
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE Arrows #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE QuasiQuotes #-}
......
......@@ -12,6 +12,7 @@ Portability : POSIX
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE Arrows #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
......
......@@ -12,6 +12,7 @@ Portability : POSIX
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE Arrows #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
......
......@@ -22,6 +22,7 @@ Next Step benchmark:
-}
{-# LANGUAGE Arrows #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
......
......@@ -13,6 +13,7 @@ Portability : POSIX
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE Arrows #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
......
......@@ -14,6 +14,7 @@ Functions to deal with users, database side.
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
......
......@@ -9,6 +9,7 @@ Portability : POSIX
-}
{-# LANGUAGE Arrows #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
......
......@@ -12,6 +12,7 @@ Let a Root Node, return the Tree of the Node as a directed acyclic graph
-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RankNTypes #-}
......
......@@ -11,6 +11,7 @@ Triggers on NodeNodeNgrams table.
-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
......
......@@ -11,6 +11,7 @@ Triggers on Nodes table.
-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
......
......@@ -11,6 +11,7 @@ Triggers on NodesNodes table.
-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
......
......@@ -13,6 +13,7 @@ commentary with @some markup@.
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
......@@ -22,10 +23,11 @@ module Gargantext.Database.Utils where
import Data.ByteString.Char8 (hPutStrLn)
import System.IO (stderr)
import Control.Exception
import Control.Exception
import Control.Monad.Error.Class -- (MonadError(..), Error)
import Control.Lens (Getter, view)
import Control.Monad.Reader
import Control.Monad.Trans.Control (MonadBaseControl)
import Control.Monad.Except
import Data.Aeson (Result(Error,Success), fromJSON, FromJSON)
import Data.Either.Extra (Either(Left, Right))
......@@ -33,10 +35,12 @@ import Data.Ini (readIniFile, lookupValue)
import qualified Data.List as DL
import Data.Maybe (maybe)
import Data.Monoid ((<>))
import Data.Pool (Pool, withResource)
import Data.Profunctor.Product.Default (Default)
import Data.Text (unpack, pack)
import Data.Typeable (Typeable)
import Data.Word (Word16)
--import Database.PostgreSQL.Simple (Connection, Pool, connect, withPoolConnection)
import Database.PostgreSQL.Simple (Connection, connect)
import Database.PostgreSQL.Simple.FromField ( Conversion, ResultError(ConversionFailed), fromField, returnError)
import Database.PostgreSQL.Simple.Internal (Field)
......@@ -48,21 +52,21 @@ import Text.Read (read)
import qualified Data.ByteString as DB
import qualified Database.PostgreSQL.Simple as PGS
class HasConnection env where
connection :: Getter env Connection
class HasConnectionPool env where
connPool :: Getter env (Pool Connection)
instance HasConnection Connection where
connection = identity
instance HasConnectionPool (Pool Connection) where
connPool = identity
type CmdM' env err m =
( MonadReader env m
, MonadError err m
, MonadIO m
, MonadBaseControl IO m
)
type CmdM env err m =
( CmdM' env err m
, HasConnection env
, HasConnectionPool env
)
type Cmd' env err a = forall m. CmdM' env err m => m a
......@@ -75,10 +79,10 @@ fromInt64ToInt = fromIntegral
-- TODO: ideally there should be very few calls to this functions.
mkCmd :: (Connection -> IO a) -> Cmd err a
mkCmd k = do
conn <- view connection
liftIO $ k conn
pool <- view connPool
withResource pool (liftBase . k)
runCmd :: (HasConnection env)
runCmd :: (HasConnectionPool env)
=> env -> Cmd' env err a
-> IO (Either err a)
runCmd env m = runExceptT $ runReaderT m env
......@@ -100,8 +104,8 @@ formatPGSQuery q a = mkCmd $ \conn -> PGS.formatQuery conn q a
runPGSQuery' :: (PGS.ToRow a, PGS.FromRow b) => PGS.Query -> a -> Cmd err [b]
runPGSQuery' q a = mkCmd $ \conn -> PGS.query conn q a
runPGSQuery :: (MonadError err m, MonadReader env m,
PGS.FromRow r, PGS.ToRow q, MonadIO m, HasConnection env)
runPGSQuery :: (MonadError err m, MonadReader env m, MonadBaseControl IO m,
PGS.FromRow r, PGS.ToRow q, HasConnectionPool env)
=> PGS.Query -> q -> m [r]
runPGSQuery q a = mkCmd $ \conn -> catch (PGS.query conn q a) (printError conn)
where
......
......@@ -12,6 +12,7 @@ Portability : POSIX
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
......@@ -28,13 +29,14 @@ module Gargantext.Prelude
, round
, sortWith
, module Prelude
, MonadBase(..)
)
where
import Control.Monad.Base (MonadBase(..))
import GHC.Exts (sortWith)
import GHC.Err.Located (undefined)
import GHC.Real (round)
import Control.Monad.IO.Class (MonadIO)
import Data.Maybe (isJust, fromJust, maybe)
import Data.Text (Text)
import Protolude ( Bool(True, False), Int, Int64, Double, Integer
......@@ -42,7 +44,6 @@ import Protolude ( Bool(True, False), Int, Int64, Double, Integer
, Enum, Bounded, Float
, Floating, Char, IO
, pure, (>>=), (=<<), (<*>), (<$>), (>>)
, putStrLn
, head, flip
, Ord, Integral, Foldable, RealFrac, Monad, filter
, reverse, map, mapM, zip, drop, take, zipWith
......@@ -63,7 +64,7 @@ import Protolude ( Bool(True, False), Int, Int64, Double, Integer
, panic
)
import Prelude (Enum, Bounded, minBound, maxBound)
import Prelude (Enum, Bounded, minBound, maxBound, putStrLn)
-- TODO import functions optimized in Utils.Count
-- import Protolude hiding (head, last, all, any, sum, product, length)
-- import Gargantext.Utils.Count
......@@ -81,8 +82,8 @@ import Text.Read (Read())
import Data.String.Conversions (cs)
printDebug :: (Show a, MonadIO m) => [Char] -> a -> m ()
printDebug msg x = putStrLn $ msg <> " " <> show x
printDebug :: (Show a, MonadBase IO m) => [Char] -> a -> m ()
printDebug msg x = liftBase . putStrLn $ msg <> " " <> show x
-- printDebug _ _ = pure ()
......@@ -303,6 +304,3 @@ fib :: Int -> Int
fib 0 = 0
fib 1 = 1
fib n = fib (n-1) + fib (n-2)
......@@ -9,6 +9,7 @@ Portability : POSIX
-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
......@@ -17,7 +18,6 @@ module Gargantext.Prelude.Utils
import Control.Lens (view)
import Control.Monad.Reader (MonadReader)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Random.Class (MonadRandom)
import Data.Text (Text)
import Control.Monad.Reader (ask)
......@@ -84,23 +84,23 @@ class ReadFile a where
readFile' :: FilePath -> IO a
writeFile :: (MonadReader env m, MonadIO m, HasSettings env, SaveFile a)
writeFile :: (MonadReader env m, MonadBase IO m, HasSettings env, SaveFile a)
=> a -> m FilePath
writeFile a = do
dataPath <- view (settings . fileFolder) <$> ask
(fp,fn) <- liftIO $ (toPath 3) . sha . Text.pack . show <$> newStdGen
(fp,fn) <- liftBase $ (toPath 3) . sha . Text.pack . show <$> newStdGen
let foldPath = dataPath <> "/" <> fp
filePath = foldPath <> "/" <> fn
_ <- liftIO $ createDirectoryIfMissing True foldPath
_ <- liftIO $ saveFile' filePath a
_ <- liftBase $ createDirectoryIfMissing True foldPath
_ <- liftBase $ saveFile' filePath a
pure filePath
readFile :: (MonadReader env m, MonadIO m, HasSettings env, ReadFile a)
readFile :: (MonadReader env m, MonadBase IO m, HasSettings env, ReadFile a)
=> FilePath -> m a
readFile fp = do
dataPath <- view (settings . fileFolder) <$> ask
liftIO $ readFile' $ dataPath <> "/" <> fp
liftBase $ readFile' $ dataPath <> "/" <> fp
......@@ -9,6 +9,7 @@ Portability : POSIX
-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
......
......@@ -13,6 +13,7 @@ CSV parser for Gargantext corpus files.
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
......@@ -20,7 +21,6 @@ module Gargantext.Text.List.Learn
where
import Control.Monad.Reader (MonadReader)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Gargantext.API.Settings
import Data.Map (Map)
import Data.Maybe (maybe)
......@@ -87,18 +87,18 @@ type Tests = Map ListType [Vec.Vector Double]
type Score = Double
type Param = Double
grid :: (MonadReader env m, MonadIO m, HasSettings env)
grid :: (MonadReader env m, MonadBase IO m, HasSettings env)
=> Param -> Param -> Train -> [Tests] -> m (Maybe Model)
grid _ _ _ [] = panic "Gargantext.Text.List.Learn.grid : empty test data"
grid s e tr te = do
let
grid' :: (MonadReader env m, MonadIO m, HasSettings env)
grid' :: (MonadReader env m, MonadBase IO m, HasSettings env)
=> Double -> Double
-> Train
-> [Tests]
-> m (Score, Model)
grid' x y tr' te' = do
model'' <- liftIO $ trainList x y tr'
model'' <- liftBase $ trainList x y tr'
let
model' = ModelSVM model'' (Just x) (Just y)
......@@ -117,7 +117,7 @@ grid s e tr te = do
$ map (\(k,vs) -> zip (repeat k) vs)
$ Map.toList t
res' <- liftIO $ predictList m toGuess
res' <- liftBase $ predictList m toGuess
pure $ score'' $ score' $ List.zip res res'
score <- mapM (getScore model') te'
......
......@@ -48,8 +48,6 @@ import Gargantext.Prelude
import Network.HTTP.Simple
import Control.Monad.Catch (MonadThrow)
import Control.Monad.IO.Class (MonadIO)
import Data.String.Conversions (ConvertibleStrings)
------------------------------------------------------------------------
......@@ -116,9 +114,10 @@ $(deriveJSON (unPrefix "_") ''PosSentences)
--
corenlp' :: ( MonadThrow m, MonadIO m, FromJSON a
corenlp' :: ( FromJSON a
, ConvertibleStrings p ByteString) =>
Lang -> p -> m (Response a)
Lang -> p -> IO (Response a)
corenlp' lang txt = do
let properties = case lang of
EN -> "{\"annotators\": \"tokenize,ssplit,pos,ner\", \"outputFormat\": \"json\"}"
......
......@@ -10,6 +10,7 @@ Portability : POSIX
-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveGeneric #-}
......@@ -18,7 +19,6 @@ module Gargantext.Viz.Graph
where
import Control.Lens (makeLenses)
import Control.Monad.IO.Class (MonadIO(liftIO))
import Data.Aeson.TH (deriveJSON)
import Data.ByteString.Lazy as DBL (readFile, writeFile)
import Data.Swagger
......@@ -189,7 +189,7 @@ graphV3ToGraphWithFiles g1 g2 = do
DBL.writeFile g2 (DA.encode $ graphV3ToGraph newGraph)
readGraphFromJson :: MonadIO m => FilePath -> m (Maybe Graph)
readGraphFromJson :: MonadBase IO m => FilePath -> m (Maybe Graph)
readGraphFromJson fp = do
graph <- liftIO $ DBL.readFile fp
graph <- liftBase $ DBL.readFile fp
pure $ DA.decode graph
......@@ -12,32 +12,37 @@ Portability : POSIX
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-} -- allows to write Text literals
{-# LANGUAGE OverloadedLists #-} -- allows to write Map and HashMap as lists
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeOperators #-}
module Gargantext.Viz.Graph.API
where
-- import Debug.Trace (trace)
import Control.Concurrent -- (forkIO)
import Control.Lens (set, (^.), _Just, (^?))
import Control.Monad.IO.Class (liftIO)
import Data.Aeson
import Debug.Trace (trace)
import qualified Data.HashMap.Lazy as HashMap
import qualified Data.Map as Map
import Data.Maybe (Maybe(..))
import Data.Swagger
import Data.Text
import GHC.Generics (Generic)
import Servant
import Servant.Job.Async
import Servant.XML
import qualified Xmlbf as Xmlbf
import Gargantext.API.Ngrams (NgramsRepo, r_version)
import Gargantext.API.Ngrams.Tools
import Gargantext.API.Orchestrator.Types
import Gargantext.API.Types
import Gargantext.Core.Types.Main
import Gargantext.Database.Config
......@@ -66,11 +71,11 @@ instance Xmlbf.ToXml Graph where
where
params = HashMap.fromList [ ("xmlns", "http://www.gexf.net/1.2draft")
, ("version", "1.2") ]
meta = Xmlbf.element "meta" params $ creator <> description
meta = Xmlbf.element "meta" params $ creator <> desc
where
params = HashMap.fromList [ ("lastmodifieddate", "2020-03-13") ]
creator = Xmlbf.element "creator" HashMap.empty $ Xmlbf.text "Gargantext.org"
description = Xmlbf.element "description" HashMap.empty $ Xmlbf.text "Gargantext gexf file"
desc = Xmlbf.element "description" HashMap.empty $ Xmlbf.text "Gargantext gexf file"
graph :: [G.Node] -> [G.Edge] -> [Xmlbf.Node]
graph gn ge = Xmlbf.element "graph" params $ (nodes gn) <> (edges ge)
where
......@@ -101,14 +106,24 @@ instance Xmlbf.ToXml Graph where
type GraphAPI = Get '[JSON] Graph
:<|> Post '[JSON] [GraphId]
:<|> Put '[JSON] Int
:<|> "gexf" :> Get '[XML] (Headers '[Header "Content-Disposition" Text] Graph)
:<|> "gexf" :> Get '[XML] (Headers '[Servant.Header "Content-Disposition" Text] Graph)
:<|> GraphAsyncAPI
:<|> "versions" :> GraphVersionsAPI
data GraphVersions = GraphVersions { gv_graph :: Maybe Int
, gv_repo :: Int } deriving (Show, Generic)
instance ToJSON GraphVersions
instance ToSchema GraphVersions
graphAPI :: UserId -> NodeId -> GargServer GraphAPI
graphAPI u n = getGraph u n
:<|> postGraph n
:<|> putGraph n
:<|> getGraphGexf u n
:<|> graphAsync u n
:<|> graphVersionsAPI u n
------------------------------------------------------------------------
......@@ -119,14 +134,56 @@ graphAPI u n = getGraph u n
-- Each process has to be tailored
getGraph' :: UserId -> NodeId -> GargServer (Get '[JSON] Graph)
getGraph' u n = do
newGraph <- liftIO newEmptyMVar
newGraph <- liftBase newEmptyMVar
g <- getGraph u n
_ <- liftIO $ forkIO $ putMVar newGraph g
g' <- liftIO $ takeMVar newGraph
_ <- liftBase $ forkIO $ putMVar newGraph g
g' <- liftBase $ takeMVar newGraph
pure g'
-}
getGraph :: UserId -> NodeId -> GargNoServer Graph
getGraph uId nId = do
nodeGraph <- getNodeWith nId HyperdataGraph
let graph = nodeGraph ^. node_hyperdata . hyperdataGraph
-- let listVersion = graph ^? _Just
-- . graph_metadata
-- . _Just
-- . gm_list
-- . lfg_version
repo <- getRepo
-- let v = repo ^. r_version
nodeUser <- getNodeUser (NodeId uId)
let uId' = nodeUser ^. node_userId
let cId = maybe (panic "[ERR:G.V.G.API] Node has no parent")
identity
$ nodeGraph ^. node_parentId
g <- case graph of
Nothing -> do
graph' <- computeGraph cId NgramsTerms repo
_ <- insertGraph cId uId' (HyperdataGraph $ Just graph')
pure $ trace "Graph empty, computing" $ graph'
Just graph' -> pure $ trace "Graph exists, returning" $ graph'
-- Just graph' -> if listVersion == Just v
-- then pure graph'
-- else do
-- graph'' <- computeGraph cId NgramsTerms repo
-- _ <- updateHyperdata nId (HyperdataGraph $ Just graph'')
-- pure graph''
newGraph <- liftBase newEmptyMVar
_ <- liftBase $ forkIO $ putMVar newGraph g
g' <- liftBase $ takeMVar newGraph
pure {- $ trace (show g) $ -} g'
recomputeGraph :: UserId -> NodeId -> GargNoServer Graph
recomputeGraph uId nId = do
nodeGraph <- getNodeWith nId HyperdataGraph
let graph = nodeGraph ^. node_hyperdata . hyperdataGraph
let listVersion = graph ^? _Just
......@@ -145,26 +202,39 @@ getGraph uId nId = do
identity
$ nodeGraph ^. node_parentId
newGraph <- liftIO newEmptyMVar
g <- case graph of
Nothing -> do
graph' <- computeGraph cId NgramsTerms repo
graph' <- computeGraphAsync cId NgramsTerms repo
_ <- insertGraph cId uId' (HyperdataGraph $ Just graph')
pure graph'
pure $ trace "[recomputeGraph] Graph empty, computing" $ graph'
Just graph' -> if listVersion == Just v
then pure graph'
else do
graph'' <- computeGraph cId NgramsTerms repo
graph'' <- computeGraphAsync cId NgramsTerms repo
_ <- updateHyperdata nId (HyperdataGraph $ Just graph'')
pure graph''
_ <- liftIO $ forkIO $ putMVar newGraph g
g' <- liftIO $ takeMVar newGraph
pure {- $ trace (show g) $ -} g'
pure $ trace "[recomputeGraph] Graph exists, recomputing" $ graph''
pure g
computeGraphAsync :: HasNodeError err
=> CorpusId
-> NgramsType
-> NgramsRepo
-> Cmd err Graph
computeGraphAsync cId nt repo = do
g <- liftBase newEmptyMVar
_ <- forkIO <$> putMVar g <$> computeGraph cId nt repo
g' <- liftBase $ takeMVar g
pure g'
-- TODO use Database Monad only here ?
computeGraph :: HasNodeError err => CorpusId -> NgramsType -> NgramsRepo -> Cmd err Graph
computeGraph :: HasNodeError err
=> CorpusId
-> NgramsType
-> NgramsRepo
-> Cmd err Graph
computeGraph cId nt repo = do
lId <- defaultList cId
......@@ -179,11 +249,11 @@ computeGraph cId nt repo = do
let ngs = filterListWithRoot GraphTerm $ mapTermListRoot [lId] nt repo
myCooc <- Map.filter (>1)
<$> getCoocByNgrams (Diagonal True)
<$> getCoocByNgrams (Diagonal False)
<$> groupNodesByNgrams ngs
<$> getNodesByNgramsOnlyUser cId (lIds <> [lId]) nt (Map.keys ngs)
graph <- liftIO $ cooc2graph 0 myCooc
graph <- liftBase $ cooc2graph 0 myCooc
let graph' = set graph_metadata (Just metadata) graph
pure graph'
......@@ -196,7 +266,69 @@ putGraph :: NodeId -> GargServer (Put '[JSON] Int)
putGraph = undefined
getGraphGexf :: UserId -> NodeId -> GargNoServer (Headers '[Header "Content-Disposition" Text] Graph)
------------------------------------------------------------
getGraphGexf :: UserId -> NodeId -> GargNoServer (Headers '[Servant.Header "Content-Disposition" Text] Graph)
getGraphGexf uId nId = do
graph <- getGraph uId nId
pure $ addHeader (concat [ "attachment; filename=graph.gexf" ]) graph
------------------------------------------------------------
type GraphAsyncAPI = Summary "Update graph"
:> "async"
:> AsyncJobsAPI ScraperStatus () ScraperStatus
graphAsync :: UserId -> NodeId -> GargServer GraphAsyncAPI
graphAsync u n =
serveJobsAPI $
JobFunction (\_ log' -> graphAsync' u n (liftBase . log'))
graphAsync' :: UserId
-> NodeId
-> (ScraperStatus -> GargNoServer ())
-> GargNoServer ScraperStatus
graphAsync' u n logStatus = do
logStatus ScraperStatus { _scst_succeeded = Just 0
, _scst_failed = Just 0
, _scst_remaining = Just 1
, _scst_events = Just []
}
_g <- trace (show u) $ recomputeGraph u n
pure ScraperStatus { _scst_succeeded = Just 1
, _scst_failed = Just 0
, _scst_remaining = Just 0
, _scst_events = Just []
}
------------------------------------------------------------
type GraphVersionsAPI = Summary "Graph versions"
:> Get '[JSON] GraphVersions
:<|> Summary "Recompute graph version"
:> Post '[JSON] Graph
graphVersionsAPI :: UserId -> NodeId -> GargServer GraphVersionsAPI
graphVersionsAPI u n =
graphVersions u n
:<|> recomputeVersions u n
graphVersions :: UserId -> NodeId -> GargNoServer GraphVersions
graphVersions _uId nId = do
nodeGraph <- getNodeWith nId HyperdataGraph
let graph = nodeGraph ^. node_hyperdata . hyperdataGraph
let listVersion = graph ^? _Just
. graph_metadata
. _Just
. gm_list
. lfg_version
repo <- getRepo
let v = repo ^. r_version
pure $ GraphVersions { gv_graph = listVersion
, gv_repo = v }
recomputeVersions :: UserId -> NodeId -> GargNoServer Graph
recomputeVersions uId nId = recomputeGraph uId nId
......@@ -25,7 +25,7 @@ import Data.Map (Map, fromListWith, lookup, fromList, delete, toList, mapKeys, m
import qualified Data.Map as DM
import Data.Maybe (fromJust)
import Data.List (concat, sortOn)
import Data.Graph.Clustering.Louvain.CplusPlus (LouvainNode(..))
import Data.Graph.Clustering.Louvain.Utils (LouvainNode(..))
-- TODO mv in Louvain Lib
......
......@@ -8,6 +8,9 @@ Portability : POSIX
Main IGraph funs/types to ease portability with FGL.
Reference:
* Gábor Csárdi, Tamás Nepusz: The igraph software package for complex network research. InterJournal Complex Systems, 1695, 2006.
-}
{-# LANGUAGE NoImplicitPrelude #-}
......
......@@ -15,10 +15,8 @@ Portability : POSIX
module Gargantext.Viz.Graph.Tools
where
import Control.Monad.IO.Class (liftIO)
import Control.Concurrent (newEmptyMVar, takeMVar, putMVar, forkIO)
import Debug.Trace (trace)
import Data.Graph.Clustering.Louvain.CplusPlus (LouvainNode(..))
import Data.Graph.Clustering.Louvain.Utils (LouvainNode(..))
-- import Data.Graph.Clustering.Louvain (hLouvain, {-iLouvainMap-})
import Data.Graph.Clustering.Louvain.CplusPlus (cLouvain)
import Data.Map (Map)
import qualified Data.Set as Set
......@@ -57,33 +55,33 @@ cooc2graph :: Threshold
-> (Map (Text, Text) Int)
-> IO Graph
cooc2graph threshold myCooc = do
let (ti, _) = createIndices myCooc
myCooc' = toIndex ti myCooc
matCooc = map2mat 0 (Map.size ti) $ Map.filter (> 1) myCooc'
distanceMat = measureConditional matCooc
distanceMap = Map.filter (> threshold) $ mat2map distanceMat
let
(ti, _) = createIndices myCooc
myCooc' = toIndex ti myCooc
matCooc = map2mat 0 (Map.size ti) $ Map.filter (> 1) myCooc'
distanceMat = measureConditional matCooc
distanceMap = Map.filter (> threshold) $ mat2map distanceMat
let nodesApprox :: Int
nodesApprox = n'
where
(as, bs) = List.unzip $ Map.keys distanceMap
n' = Set.size $ Set.fromList $ as <> bs
ClustersParams rivers level = {-trace ("nodesApprox: " <> show nodesApprox) $-} clustersParams nodesApprox
nodesApprox :: Int
nodesApprox = n'
where
(as, bs) = List.unzip $ Map.keys distanceMap
n' = Set.size $ Set.fromList $ as <> bs
ClustersParams rivers level = clustersParams nodesApprox
partitionsV <- liftIO newEmptyMVar
partitions' <- case Map.size distanceMap > 0 of
True -> trace ("level" <> show level) $ cLouvain level distanceMap
False -> panic "Text.Flow: DistanceMap is empty"
_ <- liftIO $ forkIO $ putMVar partitionsV partitions'
partitions <- liftIO $ takeMVar partitionsV
partitions <- if (Map.size distanceMap > 0)
--then iLouvainMap 100 10 distanceMap
-- then hLouvain distanceMap
then cLouvain level distanceMap
else panic "Text.Flow: DistanceMap is empty"
let bridgeness' = {-trace ("rivers: " <> show rivers) $-}
bridgeness rivers partitions distanceMap
let
bridgeness' = bridgeness rivers partitions distanceMap
confluence' = confluence (Map.keys bridgeness') 3 True False
let confluence' = confluence (Map.keys bridgeness') 3 True False
pure $ data2graph (Map.toList ti) myCooc' bridgeness' confluence' partitions
data2graph (Map.toList ti) myCooc' bridgeness' confluence' partitions
data ClustersParams = ClustersParams { bridgness :: Double
......@@ -107,12 +105,13 @@ data2graph :: [(Text, Int)]
-> Map (Int, Int) Double
-> Map (Int, Int) Double
-> [LouvainNode]
-> IO Graph
data2graph labels coocs bridge conf partitions = do
let community_id_by_node_id = Map.fromList [ (n, c) | LouvainNode n c <- partitions ]
-> Graph
data2graph labels coocs bridge conf partitions = Graph nodes edges Nothing
where
nodes <- mapM (setCoord ForceAtlas labels bridge)
community_id_by_node_id = Map.fromList [ (n, c) | LouvainNode n c <- partitions ]
nodes = map (setCoord ForceAtlas labels bridge)
[ (n, Node { node_size = maybe 0 identity (Map.lookup (n,n) coocs)
, node_type = Terms -- or Unknown
, node_id = cs (show n)
......@@ -130,7 +129,7 @@ data2graph labels coocs bridge conf partitions = do
$ Map.toList bridge
]
let edges = [ Edge { edge_source = cs (show s)
edges = [ Edge { edge_source = cs (show s)
, edge_target = cs (show t)
, edge_weight = d
, edge_confluence = maybe 0 identity $ Map.lookup (s,t) conf
......@@ -139,7 +138,6 @@ data2graph labels coocs bridge conf partitions = do
| (i, ((s,t), d)) <- zip ([0..]::[Integer]) (Map.toList bridge), s /= t, d > 0
]
pure $ Graph nodes edges Nothing
------------------------------------------------------------------------
......@@ -153,22 +151,23 @@ setCoord' f (i,n) = n { node_x_coord = x, node_y_coord = y }
-- | ACP
setCoord :: Ord a => Layout -> [(a, Int)] -> Map (Int, Int) Double -> (Int, Node) -> IO Node
setCoord l labels m (n,node) = getCoord l labels m n
>>= \(x,y) -> pure $ node { node_x_coord = x
, node_y_coord = y
}
setCoord :: Ord a => Layout -> [(a, Int)] -> Map (Int, Int) Double -> (Int, Node) -> Node
setCoord l labels m (n,node) = node { node_x_coord = x
, node_y_coord = y
}
where
(x,y) = getCoord l labels m n
getCoord :: Ord a => Layout
-> [(a, Int)] -> Map (Int, Int) Double -> Int -> IO (Double, Double)
getCoord KamadaKawai _ m n = layout m n
-> [(a, Int)] -> Map (Int, Int) Double -> Int -> (Double, Double)
getCoord KamadaKawai _ _m _n = undefined -- layout m n
getCoord ForceAtlas _ _ n = pure (sin d, cos d)
getCoord ForceAtlas _ _ n = (sin d, cos d)
where
d = fromIntegral n
getCoord ACP labels m n = pure $ to2d $ maybe (panic "Graph.Tools no coordinate") identity
getCoord ACP labels m n = to2d $ maybe (panic "Graph.Tools no coordinate") identity
$ Map.lookup n
$ pcaReduceTo (Dimension 2)
$ mapArray labels m
......
......@@ -42,7 +42,6 @@ import Servant
import Test.QuickCheck (elements)
import Test.QuickCheck.Arbitrary (Arbitrary, arbitrary)
import Web.HttpApiData (parseUrlPiece, readTextData)
import Control.Monad.IO.Class (liftIO)
import Network.HTTP.Media ((//), (/:))
------------------------------------------------------------------------
......@@ -108,7 +107,7 @@ getPhylo phId _lId l msb = do
branc = maybe 2 identity msb
maybePhylo = hyperdataPhylo_data $ _node_hyperdata phNode
p <- liftIO $ viewPhylo2Svg $ viewPhylo level branc $ maybe phyloFromQuery identity maybePhylo
p <- liftBase $ viewPhylo2Svg $ viewPhylo level branc $ maybe phyloFromQuery identity maybePhylo
pure (SVG p)
------------------------------------------------------------------------
type PostPhylo = QueryParam "listId" ListId
......
......@@ -27,7 +27,7 @@ import Gargantext.Viz.Phylo
import Gargantext.Viz.Phylo.Tools
import Debug.Trace (trace)
import Data.List (partition, concat, nub, elem, sort, (++), null, union)
import Data.Map (Map, fromList, fromListWith, adjust, filterWithKey, elems, keys, unionWith, mapWithKey)
import Data.Set (size)
......
......@@ -19,6 +19,7 @@ module Gargantext.Viz.Phylo.Cluster
where
import Control.Parallel.Strategies
import Data.Graph.Clustering.Louvain.CplusPlus
import Data.Graph.Clustering.Louvain.Utils (LouvainNode(..))
import Data.List (null,concat,sort,intersect,(++), elemIndex, groupBy, nub, union, (\\), (!!))
import Data.Map (Map, fromList, mapKeys)
import Data.Tuple (fst)
......
......@@ -72,7 +72,7 @@ flowPhylo cId = do
docs = map ( (\(y,t) -> Document y t) . filterTerms patterns) docs'
--liftIO $ flowPhylo' (List.sortOn date docs) termList l m fp
--liftBase $ flowPhylo' (List.sortOn date docs) termList l m fp
pure $ buildPhylo (List.sortOn date docs) termList
......
......@@ -492,4 +492,4 @@ traceTemporalMatching groups =
traceGroupsProxi :: Map (PhyloGroupId,PhyloGroupId) Double -> Map (PhyloGroupId,PhyloGroupId) Double
traceGroupsProxi m =
trace ( "\n" <> "-- | " <> show(Map.size m) <> " computed pairs of groups proximity" <> "\n") m
\ No newline at end of file
trace ( "\n" <> "-- | " <> show(Map.size m) <> " computed pairs of groups proximity" <> "\n") m
......@@ -543,4 +543,4 @@ adaptativeTemporalMatching elevation phylo = updatePhyloGroups 1
(traceTemporalMatching $ getGroupsFromLevel 1 phylo)
--------------------------------------
thr :: Double
thr = toThreshold elevation (phylo ^. phylo_groupsProxi)
\ No newline at end of file
thr = toThreshold elevation (phylo ^. phylo_groupsProxi)
......@@ -170,4 +170,4 @@ traceView pv = trace ("------------\n--| View |--\n------------\n\n"
<> show (percentile 75 (VS.fromList lst)) <> " (75%) "
<> show (percentile 90 (VS.fromList lst)) <> " (90%)\n") pv
where
lst = sort $ map (fromIntegral . length . snd) $ getNodesByBranches pv
\ No newline at end of file
lst = sort $ map (fromIntegral . length . snd) $ getNodesByBranches pv
......@@ -4,6 +4,7 @@ extra-package-dbs: []
packages:
- .
#- 'deps/servant-job'
#- 'deps/clustering-louvain'
docker:
enable: false
......@@ -13,7 +14,6 @@ docker:
nix:
enable: false
#packages: [gmp]
add-gc-roots: true
shell-file: build-shell.nix
......@@ -47,9 +47,9 @@ extra-deps:
#- git: https://github.com/delanoe/servant-job.git
#commit: 7a7b7100e6d132adb4c11b25b2128e6309690ac0
- git: https://github.com/np/servant-job.git
commit: 4016c76398a56e1a352a45b3ee9d698dd0dd2597
commit: 5bf03696edad27285b0588aba92b34b48db16832
- git: https://gitlab.iscpif.fr/gargantext/clustering-louvain.git
commit: e5814cbfa71f43b0a453efb65f476240d7d51a53
commit: 7d74f96dfea8e51fbab1793cc0429b2fe741f73d
- git: https://github.com/np/patches-map
commit: 8c6f38c4844ead53e664cf9c82ba461715dbe445
- git: https://github.com/delanoe/haskell-opaleye.git #- opaleye-0.6.7002.0
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment