Commit 242f56d2 authored by Alexandre Delanoë's avatar Alexandre Delanoë

[MERGE] Fix warnings.

parents 8913a00b ff00168b
...@@ -16,6 +16,7 @@ TAGS ...@@ -16,6 +16,7 @@ TAGS
# UI # UI
gui gui
purescript-gargantext purescript-gargantext
gargantext-rfc
# Docs and Deps # Docs and Deps
doc doc
...@@ -24,3 +25,7 @@ _darcs ...@@ -24,3 +25,7 @@ _darcs
*.pdf *.pdf
# Runtime # Runtime
# Repo
repo.json*
tmp*repo*json
...@@ -19,34 +19,57 @@ Import a corpus binary. ...@@ -19,34 +19,57 @@ Import a corpus binary.
module Main where module Main where
import Prelude (read)
import Control.Exception (finally) import Control.Exception (finally)
import Servant (ServantErr) import Servant (ServantErr)
import Gargantext.Prelude import Gargantext.Prelude
import Gargantext.Database.Flow (FlowCmdM, flowCorpus) import Gargantext.Database.Flow (FlowCmdM, flowCorpusFile)
import Gargantext.Text.Parsers (FileFormat(CsvHalFormat)) import Gargantext.Text.Parsers (FileFormat(CsvHalFormat))
import Gargantext.Database.Utils (Cmd, ) import Gargantext.Database.Utils (Cmd, )
import Gargantext.Database.Types.Node (CorpusId) import Gargantext.Database.Types.Node (CorpusId, toHyperdataDocument)
--import Gargantext.Database.Schema.User (insertUsers, gargantuaUser, simpleUser) import Gargantext.Database.Schema.User (insertUsersDemo)
import Gargantext.Text.Terms (TermType(..))
import Gargantext.Core (Lang(..))
import Gargantext.API.Node () -- instances import Gargantext.API.Node () -- instances
import Gargantext.API.Settings (newDevEnvWith, runCmdDev, DevEnv) import Gargantext.API.Settings (withDevEnv, runCmdDev, DevEnv)
import System.Environment (getArgs) import System.Environment (getArgs)
--import Gargantext.Text.Parsers.GrandDebat (readFile, GrandDebatReference(..))
import qualified Data.Text as Text
import Control.Monad.IO.Class (liftIO)
main :: IO () main :: IO ()
main = do main = do
[iniPath, name, corpusPath] <- getArgs [userCreate, user, name, iniPath, limit, corpusPath] <- getArgs
{-let createUsers :: Cmd ServantErr Int64 --{-
createUsers = insertUsers [gargantuaUser,simpleUser] let createUsers :: Cmd ServantErr Int64
-} createUsers = insertUsersDemo
let cmd :: forall m. FlowCmdM DevEnv ServantErr m => m CorpusId
cmd = flowCorpusFile (cs user) (cs name) (read limit :: Int) (Multi EN) CsvHalFormat corpusPath
{-
let debatCorpus :: forall m. FlowCmdM DevEnv ServantErr m => m CorpusId
debatCorpus = do
docs <- liftIO ( splitEvery 500
<$> take (read limit :: Int)
<$> readFile corpusPath
:: IO [[GrandDebatReference ]]
)
flowCorpus (Text.pack user) (Text.pack name) (Multi FR) (map (map toHyperdataDocument) docs)
--}
let cmdCorpus :: forall m. FlowCmdM DevEnv ServantErr m => m CorpusId
cmdCorpus = flowCorpus CsvHalFormat corpusPath (cs name)
-- cmd = {-createUsers >>-} cmdCorpus
env <- newDevEnvWith iniPath
-- Better if we keep only one call to runCmdDev.
_ <- runCmdDev env cmdCorpus
pure ()
withDevEnv iniPath $ \env -> do
_ <- if userCreate == "true"
then runCmdDev env createUsers
else pure 0 --(cs "false")
_ <- runCmdDev env cmd
{-
_ <- if corpusType == "csv"
then runCmdDev env csvCorpus
else if corpusType == "debat"
then runCmdDev env debatCorpus
else panic "corpusType unknown: try \"csv\" or \"debat\""
-}
pure ()
name: gargantext name: gargantext
version: '4.0.0.3' version: '4.0.0.4'
synopsis: Search, map, share synopsis: Search, map, share
description: Please see README.md description: Please see README.md
category: Data category: Data
...@@ -59,6 +59,7 @@ library: ...@@ -59,6 +59,7 @@ library:
- Gargantext.Text.Parsers.Date - Gargantext.Text.Parsers.Date
- Gargantext.Text.Parsers.Wikimedia - Gargantext.Text.Parsers.Wikimedia
- Gargantext.Text.Parsers.WOS - Gargantext.Text.Parsers.WOS
- Gargantext.Text.Parsers.GrandDebat
- Gargantext.Text.Search - Gargantext.Text.Search
- Gargantext.Text.Terms - Gargantext.Text.Terms
- Gargantext.Text.Terms.Stop - Gargantext.Text.Terms.Stop
...@@ -75,6 +76,7 @@ library: ...@@ -75,6 +76,7 @@ library:
- Gargantext.Viz.Phylo.Tools - Gargantext.Viz.Phylo.Tools
- Gargantext.Viz.Phylo.Example - Gargantext.Viz.Phylo.Example
dependencies: dependencies:
- array
- QuickCheck - QuickCheck
- accelerate - accelerate
- aeson - aeson
...@@ -82,6 +84,7 @@ library: ...@@ -82,6 +84,7 @@ library:
- aeson-pretty - aeson-pretty
- async - async
- attoparsec - attoparsec
- auto-update
- base >=4.7 && <5 - base >=4.7 && <5
- base16-bytestring - base16-bytestring
- blaze-html - blaze-html
...@@ -102,6 +105,7 @@ library: ...@@ -102,6 +105,7 @@ library:
- fullstop - fullstop
- fclabels - fclabels
- fast-logger - fast-logger
- filelock
- full-text-search - full-text-search
- http-client - http-client
- http-client-tls - http-client-tls
...@@ -109,13 +113,15 @@ library: ...@@ -109,13 +113,15 @@ library:
- http-api-data - http-api-data
- http-types - http-types
- hsparql - hsparql
- hstatistics
- HSvm
- hxt - hxt
- hlcm - hlcm
- ini - ini
- insert-ordered-containers - insert-ordered-containers
- jose-jwt - jose-jwt
- json-state
# - kmeans-vector # - kmeans-vector
- json-stream
- KMP - KMP
- lens - lens
- located-base - located-base
...@@ -139,6 +145,7 @@ library: ...@@ -139,6 +145,7 @@ library:
- protolude - protolude
- pureMD5 - pureMD5
- SHA - SHA
- random
- rake - rake
- regex-compat - regex-compat
- resourcet - resourcet
...@@ -162,10 +169,10 @@ library: ...@@ -162,10 +169,10 @@ library:
- string-conversions - string-conversions
- swagger2 - swagger2
- tagsoup - tagsoup
- temporary
- text-metrics - text-metrics
- time - time
- time-locale-compat - time-locale-compat
- time-units
- timezone-series - timezone-series
- transformers - transformers
- transformers-base - transformers-base
......
...@@ -30,6 +30,8 @@ Thanks @yannEsposito for this. ...@@ -30,6 +30,8 @@ Thanks @yannEsposito for this.
{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-} {-# LANGUAGE TypeOperators #-}
{-# LANGUAGE KindSignatures #-} {-# LANGUAGE KindSignatures #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-} {-# LANGUAGE UndecidableInstances #-}
...@@ -45,14 +47,16 @@ import GHC.TypeLits (AppendSymbol, Symbol) ...@@ -45,14 +47,16 @@ import GHC.TypeLits (AppendSymbol, Symbol)
import Control.Lens import Control.Lens
import Control.Exception (finally) import Control.Exception (finally)
import Control.Monad.Except (withExceptT, ExceptT)
import Control.Monad.IO.Class (liftIO) import Control.Monad.IO.Class (liftIO)
import Control.Monad.Reader (runReaderT) import Control.Monad.Reader (ReaderT, runReaderT)
import Data.Aeson.Encode.Pretty (encodePretty) import Data.Aeson.Encode.Pretty (encodePretty)
import qualified Data.ByteString.Lazy.Char8 as BL8 import qualified Data.ByteString.Lazy.Char8 as BL8
import Data.Swagger import Data.Swagger
import Data.Text (Text) import Data.Text (Text)
import qualified Data.Text.IO as T import qualified Data.Text.IO as T
--import qualified Data.Set as Set --import qualified Data.Set as Set
import Data.Validity
import Network.Wai import Network.Wai
import Network.Wai.Handler.Warp hiding (defaultSettings) import Network.Wai.Handler.Warp hiding (defaultSettings)
...@@ -70,10 +74,11 @@ import Text.Blaze.Html (Html) ...@@ -70,10 +74,11 @@ import Text.Blaze.Html (Html)
--import Gargantext.API.Swagger --import Gargantext.API.Swagger
import Gargantext.Prelude import Gargantext.Prelude
import Gargantext.Core.Types (HasInvalidError(..))
import Gargantext.API.FrontEnd (FrontEndAPI, frontEndServer) import Gargantext.API.FrontEnd (FrontEndAPI, frontEndServer)
import Gargantext.API.Auth (AuthRequest, AuthResponse, auth) import Gargantext.API.Auth (AuthRequest, AuthResponse, auth)
import Gargantext.API.Ngrams (HasRepoVar(..), HasRepoSaver(..), saveRepo) import Gargantext.API.Ngrams (HasRepo(..), HasRepoSaver(..), saveRepo)
import Gargantext.API.Node ( GargServer import Gargantext.API.Node ( GargServer
, Roots , roots , Roots , roots
, NodeAPI , nodeAPI , NodeAPI , nodeAPI
...@@ -84,8 +89,10 @@ import Gargantext.API.Node ( GargServer ...@@ -84,8 +89,10 @@ import Gargantext.API.Node ( GargServer
, HyperdataCorpus , HyperdataCorpus
, HyperdataAnnuaire , HyperdataAnnuaire
) )
import Gargantext.Database.Schema.Node (HasNodeError(..), NodeError)
--import Gargantext.Database.Node.Contact (HyperdataContact) --import Gargantext.Database.Node.Contact (HyperdataContact)
import Gargantext.Database.Utils (HasConnection) import Gargantext.Database.Utils (HasConnection)
import Gargantext.Database.Tree (HasTreeError(..), TreeError)
import Gargantext.Database.Types.Node (NodeId, CorpusId, AnnuaireId) import Gargantext.Database.Types.Node (NodeId, CorpusId, AnnuaireId)
import Gargantext.API.Count ( CountAPI, count, Query) import Gargantext.API.Count ( CountAPI, count, Query)
import Gargantext.API.Search ( SearchAPI, search, SearchQuery) import Gargantext.API.Search ( SearchAPI, search, SearchQuery)
...@@ -115,6 +122,26 @@ import Network.HTTP.Types hiding (Query) ...@@ -115,6 +122,26 @@ import Network.HTTP.Types hiding (Query)
import Gargantext.API.Settings import Gargantext.API.Settings
data GargError
= GargNodeError NodeError
| GargTreeError TreeError
| GargInvalidError Validation
deriving (Show)
makePrisms ''GargError
instance HasNodeError GargError where
_NodeError = _GargNodeError
instance HasInvalidError GargError where
_InvalidError = _GargInvalidError
instance HasTreeError GargError where
_TreeError = _GargTreeError
showAsServantErr :: Show a => a -> ServantErr
showAsServantErr a = err500 { errBody = BL8.pack $ show a }
fireWall :: Applicative f => Request -> FireWall -> f Bool fireWall :: Applicative f => Request -> FireWall -> f Bool
fireWall req fw = do fireWall req fw = do
let origin = lookup "Origin" (requestHeaders req) let origin = lookup "Origin" (requestHeaders req)
...@@ -278,13 +305,16 @@ type API = SwaggerFrontAPI :<|> GargAPI :<|> Get '[HTML] Html ...@@ -278,13 +305,16 @@ type API = SwaggerFrontAPI :<|> GargAPI :<|> Get '[HTML] Html
--------------------------------------------------------------------- ---------------------------------------------------------------------
-- | Server declarations -- | Server declarations
server :: (HasConnection env, HasRepoVar env, HasRepoSaver env) server :: forall env. (HasConnection env, HasRepo env, HasSettings env)
=> env -> IO (Server API) => env -> IO (Server API)
server env = do server env = do
-- orchestrator <- scrapyOrchestrator env -- orchestrator <- scrapyOrchestrator env
pure $ swaggerFront pure $ swaggerFront
:<|> hoistServer (Proxy :: Proxy GargAPI) (`runReaderT` env) serverGargAPI :<|> hoistServer (Proxy :: Proxy GargAPI) transform serverGargAPI
:<|> serverStatic :<|> serverStatic
where
transform :: forall a. ReaderT env (ExceptT GargError IO) a -> Handler a
transform = Handler . withExceptT showAsServantErr . (`runReaderT` env)
serverGargAPI :: GargServer GargAPI serverGargAPI :: GargServer GargAPI
serverGargAPI -- orchestrator serverGargAPI -- orchestrator
...@@ -318,7 +348,7 @@ gargMock :: Server GargAPI ...@@ -318,7 +348,7 @@ gargMock :: Server GargAPI
gargMock = mock apiGarg Proxy gargMock = mock apiGarg Proxy
--------------------------------------------------------------------- ---------------------------------------------------------------------
makeApp :: (HasConnection env, HasRepoVar env, HasRepoSaver env) makeApp :: (HasConnection env, HasRepo env, HasSettings env)
=> env -> IO Application => env -> IO Application
makeApp = fmap (serve api) . server makeApp = fmap (serve api) . server
......
...@@ -40,13 +40,11 @@ import Gargantext.Database.Utils (Cmd) ...@@ -40,13 +40,11 @@ import Gargantext.Database.Utils (Cmd)
import Gargantext.Prelude hiding (reverse) import Gargantext.Prelude hiding (reverse)
import Test.QuickCheck (elements, oneof) import Test.QuickCheck (elements, oneof)
import Test.QuickCheck.Arbitrary (Arbitrary, arbitrary) import Test.QuickCheck.Arbitrary (Arbitrary, arbitrary)
import Gargantext.Core.Types.Individu (Username, Password, arbitraryUsername, arbitraryPassword)
--------------------------------------------------- ---------------------------------------------------
-- | Main types for AUTH API -- | Main types for AUTH API
type Username = Text
type Password = Text
data AuthRequest = AuthRequest { _authReq_username :: Username data AuthRequest = AuthRequest { _authReq_username :: Username
, _authReq_password :: Password , _authReq_password :: Password
} }
...@@ -76,18 +74,12 @@ type TreeId = NodeId ...@@ -76,18 +74,12 @@ type TreeId = NodeId
data CheckAuth = InvalidUser | InvalidPassword | Valid Token TreeId data CheckAuth = InvalidUser | InvalidPassword | Valid Token TreeId
deriving (Eq) deriving (Eq)
arbitraryUsername :: [Username]
arbitraryUsername = ["gargantua", "user1", "user2"]
arbitraryPassword :: [Password]
arbitraryPassword = map reverse arbitraryUsername
checkAuthRequest :: Username -> Password -> Cmd err CheckAuth checkAuthRequest :: Username -> Password -> Cmd err CheckAuth
checkAuthRequest u p checkAuthRequest u p
| not (u `elem` arbitraryUsername) = pure InvalidUser | not (u `elem` arbitraryUsername) = pure InvalidUser
| u /= reverse p = pure InvalidPassword | u /= reverse p = pure InvalidPassword
| otherwise = do | otherwise = do
muId <- getRoot u muId <- getRoot "user1"
pure $ maybe InvalidUser (Valid "token" . _node_id) $ head muId pure $ maybe InvalidUser (Valid "token" . _node_id) $ head muId
auth :: AuthRequest -> Cmd err AuthResponse auth :: AuthRequest -> Cmd err AuthResponse
......
{-|
Module : Gargantext.API.Metrics
Description : Server API
Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
Metrics API
-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
module Gargantext.API.Metrics
where
import Data.Text (Text)
import GHC.Generics (Generic)
import Gargantext.Prelude
import Data.Aeson.TH (deriveJSON)
import Test.QuickCheck.Arbitrary (Arbitrary, arbitrary)
import Gargantext.Core.Types (ListType(..))
import Gargantext.Core.Utils.Prefix (unPrefix)
import Data.Swagger
data Metrics = Metrics
{ metrics_data :: [Metric]}
deriving (Generic, Show)
instance ToSchema Metrics
instance Arbitrary Metrics
where
arbitrary = Metrics <$> arbitrary
data Metric = Metric
{ m_label :: !Text
, m_x :: !Double
, m_y :: !Double
, m_cat :: !ListType
} deriving (Generic, Show)
instance ToSchema Metric
instance Arbitrary Metric
where
arbitrary = Metric <$> arbitrary
<*> arbitrary
<*> arbitrary
<*> arbitrary
deriveJSON (unPrefix "metrics_") ''Metrics
deriveJSON (unPrefix "m_") ''Metric
This diff is collapsed.
{-|
Module : Gargantext.API.Ngrams.Tools
Description : Tools to manage Ngrams Elements (from the API)
Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
module Gargantext.API.Ngrams.Tools
where
import Control.Concurrent
import Control.Lens (_Just, (^.), at, view)
import Control.Monad.Reader
import Data.Map.Strict (Map)
import Data.Set (Set)
import Data.Text (Text)
import Data.Validity
import Gargantext.API.Ngrams
import Gargantext.Core.Types (ListType(..), NodeId, ListId)
import Gargantext.Database.Schema.Ngrams (NgramsType)
import Gargantext.Prelude
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
type RootTerm = Text
getListNgrams :: RepoCmdM env err m
=> [ListId] -> NgramsType
-> m (Map Text NgramsRepoElement)
getListNgrams nodeIds ngramsType = do
v <- view repoVar
repo <- liftIO $ readMVar v
let
ngramsMap = repo ^. r_state . at ngramsType . _Just
ngrams = Map.unionsWith mergeNgramsElement
[ ngramsMap ^. at nodeId . _Just | nodeId <- nodeIds ]
pure ngrams
mapTermListRoot :: RepoCmdM env err m
=> [ListId] -> NgramsType
-> m (Map Text (ListType, (Maybe Text)))
mapTermListRoot nodeIds ngramsType = do
ngrams <- getListNgrams nodeIds ngramsType
pure $ Map.fromList [(t, (_nre_list nre, _nre_root nre))
| (t, nre) <- Map.toList ngrams
]
filterListWithRoot :: ListType -> Map Text (ListType, Maybe Text)
-> Map Text (Maybe RootTerm)
filterListWithRoot lt m = Map.fromList
$ map (\(t,(_,r)) -> (t,r))
$ filter isGraphTerm (Map.toList m)
where
isGraphTerm (_t,(l, maybeRoot)) = case maybeRoot of
Nothing -> l == lt
Just r -> case Map.lookup r m of
Nothing -> panic $ "Garg.API.Ngrams.Tools: filterWithRoot, unknown key: " <> r
Just (l',_) -> l' == lt
groupNodesByNgrams :: Map Text (Maybe RootTerm)
-> Map Text (Set NodeId)
-> Map Text (Set NodeId)
groupNodesByNgrams syn occs = Map.fromListWith (<>) occs'
where
occs' = map toSyn (Map.toList occs)
toSyn (t,ns) = case Map.lookup t syn of
Nothing -> panic $ "[Garg.API.Ngrams.Tools.groupNodesByNgrams] unknown key: " <> t
Just r -> case r of
Nothing -> (t, ns)
Just r' -> (r',ns)
data Diagonal = Diagonal Bool
getCoocByNgrams :: Diagonal -> Map Text (Set NodeId) -> Map (Text, Text) Int
getCoocByNgrams (Diagonal diag) m =
Map.fromList [((t1,t2)
,maybe 0 Set.size $ Set.intersection
<$> Map.lookup t1 m
<*> Map.lookup t2 m
) | (t1,t2) <- case diag of
True -> [ (x,y) | x <- Map.keys m, y <- Map.keys m, x <= y]
False -> listToCombi identity (Map.keys m)
]
...@@ -10,7 +10,7 @@ Portability : POSIX ...@@ -10,7 +10,7 @@ Portability : POSIX
Node API Node API
-} -}
{-# OPTIONS_GHC -fno-warn-name-shadowing -fno-warn-orphans #-} {-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE DeriveGeneric #-}
...@@ -21,7 +21,6 @@ Node API ...@@ -21,7 +21,6 @@ Node API
{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-} {-# LANGUAGE TypeOperators #-}
-------------------------------------------------------------------
module Gargantext.API.Node module Gargantext.API.Node
( module Gargantext.API.Node ( module Gargantext.API.Node
, HyperdataAny(..) , HyperdataAny(..)
...@@ -32,49 +31,58 @@ module Gargantext.API.Node ...@@ -32,49 +31,58 @@ module Gargantext.API.Node
, HyperdataDocument(..) , HyperdataDocument(..)
, HyperdataDocumentV3(..) , HyperdataDocumentV3(..)
) where ) where
-------------------------------------------------------------------
import Control.Lens (prism', set) import Control.Lens (prism', set)
import Control.Monad.IO.Class (liftIO)
import Control.Monad ((>>)) import Control.Monad ((>>))
--import System.IO (putStrLn, readFile) import Control.Monad.IO.Class (liftIO)
import Data.Aeson (FromJSON, ToJSON) import Data.Aeson (FromJSON, ToJSON)
import Data.Text (Text())
import Data.Swagger import Data.Swagger
import Data.Text (Text())
import Data.Time (UTCTime) import Data.Time (UTCTime)
import GHC.Generics (Generic) import GHC.Generics (Generic)
import Servant import Gargantext.API.Metrics
import Gargantext.API.Ngrams (TabType(..), TableNgramsApi, TableNgramsApiGet, tableNgramsPatch, getTableNgrams, HasRepo, QueryParamR)
import Gargantext.API.Ngrams (TabType(..), TableNgramsApi, TableNgramsApiGet, tableNgramsPatch, getTableNgrams, HasRepoVar, HasRepoSaver) import Gargantext.API.Ngrams.Tools
import Gargantext.Prelude import Gargantext.API.Search ( SearchAPI, searchIn, SearchInQuery)
import Gargantext.Database.Types.Node import Gargantext.Core.Types (Offset, Limit, ListType(..), HasInvalidError)
import Gargantext.Database.Utils -- (Cmd, CmdM) import Gargantext.Core.Types.Main (Tree, NodeTree)
import Gargantext.Database.Schema.Node ( getNodesWithParentId, getNode, deleteNode, deleteNodes, mkNodeWithParent, JSONB, NodeError(..), HasNodeError(..))
import Gargantext.Database.Node.Children (getChildren)
import qualified Gargantext.Database.Node.Update as U (update, Update(..))
import Gargantext.Database.Facet (FacetDoc , runViewDocuments, OrderBy(..),FacetChart,runViewAuthorsDoc) import Gargantext.Database.Facet (FacetDoc , runViewDocuments, OrderBy(..),FacetChart,runViewAuthorsDoc)
import Gargantext.Database.Tree (treeDB, HasTreeError(..), TreeError(..)) import qualified Gargantext.Database.Metrics as Metrics
import Gargantext.Database.Metrics.Count (getCoocByDocDev) import Gargantext.Database.Metrics.NgramsByNode (getNodesByNgramsOnlyUser)
import Gargantext.Database.Node.Children (getChildren)
import Gargantext.Database.Schema.Ngrams (NgramsType(..))
import Gargantext.Database.Schema.Node ( getNodesWithParentId, getNode, deleteNode, deleteNodes, mkNodeWithParent, JSONB, NodeError(..), HasNodeError(..))
import Gargantext.Database.Schema.Node (defaultList) import Gargantext.Database.Schema.Node (defaultList)
import Gargantext.Database.Schema.NodeNode (nodesToFavorite, nodesToTrash) import Gargantext.Database.Schema.NodeNode (nodesToFavorite, nodesToTrash)
import Gargantext.API.Search ( SearchAPI, searchIn, SearchInQuery) import Gargantext.Database.Tree (treeDB, HasTreeError(..), TreeError(..))
import Gargantext.Database.Types.Node
-- Graph
import Gargantext.Text.Flow (cooc2graph)
import Gargantext.Viz.Graph hiding (Node)-- (Graph(_graph_metadata),LegendField(..), GraphMetadata(..),readGraphFromJson,defaultGraph)
-- import Gargantext.Core (Lang(..))
import Gargantext.Core.Types (Offset, Limit)
import Gargantext.Core.Types.Main (Tree, NodeTree)
import Gargantext.Database.Types.Node (CorpusId, ContactId) import Gargantext.Database.Types.Node (CorpusId, ContactId)
-- import Gargantext.Text.Terms (TermType(..)) import Gargantext.Database.Utils -- (Cmd, CmdM)
import Gargantext.Prelude
import Gargantext.API.Settings
import Gargantext.Text.Metrics (Scored(..))
import Gargantext.Viz.Graph hiding (Node)-- (Graph(_graph_metadata),LegendField(..), GraphMetadata(..),readGraphFromJson,defaultGraph)
import Gargantext.Viz.Graph.Tools (cooc2graph)
import Servant
import Test.QuickCheck (elements) import Test.QuickCheck (elements)
import Test.QuickCheck.Arbitrary (Arbitrary, arbitrary) import Test.QuickCheck.Arbitrary (Arbitrary, arbitrary)
import qualified Data.Map as Map
import qualified Gargantext.Database.Node.Update as U (update, Update(..))
{-
import qualified Gargantext.Text.List.Learn as Learn
import qualified Data.Vector as Vec
--}
type GargServer api = type GargServer api =
forall env m. forall env err m.
(CmdM env ServantErr m, HasRepoVar env, HasRepoSaver env) ( CmdM env err m
, HasNodeError err
, HasInvalidError err
, HasTreeError err
, HasRepo env
, HasSettings env
)
=> ServerT api m => ServerT api m
------------------------------------------------------------------- -------------------------------------------------------------------
...@@ -143,6 +151,7 @@ type NodeAPI a = Get '[JSON] (Node a) ...@@ -143,6 +151,7 @@ type NodeAPI a = Get '[JSON] (Node a)
:> QueryParam "limit" Int :> QueryParam "limit" Int
:> QueryParam "order" OrderBy :> QueryParam "order" OrderBy
:> SearchAPI :> SearchAPI
:<|> "metrics" :> MetricsAPI
-- TODO-ACCESS: check userId CanRenameNode nodeId -- TODO-ACCESS: check userId CanRenameNode nodeId
-- TODO-EVENTS: NodeRenamed RenameNode or re-use some more general NodeEdited... -- TODO-EVENTS: NodeRenamed RenameNode or re-use some more general NodeEdited...
...@@ -180,9 +189,11 @@ nodeAPI p uId id ...@@ -180,9 +189,11 @@ nodeAPI p uId id
:<|> favApi id :<|> favApi id
:<|> delDocs id :<|> delDocs id
:<|> searchIn id :<|> searchIn id
:<|> getMetrics id
-- Annuaire -- Annuaire
-- :<|> upload -- :<|> upload
-- :<|> query -- :<|> query
------------------------------------------------------------------------ ------------------------------------------------------------------------
data RenameNode = RenameNode { r_name :: Text } data RenameNode = RenameNode { r_name :: Text }
deriving (Generic) deriving (Generic)
...@@ -279,31 +290,29 @@ type GraphAPI = Get '[JSON] Graph ...@@ -279,31 +290,29 @@ type GraphAPI = Get '[JSON] Graph
graphAPI :: NodeId -> GargServer GraphAPI graphAPI :: NodeId -> GargServer GraphAPI
graphAPI nId = do graphAPI nId = do
nodeGraph <- getNode nId HyperdataGraph nodeGraph <- getNode nId HyperdataGraph
let title = "Title" let metadata = GraphMetadata "Title" [maybe 0 identity $ _node_parentId nodeGraph]
let metadata = GraphMetadata title [maybe 0 identity $ _node_parentId nodeGraph]
[ LegendField 1 "#FFF" "Cluster" [ LegendField 1 "#FFF" "Cluster"
, LegendField 2 "#FFF" "Cluster" , LegendField 2 "#FFF" "Cluster"
] ]
-- (map (\n -> LegendField n "#FFFFFF" (pack $ show n)) [1..10]) -- (map (\n -> LegendField n "#FFFFFF" (pack $ show n)) [1..10])
let cId = maybe (panic "no parentId") identity $ _node_parentId nodeGraph let cId = maybe (panic "no parentId") identity $ _node_parentId nodeGraph
lId <- defaultList cId lId <- defaultList cId
myCooc <- getCoocByDocDev cId lId ngs <- filterListWithRoot GraphTerm <$> mapTermListRoot [lId] NgramsTerms
liftIO $ set graph_metadata (Just metadata)
<$> cooc2graph myCooc myCooc <- Map.filter (>1) <$> getCoocByNgrams (Diagonal False)
<$> groupNodesByNgrams ngs
-- <$> maybe defaultGraph identity <$> getNodesByNgramsOnlyUser cId NgramsTerms (Map.keys ngs)
-- <$> readGraphFromJson "purescript-gargantext/dist/examples/imtNew.json"
-- t <- textFlow (Mono EN) (Contexts contextText) liftIO $ set graph_metadata (Just metadata) <$> cooc2graph myCooc
-- liftIO $ liftIO $ pure $ maybe t identity maybeGraph
-- TODO what do we get about the node? to replace contextText
instance HasNodeError ServantErr where instance HasNodeError ServantErr where
_NodeError = prism' mk (const Nothing) -- $ panic "HasNodeError ServantErr: not a prism") _NodeError = prism' mk (const Nothing) -- $ panic "HasNodeError ServantErr: not a prism")
where where
e = "NodeError: " e = "Gargantext NodeError: "
mk NoListFound = err404 { errBody = e <> "No list found" } mk NoListFound = err404 { errBody = e <> "No list found" }
mk NoRootFound = err404 { errBody = e <> "No Root found" } mk NoRootFound = err404 { errBody = e <> "No Root found" }
mk NoCorpusFound = err404 { errBody = e <> "No Corpus found" } mk NoCorpusFound = err404 { errBody = e <> "No Corpus found" }
...@@ -335,23 +344,25 @@ treeAPI = treeDB ...@@ -335,23 +344,25 @@ treeAPI = treeDB
------------------------------------------------------------------------ ------------------------------------------------------------------------
-- | Check if the name is less than 255 char -- | Check if the name is less than 255 char
rename :: NodeId -> RenameNode -> Cmd err [Int] rename :: NodeId -> RenameNode -> Cmd err [Int]
rename nId (RenameNode name) = U.update (U.Rename nId name) rename nId (RenameNode name') = U.update (U.Rename nId name')
getTable :: NodeId -> Maybe TabType getTable :: NodeId -> Maybe TabType
-> Maybe Offset -> Maybe Limit -> Maybe Offset -> Maybe Limit
-> Maybe OrderBy -> Cmd err [FacetDoc] -> Maybe OrderBy -> Cmd err [FacetDoc]
getTable cId ft o l order = case ft of getTable cId ft o l order =
(Just Docs) -> runViewDocuments cId False o l order case ft of
(Just Trash) -> runViewDocuments cId True o l order (Just Docs) -> runViewDocuments cId False o l order
_ -> panic "not implemented" (Just Trash) -> runViewDocuments cId True o l order
_ -> panic "not implemented"
getPairing :: ContactId -> Maybe TabType getPairing :: ContactId -> Maybe TabType
-> Maybe Offset -> Maybe Limit -> Maybe Offset -> Maybe Limit
-> Maybe OrderBy -> Cmd err [FacetDoc] -> Maybe OrderBy -> Cmd err [FacetDoc]
getPairing cId ft o l order = case ft of getPairing cId ft o l order =
(Just Docs) -> runViewAuthorsDoc cId False o l order case ft of
(Just Trash) -> runViewAuthorsDoc cId True o l order (Just Docs) -> runViewAuthorsDoc cId False o l order
_ -> panic "not implemented" (Just Trash) -> runViewAuthorsDoc cId True o l order
_ -> panic "not implemented"
getChart :: NodeId -> Maybe UTCTime -> Maybe UTCTime getChart :: NodeId -> Maybe UTCTime -> Maybe UTCTime
...@@ -359,7 +370,7 @@ getChart :: NodeId -> Maybe UTCTime -> Maybe UTCTime ...@@ -359,7 +370,7 @@ getChart :: NodeId -> Maybe UTCTime -> Maybe UTCTime
getChart _ _ _ = undefined -- TODO getChart _ _ _ = undefined -- TODO
postNode :: HasNodeError err => UserId -> NodeId -> PostNode -> Cmd err [NodeId] postNode :: HasNodeError err => UserId -> NodeId -> PostNode -> Cmd err [NodeId]
postNode uId pId (PostNode name nt) = mkNodeWithParent nt (Just pId) uId name postNode uId pId (PostNode nodeName nt) = mkNodeWithParent nt (Just pId) uId nodeName
putNode :: NodeId -> Cmd err Int putNode :: NodeId -> Cmd err Int
putNode = undefined -- TODO putNode = undefined -- TODO
...@@ -385,3 +396,25 @@ query s = pure s ...@@ -385,3 +396,25 @@ query s = pure s
-- putStrLn content -- putStrLn content
-- pure (pack "Data loaded") -- pure (pack "Data loaded")
-------------------------------------------------------------------------------
type MetricsAPI = Summary "SepGen IncExc metrics"
:> QueryParam "list" ListId
:> QueryParamR "ngramsType" TabType
:> QueryParam "limit" Int
:> Get '[JSON] Metrics
getMetrics :: NodeId -> GargServer MetricsAPI
getMetrics cId maybeListId tabType maybeLimit = do
(ngs', scores) <- Metrics.getMetrics' cId maybeListId tabType maybeLimit
let
metrics = map (\(Scored t s1 s2) -> Metric t s1 s2 (listType t ngs')) scores
listType t m = maybe (panic errorMsg) fst $ Map.lookup t m
errorMsg = "API.Node.metrics: key absent"
pure $ Metrics metrics
...@@ -8,18 +8,17 @@ Stability : experimental ...@@ -8,18 +8,17 @@ Stability : experimental
Portability : POSIX Portability : POSIX
-} -}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-} {-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DataKinds #-}
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE RankNTypes #-}
{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RankNTypes #-} {-# LANGUAGE TemplateHaskell #-}
module Gargantext.API.Settings module Gargantext.API.Settings
where where
...@@ -30,7 +29,9 @@ import GHC.Enum ...@@ -30,7 +29,9 @@ import GHC.Enum
import GHC.Generics (Generic) import GHC.Generics (Generic)
import Prelude (Bounded(), fail) import Prelude (Bounded(), fail)
import System.Environment (lookupEnv) import System.Environment (lookupEnv)
import System.IO (FilePath) import System.IO (FilePath, hClose)
import System.IO.Temp (withTempFile)
import System.FileLock (tryLockFile, unlockFile, SharedExclusive(Exclusive))
import Database.PostgreSQL.Simple (Connection, connect) import Database.PostgreSQL.Simple (Connection, connect)
import Network.HTTP.Client (Manager) import Network.HTTP.Client (Manager)
import Network.HTTP.Client.TLS (newTlsManager) import Network.HTTP.Client.TLS (newTlsManager)
...@@ -38,11 +39,10 @@ import Network.HTTP.Client.TLS (newTlsManager) ...@@ -38,11 +39,10 @@ import Network.HTTP.Client.TLS (newTlsManager)
import Data.Aeson import Data.Aeson
import Data.Maybe (fromMaybe) import Data.Maybe (fromMaybe)
import Data.Either (either) import Data.Either (either)
import Data.JsonState (mkSaveState)
import Data.Text import Data.Text
import Data.Text.Encoding (encodeUtf8) import Data.Text.Encoding (encodeUtf8)
import Data.Time.Units import Data.ByteString (ByteString)
import Data.ByteString.Lazy.Internal import qualified Data.ByteString.Lazy as L
import Servant import Servant
import Servant.Client (BaseUrl, parseBaseUrl) import Servant.Client (BaseUrl, parseBaseUrl)
...@@ -52,13 +52,14 @@ import qualified Jose.Jwk as Jose ...@@ -52,13 +52,14 @@ import qualified Jose.Jwk as Jose
import qualified Jose.Jwa as Jose import qualified Jose.Jwa as Jose
import Control.Concurrent import Control.Concurrent
import Control.Debounce (mkDebounce, defaultDebounceSettings, debounceFreq, debounceAction)
import Control.Exception (finally) import Control.Exception (finally)
import Control.Monad.Logger import Control.Monad.Logger
import Control.Monad.Reader import Control.Monad.Reader
import Control.Lens import Control.Lens
import Gargantext.Prelude import Gargantext.Prelude
import Gargantext.Database.Utils (databaseParameters, HasConnection(..), Cmd', runCmd) import Gargantext.Database.Utils (databaseParameters, HasConnection(..), Cmd', runCmd)
import Gargantext.API.Ngrams (NgramsRepo, HasRepoVar(..), HasRepoSaver(..), initMockRepo, r_version, saveRepo) import Gargantext.API.Ngrams (NgramsRepo, HasRepoVar(..), HasRepoSaver(..), HasRepo(..), RepoEnv(..), r_version, saveRepo, initRepo, renv_var, renv_lock)
import Gargantext.API.Orchestrator.Types import Gargantext.API.Orchestrator.Types
type PortNumber = Int type PortNumber = Int
...@@ -79,10 +80,14 @@ data Settings = Settings ...@@ -79,10 +80,14 @@ data Settings = Settings
, _jwtSecret :: Jose.Jwk -- ^ key from the jose-jwt package , _jwtSecret :: Jose.Jwk -- ^ key from the jose-jwt package
, _sendLoginEmails :: SendEmailType , _sendLoginEmails :: SendEmailType
, _scrapydUrl :: BaseUrl , _scrapydUrl :: BaseUrl
, _fileFolder :: FilePath
} }
makeLenses ''Settings makeLenses ''Settings
class HasSettings env where
settings :: Getter env Settings
parseJwk :: Text -> Jose.Jwk parseJwk :: Text -> Jose.Jwk
parseJwk secretStr = jwk parseJwk secretStr = jwk
...@@ -106,6 +111,7 @@ devSettings = Settings ...@@ -106,6 +111,7 @@ devSettings = Settings
, _jwtSecret = parseJwk "MVg0YAPVSPiYQc/qIs/rV/X32EFR0zOJWfHFgMbszMw=" , _jwtSecret = parseJwk "MVg0YAPVSPiYQc/qIs/rV/X32EFR0zOJWfHFgMbszMw="
, _sendLoginEmails = LogEmailToConsole , _sendLoginEmails = LogEmailToConsole
, _scrapydUrl = fromMaybe (panic "Invalid scrapy URL") $ parseBaseUrl "http://localhost:6800" , _scrapydUrl = fromMaybe (panic "Invalid scrapy URL") $ parseBaseUrl "http://localhost:6800"
, _fileFolder = "data"
} }
...@@ -135,14 +141,13 @@ optSetting name d = do ...@@ -135,14 +141,13 @@ optSetting name d = do
data FireWall = FireWall { unFireWall :: Bool } data FireWall = FireWall { unFireWall :: Bool }
data Env = Env data Env = Env
{ _env_settings :: !Settings { _env_settings :: !Settings
, _env_logger :: !LoggerSet , _env_logger :: !LoggerSet
, _env_conn :: !Connection , _env_conn :: !Connection
, _env_repo_var :: !(MVar NgramsRepo) , _env_repo :: !RepoEnv
, _env_repo_saver :: !(IO ()) , _env_manager :: !Manager
, _env_manager :: !Manager , _env_self_url :: !BaseUrl
, _env_self_url :: !BaseUrl , _env_scrapers :: !ScrapersEnv
, _env_scrapers :: !ScrapersEnv
} }
deriving (Generic) deriving (Generic)
...@@ -152,10 +157,16 @@ instance HasConnection Env where ...@@ -152,10 +157,16 @@ instance HasConnection Env where
connection = env_conn connection = env_conn
instance HasRepoVar Env where instance HasRepoVar Env where
repoVar = env_repo_var repoVar = repoEnv . repoVar
instance HasRepoSaver Env where instance HasRepoSaver Env where
repoSaver = env_repo_saver repoSaver = repoEnv . repoSaver
instance HasRepo Env where
repoEnv = env_repo
instance HasSettings Env where
settings = env_settings
data MockEnv = MockEnv data MockEnv = MockEnv
{ _menv_firewall :: !FireWall { _menv_firewall :: !FireWall
...@@ -164,20 +175,50 @@ data MockEnv = MockEnv ...@@ -164,20 +175,50 @@ data MockEnv = MockEnv
makeLenses ''MockEnv makeLenses ''MockEnv
-- | TODO add this path in Settings
repoSnapshot :: FilePath repoSnapshot :: FilePath
repoSnapshot = "repo.json" repoSnapshot = "repo.json"
readRepo :: IO (MVar NgramsRepo) -- | TODO add hard coded file in Settings
readRepo = do -- This assumes we own the lock on repoSnapshot.
repoSaverAction :: ToJSON a => a -> IO ()
repoSaverAction a = do
withTempFile "." "tmp-repo.json" $ \fp h -> do
-- printDebug "repoSaverAction" fp
L.hPut h $ encode a
hClose h
renameFile fp repoSnapshot
mkRepoSaver :: MVar NgramsRepo -> IO (IO ())
mkRepoSaver repo_var = mkDebounce settings
where
settings = defaultDebounceSettings
{ debounceFreq = 1000000 -- 1 second
, debounceAction = withMVar repo_var repoSaverAction
-- ^ Here this not only `readMVar` but `takeMVar`.
-- Namely while repoSaverAction is saving no other change
-- can be made to the MVar.
-- This might be not efficent and thus reconsidered later.
-- However this enables to safely perform a *final* save.
-- See `cleanEnv`.
-- Future work:
-- * Add a new MVar just for saving.
}
readRepoEnv :: IO RepoEnv
readRepoEnv = do
-- | Does file exist ? :: Bool -- | Does file exist ? :: Bool
repoFile <- doesFileExist repoSnapshot repoFile <- doesFileExist repoSnapshot
-- | Is file not empty ? :: Bool -- | Is file not empty ? :: Bool
repoExists <- if repoFile repoExists <- if repoFile
then (>0) <$> getFileSize repoSnapshot then (>0) <$> getFileSize repoSnapshot
else pure repoFile else pure False
newMVar =<< mlock <- tryLockFile repoSnapshot Exclusive
lock <- maybe (panic "Repo file already locked") pure mlock
mvar <- newMVar =<<
if repoExists if repoExists
then do then do
e_repo <- eitherDecodeFileStrict repoSnapshot e_repo <- eitherDecodeFileStrict repoSnapshot
...@@ -186,12 +227,10 @@ readRepo = do ...@@ -186,12 +227,10 @@ readRepo = do
copyFile repoSnapshot archive copyFile repoSnapshot archive
pure repo pure repo
else else
pure initMockRepo pure initRepo
mkRepoSaver :: MVar NgramsRepo -> IO (IO ()) saver <- mkRepoSaver mvar
mkRepoSaver repo_var = do pure $ RepoEnv { _renv_var = mvar, _renv_saver = saver, _renv_lock = lock }
saveAction <- mkSaveState (10 :: Second) repoSnapshot
pure $ readMVar repo_var >>= saveAction
newEnv :: PortNumber -> FilePath -> IO Env newEnv :: PortNumber -> FilePath -> IO Env
newEnv port file = do newEnv port file = do
...@@ -199,31 +238,28 @@ newEnv port file = do ...@@ -199,31 +238,28 @@ newEnv port file = do
settings <- pure (devSettings & appPort .~ port) -- TODO read from 'file' settings <- pure (devSettings & appPort .~ port) -- TODO read from 'file'
when (port /= settings ^. appPort) $ when (port /= settings ^. appPort) $
panic "TODO: conflicting settings of port" panic "TODO: conflicting settings of port"
self_url <- parseBaseUrl $ "http://0.0.0.0:" <> show port self_url <- parseBaseUrl $ "http://0.0.0.0:" <> show port
param <- databaseParameters file param <- databaseParameters file
conn <- connect param conn <- connect param
repo <- readRepoEnv
repo_var <- readRepo
repo_saver <- mkRepoSaver repo_var
scrapers_env <- newJobEnv defaultSettings manager scrapers_env <- newJobEnv defaultSettings manager
logger <- newStderrLoggerSet defaultBufSize logger <- newStderrLoggerSet defaultBufSize
pure $ Env pure $ Env
{ _env_settings = settings { _env_settings = settings
, _env_logger = logger , _env_logger = logger
, _env_conn = conn , _env_conn = conn
, _env_repo_var = repo_var , _env_repo = repo
, _env_repo_saver = repo_saver
, _env_manager = manager , _env_manager = manager
, _env_scrapers = scrapers_env , _env_scrapers = scrapers_env
, _env_self_url = self_url , _env_self_url = self_url
} }
data DevEnv = DevEnv data DevEnv = DevEnv
{ _dev_env_conn :: !Connection { _dev_env_conn :: !Connection
, _dev_env_repo_var :: !(MVar NgramsRepo) , _dev_env_repo :: !RepoEnv
, _dev_env_repo_saver :: !(IO ()) , _dev_env_settings :: !Settings
} }
makeLenses ''DevEnv makeLenses ''DevEnv
...@@ -232,25 +268,45 @@ instance HasConnection DevEnv where ...@@ -232,25 +268,45 @@ instance HasConnection DevEnv where
connection = dev_env_conn connection = dev_env_conn
instance HasRepoVar DevEnv where instance HasRepoVar DevEnv where
repoVar = dev_env_repo_var repoVar = repoEnv . repoVar
instance HasRepoSaver DevEnv where instance HasRepoSaver DevEnv where
repoSaver = dev_env_repo_saver repoSaver = repoEnv . repoSaver
newDevEnvWith :: FilePath -> IO DevEnv instance HasRepo DevEnv where
newDevEnvWith file = do repoEnv = dev_env_repo
param <- databaseParameters file
conn <- connect param instance HasSettings DevEnv where
repo_var <- newMVar initMockRepo settings = dev_env_settings
repo_saver <- mkRepoSaver repo_var
pure $ DevEnv cleanEnv :: HasRepo env => env -> IO ()
{ _dev_env_conn = conn cleanEnv env = do
, _dev_env_repo_var = repo_var r <- takeMVar (env ^. repoEnv . renv_var)
, _dev_env_repo_saver = repo_saver repoSaverAction r
} unlockFile (env ^. repoEnv . renv_lock)
newDevEnv :: IO DevEnv withDevEnv :: FilePath -> (DevEnv -> IO a) -> IO a
newDevEnv = newDevEnvWith "gargantext.ini" withDevEnv iniPath k = do
env <- newDevEnv
k env `finally` cleanEnv env
where
newDevEnv = do
param <- databaseParameters iniPath
conn <- connect param
repo <- readRepoEnv
pure $ DevEnv
{ _dev_env_conn = conn
, _dev_env_repo = repo
, _dev_env_settings = devSettings
}
-- | Run Cmd Sugar for the Repl (GHCI)
runCmdRepl :: Show err => Cmd' DevEnv err a -> IO a
runCmdRepl f = withDevEnv "gargantext.ini" $ \env -> runCmdDev env f
runCmdReplServantErr :: Cmd' DevEnv ServantErr a -> IO a
runCmdReplServantErr = runCmdRepl
-- Use only for dev -- Use only for dev
-- In particular this writes the repo file after running -- In particular this writes the repo file after running
......
...@@ -20,9 +20,12 @@ module Gargantext.Core.Types ( module Gargantext.Core.Types.Main ...@@ -20,9 +20,12 @@ module Gargantext.Core.Types ( module Gargantext.Core.Types.Main
, Term, Terms(..) , Term, Terms(..)
, TokenTag(..), POS(..), NER(..) , TokenTag(..), POS(..), NER(..)
, Label, Stems , Label, Stems
, HasInvalidError(..), assertValid
) where ) where
import GHC.Generics import Control.Lens (Prism', (#))
import Control.Monad.Error.Class (MonadError, throwError)
import Data.Aeson import Data.Aeson
import Data.Semigroup import Data.Semigroup
import Data.Monoid import Data.Monoid
...@@ -30,11 +33,13 @@ import Data.Set (Set, empty) ...@@ -30,11 +33,13 @@ import Data.Set (Set, empty)
--import qualified Data.Set as S --import qualified Data.Set as S
import Data.Text (Text, unpack) import Data.Text (Text, unpack)
import Data.Validity
import Gargantext.Core.Types.Main import Gargantext.Core.Types.Main
import Gargantext.Database.Types.Node import Gargantext.Database.Types.Node
import Gargantext.Prelude import Gargantext.Prelude
import GHC.Generics
------------------------------------------------------------------------ ------------------------------------------------------------------------
type Term = Text type Term = Text
...@@ -120,3 +125,11 @@ instance Monoid TokenTag where ...@@ -120,3 +125,11 @@ instance Monoid TokenTag where
mconcat = foldl mappend mempty mconcat = foldl mappend mempty
class HasInvalidError e where
_InvalidError :: Prism' e Validation
assertValid :: (MonadError e m, HasInvalidError e) => Validation -> m ()
assertValid v = when (not $ validationIsValid v) $ throwError $ _InvalidError # v
-- assertValid :: MonadIO m => Validation -> m ()
-- assertValid v = when (not $ validationIsValid v) $ fail $ show v
...@@ -11,13 +11,31 @@ Individu defintions ...@@ -11,13 +11,31 @@ Individu defintions
-} -}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-} {-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
module Gargantext.Core.Types.Individu module Gargantext.Core.Types.Individu
where where
import Data.Text (Text) import Gargantext.Prelude hiding (reverse)
import Data.Text (Text, pack, reverse)
type Username = Text type Username = Text
type Password = Text
type UsernameMaster = Username type UsernameMaster = Username
type UsernameSimple = Username type UsernameSimple = Username
arbitraryUsername :: [Username]
arbitraryUsername = ["gargantua"] <> users
where
users = zipWith (\a b -> a <> (pack . show) b)
(repeat "user") ([1..20]::[Int])
arbitraryPassword :: [Password]
arbitraryPassword = map reverse arbitraryUsername
...@@ -26,9 +26,10 @@ import Data.Aeson (FromJSON, ToJSON, toJSON) ...@@ -26,9 +26,10 @@ import Data.Aeson (FromJSON, ToJSON, toJSON)
import Data.Aeson as A import Data.Aeson as A
import Data.Aeson.TH (deriveJSON) import Data.Aeson.TH (deriveJSON)
import Data.Map (fromList, lookup) import Data.Map (fromList, lookup)
import Data.Either (Either(..))
import Data.Eq (Eq()) import Data.Eq (Eq())
import Data.Monoid ((<>)) import Data.Monoid ((<>))
import Data.Text (Text) import Data.Text (Text, unpack)
import Data.Swagger import Data.Swagger
import Gargantext.Database.Types.Node -- (NodeType(..), Node, Hyperdata(..)) import Gargantext.Database.Types.Node -- (NodeType(..), Node, Hyperdata(..))
...@@ -36,8 +37,10 @@ import Gargantext.Core.Utils.Prefix (unPrefix) ...@@ -36,8 +37,10 @@ import Gargantext.Core.Utils.Prefix (unPrefix)
import Gargantext.Prelude import Gargantext.Prelude
import GHC.Generics (Generic) import GHC.Generics (Generic)
import Servant.API (FromHttpApiData(..))
import Test.QuickCheck (elements) import Test.QuickCheck (elements)
import Test.QuickCheck.Arbitrary (Arbitrary, arbitrary) import Test.QuickCheck.Arbitrary (Arbitrary, arbitrary)
import Text.Read (read)
------------------------------------------------------------------------ ------------------------------------------------------------------------
data NodeTree = NodeTree { _nt_name :: Text data NodeTree = NodeTree { _nt_name :: Text
...@@ -84,21 +87,25 @@ type HashId = Text ...@@ -84,21 +87,25 @@ type HashId = Text
type TypeId = Int type TypeId = Int
-- TODO multiple ListType declaration, remove it -- TODO multiple ListType declaration, remove it
data ListType = StopList | CandidateList | GraphList data ListType = StopTerm | CandidateTerm | GraphTerm
deriving (Generic, Eq, Ord, Show, Enum, Bounded) deriving (Generic, Eq, Ord, Show, Read, Enum, Bounded)
instance ToJSON ListType instance ToJSON ListType
instance FromJSON ListType instance FromJSON ListType
instance ToSchema ListType instance ToSchema ListType
instance ToParamSchema ListType
instance Arbitrary ListType where instance Arbitrary ListType where
arbitrary = elements [minBound..maxBound] arbitrary = elements [minBound..maxBound]
instance FromHttpApiData ListType where
parseUrlPiece = Right . read . unpack
type ListTypeId = Int type ListTypeId = Int
listTypeId :: ListType -> ListTypeId listTypeId :: ListType -> ListTypeId
listTypeId StopList = 0 listTypeId StopTerm = 0
listTypeId CandidateList = 1 listTypeId CandidateTerm = 1
listTypeId GraphList = 2 listTypeId GraphTerm = 2
fromListTypeId :: ListTypeId -> Maybe ListType fromListTypeId :: ListTypeId -> Maybe ListType
fromListTypeId i = lookup i $ fromList [ (listTypeId l, l) | l <- [minBound..maxBound]] fromListTypeId i = lookup i $ fromList [ (listTypeId l, l) | l <- [minBound..maxBound]]
......
{-|
Module : Gargantext.Database.Access
Description : Access to Database
Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
-}
{-# LANGUAGE NoImplicitPrelude #-}
module Gargantext.Database.Access where
data Action = Read | Write | Exec
data Roles = RoleUser | RoleMaster
...@@ -38,7 +38,6 @@ userMaster = "gargantua" ...@@ -38,7 +38,6 @@ userMaster = "gargantua"
userArbitrary :: Text userArbitrary :: Text
userArbitrary = "user1" userArbitrary = "user1"
nodeTypeId :: NodeType -> NodeTypeId nodeTypeId :: NodeType -> NodeTypeId
nodeTypeId n = nodeTypeId n =
case n of case n of
...@@ -52,7 +51,8 @@ nodeTypeId n = ...@@ -52,7 +51,8 @@ nodeTypeId n =
--NodeSwap -> 19 --NodeSwap -> 19
---- Lists ---- Lists
NodeList -> 5 NodeList -> 5
NodeListModel -> 10
---- Scores ---- Scores
-- NodeOccurrences -> 10 -- NodeOccurrences -> 10
......
...@@ -148,7 +148,7 @@ instance Arbitrary FacetDoc where ...@@ -148,7 +148,7 @@ instance Arbitrary FacetDoc where
| id' <- [1..10] | id' <- [1..10]
, year <- [1990..2000] , year <- [1990..2000]
, t <- ["title", "another title"] , t <- ["title", "another title"]
, hp <- hyperdataDocuments , hp <- arbitraryHyperdataDocuments
, fav <- [True, False] , fav <- [True, False]
, ngramCount <- [3..100] , ngramCount <- [3..100]
] ]
...@@ -215,8 +215,8 @@ viewAuthorsDoc cId _ nt = proc () -> do ...@@ -215,8 +215,8 @@ viewAuthorsDoc cId _ nt = proc () -> do
(doc,(_,(_,(_,contact)))) <- queryAuthorsDoc -< () (doc,(_,(_,(_,contact)))) <- queryAuthorsDoc -< ()
{-nn <- queryNodeNodeTable -< () {-nn <- queryNodeNodeTable -< ()
restrict -< nodeNode_node1_id nn .== _node_id doc restrict -< nn_node1_id nn .== _node_id doc
-- restrict -< nodeNode_delete nn .== (pgBool t) -- restrict -< nn_delete nn .== (pgBool t)
-} -}
restrict -< _node_id contact .== (toNullable $ pgNodeId cId) restrict -< _node_id contact .== (toNullable $ pgNodeId cId)
...@@ -229,17 +229,17 @@ queryAuthorsDoc = leftJoin5 queryNodeTable queryNodeNgramTable queryNgramsTable ...@@ -229,17 +229,17 @@ queryAuthorsDoc = leftJoin5 queryNodeTable queryNodeNgramTable queryNgramsTable
where where
cond12 :: (NodeNgramRead, NodeRead) -> Column PGBool cond12 :: (NodeNgramRead, NodeRead) -> Column PGBool
cond12 (nodeNgram, doc) = _node_id doc cond12 (nodeNgram, doc) = _node_id doc
.== _nn_node_id nodeNgram .== nng_node_id nodeNgram
cond23 :: (NgramsRead, (NodeNgramRead, NodeReadNull)) -> Column PGBool cond23 :: (NgramsRead, (NodeNgramRead, NodeReadNull)) -> Column PGBool
cond23 (ngrams, (nodeNgram, _)) = ngrams_id ngrams cond23 (ngrams, (nodeNgram, _)) = ngrams_id ngrams
.== _nn_ngrams_id nodeNgram .== nng_ngrams_id nodeNgram
cond34 :: (NodeNgramRead, (NgramsRead, (NodeNgramReadNull, NodeReadNull))) -> Column PGBool cond34 :: (NodeNgramRead, (NgramsRead, (NodeNgramReadNull, NodeReadNull))) -> Column PGBool
cond34 (nodeNgram2, (ngrams, (_,_)))= ngrams_id ngrams .== _nn_ngrams_id nodeNgram2 cond34 (nodeNgram2, (ngrams, (_,_)))= ngrams_id ngrams .== nng_ngrams_id nodeNgram2
cond45 :: (NodeRead, (NodeNgramRead, (NgramsReadNull, (NodeNgramReadNull, NodeReadNull)))) -> Column PGBool cond45 :: (NodeRead, (NodeNgramRead, (NgramsReadNull, (NodeNgramReadNull, NodeReadNull)))) -> Column PGBool
cond45 (contact, (nodeNgram2, (_, (_,_)))) = _node_id contact .== _nn_node_id nodeNgram2 cond45 (contact, (nodeNgram2, (_, (_,_)))) = _node_id contact .== nng_node_id nodeNgram2
------------------------------------------------------------------------ ------------------------------------------------------------------------
...@@ -254,11 +254,11 @@ viewDocuments :: CorpusId -> Trash -> NodeTypeId -> Query FacetDocRead ...@@ -254,11 +254,11 @@ viewDocuments :: CorpusId -> Trash -> NodeTypeId -> Query FacetDocRead
viewDocuments cId t ntId = proc () -> do viewDocuments cId t ntId = proc () -> do
n <- queryNodeTable -< () n <- queryNodeTable -< ()
nn <- queryNodeNodeTable -< () nn <- queryNodeNodeTable -< ()
restrict -< _node_id n .== nodeNode_node2_id nn restrict -< _node_id n .== nn_node2_id nn
restrict -< nodeNode_node1_id nn .== (pgNodeId cId) restrict -< nn_node1_id nn .== (pgNodeId cId)
restrict -< _node_typename n .== (pgInt4 ntId) restrict -< _node_typename n .== (pgInt4 ntId)
restrict -< nodeNode_delete nn .== (pgBool t) restrict -< nn_delete nn .== (pgBool t)
returnA -< FacetDoc (_node_id n) (_node_date n) (_node_name n) (_node_hyperdata n) (nodeNode_favorite nn) (pgInt4 1) returnA -< FacetDoc (_node_id n) (_node_date n) (_node_name n) (_node_hyperdata n) (nn_favorite nn) (pgInt4 1)
------------------------------------------------------------------------ ------------------------------------------------------------------------
......
This diff is collapsed.
{-|
Module : Gargantext.Database.Flow.Annuaire
Description : Database Flow Annuaire
Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE FlexibleContexts #-}
module Gargantext.Database.Flow.Annuaire
where
{-
import Gargantext.Prelude
import Gargantext.Database.Flow
-- | Annuaire
flowAnnuaire :: FlowCmdM env ServantErr m => FilePath -> m ()
flowAnnuaire filePath = do
contacts <- liftIO $ deserialiseImtUsersFromFile filePath
ps <- flowInsertAnnuaire "Annuaire"
$ map (\h-> ToDbContact h)
$ map addUniqIdsContact contacts
printDebug "length annuaire" ps
-}
...@@ -54,10 +54,10 @@ data DocumentIdWithNgrams a = ...@@ -54,10 +54,10 @@ data DocumentIdWithNgrams a =
, document_ngrams :: Map (NgramsT Ngrams) Int , document_ngrams :: Map (NgramsT Ngrams) Int
} deriving (Show) } deriving (Show)
-- | TODO for now, list Type is CandidateList because Graph Terms -- | TODO for now, list Type is CandidateTerm because Graph Terms
-- have to be detected in next step in the flow -- have to be detected in next step in the flow
insertToNodeNgrams :: Map NgramsIndexed (Map NgramsType (Map NodeId Int)) -> Cmd err Int insertToNodeNgrams :: Map NgramsIndexed (Map NgramsType (Map NodeId Int)) -> Cmd err Int
insertToNodeNgrams m = insertNodeNgrams [ NodeNgram n (_ngramsId ng) Nothing (ngramsTypeId t) (listTypeId CandidateList) (fromIntegral i) insertToNodeNgrams m = insertNodeNgrams [ NodeNgram n (_ngramsId ng) Nothing (ngramsTypeId t) (listTypeId CandidateTerm) (fromIntegral i)
| (ng, t2n2i) <- DM.toList m | (ng, t2n2i) <- DM.toList m
, (t, n2i) <- DM.toList t2n2i , (t, n2i) <- DM.toList t2n2i
, (n, i) <- DM.toList n2i , (n, i) <- DM.toList n2i
......
...@@ -25,41 +25,42 @@ Portability : POSIX ...@@ -25,41 +25,42 @@ Portability : POSIX
module Gargantext.Database.Lists where module Gargantext.Database.Lists where
import Control.Arrow (returnA) import Gargantext.API.Ngrams (TabType(..))
import Gargantext.Core.Types -- (NodePoly(..), NodeCorpus, ListId) import Gargantext.Core.Types -- (NodePoly(..), NodeCorpus, ListId)
import Gargantext.Core.Types.Individu (Username) import Gargantext.Database.Flow (FlowCmdM)
import Gargantext.Database.Config (nodeTypeId)
import Gargantext.Database.Schema.Node -- (HasNodeError, queryNodeTable)
import Gargantext.Database.Schema.User -- (queryUserTable)
import Gargantext.Database.Utils
import Gargantext.Prelude hiding (sum, head) import Gargantext.Prelude hiding (sum, head)
import Opaleye hiding (FromField) import Gargantext.Text.Metrics (Scored(..))
import Opaleye.Internal.QueryArr (Query)
import Prelude hiding (null, id, map, sum) import Prelude hiding (null, id, map, sum)
import qualified Data.Map as Map
import qualified Data.Vector as Vec
import qualified Gargantext.Database.Metrics as Metrics
-- | To get all lists of a user
-- /!\ lists of different types of corpora (Annuaire or Documents)
listsWith :: HasNodeError err => Username -> Cmd err [Maybe ListId]
listsWith u = runOpaQuery (selectLists u)
where
selectLists u = proc () -> do
(auth_user,nodes) <- listsWithJoin2 -< ()
restrict -< user_username auth_user .== (pgStrictText u)
restrict -< _node_typename nodes .== (toNullable $ pgInt4 $ nodeTypeId NodeList)
returnA -< _node_id nodes
listsWithJoin2 :: Query (UserRead, NodeReadNull)
listsWithJoin2 = leftJoin queryUserTable queryNodeTable cond12
where
cond12 (u,n) = user_id u .== _node_userId n
{- {-
listsWithJoin3 :: Query (NodeRead, (UserRead, NodeReadNull)) trainModel :: FlowCmdM env ServantErr m
listsWithJoin3 = leftJoin3 queryUserTable queryNodeTable queryNodeTable cond12 cond23 => Username -> m Score
where trainMode u = do
cond12 :: (NodeRead rootId <- _node_id <$> getRoot u
cond12 (u,n) = user_id u .== _node_userId n (id:ids) <- getCorporaWithParentId rootId
cond23 :: (NodeRead, (UserRead, NodeReadNull)) -> Column PGBool (s,_model) <- case length ids >0 of
cond23 (n1,(u,n2)) = (toNullable $ _node_id n1) .== _node_parentId n2 True -> grid 100 150 (getMetrics
--} False -> panic "Gargantext.Database.Lists.trainModel : not enough corpora"
--}
getMetrics :: FlowCmdM env err m
=> CorpusId -> Maybe ListId -> TabType -> Maybe Int
-> m (Map.Map ListType [Vec.Vector Double])
getMetrics cId maybeListId tabType maybeLimit = do
(ngs', scores) <- Metrics.getMetrics' cId maybeListId tabType maybeLimit
let
metrics = map (\(Scored t s1 s2) -> (listType t ngs', [Vec.fromList [s1,s2]])) scores
listType t m = maybe (panic errorMsg) fst $ Map.lookup t m
errorMsg = "API.Node.metrics: key absent"
{-
_ <- Learn.grid 100 110 metrics' metrics'
--}
pure $ Map.fromListWith (<>) metrics
{-|
Module : Gargantext.Database.Metrics
Description : Get Metrics from Storage (Database like)
Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
Node API
-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE FlexibleContexts #-}
module Gargantext.Database.Metrics
where
import Data.Map (Map)
import Data.Text (Text)
import Gargantext.API.Ngrams (TabType(..), ngramsTypeFromTabType)
import Gargantext.API.Ngrams.Tools (filterListWithRoot, groupNodesByNgrams, Diagonal(..), getCoocByNgrams, mapTermListRoot, RootTerm)
import Gargantext.Core.Types (ListType(..), Limit)
import Gargantext.Database.Flow (FlowCmdM)
import Gargantext.Database.Metrics.NgramsByNode (getNodesByNgramsOnlyUser, getTficfWith)
import Gargantext.Database.Schema.Node (defaultList)
import Gargantext.Database.Types.Node (ListId, CorpusId, HyperdataCorpus)
import Gargantext.Database.Flow (getOrMkRootWithCorpus)
import Gargantext.Database.Config (userMaster)
import Gargantext.Prelude
import Gargantext.Text.Metrics (scored, Scored(..), localMetrics, toScored)
import qualified Data.Map as Map
import qualified Data.Vector.Storable as Vec
getMetrics' :: FlowCmdM env err m
=> CorpusId -> Maybe ListId -> TabType -> Maybe Limit
-> m (Map Text (ListType, Maybe Text), [Scored Text])
getMetrics' cId maybeListId tabType maybeLimit = do
(ngs, _, myCooc) <- getNgramsCooc cId maybeListId tabType maybeLimit
pure (ngs, scored myCooc)
getMetrics :: FlowCmdM env err m
=> CorpusId -> Maybe ListId -> TabType -> Maybe Limit
-> m (Map Text (ListType, Maybe Text), [Scored Text])
getMetrics cId maybeListId tabType maybeLimit = do
(ngs, ngs', metrics) <- getLocalMetrics cId maybeListId tabType maybeLimit
(_masterUserId, _masterRootId, masterCorpusId) <- getOrMkRootWithCorpus userMaster "" (Nothing :: Maybe HyperdataCorpus)
metrics' <- getTficfWith cId masterCorpusId (ngramsTypeFromTabType tabType) ngs'
pure (ngs , toScored [metrics, Map.fromList $ map (\(a,b) -> (a, Vec.fromList [fst b])) $ Map.toList metrics'])
getLocalMetrics :: (FlowCmdM env err m)
=> CorpusId -> Maybe ListId -> TabType -> Maybe Limit
-> m ( Map Text (ListType, Maybe Text)
, Map Text (Maybe RootTerm)
, Map Text (Vec.Vector Double)
)
getLocalMetrics cId maybeListId tabType maybeLimit = do
(ngs, ngs', myCooc) <- getNgramsCooc cId maybeListId tabType maybeLimit
pure (ngs, ngs', localMetrics myCooc)
getNgramsCooc :: (FlowCmdM env err m)
=> CorpusId -> Maybe ListId -> TabType -> Maybe Limit
-> m ( Map Text (ListType, Maybe Text)
, Map Text (Maybe RootTerm)
, Map (Text, Text) Int
)
getNgramsCooc cId maybeListId tabType maybeLimit = do
(ngs', ngs) <- getNgrams cId maybeListId tabType
let
take' Nothing xs = xs
take' (Just n) xs = take n xs
myCooc <- Map.filter (>1) <$> getCoocByNgrams (Diagonal True)
<$> groupNodesByNgrams ngs
<$> getNodesByNgramsOnlyUser cId (ngramsTypeFromTabType tabType)
(take' maybeLimit $ Map.keys ngs)
pure $ (ngs', ngs, myCooc)
getNgrams :: (FlowCmdM env err m)
=> CorpusId -> Maybe ListId -> TabType
-> m (Map Text (ListType, Maybe Text), Map Text (Maybe RootTerm))
getNgrams cId maybeListId tabType = do
lId <- case maybeListId of
Nothing -> defaultList cId
Just lId' -> pure lId'
lists <- mapTermListRoot [lId] (ngramsTypeFromTabType tabType)
let maybeSyn = Map.unions $ map (\t -> filterListWithRoot t lists)
[GraphTerm, StopTerm, CandidateTerm]
pure (lists, maybeSyn)
...@@ -11,6 +11,8 @@ Count Ngrams by Context ...@@ -11,6 +11,8 @@ Count Ngrams by Context
-} -}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE Arrows #-}
{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE QuasiQuotes #-}
...@@ -18,18 +20,32 @@ Count Ngrams by Context ...@@ -18,18 +20,32 @@ Count Ngrams by Context
module Gargantext.Database.Metrics.Count where module Gargantext.Database.Metrics.Count where
import Control.Arrow (returnA)
import Control.Lens (view) import Control.Lens (view)
import Data.Map.Strict (Map, fromListWith, elems) import Data.Map.Strict (Map, fromListWith, elems)
import Data.Monoid (mempty)
import Data.Text (Text) import Data.Text (Text)
import Database.PostgreSQL.Simple.SqlQQ (sql) import Database.PostgreSQL.Simple.SqlQQ (sql)
import Gargantext.Database.Schema.Node (HasNodeError(..)) import Gargantext.API.Ngrams (NgramsElement, mkNgramsElement)
import Gargantext.Prelude
import Gargantext.Core.Types.Main (listTypeId, ListType(..)) import Gargantext.Core.Types.Main (listTypeId, ListType(..))
import Gargantext.Text.Metrics.Count (Coocs, coocOn) import Gargantext.Database.Access
import Gargantext.Database.Config (nodeTypeId)
import Gargantext.Database.Queries.Join (leftJoin4, leftJoin5, leftJoin3)
import Gargantext.Database.Schema.Ngrams
import Gargantext.Database.Schema.Ngrams (NgramsId, NgramsType(..), ngramsTypeId, Ngrams(..), NgramsIndexed(..), ngrams, ngramsTerms, fromNgramsTypeId)
import Gargantext.Database.Schema.Node
import Gargantext.Database.Schema.Node (HasNodeError(..))
import Gargantext.Database.Schema.NodeNgram
import Gargantext.Database.Schema.NodeNode
import Gargantext.Database.Schema.NodeNodeNgrams
import Gargantext.Database.Types.Node -- (ListId, CorpusId, NodeId)
import Gargantext.Database.Utils
import Gargantext.Database.Utils (Cmd, runPGSQuery) import Gargantext.Database.Utils (Cmd, runPGSQuery)
import Gargantext.Database.Types.Node (ListId, CorpusId) import Gargantext.Prelude hiding (sum)
import Gargantext.Database.Types.Node (NodeId) import Gargantext.Text.Metrics.Count (Coocs, coocOn)
import Gargantext.Database.Schema.Ngrams (NgramsId, NgramsType(..), ngramsTypeId, Ngrams(..), NgramsIndexed(..), ngrams, ngramsTerms) import Opaleye
import Safe (headMay)
import qualified Database.PostgreSQL.Simple as PGS
getCoocByDocDev :: HasNodeError err => CorpusId -> ListId -> Cmd err (Map ([Text], [Text]) Int) getCoocByDocDev :: HasNodeError err => CorpusId -> ListId -> Cmd err (Map ([Text], [Text]) Int)
getCoocByDocDev cId lId = coocOn (\n-> [ view ( ngrams . ngramsTerms) n]) <$> getNgramsByDoc cId lId getCoocByDocDev cId lId = coocOn (\n-> [ view ( ngrams . ngramsTerms) n]) <$> getNgramsByDoc cId lId
...@@ -49,7 +65,7 @@ getNgramsByDoc cId lId = ...@@ -49,7 +65,7 @@ getNgramsByDoc cId lId =
getNgramsByDocDb :: CorpusId -> ListId -> Cmd err [(NodeId, NgramsId, Text, Int)] getNgramsByDocDb :: CorpusId -> ListId -> Cmd err [(NodeId, NgramsId, Text, Int)]
getNgramsByDocDb cId lId = runPGSQuery query params getNgramsByDocDb cId lId = runPGSQuery query params
where where
params = (cId, lId, listTypeId GraphList, ngramsTypeId NgramsTerms) params = (cId, lId, listTypeId GraphTerm, ngramsTypeId NgramsTerms)
query = [sql| query = [sql|
-- TODO add CTE -- TODO add CTE
...@@ -65,3 +81,175 @@ getNgramsByDocDb cId lId = runPGSQuery query params ...@@ -65,3 +81,175 @@ getNgramsByDocDb cId lId = runPGSQuery query params
AND list.ngrams_type = ? -- NgramsTypeId AND list.ngrams_type = ? -- NgramsTypeId
|] |]
getNgramsByNode :: NodeId -> NgramsType -> Cmd err [[Text]]
getNgramsByNode nId nt = elems
<$> fromListWith (<>)
<$> map (\(i,t) -> (i,[t]))
<$> getNgramsByNodeNodeIndexed nId nt
-- | TODO add join with nodeNodeNgram (if it exists)
getNgramsByNodeNodeIndexed :: NodeId -> NgramsType -> Cmd err [(NodeId, Text)]
getNgramsByNodeNodeIndexed nId nt = runOpaQuery (select' nId)
where
select' nId' = proc () -> do
(ng,(nng,(nn,n))) <- getNgramsByNodeNodeIndexedJoin -< ()
restrict -< _node_id n .== toNullable (pgNodeId nId')
restrict -< nng_ngramsType nng .== toNullable (pgNgramsTypeId $ ngramsTypeId nt)
restrict -< nn_delete nn ./= (toNullable . pgBool) True
returnA -< (nng_node_id nng, ngrams_terms ng)
{-
getNgramsByNodeIndexed' :: NodeId -> NgramsType -> Cmd err [(NodeId, Maybe Text)]
getNgramsByNodeIndexed' nId nt = runOpaQuery (select' nId)
where
select' nId' = proc () -> do
(nnng,(ng,(nng,(_,n)))) <- getNgramsByNodeIndexedJoin5 -< ()
restrict -< _node_id n .== toNullable (pgNodeId nId')
restrict -< nng_ngramsType nng .== toNullable (pgNgramsTypeId $ ngramsTypeId nt)
let node_id' = ifThenElse (isNull $ toNullable $ nnng_node1_id nnng)
(nng_node_id nng)
(nnng_node2_id nng)
let t1 = ifThenElse (isNull $ toNullable $ nnng_node1_id nnng)
(ngrams_terms ng)
(nnng_terms nng)
returnA -< (n1, t1)
--}
getNgramsByNodeNodeIndexedJoin :: Query ( NgramsRead
, (NodeNgramReadNull
, (NodeNodeReadNull
, NodeReadNull
)
)
)
getNgramsByNodeNodeIndexedJoin = leftJoin4 queryNodeTable
queryNodeNodeTable
queryNodeNgramTable
queryNgramsTable
c1 c2 c3
where
c1 :: (NodeNodeRead, NodeRead) -> Column PGBool
c1 (nn,n) = nn_node1_id nn .== _node_id n
c2 :: ( NodeNgramRead
, (NodeNodeRead
, NodeReadNull
)
) -> Column PGBool
c2 (nng,(nn',_)) = (nng_node_id nng) .== nn_node2_id nn'
c3 :: ( NgramsRead
, ( NodeNgramRead
, ( NodeNodeReadNull
, NodeReadNull
)
)
) -> Column PGBool
c3 (ng,(nng',(_,_))) = (ngrams_id ng) .== nng_ngrams_id nng'
getNgramsByNodeNodeIndexedJoin5 :: Query ( NodeNodeNgramsRead
, (NgramsReadNull
, (NodeNgramReadNull
, (NodeNodeReadNull
, NodeReadNull
)
)
)
)
getNgramsByNodeNodeIndexedJoin5 = leftJoin5 queryNodeTable
queryNodeNodeTable
queryNodeNgramTable
queryNgramsTable
queryNodeNodeNgramsTable
c1 c2 c3 c4
where
c1 :: (NodeNodeRead, NodeRead) -> Column PGBool
c1 (nn,n) = nn_node1_id nn .== _node_id n
c2 :: ( NodeNgramRead
, (NodeNodeRead
, NodeReadNull
)
) -> Column PGBool
c2 (nng,(nn',_)) = (nng_node_id nng) .== nn_node2_id nn'
c3 :: ( NgramsRead
, ( NodeNgramRead
, ( NodeNodeReadNull
, NodeReadNull
)
)
) -> Column PGBool
c3 (ng,(nng',(_,_))) = (ngrams_id ng) .== nng_ngrams_id nng'
c4 :: ( NodeNodeNgramsRead
, (NgramsRead
, ( NodeNgramReadNull
, ( NodeNodeReadNull
, NodeReadNull
)
)
)
) -> Column PGBool
c4 (nnng,(_,(_,(nn,_)))) = (toNullable $ nnng_node1_id nnng) .== (nn_node1_id nn)
.&& (toNullable $ nnng_node2_id nnng) .== (nn_node2_id nn)
--}
--{-
getNgramsElementsWithParentNodeId :: NodeId -> Cmd err (Map NgramsType [NgramsElement])
getNgramsElementsWithParentNodeId nId = do
ns <- getNgramsWithParentNodeId nId
pure $ fromListWith (<>)
[ (maybe (panic "error") identity $ fromNgramsTypeId nt,
[mkNgramsElement ng CandidateTerm Nothing mempty])
| (_,(nt,ng)) <- ns
]
-------------------------------------------------------------------------
getNgramsWithParentNodeId :: NodeId -> Cmd err [(NodeId, (NgramsTypeId, Text))]
getNgramsWithParentNodeId nId = runOpaQuery (select nId)
where
select nId' = proc () -> do
(ng,(nng,n)) <- getNgramsWithParentNodeIdJoin -< ()
restrict -< _node_parentId n .== (toNullable $ pgNodeId nId')
restrict -< _node_typename n .== (toNullable $ pgInt4 $ nodeTypeId NodeDocument)
returnA -< (nng_node_id nng, (nng_ngramsType nng, ngrams_terms ng))
--}
getNgramsWithParentNodeIdJoin :: Query ( NgramsRead
, ( NodeNgramReadNull
, NodeReadNull
)
)
getNgramsWithParentNodeIdJoin = leftJoin3 queryNodeTable queryNodeNgramTable queryNgramsTable on1 on2
where
on1 :: (NodeNgramRead, NodeRead) -> Column PGBool
on1 (nng,n) = nng_node_id nng .== _node_id n
on2 :: (NgramsRead, (NodeNgramRead, NodeReadNull))-> Column PGBool
on2 (ng, (nng,_)) = ngrams_id ng .== nng_ngrams_id nng
countCorpusDocuments :: Roles -> Int -> Cmd err Int
countCorpusDocuments r cId = maybe 0 identity
<$> headMay
<$> map (\(PGS.Only n) -> n)
<$> runQuery' r cId
where
runQuery' RoleUser cId' = runPGSQuery
"SELECT count(*) from nodes_nodes nn WHERE nn.node1_id = ? AND nn.delete = False"
(PGS.Only cId')
runQuery' RoleMaster cId' = runPGSQuery
"SELECT count(*) from nodes n WHERE n.parent_id = ? AND n.typename = ?"
(cId', nodeTypeId NodeDocument)
This diff is collapsed.
{-|
Module : Gargantext.Database.Metrics.TFICF
Description : Ngram connection to the Database
Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
TFICF, generalization of TFIDF
-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RankNTypes #-}
module Gargantext.Database.Metrics.TFICF where
import Database.PostgreSQL.Simple.SqlQQ (sql)
import qualified Database.PostgreSQL.Simple as DPS
import Safe (headMay)
import Gargantext.Text.Metrics.TFICF -- (tficf)
import Gargantext.Prelude
import Gargantext.Core.Types.Individu (UsernameMaster)
import Gargantext.Database.Utils (Cmd, runPGSQuery)
import Gargantext.Database.Types.Node (ListId, CorpusId, NodeType(..))
import Gargantext.Database.Config (nodeTypeId)
import Gargantext.Database.Schema.Ngrams (NgramsId, NgramsTerms, NgramsType, ngramsTypeId)
type OccGlobal = Double
type OccCorpus = Double
getTficf :: UsernameMaster -> CorpusId -> ListId -> NgramsType
-> Cmd err [Tficf]
getTficf u cId lId ngType = do
g <- getTficfGlobal u
c <- getTficfCorpus cId
ngs <- getTficfNgrams u cId lId ngType
pure $ map (\(nId, nTerms, wm, wn)
-> Tficf nId nTerms
(tficf (TficfCorpus wn (fromIntegral c))
(TficfLanguage wm (fromIntegral g))
)
) ngs
getTficfGlobal :: UsernameMaster -> Cmd err Int
getTficfGlobal u = maybe 0 identity <$> headMay
<$> map (\(DPS.Only n) -> n )
<$> runPGSQuery q p
where
p = (u, nodeTypeId NodeDocument)
q = [sql| SELECT count(*) from nodes n
JOIN auth_user a ON a.id = n.user_id
WHERE
a.username = ?
AND n.typename = ?
|]
getTficfCorpus :: CorpusId -> Cmd err Int
getTficfCorpus cId = maybe 0 identity <$> headMay
<$> map (\(DPS.Only n) -> n )
<$> runPGSQuery q p
where
p = (cId, nodeTypeId NodeDocument)
q = [sql| WITH input(corpusId, typename) AS ((VALUES(?::"int4",?::"int4")))
SELECT count(*) from nodes_nodes AS nn
JOIN nodes AS n ON n.id = nn.node2_id
JOIN input ON nn.node1_id = input.corpusId
WHERE n.typename = input.typename;
|]
getTficfNgrams :: UsernameMaster -> CorpusId -> ListId -> NgramsType
-> Cmd err [(NgramsId, NgramsTerms, OccGlobal, OccCorpus)]
getTficfNgrams u cId lId ngType = runPGSQuery queryTficf p
where
p = (u, nodeTypeId NodeList, nodeTypeId NodeDocument, ngramsTypeId ngType, cId, lId)
queryTficf :: DPS.Query
queryTficf = [sql|
-- TODO add CTE for groups
WITH input(masterUsername,typenameList,typenameDoc,ngramsTypeId,corpusId,listId)
AS ((VALUES(?::"text", ? :: "int4", ?::"int4", ?::"int4",?::"int4",?::"int4"))),
-- AS ((VALUES('gargantua'::"text", 5 :: "int4", 4::"int4", 4::"int4",1018::"int4",1019::"int4"))),
list_master AS (
SELECT n.id,n.name,n.user_id from nodes n
JOIN input ON n.typename = input.typenameList
JOIN auth_user a ON a.id = n.user_id
WHERE
a.username = input.masterUsername
),
ngrams_master AS (
SELECT ng.id, ng.terms, SUM(nng2.weight) AS weight FROM nodes_ngrams nng
JOIN list_master ON list_master.id = nng.node_id
JOIN nodes_ngrams nng2 ON nng2.ngrams_id = nng.ngrams_id
JOIN nodes n ON n.id = nng2.node_id
JOIN input ON input.typenameDoc = n.typename
JOIN ngrams ng ON ng.id = nng2.ngrams_id
WHERE
nng.ngrams_type = input.ngramsTypeId
-- AND n.hyperdata -> 'lang' = 'en'
GROUP BY ng.id,ng.terms
),
ngrams_user AS (
SELECT ng.id, ng.terms, SUM(nng2.weight) AS weight
FROM nodes_ngrams nng
JOIN list_master ON list_master.id = nng.node_id
JOIN nodes_ngrams nng2 ON nng2.ngrams_id = nng.ngrams_id
JOIN nodes_nodes nn ON nn.node2_id = nng2.node_id
JOIN ngrams ng ON ng.id = nng2.ngrams_id
JOIN input ON nn.node1_id = input.corpusId
WHERE
nng.ngrams_type = input.ngramsTypeId
-- AND n.hyperdata -> 'lang' = 'en'
GROUP BY ng.id,ng.terms
)
SELECT nu.id,nu.terms,SUM(nm.weight) wm,SUM(nu.weight) wu
FROM ngrams_user nu
JOIN ngrams_master nm ON nm.id = nu.id
WHERE
nm.weight > 1
AND
nu.weight > 1
GROUP BY nu.id,nu.terms
--ORDER BY wm DESC
--LIMIT 1000
|]
...@@ -48,7 +48,6 @@ the concatenation of the parameters defined by @hashParameters@. ...@@ -48,7 +48,6 @@ the concatenation of the parameters defined by @hashParameters@.
-} -}
------------------------------------------------------------------------ ------------------------------------------------------------------------
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoImplicitPrelude #-}
...@@ -62,15 +61,13 @@ module Gargantext.Database.Node.Document.Insert where ...@@ -62,15 +61,13 @@ module Gargantext.Database.Node.Document.Insert where
import Control.Lens (set, view) import Control.Lens (set, view)
import Control.Lens.Prism import Control.Lens.Prism
import Control.Lens.Cons import Control.Lens.Cons
import Data.Aeson (toJSON, Value) import Data.Aeson (toJSON)
import Data.Maybe (maybe) import Data.Maybe (maybe)
import Data.Text (Text) import Data.Text (Text)
import Data.Typeable (Typeable)
import Database.PostgreSQL.Simple (FromRow, Query, Only(..)) import Database.PostgreSQL.Simple (FromRow, Query, Only(..))
import Database.PostgreSQL.Simple.FromRow (fromRow, field) import Database.PostgreSQL.Simple.FromRow (fromRow, field)
import Database.PostgreSQL.Simple.SqlQQ import Database.PostgreSQL.Simple.SqlQQ
import Database.PostgreSQL.Simple.ToField (toField) import Database.PostgreSQL.Simple.ToField (toField, Action)
import Database.PostgreSQL.Simple.ToRow (ToRow(..))
import Database.PostgreSQL.Simple.Types (Values(..), QualifiedIdentifier(..)) import Database.PostgreSQL.Simple.Types (Values(..), QualifiedIdentifier(..))
import GHC.Generics (Generic) import GHC.Generics (Generic)
import Gargantext.Database.Config (nodeTypeId) import Gargantext.Database.Config (nodeTypeId)
...@@ -81,7 +78,7 @@ import Gargantext.Prelude ...@@ -81,7 +78,7 @@ import Gargantext.Prelude
import qualified Data.ByteString.Lazy.Char8 as DC (pack) import qualified Data.ByteString.Lazy.Char8 as DC (pack)
import qualified Data.Digest.Pure.SHA as SHA (sha256, showDigest) import qualified Data.Digest.Pure.SHA as SHA (sha256, showDigest)
import qualified Data.Text as DT (pack, unpack, concat, take) import qualified Data.Text as DT (pack, unpack, concat, take)
import Gargantext.Prelude.Utils (hash)
-- TODO : the import of Document constructor below does not work -- TODO : the import of Document constructor below does not work
-- import Gargantext.Database.Types.Node (Document) -- import Gargantext.Database.Types.Node (Document)
--import Gargantext.Database.Types.Node (docExample, hyperdataDocument, HyperdataDocument(..) --import Gargantext.Database.Types.Node (docExample, hyperdataDocument, HyperdataDocument(..)
...@@ -102,24 +99,39 @@ import Database.PostgreSQL.Simple (formatQuery) ...@@ -102,24 +99,39 @@ import Database.PostgreSQL.Simple (formatQuery)
--------------------------------------------------------------------------- ---------------------------------------------------------------------------
-- * Main Insert functions -- * Main Insert functions
-- ** Database configuration
-- Administrator of the database has to create a uniq index as following SQL command:
-- `create unique index on nodes (typename, parent_id, (hyperdata ->> 'uniqId'));`
-- | Insert Document main function -- | Insert Document main function
-- UserId : user who is inserting the documents -- UserId : user who is inserting the documents
-- ParentId : folder ID which is parent of the inserted documents -- ParentId : folder ID which is parent of the inserted documents
-- Administrator of the database has to create a uniq index as following SQL command:
-- `create unique index on nodes (typename, parent_id, (hyperdata ->> 'uniqId'));`
insertDb :: InsertDb a => UserId -> ParentId -> [a] -> Cmd err [ReturnId]
insertDb u p = runPGSQuery queryInsert . Only . Values fields . map (insertDb' u p)
where
fields = map (\t-> QualifiedIdentifier Nothing t) inputSqlTypes
class InsertDb a
where
insertDb' :: UserId -> ParentId -> a -> [Action]
data ToDbData = ToDbDocument HyperdataDocument | ToDbContact HyperdataContact
-- TODO-ACCESS: check uId CanInsertDoc pId && checkDocType nodeType instance InsertDb HyperdataDocument
-- TODO-EVENTS: InsertedNodes
insertDocuments :: UserId -> ParentId -> NodeType -> [ToDbData] -> Cmd err [ReturnId]
insertDocuments uId pId nodeType =
runPGSQuery queryInsert . Only . Values fields . prepare uId pId nodeType
where where
fields = map (\t-> QualifiedIdentifier Nothing t) inputSqlTypes insertDb' u p h = [ toField $ nodeTypeId NodeDocument
, toField u
, toField p
, toField $ maybe "No Title" (DT.take 255) (_hyperdataDocument_title h)
, (toField . toJSON) h
]
instance InsertDb HyperdataContact
where
insertDb' u p h = [ toField $ nodeTypeId NodeContact
, toField u
, toField p
, toField $ maybe "Contact" (DT.take 255) (Just "Name") -- (_hc_name h)
, (toField . toJSON) h
]
-- | Debug SQL function -- | Debug SQL function
-- --
...@@ -161,19 +173,6 @@ queryInsert = [sql| ...@@ -161,19 +173,6 @@ queryInsert = [sql|
JOIN nodes c USING (hyperdata); -- columns of unique index JOIN nodes c USING (hyperdata); -- columns of unique index
|] |]
prepare :: UserId -> ParentId -> NodeType -> [ToDbData] -> [InputData]
prepare uId pId nodeType = map (\h -> InputData tId uId pId (name h) (toJSON' h))
where
tId = nodeTypeId nodeType
toJSON' (ToDbDocument hd) = toJSON hd
toJSON' (ToDbContact hc) = toJSON hc
name h = DT.take 255 <$> maybe "No Title" identity $ f h
where
f (ToDbDocument hd) = _hyperdataDocument_title hd
f (ToDbContact _ ) = Just "Contact" -- TODO view FirstName . LastName
------------------------------------------------------------------------ ------------------------------------------------------------------------
-- * Main Types used -- * Main Types used
...@@ -190,67 +189,59 @@ data ReturnId = ReturnId { reInserted :: Bool -- ^ if the document is inserted ( ...@@ -190,67 +189,59 @@ data ReturnId = ReturnId { reInserted :: Bool -- ^ if the document is inserted (
instance FromRow ReturnId where instance FromRow ReturnId where
fromRow = ReturnId <$> field <*> field <*> field fromRow = ReturnId <$> field <*> field <*> field
-- ** Insert Types
data InputData = InputData { inTypenameId :: NodeTypeId
, inUserId :: UserId
, inParentId :: ParentId
, inName :: Text
, inHyper :: Value
} deriving (Show, Generic, Typeable)
instance ToRow InputData where
toRow inputData = [ toField (inTypenameId inputData)
, toField (inUserId inputData)
, toField (inParentId inputData)
, toField (inName inputData)
, toField (inHyper inputData)
]
--------------------------------------------------------------------------- ---------------------------------------------------------------------------
-- * Uniqueness of document definition -- * Uniqueness of document definition
addUniqIdsDoc :: HyperdataDocument -> HyperdataDocument class AddUniqId a
addUniqIdsDoc doc = set hyperdataDocument_uniqIdBdd (Just hashBdd)
$ set hyperdataDocument_uniqId (Just hash) doc
where where
hash = uniqId $ DT.concat $ map ($ doc) hashParametersDoc addUniqId :: a -> a
hashBdd = uniqId $ DT.concat $ map ($ doc) ([(\d -> maybe' (_hyperdataDocument_bdd d))] <> hashParametersDoc)
uniqId :: Text -> Text
uniqId = DT.pack . SHA.showDigest . SHA.sha256 . DC.pack . DT.unpack
hashParametersDoc :: [(HyperdataDocument -> Text)] instance AddUniqId HyperdataDocument
hashParametersDoc = [ \d -> maybe' (_hyperdataDocument_title d) where
, \d -> maybe' (_hyperdataDocument_abstract d) addUniqId = addUniqIdsDoc
, \d -> maybe' (_hyperdataDocument_source d) where
, \d -> maybe' (_hyperdataDocument_publication_date d) addUniqIdsDoc :: HyperdataDocument -> HyperdataDocument
] addUniqIdsDoc doc = set hyperdataDocument_uniqIdBdd (Just hashBdd)
--------------------------------------------------------------------------- $ set hyperdataDocument_uniqId (Just hashUni) doc
where
hashUni = hash $ DT.concat $ map ($ doc) hashParametersDoc
hashBdd = hash $ DT.concat $ map ($ doc) ([(\d -> maybeText (_hyperdataDocument_bdd d))] <> hashParametersDoc)
hashParametersDoc :: [(HyperdataDocument -> Text)]
hashParametersDoc = [ \d -> maybeText (_hyperdataDocument_title d)
, \d -> maybeText (_hyperdataDocument_abstract d)
, \d -> maybeText (_hyperdataDocument_source d)
, \d -> maybeText (_hyperdataDocument_publication_date d)
]
---------------------------------------------------------------------------
-- * Uniqueness of document definition -- * Uniqueness of document definition
-- TODO factorize with above (use the function below for tests) -- TODO factorize with above (use the function below for tests)
instance AddUniqId HyperdataContact
where
addUniqId = addUniqIdsContact
addUniqIdsContact :: HyperdataContact -> HyperdataContact addUniqIdsContact :: HyperdataContact -> HyperdataContact
addUniqIdsContact hc = set (hc_uniqIdBdd) (Just hashBdd) addUniqIdsContact hc = set (hc_uniqIdBdd) (Just hashBdd)
$ set (hc_uniqId) (Just hash) hc $ set (hc_uniqId ) (Just hashUni) hc
where where
hash = uniqId $ DT.concat $ map ($ hc) hashParametersContact hashUni = uniqId $ DT.concat $ map ($ hc) hashParametersContact
hashBdd = uniqId $ DT.concat $ map ($ hc) ([\d -> maybe' (view hc_bdd d)] <> hashParametersContact) hashBdd = uniqId $ DT.concat $ map ($ hc) ([\d -> maybeText (view hc_bdd d)] <> hashParametersContact)
uniqId :: Text -> Text uniqId :: Text -> Text
uniqId = DT.pack . SHA.showDigest . SHA.sha256 . DC.pack . DT.unpack uniqId = DT.pack . SHA.showDigest . SHA.sha256 . DC.pack . DT.unpack
-- | TODO add more hashparameters -- | TODO add more hashparameters
hashParametersContact :: [(HyperdataContact -> Text)] hashParametersContact :: [(HyperdataContact -> Text)]
hashParametersContact = [ \d -> maybe' $ view (hc_who . _Just . cw_firstName) d hashParametersContact = [ \d -> maybeText $ view (hc_who . _Just . cw_firstName) d
, \d -> maybe' $ view (hc_who . _Just . cw_lastName ) d , \d -> maybeText $ view (hc_who . _Just . cw_lastName ) d
, \d -> maybe' $ view (hc_where . _head . cw_touch . _Just . ct_mail) d , \d -> maybeText $ view (hc_where . _head . cw_touch . _Just . ct_mail) d
] ]
maybe' :: Maybe Text -> Text maybeText :: Maybe Text -> Text
maybe' = maybe (DT.pack "") identity maybeText = maybe (DT.pack "") identity
--------------------------------------------------------------------------- ---------------------------------------------------------------------------
This diff is collapsed.
...@@ -45,8 +45,9 @@ selectRoot :: Username -> Query NodeRead ...@@ -45,8 +45,9 @@ selectRoot :: Username -> Query NodeRead
selectRoot username = proc () -> do selectRoot username = proc () -> do
row <- queryNodeTable -< () row <- queryNodeTable -< ()
users <- queryUserTable -< () users <- queryUserTable -< ()
restrict -< _node_typename row .== (pgInt4 $ nodeTypeId NodeUser) restrict -< _node_typename row .== (pgInt4 $ nodeTypeId NodeUser)
restrict -< user_username users .== (pgStrictText username) restrict -< user_username users .== (pgStrictText username)
restrict -< _node_userId row .== (user_id users) restrict -< _node_userId row .== (user_id users)
returnA -< row returnA -< row
...@@ -25,15 +25,15 @@ Ngrams connection to the Database. ...@@ -25,15 +25,15 @@ Ngrams connection to the Database.
module Gargantext.Database.Schema.Ngrams where module Gargantext.Database.Schema.Ngrams where
import Data.Aeson (FromJSON, FromJSONKey)
import Control.Lens (makeLenses, view, over) import Control.Lens (makeLenses, view, over)
import Control.Monad (mzero) import Control.Monad (mzero)
import Data.Aeson import Data.Aeson
import Data.Aeson.Types (toJSONKeyText)
import Data.ByteString.Internal (ByteString) import Data.ByteString.Internal (ByteString)
import Data.Map (Map, fromList, lookup, fromListWith) import Data.Map (Map, fromList, lookup, fromListWith)
import Data.Profunctor.Product.TH (makeAdaptorAndInstance) import Data.Profunctor.Product.TH (makeAdaptorAndInstance)
import Data.Set (Set) import Data.Set (Set)
import Data.Text (Text, splitOn) import Data.Text (Text, splitOn, pack)
import Database.PostgreSQL.Simple ((:.)(..)) import Database.PostgreSQL.Simple ((:.)(..))
import Database.PostgreSQL.Simple.FromRow (fromRow, field) import Database.PostgreSQL.Simple.FromRow (fromRow, field)
import Database.PostgreSQL.Simple.SqlQQ (sql) import Database.PostgreSQL.Simple.SqlQQ (sql)
...@@ -56,8 +56,8 @@ import qualified Data.Set as DS ...@@ -56,8 +56,8 @@ import qualified Data.Set as DS
import qualified Database.PostgreSQL.Simple as PGS import qualified Database.PostgreSQL.Simple as PGS
type NgramsTerms = Text
type NgramsId = Int type NgramsId = Int
type NgramsTerms = Text
type Size = Int type Size = Int
data NgramsPoly id terms n = NgramsDb { ngrams_id :: id data NgramsPoly id terms n = NgramsDb { ngrams_id :: id
...@@ -105,9 +105,11 @@ data NgramsType = Authors | Institutes | Sources | NgramsTerms ...@@ -105,9 +105,11 @@ data NgramsType = Authors | Institutes | Sources | NgramsTerms
deriving (Eq, Show, Ord, Enum, Bounded, Generic) deriving (Eq, Show, Ord, Enum, Bounded, Generic)
instance FromJSON NgramsType instance FromJSON NgramsType
instance FromJSONKey NgramsType instance FromJSONKey NgramsType where
fromJSONKey = FromJSONKeyTextParser (parseJSON . String)
instance ToJSON NgramsType instance ToJSON NgramsType
instance ToJSONKey NgramsType instance ToJSONKey NgramsType where
toJSONKey = toJSONKeyText (pack . show)
newtype NgramsTypeId = NgramsTypeId Int newtype NgramsTypeId = NgramsTypeId Int
deriving (Eq, Show, Ord, Num) deriving (Eq, Show, Ord, Num)
...@@ -121,6 +123,10 @@ instance FromField NgramsTypeId where ...@@ -121,6 +123,10 @@ instance FromField NgramsTypeId where
if (n :: Int) > 0 then return $ NgramsTypeId n if (n :: Int) > 0 then return $ NgramsTypeId n
else mzero else mzero
instance QueryRunnerColumnDefault (Nullable PGInt4) NgramsTypeId
where
queryRunnerColumnDefault = fieldQueryRunnerColumn
pgNgramsType :: NgramsType -> Column PGInt4 pgNgramsType :: NgramsType -> Column PGInt4
pgNgramsType = pgNgramsTypeId . ngramsTypeId pgNgramsType = pgNgramsTypeId . ngramsTypeId
...@@ -263,12 +269,12 @@ type NgramsTableParamMaster = NgramsTableParam ...@@ -263,12 +269,12 @@ type NgramsTableParamMaster = NgramsTableParam
data NgramsTableData = NgramsTableData { _ntd_id :: Int data NgramsTableData = NgramsTableData { _ntd_id :: Int
, _ntd_parent_id :: Maybe Int , _ntd_parent_id :: Maybe Int
, _ntd_terms :: Text , _ntd_terms :: Text
, _ntd_n :: Int , _ntd_n :: Int
, _ntd_listType :: Maybe ListType , _ntd_listType :: Maybe ListType
, _ntd_weight :: Double , _ntd_weight :: Double
} deriving (Show) } deriving (Show)
......
...@@ -95,6 +95,10 @@ instance FromField HyperdataList ...@@ -95,6 +95,10 @@ instance FromField HyperdataList
where where
fromField = fromField' fromField = fromField'
instance FromField HyperdataListModel
where
fromField = fromField'
instance FromField HyperdataGraph instance FromField HyperdataGraph
where where
fromField = fromField' fromField = fromField'
...@@ -102,6 +106,10 @@ instance FromField HyperdataGraph ...@@ -102,6 +106,10 @@ instance FromField HyperdataGraph
instance FromField HyperdataAnnuaire instance FromField HyperdataAnnuaire
where where
fromField = fromField' fromField = fromField'
instance FromField (NodeId, Text)
where
fromField = fromField'
------------------------------------------------------------------------ ------------------------------------------------------------------------
instance QueryRunnerColumnDefault PGJsonb HyperdataAny instance QueryRunnerColumnDefault PGJsonb HyperdataAny
where where
...@@ -127,6 +135,10 @@ instance QueryRunnerColumnDefault PGJsonb HyperdataList ...@@ -127,6 +135,10 @@ instance QueryRunnerColumnDefault PGJsonb HyperdataList
where where
queryRunnerColumnDefault = fieldQueryRunnerColumn queryRunnerColumnDefault = fieldQueryRunnerColumn
instance QueryRunnerColumnDefault PGJsonb HyperdataListModel
where
queryRunnerColumnDefault = fieldQueryRunnerColumn
instance QueryRunnerColumnDefault PGJsonb HyperdataGraph instance QueryRunnerColumnDefault PGJsonb HyperdataGraph
where where
queryRunnerColumnDefault = fieldQueryRunnerColumn queryRunnerColumnDefault = fieldQueryRunnerColumn
...@@ -147,6 +159,10 @@ instance QueryRunnerColumnDefault PGInt4 NodeId ...@@ -147,6 +159,10 @@ instance QueryRunnerColumnDefault PGInt4 NodeId
where where
queryRunnerColumnDefault = fieldQueryRunnerColumn queryRunnerColumnDefault = fieldQueryRunnerColumn
instance QueryRunnerColumnDefault (Nullable PGInt4) NodeId
where
queryRunnerColumnDefault = fieldQueryRunnerColumn
------------------------------------------------------------------------ ------------------------------------------------------------------------
-- WIP -- WIP
...@@ -323,6 +339,9 @@ getDocumentsWithParentId n = runOpaQuery $ selectNodesWith' n (Just NodeDocument ...@@ -323,6 +339,9 @@ getDocumentsWithParentId n = runOpaQuery $ selectNodesWith' n (Just NodeDocument
getListsWithParentId :: NodeId -> Cmd err [Node HyperdataList] getListsWithParentId :: NodeId -> Cmd err [Node HyperdataList]
getListsWithParentId n = runOpaQuery $ selectNodesWith' n (Just NodeList) getListsWithParentId n = runOpaQuery $ selectNodesWith' n (Just NodeList)
getListsModelWithParentId :: NodeId -> Cmd err [Node HyperdataListModel]
getListsModelWithParentId n = runOpaQuery $ selectNodesWith' n (Just NodeListModel)
getCorporaWithParentId :: NodeId -> Cmd err [Node HyperdataCorpus] getCorporaWithParentId :: NodeId -> Cmd err [Node HyperdataCorpus]
getCorporaWithParentId n = runOpaQuery $ selectNodesWith' n (Just NodeCorpus) getCorporaWithParentId n = runOpaQuery $ selectNodesWith' n (Just NodeCorpus)
...@@ -392,7 +411,6 @@ nodeAnnuaireW maybeName maybeAnnuaire pId = node NodeAnnuaire name annuaire (Jus ...@@ -392,7 +411,6 @@ nodeAnnuaireW maybeName maybeAnnuaire pId = node NodeAnnuaire name annuaire (Jus
where where
name = maybe "Annuaire" identity maybeName name = maybe "Annuaire" identity maybeName
annuaire = maybe defaultAnnuaire identity maybeAnnuaire annuaire = maybe defaultAnnuaire identity maybeAnnuaire
--------------------------
------------------------------------------------------------------------ ------------------------------------------------------------------------
arbitraryList :: HyperdataList arbitraryList :: HyperdataList
...@@ -404,6 +422,20 @@ nodeListW maybeName maybeList pId = node NodeList name list (Just pId) ...@@ -404,6 +422,20 @@ nodeListW maybeName maybeList pId = node NodeList name list (Just pId)
name = maybe "Listes" identity maybeName name = maybe "Listes" identity maybeName
list = maybe arbitraryList identity maybeList list = maybe arbitraryList identity maybeList
--------------------
arbitraryListModel :: HyperdataListModel
arbitraryListModel = HyperdataListModel (400,500) "data/models/test.model" (Just 0.83)
mkListModelNode :: HasNodeError err => ParentId -> UserId -> Cmd err [NodeId]
mkListModelNode p u = insertNodesR [nodeListModelW Nothing Nothing p u]
nodeListModelW :: Maybe Name -> Maybe HyperdataListModel -> ParentId -> UserId -> NodeWrite
nodeListModelW maybeName maybeListModel pId = node NodeListModel name list (Just pId)
where
name = maybe "List Model" identity maybeName
list = maybe arbitraryListModel identity maybeListModel
------------------------------------------------------------------------ ------------------------------------------------------------------------
arbitraryGraph :: HyperdataGraph arbitraryGraph :: HyperdataGraph
arbitraryGraph = HyperdataGraph (Just "Preferences") arbitraryGraph = HyperdataGraph (Just "Preferences")
...@@ -526,8 +558,26 @@ mkRoot uname uId = case uId > 0 of ...@@ -526,8 +558,26 @@ mkRoot uname uId = case uId > 0 of
False -> nodeError NegativeId False -> nodeError NegativeId
True -> mkNodeWithParent NodeUser Nothing uId uname True -> mkNodeWithParent NodeUser Nothing uId uname
mkCorpus :: Maybe Name -> Maybe HyperdataCorpus -> ParentId -> UserId -> Cmd err [CorpusId] -- |
mkCorpus n h p u = insertNodesR [nodeCorpusW n h p u] -- CorpusDocument is a corpus made from a set of documents
-- CorpusContact is a corpus made from a set of contacts (syn of Annuaire)
data CorpusType = CorpusDocument | CorpusContact
class MkCorpus a
where
mk :: Maybe Name -> Maybe a -> ParentId -> UserId -> Cmd err [NodeId]
instance MkCorpus HyperdataCorpus
where
mk n h p u = insertNodesR [nodeCorpusW n h p u]
instance MkCorpus HyperdataAnnuaire
where
mk n h p u = insertNodesR [nodeAnnuaireW n h p u]
getOrMkList :: HasNodeError err => ParentId -> UserId -> Cmd err ListId getOrMkList :: HasNodeError err => ParentId -> UserId -> Cmd err ListId
getOrMkList pId uId = getOrMkList pId uId =
...@@ -543,15 +593,13 @@ defaultList cId = ...@@ -543,15 +593,13 @@ defaultList cId =
mkList :: HasNodeError err => ParentId -> UserId -> Cmd err [NodeId] mkList :: HasNodeError err => ParentId -> UserId -> Cmd err [NodeId]
mkList p u = insertNodesR [nodeListW Nothing Nothing p u] mkList p u = insertNodesR [nodeListW Nothing Nothing p u]
mkGraph :: ParentId -> UserId -> Cmd err [GraphId] mkGraph :: ParentId -> UserId -> Cmd err [GraphId]
mkGraph p u = insertNodesR [nodeGraphW Nothing Nothing p u] mkGraph p u = insertNodesR [nodeGraphW Nothing Nothing p u]
mkDashboard :: ParentId -> UserId -> Cmd err [NodeId] mkDashboard :: ParentId -> UserId -> Cmd err [NodeId]
mkDashboard p u = insertNodesR [nodeDashboardW Nothing Nothing p u] mkDashboard p u = insertNodesR [nodeDashboardW Nothing Nothing p u]
mkAnnuaire :: ParentId -> UserId -> Cmd err [NodeId]
mkAnnuaire p u = insertNodesR [nodeAnnuaireW Nothing Nothing p u]
-- | Default CorpusId Master and ListId Master -- | Default CorpusId Master and ListId Master
pgNodeId :: NodeId -> Column PGInt4 pgNodeId :: NodeId -> Column PGInt4
......
...@@ -33,7 +33,6 @@ module Gargantext.Database.Schema.NodeNgram where ...@@ -33,7 +33,6 @@ module Gargantext.Database.Schema.NodeNgram where
import Data.ByteString (ByteString) import Data.ByteString (ByteString)
import Data.Text (Text) import Data.Text (Text)
import Debug.Trace (trace)
import Control.Lens.TH (makeLenses) import Control.Lens.TH (makeLenses)
import Control.Monad (void) import Control.Monad (void)
import Data.Profunctor.Product.TH (makeAdaptorAndInstance) import Data.Profunctor.Product.TH (makeAdaptorAndInstance)
...@@ -41,8 +40,7 @@ import Database.PostgreSQL.Simple.Types (Values(..), QualifiedIdentifier(..)) ...@@ -41,8 +40,7 @@ import Database.PostgreSQL.Simple.Types (Values(..), QualifiedIdentifier(..))
import Database.PostgreSQL.Simple.SqlQQ (sql) import Database.PostgreSQL.Simple.SqlQQ (sql)
import Gargantext.Database.Utils (mkCmd, Cmd, execPGSQuery) import Gargantext.Database.Utils (mkCmd, Cmd, execPGSQuery)
import Gargantext.Core.Types.Main (ListTypeId) import Gargantext.Core.Types.Main (ListTypeId)
import Gargantext.Database.Types.Node (NodeId, ListId, NodeType(..)) import Gargantext.Database.Types.Node (NodeId, ListId)
import Gargantext.Database.Config (nodeTypeId, userMaster)
import Gargantext.Database.Schema.Node (pgNodeId) import Gargantext.Database.Schema.Node (pgNodeId)
import Gargantext.Database.Schema.Ngrams (NgramsTypeId, pgNgramsTypeId) import Gargantext.Database.Schema.Ngrams (NgramsTypeId, pgNgramsTypeId)
import Gargantext.Prelude import Gargantext.Prelude
...@@ -52,13 +50,13 @@ import qualified Database.PostgreSQL.Simple as DPS ...@@ -52,13 +50,13 @@ import qualified Database.PostgreSQL.Simple as DPS
-- | TODO : remove id -- | TODO : remove id
data NodeNgramPoly node_id ngrams_id parent_id ngrams_type list_type weight data NodeNgramPoly node_id ngrams_id parent_id ngrams_type list_type weight
= NodeNgram { _nn_node_id :: node_id = NodeNgram { nng_node_id :: node_id
, _nn_ngrams_id :: ngrams_id , nng_ngrams_id :: ngrams_id
, _nn_parent_id :: parent_id , nng_parent_id :: parent_id
, _nn_ngramsType :: ngrams_type , nng_ngramsType :: ngrams_type
, _nn_listType :: list_type , nng_listType :: list_type
, _nn_weight :: weight , nng_weight :: weight
} deriving (Show) } deriving (Show)
type NodeNgramWrite = type NodeNgramWrite =
...@@ -106,12 +104,12 @@ makeLenses ''NodeNgramPoly ...@@ -106,12 +104,12 @@ makeLenses ''NodeNgramPoly
nodeNgramTable :: Table NodeNgramWrite NodeNgramRead nodeNgramTable :: Table NodeNgramWrite NodeNgramRead
nodeNgramTable = Table "nodes_ngrams" nodeNgramTable = Table "nodes_ngrams"
( pNodeNgram NodeNgram ( pNodeNgram NodeNgram
{ _nn_node_id = required "node_id" { nng_node_id = required "node_id"
, _nn_ngrams_id = required "ngrams_id" , nng_ngrams_id = required "ngrams_id"
, _nn_parent_id = optional "parent_id" , nng_parent_id = optional "parent_id"
, _nn_ngramsType = required "ngrams_type" , nng_ngramsType = required "ngrams_type"
, _nn_listType = required "list_type" , nng_listType = required "list_type"
, _nn_weight = required "weight" , nng_weight = required "weight"
} }
) )
...@@ -173,13 +171,14 @@ data Action = Del | Add ...@@ -173,13 +171,14 @@ data Action = Del | Add
type NgramsParent = Text type NgramsParent = Text
type NgramsChild = Text type NgramsChild = Text
{-
ngramsGroup :: Action -> ListId -> [(NgramsTypeId, NgramsParent, NgramsChild)] -> Cmd err () ngramsGroup :: Action -> ListId -> [(NgramsTypeId, NgramsParent, NgramsChild)] -> Cmd err ()
ngramsGroup _ _ [] = pure () ngramsGroup _ _ [] = pure ()
ngramsGroup a lid input = void $ trace (show input) $ execPGSQuery (ngramsGroupQuery a) (DPS.Only $ Values fields input') ngramsGroup a lid input = void $ trace (show input) $ execPGSQuery (ngramsGroupQuery a) (DPS.Only $ Values fields input')
where where
fields = map (\t-> QualifiedIdentifier Nothing t) ["int4","int4","text","int4","text","text"] fields = map (\t-> QualifiedIdentifier Nothing t) ["int4","int4","text","int4","text","text"]
input' = map (\(ntpid,p,c) -> (lid, nodeTypeId NodeList, userMaster, ntpid, p,c)) input input' = map (\(ntpid,p,c) -> (lid, nodeTypeId NodeList, userMaster, ntpid, p,c)) input
-}
ngramsGroupQuery :: Action -> DPS.Query ngramsGroupQuery :: Action -> DPS.Query
ngramsGroupQuery a = case a of ngramsGroupQuery a = case a of
...@@ -290,6 +289,8 @@ data NodeNgramsUpdate = NodeNgramsUpdate ...@@ -290,6 +289,8 @@ data NodeNgramsUpdate = NodeNgramsUpdate
-- TODO wrap these updates in a transaction. -- TODO wrap these updates in a transaction.
-- TODO-ACCESS: -- TODO-ACCESS:
-- * check userId CanUpdateNgrams userListId -- * check userId CanUpdateNgrams userListId
{-
updateNodeNgrams :: NodeNgramsUpdate -> Cmd err () updateNodeNgrams :: NodeNgramsUpdate -> Cmd err ()
updateNodeNgrams nnu = do updateNodeNgrams nnu = do
updateNodeNgrams' userListId $ _nnu_lists_update nnu updateNodeNgrams' userListId $ _nnu_lists_update nnu
...@@ -299,3 +300,4 @@ updateNodeNgrams nnu = do ...@@ -299,3 +300,4 @@ updateNodeNgrams nnu = do
ngramsGroup Add userListId $ _nnu_add_children nnu ngramsGroup Add userListId $ _nnu_add_children nnu
where where
userListId = _nnu_user_list_id nnu userListId = _nnu_user_list_id nnu
-}
...@@ -38,11 +38,11 @@ import Opaleye ...@@ -38,11 +38,11 @@ import Opaleye
data NodeNodePoly node1_id node2_id score fav del data NodeNodePoly node1_id node2_id score fav del
= NodeNode { nodeNode_node1_id :: node1_id = NodeNode { nn_node1_id :: node1_id
, nodeNode_node2_id :: node2_id , nn_node2_id :: node2_id
, nodeNode_score :: score , nn_score :: score
, nodeNode_favorite :: fav , nn_favorite :: fav
, nodeNode_delete :: del , nn_delete :: del
} deriving (Show) } deriving (Show)
type NodeNodeWrite = NodeNodePoly (Column (PGInt4)) type NodeNodeWrite = NodeNodePoly (Column (PGInt4))
...@@ -70,11 +70,11 @@ $(makeLensesWith abbreviatedFields ''NodeNodePoly) ...@@ -70,11 +70,11 @@ $(makeLensesWith abbreviatedFields ''NodeNodePoly)
nodeNodeTable :: Table NodeNodeWrite NodeNodeRead nodeNodeTable :: Table NodeNodeWrite NodeNodeRead
nodeNodeTable = Table "nodes_nodes" (pNodeNode nodeNodeTable = Table "nodes_nodes" (pNodeNode
NodeNode { nodeNode_node1_id = required "node1_id" NodeNode { nn_node1_id = required "node1_id"
, nodeNode_node2_id = required "node2_id" , nn_node2_id = required "node2_id"
, nodeNode_score = optional "score" , nn_score = optional "score"
, nodeNode_favorite = optional "favorite" , nn_favorite = optional "favorite"
, nodeNode_delete = optional "delete" , nn_delete = optional "delete"
} }
) )
......
{-| {-|
Module : Gargantext.Database.Schema.NodeNodeNgram Module : Gargantext.Database.Schema.NodeNodeNgrams
Description : TODO: remove this module and table in database Description : TODO: remove this module and table in database
Copyright : (c) CNRS, 2017-Present Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3 License : AGPL + CECILL v3
...@@ -18,7 +18,8 @@ Portability : POSIX ...@@ -18,7 +18,8 @@ Portability : POSIX
{-# OPTIONS_GHC -fno-warn-orphans #-} {-# OPTIONS_GHC -fno-warn-orphans #-}
module Gargantext.Database.Schema.NodeNodeNgram where module Gargantext.Database.Schema.NodeNodeNgrams
where
import Prelude import Prelude
import Data.Maybe (Maybe) import Data.Maybe (Maybe)
...@@ -29,55 +30,55 @@ import Gargantext.Database.Utils (Cmd, runOpaQuery) ...@@ -29,55 +30,55 @@ import Gargantext.Database.Utils (Cmd, runOpaQuery)
import Opaleye import Opaleye
data NodeNodeNgramPoly node1_id node2_id ngram_id score data NodeNodeNgramsPoly node1_id node2_id ngram_id score
= NodeNodeNgram { nodeNodeNgram_node1_id :: node1_id = NodeNodeNgrams { nnng_node1_id :: node1_id
, nodeNodeNgram_node2_id :: node2_id , nnng_node2_id :: node2_id
, nodeNodeNgram_ngram_id :: ngram_id , nnng_ngrams_id :: ngram_id
, nodeNodeNgram_score :: score , nnng_score :: score
} deriving (Show) } deriving (Show)
type NodeNodeNgramWrite = NodeNodeNgramPoly (Column PGInt4 ) type NodeNodeNgramsWrite = NodeNodeNgramsPoly (Column PGInt4 )
(Column PGInt4 ) (Column PGInt4 )
(Column PGInt4 ) (Column PGInt4 )
(Maybe (Column PGFloat8)) (Maybe (Column PGFloat8))
type NodeNodeNgramRead = NodeNodeNgramPoly (Column PGInt4 ) type NodeNodeNgramsRead = NodeNodeNgramsPoly (Column PGInt4 )
(Column PGInt4 ) (Column PGInt4 )
(Column PGInt4 ) (Column PGInt4 )
(Column PGFloat8) (Column PGFloat8)
type NodeNodeNgramReadNull = NodeNodeNgramPoly (Column (Nullable PGInt4 )) type NodeNodeNgramsReadNull = NodeNodeNgramsPoly (Column (Nullable PGInt4 ))
(Column (Nullable PGInt4 )) (Column (Nullable PGInt4 ))
(Column (Nullable PGInt4 )) (Column (Nullable PGInt4 ))
(Column (Nullable PGFloat8)) (Column (Nullable PGFloat8))
type NodeNodeNgram = NodeNodeNgramPoly Int type NodeNodeNgrams = NodeNodeNgramsPoly Int
Int Int
Int Int
(Maybe Double) (Maybe Double)
$(makeAdaptorAndInstance "pNodeNodeNgram" ''NodeNodeNgramPoly) $(makeAdaptorAndInstance "pNodeNodeNgrams" ''NodeNodeNgramsPoly)
$(makeLensesWith abbreviatedFields ''NodeNodeNgramPoly) $(makeLensesWith abbreviatedFields ''NodeNodeNgramsPoly)
nodeNodeNgramTable :: Table NodeNodeNgramWrite NodeNodeNgramRead nodeNodeNgramsTable :: Table NodeNodeNgramsWrite NodeNodeNgramsRead
nodeNodeNgramTable = Table "nodes_nodes_ngrams" nodeNodeNgramsTable = Table "nodes_nodes_ngrams"
( pNodeNodeNgram NodeNodeNgram ( pNodeNodeNgrams NodeNodeNgrams
{ nodeNodeNgram_node1_id = required "node1_id" { nnng_node1_id = required "node1_id"
, nodeNodeNgram_node2_id = required "node2_id" , nnng_node2_id = required "node2_id"
, nodeNodeNgram_ngram_id = required "ngram_id" , nnng_ngrams_id = required "ngram_id"
, nodeNodeNgram_score = optional "score" , nnng_score = optional "score"
} }
) )
queryNodeNodeNgramTable :: Query NodeNodeNgramRead queryNodeNodeNgramsTable :: Query NodeNodeNgramsRead
queryNodeNodeNgramTable = queryTable nodeNodeNgramTable queryNodeNodeNgramsTable = queryTable nodeNodeNgramsTable
-- | not optimized (get all ngrams without filters) -- | not optimized (get all ngrams without filters)
nodeNodeNgrams :: Cmd err [NodeNodeNgram] nodeNodeNgrams :: Cmd err [NodeNodeNgrams]
nodeNodeNgrams = runOpaQuery queryNodeNodeNgramTable nodeNodeNgrams = runOpaQuery queryNodeNodeNgramsTable
instance QueryRunnerColumnDefault PGFloat8 (Maybe Double) where instance QueryRunnerColumnDefault PGFloat8 (Maybe Double) where
queryRunnerColumnDefault = fieldQueryRunnerColumn queryRunnerColumnDefault = fieldQueryRunnerColumn
{-|
Module : Gargantext.Database.Schema.NodesNgramsRepo
Description : NodeNgram for Ngram indexation or Lists
Copyright : (c) CNRS, 2017-Present
License : AGPL + CECILL v3
Maintainer : team@gargantext.org
Stability : experimental
Portability : POSIX
-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE Arrows #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
module Gargantext.Database.Schema.NodesNgramsRepo where
import Control.Arrow (returnA)
import Control.Lens.TH (makeLenses)
import Data.Map.Strict.Patch (PatchMap)
import Data.Profunctor.Product.TH (makeAdaptorAndInstance)
import Gargantext.API.Ngrams (NgramsStatePatch, NgramsTablePatch)
import Gargantext.Database.Schema.Ngrams (NgramsType)
import Gargantext.Database.Types.Node (NodeId)
import Gargantext.Database.Utils (mkCmd, Cmd, runOpaQuery)
import Gargantext.Prelude
import Opaleye
data RepoDbPoly version patches
= RepoDbNgrams { _rdp_version :: version
, _rdp_patches :: patches
} deriving (Show)
type RepoDbWrite
= RepoDbPoly (Column PGInt4)
(Column PGJsonb)
type RepoDbRead
= RepoDbPoly (Column PGInt4)
(Column PGJsonb)
type RepoDbNgrams = RepoDbPoly Int NgramsStatePatch
$(makeAdaptorAndInstance "pRepoDbNgrams" ''RepoDbPoly)
makeLenses ''RepoDbPoly
instance QueryRunnerColumnDefault PGJsonb
(PatchMap NgramsType
(PatchMap NodeId NgramsTablePatch))
where
queryRunnerColumnDefault = fieldQueryRunnerColumn
-- type Re
repoTable :: Table RepoDbWrite RepoDbRead
repoTable = Table "nodes_ngrams_repo"
(pRepoDbNgrams RepoDbNgrams
{ _rdp_version = required "version"
, _rdp_patches = required "patches"
}
)
selectRepo :: Cmd err [RepoDbNgrams]
selectRepo = runOpaQuery selectPatches
selectPatches :: Query RepoDbRead
selectPatches = proc () -> do
repos <- queryTable repoTable -< ()
returnA -< repos
insertRepos :: [NgramsStatePatch] -> Cmd err Int64
insertRepos ns = mkCmd $ \conn -> runInsertMany conn repoTable (toWrite ns)
where
toWrite :: [NgramsStatePatch] -> [RepoDbWrite]
toWrite = undefined
--ns' = map (\(RepoDbNgrams v ps) -> RepoDbWrite (pgInt4 v) (pgJSONB ps)) ns
...@@ -33,11 +33,13 @@ import Data.Profunctor.Product.TH (makeAdaptorAndInstance) ...@@ -33,11 +33,13 @@ import Data.Profunctor.Product.TH (makeAdaptorAndInstance)
import Data.Text (Text) import Data.Text (Text)
import Data.Time (UTCTime) import Data.Time (UTCTime)
import GHC.Show(Show(..)) import GHC.Show(Show(..))
import Gargantext.Core.Types.Individu (Username) import Gargantext.Core.Types.Individu (Username, arbitraryUsername)
import Gargantext.Database.Utils import Gargantext.Database.Utils
import Gargantext.Prelude import Gargantext.Prelude
import Opaleye import Opaleye
------------------------------------------------------------------------
------------------------------------------------------------------------ ------------------------------------------------------------------------
type UserId = Int type UserId = Int
...@@ -106,21 +108,17 @@ userTable = Table "auth_user" (pUser User { user_id = optional "id" ...@@ -106,21 +108,17 @@ userTable = Table "auth_user" (pUser User { user_id = optional "id"
insertUsers :: [UserWrite] -> Cmd err Int64 insertUsers :: [UserWrite] -> Cmd err Int64
insertUsers us = mkCmd $ \c -> runInsertMany c userTable us insertUsers us = mkCmd $ \c -> runInsertMany c userTable us
gargantuaUser :: UserWrite
gargantuaUser = User (Nothing) (pgStrictText "password") gargantextUser :: Username -> UserWrite
(Nothing) (pgBool True) (pgStrictText "gargantua") gargantextUser u = User (Nothing) (pgStrictText "password")
(Nothing) (pgBool True) (pgStrictText u)
(pgStrictText "first_name") (pgStrictText "first_name")
(pgStrictText "last_name") (pgStrictText "last_name")
(pgStrictText "e@mail") (pgStrictText "e@mail")
(pgBool True) (pgBool True) (Nothing) (pgBool True) (pgBool True) (Nothing)
simpleUser :: UserWrite insertUsersDemo :: Cmd err Int64
simpleUser = User (Nothing) (pgStrictText "password") insertUsersDemo = insertUsers $ map (\u -> gargantextUser u) arbitraryUsername
(Nothing) (pgBool False) (pgStrictText "user1")
(pgStrictText "first_name")
(pgStrictText "last_name")
(pgStrictText "e@mail")
(pgBool False) (pgBool True) (Nothing)
------------------------------------------------------------------ ------------------------------------------------------------------
......
#!/bin/bash
DB="gargandbV5"
psql -c "drop database IF EXISTS \"${DB}\""
createdb "${DB}"
psql "${DB}" < schema.sql
CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog; CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog;
CREATE EXTENSION IF NOT EXISTS tsm_system_rows;
COMMENT ON EXTENSION plpgsql IS 'PL/pgSQL procedural language'; COMMENT ON EXTENSION plpgsql IS 'PL/pgSQL procedural language';
-- CREATE USER WITH ... -- CREATE USER WITH ...
...@@ -77,20 +79,20 @@ ALTER TABLE public.nodes_ngrams_repo OWNER TO gargantua; ...@@ -77,20 +79,20 @@ ALTER TABLE public.nodes_ngrams_repo OWNER TO gargantua;
-- --
-- --
-- TODO: delete delete this table -- TODO: delete delete this table
CREATE TABLE public.nodes_ngrams_ngrams ( --CREATE TABLE public.nodes_ngrams_ngrams (
node_id integer NOT NULL REFERENCES public.nodes(id) ON DELETE CASCADE, -- node_id integer NOT NULL REFERENCES public.nodes(id) ON DELETE CASCADE,
ngram1_id integer NOT NULL REFERENCES public.ngrams(id) ON DELETE CASCADE, -- ngram1_id integer NOT NULL REFERENCES public.ngrams(id) ON DELETE CASCADE,
ngram2_id integer NOT NULL REFERENCES public.ngrams(id) ON DELETE CASCADE, -- ngram2_id integer NOT NULL REFERENCES public.ngrams(id) ON DELETE CASCADE,
weight double precision, -- weight double precision,
PRIMARY KEY (node_id,ngram1_id,ngram2_id) -- PRIMARY KEY (node_id,ngram1_id,ngram2_id)
); --);
--
ALTER TABLE public.nodes_ngrams_ngrams OWNER TO gargantua; --ALTER TABLE public.nodes_ngrams_ngrams OWNER TO gargantua;
--------------------------------------------------------- ---------------------------------------------------------
CREATE TABLE public.nodes_nodes ( CREATE TABLE public.nodes_nodes (
node1_id integer NOT NULL, node1_id integer NOT NULL REFERENCES public.nodes(id) ON DELETE CASCADE,
node2_id integer NOT NULL, node2_id integer NOT NULL REFERENCES public.nodes(id) ON DELETE CASCADE,
score real, score real,
favorite boolean, favorite boolean,
delete boolean, delete boolean,
...@@ -110,33 +112,32 @@ CREATE TABLE public.rights ( ...@@ -110,33 +112,32 @@ CREATE TABLE public.rights (
); );
ALTER TABLE public.rights OWNER TO gargantua; ALTER TABLE public.rights OWNER TO gargantua;
CREATE INDEX rights_userId_nodeId ON public.rights USING btree (user_id,node_id);
------------------------------------------------------------ ------------------------------------------------------------
-- INDEXES -- INDEXES
CREATE UNIQUE INDEX ON public.auth_user(username);
CREATE INDEX auth_user_username_like ON public.auth_user USING btree (username varchar_pattern_ops); CREATE INDEX ON public.auth_user USING btree (username varchar_pattern_ops);
CREATE UNIQUE INDEX ON public.auth_user USING btree (username);
--CREATE INDEX ix_nodes_typename ON public.nodes USING btree (typename); CREATE INDEX ON public.rights USING btree (user_id,node_id);
--CREATE INDEX ngrams_n_idx ON public.ngrams USING btree (n);
CREATE INDEX nodes_hyperdata_idx ON public.nodes USING gin (hyperdata);
CREATE UNIQUE INDEX nodes_expr_idx ON public.nodes USING btree (((hyperdata ->> 'uniqId'::text)));
CREATE UNIQUE INDEX nodes_expr_idx2 ON public.nodes USING btree (((hyperdata ->> 'uniqIdBdd'::text))); CREATE INDEX ON public.nodes USING gin (hyperdata);
CREATE UNIQUE INDEX nodes_typename_parent_id_expr_idx ON public.nodes USING btree (typename, parent_id, ((hyperdata ->> 'uniqId'::text))); CREATE INDEX ON public.nodes USING btree (user_id, typename, parent_id);
CREATE INDEX nodes_user_id_typename_parent_id_idx ON public.nodes USING btree (user_id, typename, parent_id); CREATE UNIQUE INDEX ON public.nodes USING btree (((hyperdata ->> 'uniqId'::text)));
CREATE UNIQUE INDEX ON public.nodes USING btree (((hyperdata ->> 'uniqIdBdd'::text)));
CREATE UNIQUE INDEX ON public.nodes USING btree (typename, parent_id, ((hyperdata ->> 'uniqId'::text)));
CREATE UNIQUE INDEX ON public.ngrams(terms); CREATE UNIQUE INDEX ON public.ngrams (terms); -- TEST GIN
--CREATE UNIQUE INDEX ON public.ngrams(terms,n);
CREATE INDEX ON public.nodes_ngrams USING btree (ngrams_id);
CREATE UNIQUE INDEX ON public.nodes_ngrams USING btree (node_id,ngrams_id); CREATE UNIQUE INDEX ON public.nodes_ngrams USING btree (node_id,ngrams_id);
CREATE INDEX nodes_ngrams_ngrams_id_idx ON public.nodes_ngrams USING btree (ngrams_id);
CREATE INDEX nodes_ngrams_ngrams_node_id_idx ON public.nodes_ngrams_ngrams USING btree (node_id);
CREATE UNIQUE INDEX ON public.nodes_ngrams USING btree (node_id,ngrams_id,ngrams_type); CREATE UNIQUE INDEX ON public.nodes_ngrams USING btree (node_id,ngrams_id,ngrams_type);
CREATE INDEX nodes_nodes_delete ON public.nodes_nodes USING btree (node1_id, node2_id, delete);
CREATE UNIQUE INDEX nodes_nodes_node1_id_node2_id_idx ON public.nodes_nodes USING btree (node1_id, node2_id); CREATE INDEX ON public.nodes_nodes USING btree (node1_id, node2_id, delete);
CREATE UNIQUE INDEX ON public.nodes_nodes USING btree (node1_id, node2_id);
--CREATE INDEX ON public.nodes_nodes_ngrams USING btree (node1_id,nod2_id);
-- TRIGGERS -- TRIGGERS
-- TODO user haskell-postgresql-simple to create this function -- TODO user haskell-postgresql-simple to create this function
...@@ -166,7 +167,23 @@ ALTER FUNCTION public.search_update() OWNER TO gargantua; ...@@ -166,7 +167,23 @@ ALTER FUNCTION public.search_update() OWNER TO gargantua;
CREATE TRIGGER search_update_trigger BEFORE INSERT OR UPDATE ON nodes FOR EACH ROW EXECUTE PROCEDURE search_update(); CREATE TRIGGER search_update_trigger BEFORE INSERT OR UPDATE ON nodes FOR EACH ROW EXECUTE PROCEDURE search_update();
-- Ngrams Full DB Extraction Optim
-- TODO remove hard parameter
CREATE OR REPLACE function node_pos(int, int) returns bigint
AS 'SELECT count(id) from nodes
WHERE id < $1
AND typename = $2
'
LANGUAGE SQL immutable;
--drop index node_by_pos;
create index node_by_pos on nodes using btree(node_pos(id,typename));
-- Initialize index with already existing data -- Initialize index with already existing data
UPDATE nodes SET hyperdata = hyperdata; UPDATE nodes SET hyperdata = hyperdata;
...@@ -64,7 +64,7 @@ searchInCorpus cId q o l order = runOpaQuery (filterWith o l order $ queryInCorp ...@@ -64,7 +64,7 @@ searchInCorpus cId q o l order = runOpaQuery (filterWith o l order $ queryInCorp
queryInCorpus :: CorpusId -> Text -> O.Query FacetDocRead queryInCorpus :: CorpusId -> Text -> O.Query FacetDocRead
queryInCorpus cId q = proc () -> do queryInCorpus cId q = proc () -> do
(n, nn) <- joinInCorpus -< () (n, nn) <- joinInCorpus -< ()
restrict -< ( nodeNode_node1_id nn) .== (toNullable $ pgNodeId cId) restrict -< ( nn_node1_id nn) .== (toNullable $ pgNodeId cId)
restrict -< (_ns_search n) @@ (pgTSQuery (unpack q)) restrict -< (_ns_search n) @@ (pgTSQuery (unpack q))
restrict -< (_ns_typename n) .== (pgInt4 $ nodeTypeId NodeDocument) restrict -< (_ns_typename n) .== (pgInt4 $ nodeTypeId NodeDocument)
returnA -< FacetDoc (_ns_id n) (_ns_date n) (_ns_name n) (_ns_hyperdata n) (pgBool True) (pgInt4 1) returnA -< FacetDoc (_ns_id n) (_ns_date n) (_ns_name n) (_ns_hyperdata n) (pgBool True) (pgInt4 1)
...@@ -73,7 +73,7 @@ joinInCorpus :: O.Query (NodeSearchRead, NodeNodeReadNull) ...@@ -73,7 +73,7 @@ joinInCorpus :: O.Query (NodeSearchRead, NodeNodeReadNull)
joinInCorpus = leftJoin queryNodeSearchTable queryNodeNodeTable cond joinInCorpus = leftJoin queryNodeSearchTable queryNodeNodeTable cond
where where
cond :: (NodeSearchRead, NodeNodeRead) -> Column PGBool cond :: (NodeSearchRead, NodeNodeRead) -> Column PGBool
cond (n, nn) = nodeNode_node2_id nn .== _ns_id n cond (n, nn) = nn_node2_id nn .== _ns_id n
------------------------------------------------------------------------ ------------------------------------------------------------------------
type AuthorName = Text type AuthorName = Text
...@@ -103,8 +103,8 @@ queryInCorpusWithContacts cId q _ _ _ = proc () -> do ...@@ -103,8 +103,8 @@ queryInCorpusWithContacts cId q _ _ _ = proc () -> do
(docs, (corpusDoc, (docNgrams, (ngrams', (_, contacts))))) <- joinInCorpusWithContacts -< () (docs, (corpusDoc, (docNgrams, (ngrams', (_, contacts))))) <- joinInCorpusWithContacts -< ()
restrict -< (_ns_search docs) @@ (pgTSQuery $ unpack q ) restrict -< (_ns_search docs) @@ (pgTSQuery $ unpack q )
restrict -< (_ns_typename docs) .== (pgInt4 $ nodeTypeId NodeDocument) restrict -< (_ns_typename docs) .== (pgInt4 $ nodeTypeId NodeDocument)
restrict -< (nodeNode_node1_id corpusDoc) .== (toNullable $ pgNodeId cId) restrict -< (nn_node1_id corpusDoc) .== (toNullable $ pgNodeId cId)
restrict -< (_nn_listType docNgrams) .== (toNullable $ pgNgramsType Authors) restrict -< (nng_listType docNgrams) .== (toNullable $ pgNgramsType Authors)
restrict -< (_node_typename contacts) .== (toNullable $ pgInt4 $ nodeTypeId NodeContact) restrict -< (_node_typename contacts) .== (toNullable $ pgInt4 $ nodeTypeId NodeContact)
-- let contact_id = ifThenElse (isNull $ _node_id contacts) (toNullable $ pgInt4 0) (_node_id contacts) -- let contact_id = ifThenElse (isNull $ _node_id contacts) (toNullable $ pgInt4 0) (_node_id contacts)
returnA -< FacetPaired (_ns_id docs) (_ns_date docs) (_ns_hyperdata docs) (pgInt4 0) (Pair (_node_id contacts) (ngrams_terms ngrams')) returnA -< FacetPaired (_ns_id docs) (_ns_date docs) (_ns_hyperdata docs) (pgInt4 0) (Pair (_node_id contacts) (ngrams_terms ngrams'))
...@@ -113,19 +113,19 @@ joinInCorpusWithContacts :: O.Query (NodeSearchRead, (NodeNodeReadNull, (NodeNgr ...@@ -113,19 +113,19 @@ joinInCorpusWithContacts :: O.Query (NodeSearchRead, (NodeNodeReadNull, (NodeNgr
joinInCorpusWithContacts = leftJoin6 queryNodeTable queryNodeNgramTable queryNgramsTable queryNodeNgramTable queryNodeNodeTable queryNodeSearchTable cond12 cond23 cond34 cond45 cond56 joinInCorpusWithContacts = leftJoin6 queryNodeTable queryNodeNgramTable queryNgramsTable queryNodeNgramTable queryNodeNodeTable queryNodeSearchTable cond12 cond23 cond34 cond45 cond56
where where
cond12 :: (NodeNgramRead, NodeRead) -> Column PGBool cond12 :: (NodeNgramRead, NodeRead) -> Column PGBool
cond12 (ng3, n2) = _node_id n2 .== _nn_node_id ng3 cond12 (ng3, n2) = _node_id n2 .== nng_node_id ng3
--------- ---------
cond23 :: (NgramsRead, (NodeNgramRead, NodeReadNull)) -> Column PGBool cond23 :: (NgramsRead, (NodeNgramRead, NodeReadNull)) -> Column PGBool
cond23 (ng2, (nng2, _)) = _nn_ngrams_id nng2 .== ngrams_id ng2 cond23 (ng2, (nng2, _)) = nng_ngrams_id nng2 .== ngrams_id ng2
cond34 :: (NodeNgramRead, (NgramsRead, (NodeNgramReadNull, NodeReadNull))) -> Column PGBool cond34 :: (NodeNgramRead, (NgramsRead, (NodeNgramReadNull, NodeReadNull))) -> Column PGBool
cond34 (nng, (ng, (_,_))) = ngrams_id ng .== _nn_ngrams_id nng cond34 (nng, (ng, (_,_))) = ngrams_id ng .== nng_ngrams_id nng
cond45 :: (NodeNodeRead, (NodeNgramRead, (NgramsReadNull, (NodeNgramReadNull, NodeReadNull)))) -> Column PGBool cond45 :: (NodeNodeRead, (NodeNgramRead, (NgramsReadNull, (NodeNgramReadNull, NodeReadNull)))) -> Column PGBool
cond45 (nn, (nng, (_,(_,_)))) = _nn_node_id nng .== nodeNode_node2_id nn cond45 (nn, (nng, (_,(_,_)))) = nng_node_id nng .== nn_node2_id nn
cond56 :: (NodeSearchRead, (NodeNodeRead, (NodeNgramReadNull, (NgramsReadNull, (NodeNgramReadNull, NodeReadNull))))) -> Column PGBool cond56 :: (NodeSearchRead, (NodeNodeRead, (NodeNgramReadNull, (NgramsReadNull, (NodeNgramReadNull, NodeReadNull))))) -> Column PGBool
cond56 (n, (nn, (_,(_,(_,_))))) = _ns_id n .== nodeNode_node2_id nn cond56 (n, (nn, (_,(_,(_,_))))) = _ns_id n .== nn_node2_id nn
{- {-
......
...@@ -98,7 +98,7 @@ dbTree rootId = map (\(nId, tId, pId, n) -> DbTreeNode nId tId pId n) <$> runPGS ...@@ -98,7 +98,7 @@ dbTree rootId = map (\(nId, tId, pId, n) -> DbTreeNode nId tId pId n) <$> runPGS
FROM nodes AS c FROM nodes AS c
INNER JOIN tree AS s ON c.parent_id = s.id INNER JOIN tree AS s ON c.parent_id = s.id
WHERE c.typename IN (2,3,30,31,5,7,9) WHERE c.typename IN (2,3,30,31,7,9)
) )
SELECT * from tree; SELECT * from tree;
|] (Only rootId) |] (Only rootId)
......
This diff is collapsed.
...@@ -20,6 +20,10 @@ commentary with @some markup@. ...@@ -20,6 +20,10 @@ commentary with @some markup@.
module Gargantext.Database.Utils where module Gargantext.Database.Utils where
import Data.ByteString.Char8 (hPutStrLn)
import System.IO (stderr)
import Control.Exception
import Control.Monad.Error.Class -- (MonadError(..), Error)
import Control.Lens (Getter, view) import Control.Lens (Getter, view)
import Control.Monad.Reader import Control.Monad.Reader
import Control.Monad.Except import Control.Monad.Except
...@@ -69,7 +73,7 @@ mkCmd k = do ...@@ -69,7 +73,7 @@ mkCmd k = do
conn <- view connection conn <- view connection
liftIO $ k conn liftIO $ k conn
runCmd :: HasConnection env => env runCmd :: (HasConnection env) => env
-> Cmd' env err a -> Cmd' env err a
-> IO (Either err a) -> IO (Either err a)
runCmd env m = runExceptT $ runReaderT m env runCmd env m = runExceptT $ runReaderT m env
...@@ -80,8 +84,20 @@ runOpaQuery q = mkCmd $ \c -> runQuery c q ...@@ -80,8 +84,20 @@ runOpaQuery q = mkCmd $ \c -> runQuery c q
formatPGSQuery :: PGS.ToRow a => PGS.Query -> a -> Cmd err DB.ByteString formatPGSQuery :: PGS.ToRow a => PGS.Query -> a -> Cmd err DB.ByteString
formatPGSQuery q a = mkCmd $ \conn -> PGS.formatQuery conn q a formatPGSQuery q a = mkCmd $ \conn -> PGS.formatQuery conn q a
runPGSQuery :: (PGS.ToRow a, PGS.FromRow b) => PGS.Query -> a -> Cmd err [b] -- TODO use runPGSQueryDebug everywhere
runPGSQuery q a = mkCmd $ \conn -> PGS.query conn q a runPGSQuery' :: (PGS.ToRow a, PGS.FromRow b) => PGS.Query -> a -> Cmd err [b]
runPGSQuery' q a = mkCmd $ \conn -> PGS.query conn q a
runPGSQuery :: (MonadError err m, MonadReader env m,
PGS.FromRow r, PGS.ToRow q, MonadIO m, HasConnection env)
=> PGS.Query -> q -> m [r]
runPGSQuery q a = mkCmd $ \conn -> catch (PGS.query conn q a) (printError conn)
where
printError c (SomeException e) = do
q' <- PGS.formatQuery c q a
hPutStrLn stderr q'
throw (SomeException e)
execPGSQuery :: PGS.ToRow a => PGS.Query -> a -> Cmd err Int64 execPGSQuery :: PGS.ToRow a => PGS.Query -> a -> Cmd err Int64
execPGSQuery q a = mkCmd $ \conn -> PGS.execute conn q a execPGSQuery q a = mkCmd $ \conn -> PGS.execute conn q a
......
This diff is collapsed.
...@@ -16,6 +16,7 @@ commentary with @some markup@. ...@@ -16,6 +16,7 @@ commentary with @some markup@.
{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
module Gargantext.Prelude module Gargantext.Prelude
( module Gargantext.Prelude ( module Gargantext.Prelude
...@@ -127,28 +128,39 @@ type Grain = Int ...@@ -127,28 +128,39 @@ type Grain = Int
type Step = Int type Step = Int
-- | Function to split a range into chunks -- | Function to split a range into chunks
-- if step == grain then linearity -- if step == grain then linearity (splitEvery)
-- elif step < grain then overlapping -- elif step < grain then overlapping
-- else dotted with holes -- else dotted with holes
-- TODO FIX BUG if Steps*Grain /= length l
-- chunkAlong 10 10 [1..15] == [1..10]
-- BUG: what about the rest of (divMod 15 10)?
-- TODO: chunkAlongNoRest or chunkAlongWithRest
-- default behavior: NoRest
chunkAlong :: Eq a => Grain -> Step -> [a] -> [[a]] chunkAlong :: Eq a => Grain -> Step -> [a] -> [[a]]
chunkAlong a b l = case a > 0 && b > 0 of chunkAlong a b l = case a >= length l of
True -> chunkAlong_ a b l True -> [l]
False -> panic "ChunkAlong: Parameters should be > 0 and Grain > Step" False -> chunkAlong' a b l
chunkAlong_ :: Eq a => Int -> Int -> [a] -> [[a]] chunkAlong' :: Eq a => Grain -> Step -> [a] -> [[a]]
chunkAlong_ a b l = filter (/= []) $ only (while dropAlong) chunkAlong' a b l = case a > 0 && b > 0 of
where True -> chunkAlong'' a b l
only = map (take a) False -> panic "ChunkAlong: Parameters should be > 0 and Grain > Step"
while = takeWhile (\x -> length x >= a)
dropAlong = L.scanl (\x _y -> drop b x) l ([1..] :: [Integer]) chunkAlong'' :: Eq a => Int -> Int -> [a] -> [[a]]
chunkAlong'' a b l = filter (/= []) $ only (while dropAlong)
where
only = map (take a)
while = takeWhile (\x -> length x >= a)
dropAlong = L.scanl (\x _y -> drop b x) l ([1..] :: [Integer])
-- | Optimized version (Vector) -- | Optimized version (Vector)
chunkAlong' :: Int -> Int -> V.Vector a -> V.Vector (V.Vector a) chunkAlongV :: Int -> Int -> V.Vector a -> V.Vector (V.Vector a)
chunkAlong' a b l = only (while dropAlong) chunkAlongV a b l = only (while dropAlong)
where where
only = V.map (V.take a) only = V.map (V.take a)
while = V.takeWhile (\x -> V.length x >= a) while = V.takeWhile (\x -> V.length x >= a)
dropAlong = V.scanl (\x _y -> V.drop b x) l (V.fromList [1..]) dropAlong = V.scanl (\x _y -> V.drop b x) l (V.fromList [1..])
-- | TODO Inverse of chunk ? unchunkAlong ? -- | TODO Inverse of chunk ? unchunkAlong ?
-- unchunkAlong :: Int -> Int -> [[a]] -> [a] -- unchunkAlong :: Int -> Int -> [[a]] -> [a]
...@@ -252,3 +264,8 @@ zipSnd f xs = zip xs (f xs) ...@@ -252,3 +264,8 @@ zipSnd f xs = zip xs (f xs)
maximumWith :: (Ord a1, Foldable t) => (a2 -> a1) -> t a2 -> a2 maximumWith :: (Ord a1, Foldable t) => (a2 -> a1) -> t a2 -> a2
maximumWith f = L.maximumBy (compare `on` f) maximumWith f = L.maximumBy (compare `on` f)
-- | To get all combinations of a list with no repetition and apply a function to the resulting list of pairs
listToCombi :: forall a b. (a -> b) -> [a] -> [(b,b)]
listToCombi f l = [ (f x, f y) | (x:rest) <- L.tails l, y <- rest ]
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment