Commit 1b90c03a authored by Alexandre Delanoë's avatar Alexandre Delanoë

[DBFLOW] /api/v1.0/node/{id}/table ok

parent 51a7b876
......@@ -37,7 +37,8 @@ library:
- Gargantext.Core.Utils.Prefix
- Gargantext.Database
- Gargantext.Database.Bashql
- Gargantext.Database.Node.Document.Import
- Gargantext.Database.Node.Document.Insert
- Gargantext.Database.Node.Document.Add
- Gargantext.Database.Types.Node
- Gargantext.Database.User
- Gargantext.Database.Cooc
......
......@@ -30,12 +30,13 @@ module Gargantext.API.Node
, HyperdataDocumentV3(..)
) where
-------------------------------------------------------------------
import Prelude (Enum, Bounded, minBound, maxBound)
import Control.Lens (prism')
import Control.Monad.IO.Class (liftIO)
import Control.Monad ((>>))
--import System.IO (putStrLn, readFile)
import Data.Either(Either(Left))
import Data.Aeson (FromJSON, ToJSON, Value())
--import Data.Text (Text(), pack)
import Data.Text (Text())
......@@ -46,7 +47,6 @@ import Database.PostgreSQL.Simple (Connection)
import GHC.Generics (Generic)
import Servant
-- import Servant.Multipart
import Gargantext.Prelude
import Gargantext.Database.Types.Node
......@@ -55,7 +55,7 @@ import Gargantext.Database.Node ( runCmd
, getNode, getNodesWith
, deleteNode, deleteNodes, mk, JSONB)
import qualified Gargantext.Database.Node.Update as U (update, Update(..))
import Gargantext.Database.Facet (FacetDoc {-,getDocFacet-}
import Gargantext.Database.Facet (FacetDoc , runViewDocuments', OrderBy(..)
,FacetChart)
import Gargantext.Database.Tree (treeDB, HasTreeError(..), TreeError(..))
......@@ -63,6 +63,7 @@ import Gargantext.Database.Tree (treeDB, HasTreeError(..), TreeError(..))
import Gargantext.TextFlow
import Gargantext.Viz.Graph (Graph)
import Gargantext.Core (Lang(..))
import Gargantext.Core.Types (Offset, Limit)
import Gargantext.Core.Types.Main (Tree, NodeTree)
import Gargantext.Text.Terms (TermType(..))
......@@ -117,21 +118,35 @@ type NodeAPI a = Get '[JSON] (Node a)
:> QueryParam "offset" Int
:> QueryParam "limit" Int
:> Get '[JSON] [Node a]
:<|> "facet" :> Summary " Facet documents"
:> "documents" :> FacetDocAPI
-- :<|> "facet" :<|> "sources" :<|> FacetSourcesAPI
-- :<|> "facet" :<|> "authors" :<|> FacetAuthorsAPI
-- :<|> "facet" :<|> "terms" :<|> FacetTermsAPI
:<|> Summary " Tabs" :> FacetDocAPI
--data FacetFormat = Table | Chart
--data FacetType = Doc | Term | Source | Author
--data Facet = Facet Doc Format
data FacetType = Docs | Terms | Sources | Authors | Trash
deriving (Generic, Enum, Bounded)
instance FromHttpApiData FacetType
where
parseUrlPiece "Docs" = pure Docs
parseUrlPiece "Terms" = pure Terms
parseUrlPiece "Sources" = pure Sources
parseUrlPiece "Authors" = pure Authors
parseUrlPiece "Trash" = pure Trash
parseUrlPiece _ = Left "Unexpected value of FacetType"
instance ToParamSchema FacetType
instance ToJSON FacetType
instance FromJSON FacetType
instance ToSchema FacetType
instance Arbitrary FacetType
where
arbitrary = elements [minBound .. maxBound]
type FacetDocAPI = "table"
:> Summary " Table data"
:> QueryParam "view" FacetType
:> QueryParam "offset" Int
:> QueryParam "limit" Int
:> QueryParam "order" OrderBy
:> Get '[JSON] [FacetDoc]
:<|> "chart"
......@@ -183,7 +198,7 @@ nodeAPI conn p id
:<|> putNode conn id
:<|> deleteNode' conn id
:<|> getNodesWith' conn id p
:<|> getFacet conn id
:<|> getTable conn id
:<|> getChart conn id
-- :<|> upload
-- :<|> query
......@@ -195,6 +210,15 @@ rename c nId (RenameNode name) = liftIO $ U.update (U.Rename nId name) c
nodesAPI :: Connection -> [NodeId] -> Server NodesAPI
nodesAPI conn ids = deleteNodes' conn ids
getTable :: Connection -> NodeId -> Maybe FacetType -> Maybe Offset -> Maybe Limit -> Maybe OrderBy -> Handler [FacetDoc]
getTable c cId ft o l order = liftIO $ case ft of
(Just Docs) -> runViewDocuments' c cId False o l order
(Just Trash) -> runViewDocuments' c cId True o l order
_ -> panic "not implemented"
postNode :: Connection -> NodeId -> PostNode -> Handler [Int]
postNode c pId (PostNode name nt) = liftIO $ mk c nt (Just pId) name
......@@ -212,10 +236,6 @@ getNodesWith' :: JSONB a => Connection -> NodeId -> proxy a -> Maybe NodeType
getNodesWith' conn id p nodeType offset limit = liftIO (getNodesWith conn id p nodeType offset limit)
getFacet :: Connection -> NodeId -> Maybe Int -> Maybe Int
-> Handler [FacetDoc]
getFacet conn id offset limit = undefined -- liftIO (putStrLn ( "/facet" :: Text)) >> liftIO (getDocFacet conn NodeCorpus id (Just NodeDocument) offset limit)
getChart :: Connection -> NodeId -> Maybe UTCTime -> Maybe UTCTime
-> Handler [FacetChart]
getChart _ _ _ _ = undefined -- TODO
......
......@@ -11,6 +11,7 @@ Portability : POSIX
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# LANGUAGE Arrows #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
......@@ -19,41 +20,41 @@ Portability : POSIX
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
------------------------------------------------------------------------
module Gargantext.Database.Facet
where
------------------------------------------------------------------------
import Prelude hiding (null, id, map, sum, not)
import Prelude hiding (null, id, map, sum, not, read)
import Prelude (Enum, Bounded, minBound, maxBound)
import GHC.Generics (Generic)
-- import Data.Aeson (Value)
import Data.Aeson (FromJSON, ToJSON)
import Data.Either(Either(Left))
import Control.Arrow (returnA)
import Control.Lens.TH (makeLensesWith, abbreviatedFields)
import Data.Aeson.TH (deriveJSON)
import Data.Maybe (Maybe)
import Data.Profunctor.Product.Default (Default)
import Data.Profunctor.Product.TH (makeAdaptorAndInstance)
import Data.Text (Text)
import Data.Time (UTCTime)
import Data.Time.Segment (jour)
import Data.Swagger
import Database.PostgreSQL.Simple (Connection)
import Database.PostgreSQL.Simple (Connection)
import Opaleye
import Opaleye.Internal.Join (NullMaker)
import qualified Opaleye.Internal.Unpackspec()
import Servant.API
import Test.QuickCheck.Arbitrary
import Test.QuickCheck (elements)
import Gargantext.Core.Types
import Gargantext.Database.Types.Node (NodeType)
import Gargantext.Core.Utils.Prefix (unPrefix)
import Gargantext.Database.NodeNode
import Gargantext.Database.NodeNodeNgram
import Gargantext.Database.Node
import Gargantext.Database.Queries
import Gargantext.Database.Config (nodeTypeId)
......@@ -68,21 +69,23 @@ import Gargantext.Database.Config (nodeTypeId)
--instance ToJSON Facet
type Favorite = Bool
type Title = Text
type FacetDoc = Facet NodeId UTCTime HyperdataDocument Favorite Int
type FacetDoc = Facet NodeId UTCTime Title HyperdataDocument Favorite Int
type FacetSources = FacetDoc
type FacetAuthors = FacetDoc
type FacetTerms = FacetDoc
data Facet id created hyperdata favorite ngramCount =
data Facet id created title hyperdata favorite ngramCount =
FacetDoc { facetDoc_id :: id
, facetDoc_created :: created
, facetDoc_hyperdata :: hyperdata
, facetDoc_favorite :: favorite
, facetDoc_ngramCount :: ngramCount
} deriving (Show, Generic)
, facetDoc_created :: created
, facetDoc_title :: title
, facetDoc_hyperdata :: hyperdata
, facetDoc_favorite :: favorite
, facetDoc_ngramCount :: ngramCount
} deriving (Show, Generic)
-- | JSON instance
......@@ -94,9 +97,10 @@ instance ToSchema FacetDoc
-- | Mock and Quickcheck instances
instance Arbitrary FacetDoc where
arbitrary = elements [ FacetDoc id' (jour year 01 01) hp fav ngramCount
arbitrary = elements [ FacetDoc id' (jour year 01 01) t hp fav ngramCount
| id' <- [1..10]
, year <- [1990..2000]
, t <- ["title", "another title"]
, hp <- hyperdataDocuments
, fav <- [True, False]
, ngramCount <- [3..100]
......@@ -109,6 +113,7 @@ $(makeLensesWith abbreviatedFields ''Facet)
type FacetDocRead = Facet (Column PGInt4 )
(Column PGTimestamptz)
(Column PGText )
(Column PGJsonb )
(Column PGBool)
(Column PGInt4 )
......@@ -128,8 +133,28 @@ instance Arbitrary FacetChart where
-----------------------------------------------------------------------
type Trash = Bool
data OrderBy = DateAsc | DateDesc
-- | TitleAsc | TitleDesc
| FavDesc | FavAsc -- | NgramCount
| TitleAsc | TitleDesc
| FavDesc | FavAsc
deriving (Generic, Enum, Bounded, Read, Show)
-- | NgramCoun
instance FromHttpApiData OrderBy
where
parseUrlPiece "DateAsc" = pure DateAsc
parseUrlPiece "DateDesc" = pure DateDesc
parseUrlPiece "TitleAsc" = pure TitleAsc
parseUrlPiece "TitleDesc" = pure TitleDesc
parseUrlPiece "FavAsc" = pure FavAsc
parseUrlPiece "FavDesc" = pure FavDesc
parseUrlPiece _ = Left "Unexpected value of OrderBy"
instance ToParamSchema OrderBy
instance FromJSON OrderBy
instance ToJSON OrderBy
instance ToSchema OrderBy
instance Arbitrary OrderBy
where
arbitrary = elements [minBound..maxBound]
viewDocuments :: CorpusId -> Trash -> NodeTypeId -> Query FacetDocRead
viewDocuments cId t ntId = proc () -> do
......@@ -139,35 +164,41 @@ viewDocuments cId t ntId = proc () -> do
restrict -< nodeNode_node1_id nn .== (pgInt4 cId)
restrict -< _node_typename n .== (pgInt4 ntId)
restrict -< nodeNode_delete nn .== (pgBool t)
returnA -< FacetDoc (_node_id n) (_node_date n) (_node_hyperdata n) (nodeNode_favorite nn) (pgInt4 1)
returnA -< FacetDoc (_node_id n) (_node_date n) (_node_name n) (_node_hyperdata n) (nodeNode_favorite nn) (pgInt4 1)
filterDocuments :: (PGOrd date, PGOrd favorite) =>
filterDocuments :: (PGOrd date, PGOrd title, PGOrd favorite) =>
Maybe Gargantext.Core.Types.Offset
-> Maybe Gargantext.Core.Types.Limit
-> OrderBy
-> Select (Facet id (Column date) hyperdata (Column favorite) ngramCount)
-> Query (Facet id (Column date) hyperdata (Column favorite) ngramCount)
-> Maybe OrderBy
-> Select (Facet id (Column date) (Column title) hyperdata (Column favorite) ngramCount)
-> Query (Facet id (Column date) (Column title) hyperdata (Column favorite) ngramCount)
filterDocuments o l order q = limit' l $ offset' o $ orderBy ordering q
where
ordering = case order of
DateAsc -> asc facetDoc_created
DateDesc -> desc facetDoc_created
(Just DateAsc) -> asc facetDoc_created
--TitleAsc -> asc facetDoc_hyperdata
--TitleDesc -> desc facetDoc_hyperdata
(Just TitleAsc) -> asc facetDoc_title
(Just TitleDesc) -> desc facetDoc_title
FavAsc -> asc facetDoc_favorite
FavDesc -> desc facetDoc_favorite
(Just FavAsc) -> asc facetDoc_favorite
(Just FavDesc) -> desc facetDoc_favorite
_ -> desc facetDoc_created
runViewDocuments :: CorpusId -> Trash -> Maybe Offset -> Maybe Limit -> Maybe OrderBy -> Cmd [FacetDoc]
runViewDocuments cId t o l order = mkCmd $ \c -> runViewDocuments' c cId t o l order
runViewDocuments :: CorpusId -> Trash -> Maybe Offset -> Maybe Limit -> OrderBy -> Cmd [FacetDoc]
runViewDocuments cId t o l order = mkCmd $ \c -> runQuery c ( filterDocuments o l order
-- | TODO use only Cmd with Reader and delete function below
runViewDocuments' :: Connection -> CorpusId -> Trash -> Maybe Offset -> Maybe Limit -> Maybe OrderBy -> IO [FacetDoc]
runViewDocuments' c cId t o l order = runQuery c ( filterDocuments o l order
$ viewDocuments cId t ntId)
where
ntId = nodeTypeId NodeDocument
{-
getDocFacet :: Connection -> NodeType -> Int -> Maybe NodeType
-> Maybe Offset -> Maybe Limit
......
......@@ -24,12 +24,11 @@ authors
module Gargantext.Database.Flow
where
import System.FilePath (FilePath)
import GHC.Base ((>>))
import Data.Maybe (Maybe(..))
import Gargantext.Core.Types (NodePoly(..))
import Gargantext.Prelude
import Gargantext.Database.Bashql (runCmd', del)
import Gargantext.Database.Node (Cmd(..), getRoot, mkRoot, mkCorpus)
import Gargantext.Database.Node (getRoot, mkRoot, mkCorpus)
import Gargantext.Database.User (getUser, UserLight(..), Username)
import Gargantext.Database.Node.Document.Insert (insertDocuments, ReturnId(reId))
import Gargantext.Database.Node.Document.Add (add)
......@@ -64,7 +63,7 @@ subFlow username = do
pure (userId, rootId, corpusId)
-- flow :: FilePath -> IO ()
flow :: FilePath -> IO Int
flow fp = do
(masterUserId, _, corpusId) <- subFlow "gargantua"
......@@ -76,18 +75,10 @@ flow fp = do
idsRepeat <- runCmd' $ insertDocuments masterUserId corpusId docs
printDebug "Docs IDs : " idsRepeat
(userId, rootId, corpusId2) <- subFlow "alexandre"
(_, _, corpusId2) <- subFlow "alexandre"
inserted <- runCmd' $ add corpusId2 (map reId ids)
printDebug "Inserted : " inserted
-- runCmd' (del [corpusId2, corpusId])
{-
ids <- add (Documents corpusId) docs
user_id <- runCmd' (get RootUser "alexandre")
rootUser_id <- runCmd' (getRootUser $ userLight_id user_id
corpusId <- mk Corpus
-}
runCmd' (del [corpusId2, corpusId])
......@@ -20,25 +20,18 @@ Add Documents/Contact to a Corpus/Annuaire.
------------------------------------------------------------------------
module Gargantext.Database.Node.Document.Add where
import Control.Lens (set)
import Data.Aeson (toJSON, Value)
import Data.ByteString.Internal (ByteString)
import Data.Maybe (maybe)
import Data.Typeable (Typeable)
import Database.PostgreSQL.Simple (Connection, FromRow, Query, formatQuery, query, Only(..))
import Database.PostgreSQL.Simple.FromRow (fromRow, field)
import Database.PostgreSQL.Simple (Query, formatQuery, query, Only(..))
import Database.PostgreSQL.Simple.SqlQQ
import Database.PostgreSQL.Simple.ToField (toField)
import Database.PostgreSQL.Simple.ToRow (ToRow(..))
import Database.PostgreSQL.Simple.Types (Values(..), QualifiedIdentifier(..))
import Data.Text (Text)
import qualified Data.Text as DT (pack, unpack, concat)
import qualified Data.Digest.Pure.SHA as SHA (sha256, showDigest)
import qualified Data.ByteString.Lazy.Char8 as DC (pack)
import qualified Data.Text as DT (pack)
import Gargantext.Database.Config (nodeTypeId)
import Gargantext.Database.Node (mkCmd, Cmd(..))
import Gargantext.Database.Types.Node
import Gargantext.Prelude
......
......@@ -49,11 +49,12 @@ the concatenation of the parameters defined by @hashParameters@.
-}
------------------------------------------------------------------------
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
------------------------------------------------------------------------
module Gargantext.Database.Node.Document.Insert where
......@@ -64,7 +65,7 @@ import Data.Aeson (toJSON, Value)
import Data.ByteString.Internal (ByteString)
import Data.Maybe (maybe)
import Data.Typeable (Typeable)
import Database.PostgreSQL.Simple (Connection, FromRow, Query, formatQuery, query, Only(..))
import Database.PostgreSQL.Simple (FromRow, Query, formatQuery, query, Only(..))
import Database.PostgreSQL.Simple.FromRow (fromRow, field)
import Database.PostgreSQL.Simple.SqlQQ
import Database.PostgreSQL.Simple.ToField (toField)
......@@ -149,9 +150,11 @@ queryInsert = [sql|
|]
prepare :: UserId -> ParentId -> [HyperdataDocument] -> [InputData]
prepare uId pId = map (\h -> InputData tId uId pId (DT.pack "Doc") (toJSON $ addUniqId h))
prepare uId pId = map (\h -> InputData tId uId pId (maybe "No Title of Document" identity $ _hyperdataDocument_title h)
(toJSON $ addUniqId h)
)
where
tId = nodeTypeId NodeDocument
tId = nodeTypeId NodeDocument
------------------------------------------------------------------------
-- * Main Types used
......@@ -192,15 +195,6 @@ instance ToRow InputData where
---------------------------------------------------------------------------
-- * Uniqueness of document definition
hashParameters :: [(HyperdataDocument -> Text)]
hashParameters = [ \d -> maybe' (_hyperdataDocument_title d)
, \d -> maybe' (_hyperdataDocument_abstract d)
, \d -> maybe' (_hyperdataDocument_source d)
, \d -> maybe' (_hyperdataDocument_publication_date d)
]
maybe' = maybe (DT.pack "") identity
addUniqId :: HyperdataDocument -> HyperdataDocument
addUniqId doc = set hyperdataDocument_uniqIdBdd (Just hashBdd)
$ set hyperdataDocument_uniqId (Just hash) doc
......@@ -211,5 +205,16 @@ addUniqId doc = set hyperdataDocument_uniqIdBdd (Just hashBdd)
uniqId :: Text -> Text
uniqId = DT.pack . SHA.showDigest . SHA.sha256 . DC.pack . DT.unpack
hashParameters :: [(HyperdataDocument -> Text)]
hashParameters = [ \d -> maybe' (_hyperdataDocument_title d)
, \d -> maybe' (_hyperdataDocument_abstract d)
, \d -> maybe' (_hyperdataDocument_source d)
, \d -> maybe' (_hyperdataDocument_publication_date d)
]
maybe' :: Maybe Text -> Text
maybe' = maybe (DT.pack "") identity
---------------------------------------------------------------------------
......@@ -27,7 +27,6 @@ import Gargantext.Prelude
import Data.Maybe (Maybe)
import Data.Profunctor.Product.TH (makeAdaptorAndInstance)
import Control.Lens.TH (makeLensesWith, abbreviatedFields)
import qualified Database.PostgreSQL.Simple as PGS
import Opaleye
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment