Commit fd6f2c71 authored by Alexandre Delanoë's avatar Alexandre Delanoë

[API][FACET] chart and table added. Route simplified.

parent ed2d00af
......@@ -71,7 +71,6 @@ import Gargantext.Database.Utils (databaseParameters)
---------------------------------------------------------------------
import GHC.Base (Applicative)
-- import Control.Lens
......@@ -112,12 +111,12 @@ makeApp fw = do
let serverApp = appMock
-- logWare <- mkRequestLogger def { destination = RequestLogger.Logger $ env^.logger }
let checkOriginAndHost app req resp = do
blocking <- fireWall req fw
case blocking of
True -> app req resp
False -> resp ( responseLBS status401 [] "Invalid Origin or Host header" )
False -> resp ( responseLBS status401 []
"Invalid Origin or Host header")
let corsMiddleware = cors $ \_ -> Just CorsResourcePolicy
-- { corsOrigins = Just ([env^.settings.allowedOrigin], False)
......@@ -139,7 +138,6 @@ makeApp fw = do
pure $ checkOriginAndHost $ corsMiddleware $ serverApp
---------------------------------------------------------------------
type PortNumber = Int
---------------------------------------------------------------------
......@@ -269,4 +267,3 @@ startGargantextMock port = do
run port application
......@@ -30,6 +30,7 @@ import Control.Monad ((>>))
-- import Data.Aeson (Value())
--import Data.Text (Text(), pack)
import Data.Text (Text())
import Data.Time (UTCTime)
import Database.PostgreSQL.Simple (Connection)
......@@ -38,14 +39,14 @@ import Servant
import Gargantext.Prelude
import Gargantext.Types.Node
import Gargantext.Database.Node (getNodesWithParentId
import Gargantext.Database.Node ( getNodesWithParentId
, getNode, getNodesWith
, deleteNode, deleteNodes)
import Gargantext.Database.Facet (FacetDoc, getDocFacet)
import Gargantext.Database.Facet (FacetDoc, getDocFacet
,FacetChart)
-------------------------------------------------------------------
-------------------------------------------------------------------
-- | Node API Types management
type Roots = Get '[JSON] [Node HyperdataDocument]
:<|> Post '[JSON] Int
......@@ -56,20 +57,31 @@ type NodesAPI = Delete '[JSON] Int
type NodeAPI = Get '[JSON] (Node HyperdataDocument)
:<|> Delete '[JSON] Int
:<|> "children" :> Summary " Summary children"
:> QueryParam "type" NodeType
:> QueryParam "offset" Int
:> QueryParam "limit" Int
:> Get '[JSON] [Node HyperdataDocument]
:<|> "facet" :> "documents" :> FacetDocAPI
-- :<|> "facet" :<|> "sources" :<|> FacetSourcesAPI
-- :<|> "facet" :<|> "authors" :<|> FacetAuthorsAPI
-- :<|> "facet" :<|> "terms" :<|> FacetTermsAPI
--data FacetFormat = Table | Chart
--data FacetType = Doc | Term | Source | Author
--data Facet = Facet Doc Format
:<|> "facet" :> QueryParam "type" NodeType
:> QueryParam "offset" Int
:> QueryParam "limit" Int
:> Get '[JSON] [FacetDoc]
type FacetDocAPI = "table"
:> QueryParam "offset" Int
:> QueryParam "limit" Int
:> Get '[JSON] [FacetDoc]
:<|> "chart"
:> QueryParam "from" UTCTime
:> QueryParam "to" UTCTime
:> Get '[JSON] [FacetChart]
--
-- Depending on the Type of the Node, we could post
-- New documents for a corpus
-- New map list terms
......@@ -79,7 +91,6 @@ type NodeAPI = Get '[JSON] (Node HyperdataDocument)
-- :<|> "query" :> Capture "string" Text :> Get '[JSON] Text
-- | Node API functions
roots :: Connection -> Server Roots
roots conn = liftIO (putStrLn "Log Needed" >> getNodesWithParentId conn 0 Nothing)
......@@ -91,7 +102,8 @@ nodeAPI :: Connection -> NodeId -> Server NodeAPI
nodeAPI conn id = liftIO (putStrLn "getNode" >> getNode conn id )
:<|> deleteNode' conn id
:<|> getNodesWith' conn id
:<|> getDocFacet' conn id
:<|> getFacet conn id
:<|> getChart conn id
-- :<|> upload
-- :<|> query
......@@ -108,9 +120,15 @@ getNodesWith' :: Connection -> NodeId -> Maybe NodeType -> Maybe Int -> Maybe In
-> Handler [Node HyperdataDocument]
getNodesWith' conn id nodeType offset limit = liftIO (getNodesWith conn id nodeType offset limit)
getDocFacet' :: Connection -> NodeId -> Maybe NodeType -> Maybe Int -> Maybe Int
getFacet :: Connection -> NodeId -> Maybe Int -> Maybe Int
-> Handler [FacetDoc]
getDocFacet' conn id nodeType offset limit = liftIO (getDocFacet conn id nodeType offset limit)
getFacet conn id offset limit = liftIO (getDocFacet conn id (Just Document) offset limit)
getChart :: Connection -> NodeId -> Maybe UTCTime -> Maybe UTCTime
-> Handler [FacetChart]
getChart _ _ _ _ = undefined
query :: Text -> Handler Text
query s = pure s
......
......@@ -37,7 +37,7 @@ import Data.Maybe (Maybe)
import Data.Profunctor.Product.Default (Default)
import Data.Profunctor.Product.TH (makeAdaptorAndInstance)
import Data.Time (UTCTime)
import Data.Time.Segment (jour)
import Data.Time.Segment (jour, timesAfter, Granularity(D))
import Data.Swagger
import Database.PostgreSQL.Simple (Connection)
......@@ -59,7 +59,18 @@ import Gargantext.Utils.Prefix (unPrefix)
------------------------------------------------------------------------
-- | DocFacet
-- data Facet = FacetDoc | FacetSources | FacetAuthors | FacetTerms
-- deriving (Show, Generic)
--instance FromJSON Facet
--instance ToJSON Facet
type FacetDoc = Facet NodeId UTCTime HyperdataDocument Bool Int
type FacetSources = FacetDoc
type FacetAuthors = FacetDoc
type FacetTerms = FacetDoc
data Facet id created hyperdata favorite ngramCount =
FacetDoc { facetDoc_id :: id
......@@ -98,7 +109,23 @@ type FacetDocRead = Facet (Column PGInt4 )
(Column PGBool )
(Column PGInt4 )
------------------------------------------------------------------------
-----------------------------------------------------------------------
type UTCTime' = UTCTime
data FacetChart = FacetChart { facetChart_time :: UTCTime'
, facetChart_count :: Double
}
deriving (Show, Generic)
$(deriveJSON (unPrefix "facetChart_") ''FacetChart)
instance ToSchema FacetChart
instance Arbitrary UTCTime' where
arbitrary = elements $ timesAfter 100 D (jour 2000 01 01)
instance Arbitrary FacetChart where
arbitrary = FacetChart <$> arbitrary <*> arbitrary
-----------------------------------------------------------------------
getDocFacet :: Connection -> Int -> Maybe NodeType
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment