Commit ffa3c28d authored by Przemyslaw Kaminski's avatar Przemyslaw Kaminski

[document-export] add CSV export (warning: does not compile yet)

parent d329decd
Pipeline #2303 failed with stage
in 6 minutes and 7 seconds
......@@ -14,7 +14,7 @@ module Gargantext.API.Node.Document.Export
import qualified Data.Text as T
import Data.Version (showVersion)
import Gargantext.API.Node.Document.Export.Types
import Gargantext.API.Prelude (GargNoServer)
import Gargantext.API.Prelude (GargNoServer, GargServer)
import Gargantext.Core (toDBid)
import Gargantext.Core.Types
-- import Gargantext.Database.Admin.Types.Hyperdata (HyperdataDocument(..))
......@@ -23,14 +23,18 @@ import Gargantext.Database.Query.Table.Node (getClosestParentIdByType)
import Gargantext.Database.Schema.Node (NodePoly(..))
import Gargantext.Prelude
import qualified Paths_gargantext as PG -- cabal magic build module
-- import Servant (Proxy(..))
import Servant
api :: UserId -> DocId -> GargServer API
api uid dId = getDocumentsJSON uid dId
:<|> getDocumentsCSV uid dId
--------------------------------------------------
-- | Hashes are ordered by Set
getDocuments :: UserId
-> DocId
-> GargNoServer DocumentExport
getDocuments uId pId = do
getDocumentsJSON :: UserId
-> DocId
-> GargNoServer DocumentExport
getDocumentsJSON uId pId = do
printDebug "[getDocuments] pId" pId
mcId <- getClosestParentIdByType pId NodeCorpus
let cId = maybe (panic "[G.A.N.D.Export] Node has no parent") identity mcId
......@@ -59,3 +63,12 @@ getDocuments uId pId = do
, _d_ngrams = Ngrams { _ng_ngrams = []
, _ng_hash = "" }
, _d_hash = ""}
getDocumentsCSV :: UserId
-> DocId
-> GargNoServer [Document]
getDocumentsCSV uId pId = do
DocumentExport { _de_documents } <- getDocumentsJSON uId pId
pure $ _de_documents
......@@ -14,12 +14,15 @@ Portability : POSIX
module Gargantext.API.Node.Document.Export.Types where
import Data.Aeson.TH (deriveJSON)
import Data.Csv (DefaultOrdered(..), ToNamedRecord(..), (.=), header, namedRecord)
import Data.Swagger
import Data.Text (Text)
import GHC.Generics (Generic)
import Gargantext.Core.Types
import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger)
import Gargantext.Database.Admin.Types.Hyperdata (HyperdataDocument(..))
import Gargantext.Database.Schema.Node (NodePoly(..))
import Gargantext.Utils.Servant (CSV)
import Protolude
--import Protolude.Partial (read)
import Servant
......@@ -35,6 +38,16 @@ data Document =
, _d_hash :: Hash
} deriving (Generic)
--instance Read Document where
-- read "" = panic "not implemented"
instance DefaultOrdered Document where
headerOrder _ = header ["id", "name"]
instance ToNamedRecord Document where
toNamedRecord (Document { _d_document = Node { .. }}) =
namedRecord
[ "id" .= _node_id
, "name" .= _node_name ]
data Ngrams =
Ngrams { _ng_ngrams :: [Text]
, _ng_hash :: Hash
......@@ -63,7 +76,10 @@ instance ToParamSchema Ngrams where
--------------------------------------------------
type API = Summary "Document Export"
:> "export"
:> Get '[JSON] DocumentExport
:> ( "json"
:> Get '[JSON] DocumentExport
:<|> "csv"
:> Get '[CSV] [Document])
$(deriveJSON (unPrefix "_de_") ''DocumentExport)
$(deriveJSON (unPrefix "_d_") ''Document)
......
......@@ -230,7 +230,7 @@ serverPrivateGargAPI' (AuthenticatedUser (NodeId uid))
:<|> withAccess (Proxy :: Proxy TableNgramsApi) Proxy uid
<$> PathNode <*> apiNgramsTableDoc
:<|> DocumentExport.getDocuments uid
:<|> DocumentExport.api uid
:<|> count -- TODO: undefined
......
......@@ -359,7 +359,8 @@ pollDocumentNgramsTableAsyncJob :: Token -> DocId -> JobID 'Unsafe -> Maybe Limi
waitDocumentNgramsTableAsyncJob :: Token -> DocId -> JobID 'Unsafe -> ClientM (JobOutput JobLog)
-- document export API
getDocumentExport :: Token -> DocId -> ClientM DocumentExport.DocumentExport
getDocumentExportJSON :: Token -> DocId -> ClientM DocumentExport.DocumentExport
getDocumentExportCSV :: Token -> DocId -> ClientM [DocumentExport.Document]
-- count api
postCountQuery :: Token -> Query -> ClientM Counts
......@@ -656,7 +657,8 @@ postAuth
:<|> killDocumentNgramsTableAsyncJob
:<|> pollDocumentNgramsTableAsyncJob
:<|> waitDocumentNgramsTableAsyncJob
:<|> getDocumentExport
:<|> getDocumentExportJSON
:<|> getDocumentExportCSV
:<|> postCountQuery
:<|> getGraphHyperdata
:<|> postGraphAsync
......
......@@ -23,6 +23,7 @@ import Codec.Serialise (Serialise())
import Control.Monad (mzero)
import Data.Aeson
import Data.Aeson.TH (deriveJSON)
import qualified Data.Csv as Csv
import Data.Either
import Data.Hashable (Hashable)
import Data.Morpheus.Types (GQLType)
......@@ -152,7 +153,7 @@ pgNodeId = O.sqlInt4 . id2int
------------------------------------------------------------------------
newtype NodeId = NodeId Int
deriving (Read, Generic, Num, Eq, Ord, Enum, ToJSONKey, FromJSONKey, ToJSON, FromJSON, Hashable)
deriving (Read, Generic, Num, Eq, Ord, Enum, ToJSONKey, FromJSONKey, ToJSON, FromJSON, Hashable, Csv.ToField)
instance GQLType NodeId
instance Show NodeId where
show (NodeId n) = "nodeId-" <> show n
......@@ -166,6 +167,8 @@ instance FromField NodeId where
then return $ NodeId n
else mzero
instance ToSchema NodeId
--instance Csv.ToField NodeId where
-- toField (NodeId nodeId) = Csv.toField nodeId
unNodeId :: NodeId -> Int
unNodeId (NodeId n) = n
......
module Gargantext.Utils.Servant where
import qualified Data.ByteString.Lazy.Char8 as BSC
import Data.Csv (encodeDefaultOrderedByName, DefaultOrdered, ToNamedRecord)
import Network.HTTP.Media ((//), (/:))
import qualified Prelude as Prelude
import Protolude
import Protolude.Partial (read)
import Servant
data CSV = CSV
instance Accept CSV where
contentType _ = "text" // "csv" /: ("charset", "utf-8")
instance (DefaultOrdered a, ToNamedRecord a) => MimeRender CSV [a] where
mimeRender _ val = encodeDefaultOrderedByName val
instance Read a => MimeUnrender CSV a where
mimeUnrender _ bs = case BSC.take len bs of
"text/csv" -> return . read . BSC.unpack $ BSC.drop len bs
_ -> Left "didn't start with the magic incantation"
where
len :: Int64
len = fromIntegral $ length ("text/csv" :: Prelude.String)
--instance ToNamedRecord a => MimeRender CSV [a] where
-- mimeRender _ val = encode val
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment