Verified Commit 3305c248 authored by Przemyslaw Kaminski's avatar Przemyslaw Kaminski

Merge branch '79-dev-rewrite-better-record-syntax' into dev-corpora-from-write-nodes

parents 112ea7af 477a7fdc
...@@ -20,9 +20,11 @@ default-extensions: ...@@ -20,9 +20,11 @@ default-extensions:
- FlexibleInstances - FlexibleInstances
- GeneralizedNewtypeDeriving - GeneralizedNewtypeDeriving
- MultiParamTypeClasses - MultiParamTypeClasses
- NamedFieldPuns
- NoImplicitPrelude - NoImplicitPrelude
- OverloadedStrings - OverloadedStrings
- RankNTypes - RankNTypes
- RecordWildCards
library: library:
source-dirs: src source-dirs: src
ghc-options: ghc-options:
......
...@@ -111,7 +111,10 @@ updateScatter' cId maybeListId tabType maybeLimit = do ...@@ -111,7 +111,10 @@ updateScatter' cId maybeListId tabType maybeLimit = do
(ngs', scores) <- Metrics.getMetrics cId maybeListId tabType maybeLimit (ngs', scores) <- Metrics.getMetrics cId maybeListId tabType maybeLimit
let let
metrics = fmap (\(Scored t s1 s2) -> Metric (unNgramsTerm t) s1 s2 (listType t ngs')) metrics = fmap (\(Scored t s1 s2) -> Metric { m_label = unNgramsTerm t
, m_x = s1
, m_y = s2
, m_cat = listType t ngs' })
$ fmap normalizeLocal scores $ fmap normalizeLocal scores
listType t m = maybe (panic errorMsg) fst $ HashMap.lookup t m listType t m = maybe (panic errorMsg) fst $ HashMap.lookup t m
errorMsg = "API.Node.metrics: key absent" errorMsg = "API.Node.metrics: key absent"
......
...@@ -315,7 +315,10 @@ type PairWith = Summary "Pair a Corpus with an Annuaire" ...@@ -315,7 +315,10 @@ type PairWith = Summary "Pair a Corpus with an Annuaire"
pairWith :: CorpusId -> GargServer PairWith pairWith :: CorpusId -> GargServer PairWith
pairWith cId aId lId = do pairWith cId aId lId = do
r <- pairing cId aId lId r <- pairing cId aId lId
_ <- insertNodeNode [ NodeNode cId aId Nothing Nothing] _ <- insertNodeNode [ NodeNode { _nn_node1_id = cId
, _nn_node2_id = aId
, _nn_score = Nothing
, _nn_category = Nothing }]
pure r pure r
......
...@@ -74,9 +74,9 @@ addToAnnuaireWithForm :: FlowCmdM env err m ...@@ -74,9 +74,9 @@ addToAnnuaireWithForm :: FlowCmdM env err m
-> AnnuaireWithForm -> AnnuaireWithForm
-> (JobLog -> m ()) -> (JobLog -> m ())
-> m JobLog -> m JobLog
addToAnnuaireWithForm _cid (AnnuaireWithForm ft _d _l) logStatus = do addToAnnuaireWithForm _cid (AnnuaireWithForm { _wf_filetype }) logStatus = do
printDebug "ft" ft printDebug "ft" _wf_filetype
logStatus JobLog { _scst_succeeded = Just 1 logStatus JobLog { _scst_succeeded = Just 1
, _scst_failed = Just 0 , _scst_failed = Just 0
......
...@@ -63,15 +63,17 @@ getCorpus cId lId nt' = do ...@@ -63,15 +63,17 @@ getCorpus cId lId nt' = do
repo <- getRepo' [fromMaybe (panic "[Gargantext.API.Node.Corpus.Export]") lId] repo <- getRepo' [fromMaybe (panic "[Gargantext.API.Node.Corpus.Export]") lId]
ngs <- getNodeNgrams cId lId nt repo ngs <- getNodeNgrams cId lId nt repo
let -- uniqId is hash computed already for each document imported in database let -- uniqId is hash computed already for each document imported in database
r = Map.intersectionWith (\a b -> Document a (Ngrams (Set.toList b) (hash b)) (d_hash a b) r = Map.intersectionWith
(\a b -> Document { _d_document = a
, _d_ngrams = Ngrams (Set.toList b) (hash b)
, _d_hash = d_hash a b }
) ns (Map.map (Set.map unNgramsTerm) ngs) ) ns (Map.map (Set.map unNgramsTerm) ngs)
where where
d_hash a b = hash [ fromMaybe "" (_hd_uniqId $ _node_hyperdata a) d_hash a b = hash [ fromMaybe "" (_hd_uniqId $ _node_hyperdata a)
, hash b , hash b
] ]
pure $ Corpus (Map.elems r) (hash $ List.map _d_hash pure $ Corpus { _c_corpus = Map.elems r
$ Map.elems r , _c_hash = hash $ List.map _d_hash $ Map.elems r }
)
getNodeNgrams :: HasNodeError err getNodeNgrams :: HasNodeError err
=> CorpusId => CorpusId
......
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TemplateHaskell #-}
module Gargantext.API.Node.Corpus.Searx where module Gargantext.API.Node.Corpus.Searx where
......
...@@ -21,7 +21,7 @@ import Data.Maybe (catMaybes) ...@@ -21,7 +21,7 @@ import Data.Maybe (catMaybes)
import Data.Text (Text) import Data.Text (Text)
import Data.List (replicate, null) import Data.List (replicate, null)
import Data.Aeson import Data.Aeson
import Data.Swagger import Data.Swagger hiding (title, url)
import GHC.Generics (Generic) import GHC.Generics (Generic)
import Servant import Servant
import Test.QuickCheck (elements) import Test.QuickCheck (elements)
...@@ -107,13 +107,15 @@ publicNodes = do ...@@ -107,13 +107,15 @@ publicNodes = do
-- http://localhost:8008/api/v1.0/node/23543/file/download<Paste> -- http://localhost:8008/api/v1.0/node/23543/file/download<Paste>
-- http://localhost:8000/images/Gargantextuel-212x300.jpg -- http://localhost:8000/images/Gargantextuel-212x300.jpg
toPublicData :: Text -> (Node HyperdataFolder, [NodeId]) -> Maybe PublicData toPublicData :: Text -> (Node HyperdataFolder, [NodeId]) -> Maybe PublicData
toPublicData base (n , mn) = PublicData <$> (hd ^? (_Just . hf_data . cf_title)) toPublicData base (n , mn) = do
<*> (hd ^? (_Just . hf_data . cf_desc )) title <- (hd ^? (_Just . hf_data . cf_title))
<*> (Just $ url' mn) -- "images/Gargantextuel-212x300.jpg" abstract <- (hd ^? (_Just . hf_data . cf_desc ))
<*> (Just $ url' mn) img <- (Just $ url' mn) -- "images/Gargantextuel-212x300.jpg"
<*> Just (cs $ show $ utc2year (n^.node_date)) url <- (Just $ url' mn)
<*> (hd ^? (_Just . hf_data . cf_query)) date <- Just (cs $ show $ utc2year (n^.node_date))
<*> (hd ^? (_Just . hf_data . cf_authors)) database <- (hd ^? (_Just . hf_data . cf_query))
author <- (hd ^? (_Just . hf_data . cf_authors))
pure $ PublicData { .. }
where where
hd = head hd = head
$ filter (\(HyperdataField cd _ _) -> cd == JSON) $ filter (\(HyperdataField cd _ _) -> cd == JSON)
...@@ -150,13 +152,13 @@ instance Arbitrary PublicData where ...@@ -150,13 +152,13 @@ instance Arbitrary PublicData where
defaultPublicData :: PublicData defaultPublicData :: PublicData
defaultPublicData = defaultPublicData =
PublicData "Title" PublicData { title = "Title"
(foldl (<>) "" $ replicate 100 "abstract ") , abstract = foldl (<>) "" $ replicate 100 "abstract "
"images/Gargantextuel-212x300.jpg" , img = "images/Gargantextuel-212x300.jpg"
"https://.." , url = "https://.."
"YY/MM/DD" , date = "YY/MM/DD"
"database" , database = "database"
"Author" , author = "Author" }
......
...@@ -10,7 +10,6 @@ Portability : POSIX ...@@ -10,7 +10,6 @@ Portability : POSIX
Count API part of Gargantext. Count API part of Gargantext.
-} -}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-} {-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE DeriveAnyClass #-}
......
...@@ -86,7 +86,11 @@ instance ToSchema TableQuery where ...@@ -86,7 +86,11 @@ instance ToSchema TableQuery where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "tq_") declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "tq_")
instance Arbitrary TableQuery where instance Arbitrary TableQuery where
arbitrary = elements [TableQuery 0 10 DateAsc Docs "electrodes"] arbitrary = elements [TableQuery { tq_offset = 0
, tq_limit = 10
, tq_orderBy = DateAsc
, tq_view = Docs
, tq_query = "electrodes" }]
tableApi :: NodeId -> GargServer TableApi tableApi :: NodeId -> GargServer TableApi
......
...@@ -33,73 +33,74 @@ data School = School { school_shortName :: Text ...@@ -33,73 +33,74 @@ data School = School { school_shortName :: Text
schools :: [School] schools :: [School]
schools = [ School schools = [ School
("Mines Albi-Carmaux") { school_shortName = "Mines Albi-Carmaux"
("Mines Albi-Carmaux - École nationale supérieure des Mines d'Albi‐Carmaux") , school_longName = "Mines Albi-Carmaux - École nationale supérieure des Mines d'Albi‐Carmaux"
("469216") , school_id = "469216" }
, School , School
("Mines Alès") { school_shortName = "Mines Alès"
("EMA - École des Mines d'Alès") , school_longName = "EMA - École des Mines d'Alès"
("6279") , school_id = "6279" }
, School , School
("Mines Douai") { school_shortName = "Mines Douai"
("Mines Douai EMD - École des Mines de Douai") , school_longName = "Mines Douai EMD - École des Mines de Douai"
("224096") , school_id = "224096" }
, School , School
("Mines Lille") { school_shortName = "Mines Lille"
("Mines Lille - École des Mines de Lille") , school_longName = "Mines Lille - École des Mines de Lille"
("144103") , school_id = "144103" }
, School , School
("IMT Lille Douai") { school_shortName = "IMT Lille Douai"
("IMT Lille Douai") , school_longName = "IMT Lille Douai"
("497330") , school_id = "497330" }
, School , School
("Mines Nantes") { school_shortName = "Mines Nantes"
("Mines Nantes - Mines Nantes") , school_longName = "Mines Nantes - Mines Nantes"
("84538") , school_id = "84538" }
, School , School
("Télécom Bretagne") { school_shortName = "Télécom Bretagne"
("Télécom Bretagne") , school_longName = "Télécom Bretagne"
("301262") , school_id = "301262" }
, School , School
("IMT Atlantique") { school_shortName = "IMT Atlantique"
("IMT Atlantique - IMT Atlantique Bretagne-Pays de la Loire") , school_longName = "IMT Atlantique - IMT Atlantique Bretagne-Pays de la Loire"
("481355") , school_id = "481355" }
, School , School
("Mines Saint-Étienne") { school_shortName = "Mines Saint-Étienne"
("Mines Saint-Étienne MSE - École des Mines de Saint-Étienne") , school_longName = "Mines Saint-Étienne MSE - École des Mines de Saint-Étienne"
("29212") , school_id = "29212" }
, School , School
("Télécom École de Management") { school_shortName = "Télécom École de Management"
("TEM - Télécom Ecole de Management") , school_longName = "TEM - Télécom Ecole de Management"
("301442") , school_id = "301442" }
, School , School
("IMT Business School") { school_shortName = "IMT Business School"
("IMT Business School") , school_longName = "IMT Business School"
("542824") , school_id = "542824" }
, School , School
("Télécom ParisTech") { school_shortName = "Télécom ParisTech"
("Télécom ParisTech") , school_longName = "Télécom ParisTech"
("300362") , school_id = "300362" }
, School , School
("Télécom SudParis") { school_shortName = "Télécom SudParis"
("TSP - Télécom SudParis") , school_longName = "TSP - Télécom SudParis"
("352124") , school_id = "352124" }
, School , School
("ARMINES") { school_shortName = "ARMINES"
("ARMINES") , school_longName = "ARMINES"
("300362") , school_id = "300362" }
, School , School
("Eurecom") { school_shortName = "Eurecom"
("Eurecom") , school_longName = "Eurecom"
("421532") , school_id = "421532" }
, School , School
("Mines ParisTech") { school_shortName = "Mines ParisTech"
("MINES ParisTech - École nationale supérieure des mines de Paris") , school_longName = "MINES ParisTech - École nationale supérieure des mines de Paris"
("301492") , school_id = "301492" }
] ]
mapIdSchool :: Map Text Text mapIdSchool :: Map Text Text
mapIdSchool = M.fromList $ Gargantext.Prelude.map (\(School n _ i) -> (i,n)) schools mapIdSchool = M.fromList $ Gargantext.Prelude.map
(\(School { school_shortName, school_id }) -> (school_id, school_shortName)) schools
hal_data :: IO (Either Prelude.String (DV.Vector CsvHal)) hal_data :: IO (Either Prelude.String (DV.Vector CsvHal))
hal_data = do hal_data = do
......
...@@ -75,37 +75,39 @@ data IMTUser = IMTUser ...@@ -75,37 +75,39 @@ data IMTUser = IMTUser
-- | CSV instance -- | CSV instance
instance FromNamedRecord IMTUser where instance FromNamedRecord IMTUser where
parseNamedRecord r = IMTUser <$> r .: "id" parseNamedRecord r = do
<*> r .: "entite" id <- r .: "id"
<*> r .: "mail" entite <- r .: "entite"
<*> r .: "nom" mail <- r .: "mail"
<*> r .: "prenom" nom <- r .: "nom"
<*> r .: "fonction" prenom <- r .: "prenom"
<*> r .: "fonction2" fonction <- r .: "fonction"
<*> r .: "tel" fonction2 <- r .: "fonction2"
<*> r .: "fax" tel <- r .: "tel"
<*> r .: "service" fax <- r .: "fax"
<*> r .: "groupe" service <- r .: "service"
<*> r .: "entite2" groupe <- r .: "groupe"
<*> r .: "service2" entite2 <- r .: "entite2"
<*> r .: "groupe2" service2 <- r .: "service2"
<*> r .: "bureau" groupe2 <- r .: "groupe2"
<*> r .: "url" bureau <- r .: "bureau"
<*> r .: "pservice" url <- r .: "url"
<*> r .: "pfonction" pservice <- r .: "pservice"
<*> r .: "afonction" pfonction <- r .: "pfonction"
<*> r .: "afonction2" afonction <- r .: "afonction"
<*> r .: "grprech" afonction2 <- r .: "afonction2"
<*> r .: "appellation" grprech <- r .: "grprech"
<*> r .: "lieu" appellation <- r .: "appellation"
<*> r .: "aprecision" lieu <- r .: "lieu"
<*> r .: "atel" aprecision <- r .: "aprecision"
<*> r .: "sexe" atel <- r .: "atel"
<*> r .: "statut" sexe <- r .: "sexe"
<*> r .: "idutilentite" statut <- r .: "statut"
<*> r .: "actif" idutilentite <- r .: "idutilentite"
<*> r .: "idutilsiecoles" actif <- r .: "actif"
<*> r .: "date_modification" idutilsiecoles <- r .: "idutilsiecoles"
date_modification <- r .: "date_modification"
pure $ IMTUser {..}
headerCSVannuaire :: Header headerCSVannuaire :: Header
headerCSVannuaire = headerCSVannuaire =
...@@ -136,15 +138,54 @@ deserialiseFromFile' filepath = deserialise <$> BL.readFile filepath ...@@ -136,15 +138,54 @@ deserialiseFromFile' filepath = deserialise <$> BL.readFile filepath
------------------------------------------------------------------------ ------------------------------------------------------------------------
imtUser2gargContact :: IMTUser -> HyperdataContact imtUser2gargContact :: IMTUser -> HyperdataContact
imtUser2gargContact (IMTUser id' entite' mail' nom' prenom' fonction' _fonction2' tel' _fax' --imtUser2gargContact (IMTUser id' entite' mail' nom' prenom' fonction' _fonction2' tel' _fax'
service' _groupe' _entite2 _service2 _group2 bureau' url' _pservice' _pfonction' _afonction' _afonction2' -- service' _groupe' _entite2 _service2 _group2 bureau' url' _pservice' _pfonction' _afonction' _afonction2'
_grprech' _appellation' lieu' _aprecision' _atel' _sexe' _statut' _idutilentite' -- _grprech' _appellation' lieu' _aprecision' _atel' _sexe' _statut' _idutilentite'
_actif' _idutilsiecoles' date_modification') -- _actif' _idutilsiecoles' date_modification')
= HyperdataContact (Just "IMT Annuaire") (Just qui) [ou] ((<>) <$> (fmap (\p -> p <> " ") prenom') <*> nom') entite' date_modification' Nothing Nothing -- = HyperdataContact (Just "IMT Annuaire") (Just qui) [ou] ((<>) <$> (fmap (\p -> p <> " ") prenom') <*> nom') entite' date_modification' Nothing Nothing
imtUser2gargContact (IMTUser { id
, entite
, mail
, nom
, prenom
, fonction
, tel
, service
, bureau
, url
, lieu
, date_modification }) =
HyperdataContact { _hc_bdd = Just "IMT Annuaire"
, _hc_who = Just qui
, _hc_where = [ou]
, _hc_title = title
, _hc_source = entite
, _hc_lastValidation = date_modification
, _hc_uniqIdBdd = Nothing
, _hc_uniqId = Nothing }
where where
qui = ContactWho id' prenom' nom' (catMaybes [service']) [] title = (<>) <$> (fmap (\p -> p <> " ") prenom) <*> nom
ou = ContactWhere (toList entite') (toList service') fonction' bureau' (Just "France") lieu' contact Nothing Nothing qui = ContactWho { _cw_id = id
contact = Just $ ContactTouch mail' tel' url' , _cw_firstName = prenom
, _cw_lastName = nom
, _cw_keywords = catMaybes [service]
, _cw_freetags = [] }
ou = ContactWhere { _cw_organization = toList entite
, _cw_labTeamDepts = toList service
, _cw_role = fonction
, _cw_office = bureau
, _cw_country = Just "France"
, _cw_city = lieu
, _cw_touch = contact
, _cw_entry = Nothing
, _cw_exit = Nothing }
contact = Just $ ContactTouch { _ct_mail = mail
, _ct_phone = tel
, _ct_url = url }
-- meta = ContactMetaData (Just "IMT annuaire") date_modification' -- meta = ContactMetaData (Just "IMT annuaire") date_modification'
toList Nothing = [] toList Nothing = []
toList (Just x) = [x] toList (Just x) = [x]
...@@ -50,3 +50,6 @@ instance Arbitrary GraphMetric where ...@@ -50,3 +50,6 @@ instance Arbitrary GraphMetric where
arbitrary = elements [ minBound .. maxBound ] arbitrary = elements [ minBound .. maxBound ]
------------------------------------------------------------------------ ------------------------------------------------------------------------
...@@ -82,7 +82,9 @@ readNodeStoryEnv :: NodeStoryDir -> IO NodeStoryEnv ...@@ -82,7 +82,9 @@ readNodeStoryEnv :: NodeStoryDir -> IO NodeStoryEnv
readNodeStoryEnv nsd = do readNodeStoryEnv nsd = do
mvar <- nodeStoryVar nsd Nothing [0] mvar <- nodeStoryVar nsd Nothing [0]
saver <- mkNodeStorySaver nsd mvar saver <- mkNodeStorySaver nsd mvar
pure $ NodeStoryEnv mvar saver (nodeStoryVar nsd (Just mvar)) pure $ NodeStoryEnv { _nse_var = mvar
, _nse_saver = saver
, _nse_getter = nodeStoryVar nsd (Just mvar) }
------------------------------------------------------------------------ ------------------------------------------------------------------------
mkNodeStorySaver :: NodeStoryDir -> MVar NodeListStory -> IO (IO ()) mkNodeStorySaver :: NodeStoryDir -> MVar NodeListStory -> IO (IO ())
...@@ -212,7 +214,9 @@ repoToNodeListStory (Repo _v s h) = NodeStory $ Map.fromList ns ...@@ -212,7 +214,9 @@ repoToNodeListStory (Repo _v s h) = NodeStory $ Map.fromList ns
h' = ngramsStatePatch_migration h h' = ngramsStatePatch_migration h
ns = List.map (\(n,ns') ns = List.map (\(n,ns')
-> (n, let hs = fromMaybe [] (Map.lookup n h') in -> (n, let hs = fromMaybe [] (Map.lookup n h') in
Archive (List.length hs) ns' hs Archive { _a_version = List.length hs
, _a_state = ns'
, _a_history = hs }
) )
) $ Map.toList s' ) $ Map.toList s'
...@@ -276,10 +280,17 @@ instance Serialise NgramsStatePatch' ...@@ -276,10 +280,17 @@ instance Serialise NgramsStatePatch'
-- TODO Semigroup instance for unions -- TODO Semigroup instance for unions
-- TODO check this -- TODO check this
instance (Semigroup s, Semigroup p) => Semigroup (Archive s p) where instance (Semigroup s, Semigroup p) => Semigroup (Archive s p) where
(<>) (Archive _v _s p) (Archive v' s' p') = Archive v' s' (p' <> p) (<>) (Archive { _a_history = p }) (Archive { _a_version = v'
, _a_state = s'
, _a_history = p'}) =
Archive { _a_version = v'
, _a_state = s'
, _a_history = p' <> p }
instance Monoid (Archive NgramsState' NgramsStatePatch') where instance Monoid (Archive NgramsState' NgramsStatePatch') where
mempty = Archive 0 mempty [] mempty = Archive { _a_version = 0
, _a_state = mempty
, _a_history = [] }
instance (FromJSON s, FromJSON p) => FromJSON (Archive s p) where instance (FromJSON s, FromJSON p) => FromJSON (Archive s p) where
parseJSON = genericParseJSON $ unPrefix "_a_" parseJSON = genericParseJSON $ unPrefix "_a_"
...@@ -293,13 +304,17 @@ initNodeStory :: Monoid s => NodeId -> NodeStory s p ...@@ -293,13 +304,17 @@ initNodeStory :: Monoid s => NodeId -> NodeStory s p
initNodeStory ni = NodeStory $ Map.singleton ni initArchive initNodeStory ni = NodeStory $ Map.singleton ni initArchive
initArchive :: Monoid s => Archive s p initArchive :: Monoid s => Archive s p
initArchive = Archive 0 mempty [] initArchive = Archive { _a_version = 0
, _a_state = mempty
, _a_history = [] }
initNodeListStoryMock :: NodeListStory initNodeListStoryMock :: NodeListStory
initNodeListStoryMock = NodeStory $ Map.singleton nodeListId archive initNodeListStoryMock = NodeStory $ Map.singleton nodeListId archive
where where
nodeListId = 0 nodeListId = 0
archive = Archive 0 ngramsTableMap [] archive = Archive { _a_version = 0
, _a_state = ngramsTableMap
, _a_history = [] }
ngramsTableMap = Map.singleton TableNgrams.NgramsTerms ngramsTableMap = Map.singleton TableNgrams.NgramsTerms
$ Map.fromList $ Map.fromList
[ (n ^. ne_ngrams, ngramsElementToRepo n) [ (n ^. ne_ngrams, ngramsElementToRepo n)
......
...@@ -31,23 +31,23 @@ get la q ml = do ...@@ -31,23 +31,23 @@ get la q ml = do
toDoc' :: Lang -> HAL.Corpus -> IO HyperdataDocument toDoc' :: Lang -> HAL.Corpus -> IO HyperdataDocument
toDoc' la (HAL.Corpus i t ab d s aus affs struct_id) = do toDoc' la (HAL.Corpus i t ab d s aus affs struct_id) = do
(utctime, (pub_year, pub_month, pub_day)) <- Date.dateSplit la (maybe (Just "2019") Just d) (utctime, (pub_year, pub_month, pub_day)) <- Date.dateSplit la (maybe (Just "2019") Just d)
pure $ HyperdataDocument (Just "Hal") pure $ HyperdataDocument { _hd_bdd = Just "Hal"
(Just $ pack $ show i) , _hd_doi = Just $ pack $ show i
Nothing , _hd_url = Nothing
Nothing , _hd_uniqId = Nothing
Nothing , _hd_uniqIdBdd = Nothing
Nothing , _hd_page = Nothing
(Just $ intercalate " " t) , _hd_title = Just $ intercalate " " t
(Just $ foldl (\x y -> x <> ", " <> y) "" aus) , _hd_authors = Just $ foldl (\x y -> x <> ", " <> y) "" aus
(Just $ foldl (\x y -> x <> ", " <> y) "" $ affs <> map (cs . show) struct_id) , _hd_institutes = Just $ foldl (\x y -> x <> ", " <> y) "" $ affs <> map (cs . show) struct_id
(Just $ maybe "Nothing" identity s) , _hd_source = Just $ maybe "Nothing" identity s
(Just $ intercalate " " ab) , _hd_abstract = Just $ intercalate " " ab
(fmap (pack . show) utctime) , _hd_publication_date = fmap (pack . show) utctime
pub_year , _hd_publication_year = pub_year
pub_month , _hd_publication_month = pub_month
pub_day , _hd_publication_day = pub_day
Nothing , _hd_publication_hour = Nothing
Nothing , _hd_publication_minute = Nothing
Nothing , _hd_publication_second = Nothing
(Just $ (pack . show) la) , _hd_language_iso2 = Just $ (pack . show) la }
...@@ -39,22 +39,23 @@ toDoc' la docs' = do ...@@ -39,22 +39,23 @@ toDoc' la docs' = do
toDoc :: Lang -> ISTEX.Document -> IO HyperdataDocument toDoc :: Lang -> ISTEX.Document -> IO HyperdataDocument
toDoc la (ISTEX.Document i t a ab d s) = do toDoc la (ISTEX.Document i t a ab d s) = do
(utctime, (pub_year, pub_month, pub_day)) <- Date.dateSplit la (maybe (Just "2019") (Just . pack . show) d) (utctime, (pub_year, pub_month, pub_day)) <- Date.dateSplit la (maybe (Just "2019") (Just . pack . show) d)
pure $ HyperdataDocument (Just "Istex") pure $ HyperdataDocument { _hd_bdd = Just "Istex"
(Just i) , _hd_doi = Just i
Nothing , _hd_url = Nothing
Nothing , _hd_uniqId = Nothing
Nothing , _hd_uniqIdBdd = Nothing
Nothing , _hd_page = Nothing
t , _hd_title = t
(Just $ foldl (\x y -> x <> ", " <> y) "" (map ISTEX._author_name a)) , _hd_authors = Just $ foldl (\x y -> x <> ", " <> y) "" (map ISTEX._author_name a)
(Just $ foldl (\x y -> x <> ", " <> y) "" (concat $ (map ISTEX._author_affiliations) a)) , _hd_institutes = Just $ foldl (\x y -> x <> ", " <> y) "" (concat $ (map ISTEX._author_affiliations) a)
(Just $ foldl (\x y -> x <> ", " <> y) "" (catMaybes $ map ISTEX._source_title s)) , _hd_source = Just $ foldl (\x y -> x <> ", " <> y) "" (catMaybes $ map ISTEX._source_title s)
ab , _hd_abstract = ab
(fmap (pack . show) utctime) , _hd_publication_date = fmap (pack . show) utctime
pub_year , _hd_publication_year = pub_year
pub_month , _hd_publication_month = pub_month
pub_day , _hd_publication_day = pub_day
Nothing , _hd_publication_hour = Nothing
Nothing , _hd_publication_minute = Nothing
Nothing , _hd_publication_second = Nothing
(Just $ (pack . show) la) , _hd_language_iso2 = Just $ (pack . show) la }
...@@ -38,25 +38,25 @@ get q l = either (\e -> panic $ "CRAWL: PubMed" <> e) (map (toDoc EN)) ...@@ -38,25 +38,25 @@ get q l = either (\e -> panic $ "CRAWL: PubMed" <> e) (map (toDoc EN))
toDoc :: Lang -> PubMedDoc.PubMed -> HyperdataDocument toDoc :: Lang -> PubMedDoc.PubMed -> HyperdataDocument
toDoc l (PubMedDoc.PubMed (PubMedDoc.PubMedArticle t j as aus) toDoc l (PubMedDoc.PubMed (PubMedDoc.PubMedArticle t j as aus)
(PubMedDoc.PubMedDate a y m d) (PubMedDoc.PubMedDate a y m d)
) = HyperdataDocument (Just "PubMed") ) = HyperdataDocument { _hd_bdd = Just "PubMed"
Nothing , _hd_doi = Nothing
Nothing , _hd_url = Nothing
Nothing , _hd_uniqId = Nothing
Nothing , _hd_uniqIdBdd = Nothing
Nothing , _hd_page = Nothing
t , _hd_title = t
(authors aus) , _hd_authors = authors aus
(institutes aus) , _hd_institutes = institutes aus
j , _hd_source = j
(abstract as) , _hd_abstract = abstract as
(Just $ Text.pack $ show a) , _hd_publication_date = Just $ Text.pack $ show a
(Just $ fromIntegral y) , _hd_publication_year = Just $ fromIntegral y
(Just m) , _hd_publication_month = Just m
(Just d) , _hd_publication_day = Just d
Nothing , _hd_publication_hour = Nothing
Nothing , _hd_publication_minute = Nothing
Nothing , _hd_publication_second = Nothing
(Just $ (Text.pack . show) l) , _hd_language_iso2 = Just $ (Text.pack . show) l }
where where
authors :: Maybe [PubMedDoc.Author] -> Maybe Text authors :: Maybe [PubMedDoc.Author] -> Maybe Text
authors aus' = case aus' of authors aus' = case aus' of
......
...@@ -122,25 +122,25 @@ toDoc ff d = do ...@@ -122,25 +122,25 @@ toDoc ff d = do
(utcTime, (pub_year, pub_month, pub_day)) <- Date.dateSplit lang dateToParse (utcTime, (pub_year, pub_month, pub_day)) <- Date.dateSplit lang dateToParse
pure $ HyperdataDocument (Just $ DT.pack $ show ff) pure $ HyperdataDocument { _hd_bdd = Just $ DT.pack $ show ff
(lookup "doi" d) , _hd_doi = lookup "doi" d
(lookup "URL" d) , _hd_url = lookup "URL" d
Nothing , _hd_uniqId = Nothing
Nothing , _hd_uniqIdBdd = Nothing
Nothing , _hd_page = Nothing
(lookup "title" d) , _hd_title = lookup "title" d
Nothing , _hd_authors = Nothing
(lookup "authors" d) , _hd_institutes = lookup "authors" d
(lookup "source" d) , _hd_source = lookup "source" d
(lookup "abstract" d) , _hd_abstract = lookup "abstract" d
(fmap (DT.pack . show) utcTime) , _hd_publication_date = fmap (DT.pack . show) utcTime
(pub_year) , _hd_publication_year = pub_year
(pub_month) , _hd_publication_month = pub_month
(pub_day) , _hd_publication_day = pub_day
Nothing , _hd_publication_hour = Nothing
Nothing , _hd_publication_minute = Nothing
Nothing , _hd_publication_second = Nothing
(Just $ (DT.pack . show) lang) , _hd_language_iso2 = Just $ (DT.pack . show) lang }
enrichWith :: FileFormat enrichWith :: FileFormat
-> (a, [[[(DB.ByteString, DB.ByteString)]]]) -> (a, [[(Text, Text)]]) -> (a, [[[(DB.ByteString, DB.ByteString)]]]) -> (a, [[(Text, Text)]])
......
...@@ -62,34 +62,39 @@ data CsvGargV3 = CsvGargV3 ...@@ -62,34 +62,39 @@ data CsvGargV3 = CsvGargV3
-- | Doc 2 HyperdataDocument -- | Doc 2 HyperdataDocument
toDoc :: CsvGargV3 -> HyperdataDocument toDoc :: CsvGargV3 -> HyperdataDocument
toDoc (CsvGargV3 did dt _ dpy dpm dpd dab dau) = toDoc (CsvGargV3 did dt _ dpy dpm dpd dab dau) =
HyperdataDocument (Just "CSV") HyperdataDocument { _hd_bdd = Just "CSV"
(Just . pack . show $ did) , _hd_doi = Just . pack . show $ did
Nothing , _hd_url = Nothing
Nothing , _hd_uniqId = Nothing
Nothing , _hd_uniqIdBdd = Nothing
Nothing , _hd_page = Nothing
(Just dt) , _hd_title = Just dt
Nothing , _hd_authors = Nothing
(Just dau) , _hd_institutes = Just dau
(Just dab) , _hd_source = Just dab
(Nothing) , _hd_abstract = Nothing
Nothing , _hd_publication_date = Nothing
(Just dpy) , _hd_publication_year = Just dpy
(Just dpm) , _hd_publication_month = Just dpm
(Just dpd) , _hd_publication_day = Just dpd
Nothing , _hd_publication_hour = Nothing
Nothing , _hd_publication_minute = Nothing
Nothing , _hd_publication_second = Nothing
Nothing , _hd_language_iso2 = Nothing }
--------------------------------------------------------------- ---------------------------------------------------------------
-- | Types Conversions -- | Types Conversions
toDocs :: Vector CsvDoc -> [CsvGargV3] toDocs :: Vector CsvDoc -> [CsvGargV3]
toDocs v = V.toList toDocs v = V.toList
$ V.zipWith (\nId (CsvDoc t s mPy pm pd abst auth) $ V.zipWith (\nId (CsvDoc { .. }) -- (CsvDoc t s mPy pm pd abst auth)
-> CsvGargV3 nId t s -> CsvGargV3 { d_docId = nId
(fromMIntOrDec defaultYear mPy) (fromMaybe defaultMonth pm) (fromMaybe defaultDay pd) , d_title = csv_title
abst auth ) , d_source = csv_source
, d_publication_year = fromMIntOrDec defaultYear csv_publication_year
, d_publication_month = fromMaybe defaultMonth csv_publication_month
, d_publication_day = fromMaybe defaultDay csv_publication_day
, d_abstract = csv_abstract
, d_authors = csv_authors })
(V.enumFromN 1 (V.length v'')) v'' (V.enumFromN 1 (V.length v'')) v''
where where
v'' = V.foldl (\v' sep -> V.concatMap (splitDoc (docsSize v') sep) v') v seps v'' = V.foldl (\v' sep -> V.concatMap (splitDoc (docsSize v') sep) v') v seps
...@@ -99,7 +104,13 @@ toDocs v = V.toList ...@@ -99,7 +104,13 @@ toDocs v = V.toList
fromDocs :: Vector CsvGargV3 -> Vector CsvDoc fromDocs :: Vector CsvGargV3 -> Vector CsvDoc
fromDocs docs = V.map fromDocs' docs fromDocs docs = V.map fromDocs' docs
where where
fromDocs' (CsvGargV3 _ t s py pm pd abst auth) = (CsvDoc t s (Just $ IntOrDec py) (Just pm) (Just pd) abst auth) fromDocs' (CsvGargV3 { .. }) = CsvDoc { csv_title = d_title
, csv_source = d_source
, csv_publication_year = Just $ IntOrDec d_publication_year
, csv_publication_month = Just d_publication_month
, csv_publication_day = Just d_publication_day
, csv_abstract = d_abstract
, csv_authors = d_authors }
--------------------------------------------------------------- ---------------------------------------------------------------
-- | Split a document in its context -- | Split a document in its context
...@@ -117,19 +128,17 @@ splitDoc m splt doc = let docSize = (length $ csv_abstract doc) in ...@@ -117,19 +128,17 @@ splitDoc m splt doc = let docSize = (length $ csv_abstract doc) in
V.fromList [doc] V.fromList [doc]
where where
splitDoc' :: SplitContext -> CsvDoc -> Vector CsvDoc splitDoc' :: SplitContext -> CsvDoc -> Vector CsvDoc
splitDoc' contextSize (CsvDoc t s py pm pd abst auth) = V.fromList $ [firstDoc] <> nextDocs splitDoc' contextSize (CsvDoc { .. }) = V.fromList $ [firstDoc] <> nextDocs
where where
firstDoc = CsvDoc t s py pm pd firstAbstract auth firstDoc = CsvDoc { csv_abstract = firstAbstract, .. }
firstAbstract = head' "splitDoc'1" abstracts firstAbstract = head' "splitDoc'1" abstracts
nextDocs = map (\txt -> CsvDoc nextDocs = map (\txt -> CsvDoc { csv_title = head' "splitDoc'2" $ sentences txt
(head' "splitDoc'2" $ sentences txt) , csv_abstract = unsentences $ tail' "splitDoc'1" $ sentences txt
s py pm pd , .. }
(unsentences $ tail' "splitDoc'1" $ sentences txt)
auth
) (tail' "splitDoc'2" abstracts) ) (tail' "splitDoc'2" abstracts)
abstracts = (splitBy $ contextSize) abst abstracts = (splitBy $ contextSize) csv_abstract
--------------------------------------------------------------- ---------------------------------------------------------------
--------------------------------------------------------------- ---------------------------------------------------------------
...@@ -174,33 +183,35 @@ data CsvDoc = CsvDoc ...@@ -174,33 +183,35 @@ data CsvDoc = CsvDoc
deriving (Show) deriving (Show)
instance FromNamedRecord CsvDoc where instance FromNamedRecord CsvDoc where
parseNamedRecord r = CsvDoc <$> (r .: "title" <|> r .: "Title") parseNamedRecord r = do
<*> (r .: "source" <|> r .: "Source") csv_title <- r .: "title" <|> r .: "Title"
<*> (r .: "publication_year" <|> r .: "Publication Year") csv_source <- r .: "source" <|> r .: "Source"
<*> (r .: "publication_month" <|> r .: "Publication Month") csv_publication_year <- r .: "publication_year" <|> r .: "Publication Year"
<*> (r .: "publication_day" <|> r .: "Publication Day") csv_publication_month <- r .: "publication_month" <|> r .: "Publication Month"
<*> (r .: "abstract" <|> r .: "Abstract") csv_publication_day <- r .: "publication_day" <|> r .: "Publication Day"
<*> (r .: "authors" <|> r .: "Authors") csv_abstract <- r .: "abstract" <|> r .: "Abstract"
csv_authors <- r .: "authors" <|> r .: "Authors"
pure $ CsvDoc { .. }
instance ToNamedRecord CsvDoc where instance ToNamedRecord CsvDoc where
toNamedRecord (CsvDoc t s py pm pd abst aut) = toNamedRecord (CsvDoc{ .. }) =
namedRecord [ "title" .= t namedRecord [ "title" .= csv_title
, "source" .= s , "source" .= csv_source
, "publication_year" .= py , "publication_year" .= csv_publication_year
, "publication_month" .= pm , "publication_month" .= csv_publication_month
, "publication_day" .= pd , "publication_day" .= csv_publication_day
, "abstract" .= abst , "abstract" .= csv_abstract
, "authors" .= aut , "authors" .= csv_authors
] ]
hyperdataDocument2csvDoc :: HyperdataDocument -> CsvDoc hyperdataDocument2csvDoc :: HyperdataDocument -> CsvDoc
hyperdataDocument2csvDoc h = CsvDoc (m $ _hd_title h) hyperdataDocument2csvDoc h = CsvDoc { csv_title = m $ _hd_title h
(m $ _hd_source h) , csv_source = m $ _hd_source h
(Just $ IntOrDec $ mI $ _hd_publication_year h) , csv_publication_year = Just $ IntOrDec $ mI $ _hd_publication_year h
(Just $ mI $ _hd_publication_month h) , csv_publication_month = Just $ mI $ _hd_publication_month h
(Just $ mI $ _hd_publication_day h) , csv_publication_day = Just $ mI $ _hd_publication_day h
(m $ _hd_abstract h) , csv_abstract = m $ _hd_abstract h
(m $ _hd_authors h) , csv_authors = m $ _hd_authors h }
where where
m = maybe "" identity m = maybe "" identity
...@@ -300,110 +311,109 @@ data CsvHal = CsvHal ...@@ -300,110 +311,109 @@ data CsvHal = CsvHal
deriving (Show) deriving (Show)
instance FromNamedRecord CsvHal where instance FromNamedRecord CsvHal where
parseNamedRecord r = CsvHal <$> r .: "title" parseNamedRecord r = do
<*> r .: "source" csvHal_title <- r .: "title"
<*> r .: "publication_year" csvHal_source <- r .: "source"
<*> r .: "publication_month" csvHal_publication_year <- r .: "publication_year"
<*> r .: "publication_day" csvHal_publication_month <- r .: "publication_month"
<*> r .: "abstract" csvHal_publication_day <- r .: "publication_day"
<*> r .: "authors" csvHal_abstract <- r .: "abstract"
csvHal_authors <- r .: "authors"
<*> r .: "url" csvHal_url <- r .: "url"
<*> r .: "isbn_s" csvHal_isbn_s <- r .: "isbn_s"
<*> r .: "issue_s" csvHal_issue_s <- r .: "issue_s"
<*> r .: "journalPublisher_s" csvHal_journalPublisher_s <- r .: "journalPublisher_s"
<*> r .: "language_s" csvHal_language_s <- r .: "language_s"
csvHal_doiId_s <- r .: "doiId_s"
<*> r .: "doiId_s" csvHal_authId_i <- r .: "authId_i"
<*> r .: "authId_i" csvHal_instStructId_i <- r .: "instStructId_i"
<*> r .: "instStructId_i" csvHal_deptStructId_i <- r .: "deptStructId_i"
<*> r .: "deptStructId_i" csvHal_labStructId_i <- r .: "labStructId_i"
<*> r .: "labStructId_i" csvHal_rteamStructId_i <- r .: "rteamStructId_i"
csvHal_docType_s <- r .: "docType_s"
<*> r .: "rteamStructId_i" pure $ CsvHal { .. }
<*> r .: "docType_s"
instance ToNamedRecord CsvHal where instance ToNamedRecord CsvHal where
toNamedRecord (CsvHal t s py pm pd abst aut url isbn iss j lang doi auth inst dept lab team doct) = --toNamedRecord (CsvHal t s py pm pd abst aut url isbn iss j lang doi auth inst dept lab team doct) =
namedRecord [ "title" .= t toNamedRecord (CsvHal { .. }) =
, "source" .= s namedRecord [ "title" .= csvHal_title
, "source" .= csvHal_source
, "publication_year" .= py
, "publication_month" .= pm , "publication_year" .= csvHal_publication_year
, "publication_day" .= pd , "publication_month" .= csvHal_publication_month
, "publication_day" .= csvHal_publication_day
, "abstract" .= abst
, "authors" .= aut , "abstract" .= csvHal_abstract
, "authors" .= csvHal_authors
, "url" .= url
, "isbn_s" .= isbn , "url" .= csvHal_url
, "issue_s" .= iss , "isbn_s" .= csvHal_isbn_s
, "journalPublisher_s" .= j , "issue_s" .= csvHal_issue_s
, "language_s" .= lang , "journalPublisher_s" .= csvHal_journalPublisher_s
, "language_s" .= csvHal_language_s
, "doiId_s" .= doi
, "authId_i" .= auth , "doiId_s" .= csvHal_doiId_s
, "instStructId_i" .= inst , "authId_i" .= csvHal_authId_i
, "deptStructId_i" .= dept , "instStructId_i" .= csvHal_instStructId_i
, "labStructId_i" .= lab , "deptStructId_i" .= csvHal_deptStructId_i
, "labStructId_i" .= csvHal_labStructId_i
, "rteamStructId_i" .= team
, "docType_s" .= doct , "rteamStructId_i" .= csvHal_rteamStructId_i
, "docType_s" .= csvHal_docType_s
] ]
csvHal2doc :: CsvHal -> HyperdataDocument csvHal2doc :: CsvHal -> HyperdataDocument
csvHal2doc (CsvHal title source csvHal2doc (CsvHal { .. }) =
pub_year pub_month pub_day HyperdataDocument { _hd_bdd = Just "CsvHal"
abstract authors , _hd_doi = Just csvHal_doiId_s
url _ _ _ _ , _hd_url = Just csvHal_url
doi _ inst _ _ , _hd_uniqId = Nothing
_ _ ) = HyperdataDocument (Just "CsvHal") , _hd_uniqIdBdd = Nothing
(Just doi) , _hd_page = Nothing
(Just url) , _hd_title = Just csvHal_title
Nothing , _hd_authors = Just csvHal_authors
Nothing , _hd_institutes = Just csvHal_instStructId_i
Nothing , _hd_source = Just csvHal_source
(Just title) , _hd_abstract = Just csvHal_abstract
(Just authors) , _hd_publication_date = Just $ pack . show $ jour csvHal_publication_year
(Just inst) csvHal_publication_month
(Just source) csvHal_publication_day
(Just abstract) , _hd_publication_year = Just $ fromIntegral csvHal_publication_year
(Just $ pack . show $ jour pub_year pub_month pub_day) , _hd_publication_month = Just csvHal_publication_month
(Just $ fromIntegral pub_year) , _hd_publication_day = Just csvHal_publication_day
(Just pub_month) , _hd_publication_hour = Nothing
(Just pub_day) , _hd_publication_minute = Nothing
Nothing , _hd_publication_second = Nothing
Nothing , _hd_language_iso2 = Nothing }
Nothing
Nothing
csv2doc :: CsvDoc -> HyperdataDocument csv2doc :: CsvDoc -> HyperdataDocument
csv2doc (CsvDoc title source csv2doc (CsvDoc { .. })
mPubYear mPubMonth mPubDay = HyperdataDocument { _hd_bdd = Just "CsvHal"
abstract authors ) = HyperdataDocument (Just "CsvHal") , _hd_doi = Nothing
Nothing , _hd_url = Nothing
Nothing , _hd_uniqId = Nothing
Nothing , _hd_uniqIdBdd = Nothing
Nothing , _hd_page = Nothing
Nothing , _hd_title = Just csv_title
(Just title) , _hd_authors = Just csv_authors
(Just authors) , _hd_institutes = Nothing
Nothing , _hd_source = Just csv_source
(Just source) , _hd_abstract = Just csv_abstract
(Just abstract) , _hd_publication_date = Just $ pack . show $ jour (fromIntegral pubYear)
(Just $ pack . show $ jour (fromIntegral pubYear) pubMonth pubDay) pubMonth
(Just pubYear) pubDay
(Just pubMonth) , _hd_publication_year = Just pubYear
(Just pubDay) , _hd_publication_month = Just pubMonth
Nothing , _hd_publication_day = Just pubDay
Nothing , _hd_publication_hour = Nothing
Nothing , _hd_publication_minute = Nothing
Nothing , _hd_publication_second = Nothing
, _hd_language_iso2 = Nothing }
where where
pubYear = fromMIntOrDec defaultYear mPubYear pubYear = fromMIntOrDec defaultYear csv_publication_year
pubMonth = fromMaybe defaultMonth mPubMonth pubMonth = fromMaybe defaultMonth csv_publication_month
pubDay = fromMaybe defaultDay mPubDay pubDay = fromMaybe defaultDay csv_publication_day
------------------------------------------------------------------------ ------------------------------------------------------------------------
parseHal :: FilePath -> IO (Either Prelude.String [HyperdataDocument]) parseHal :: FilePath -> IO (Either Prelude.String [HyperdataDocument])
...@@ -438,14 +448,16 @@ data Csv' = Csv' ...@@ -438,14 +448,16 @@ data Csv' = Csv'
instance FromNamedRecord Csv' where instance FromNamedRecord Csv' where
parseNamedRecord r = Csv' <$> r .: "title" parseNamedRecord r = do
<*> r .: "source" csv'_title <- r .: "title"
<*> r .: "publication_year" csv'_source <- r .: "source"
<*> r .: "publication_month" csv'_publication_year <- r .: "publication_year"
<*> r .: "publication_day" csv'_publication_month <- r .: "publication_month"
<*> r .: "abstract" csv'_publication_day <- r .: "publication_day"
<*> r .: "authors" csv'_abstract <- r .: "abstract"
<*> r .: "weight" csv'_authors <- r .: "authors"
csv'_weight <- r .: "weight"
pure $ Csv' { .. }
readWeightedCsv :: FilePath -> IO (Header, Vector Csv') readWeightedCsv :: FilePath -> IO (Header, Vector Csv')
readWeightedCsv fp = readWeightedCsv fp =
......
...@@ -75,18 +75,26 @@ instance ToJSON GrandDebatReference ...@@ -75,18 +75,26 @@ instance ToJSON GrandDebatReference
instance ToHyperdataDocument GrandDebatReference instance ToHyperdataDocument GrandDebatReference
where where
toHyperdataDocument (GrandDebatReference id' _ref title' toHyperdataDocument (GrandDebatReference { id, title, publishedAt, authorType, authorZipCode, responses }) =
_createdAt' publishedAt' _updatedAt HyperdataDocument { _hd_bdd = Just "GrandDebat"
_trashed _trashedStatus , _hd_doi = id
_authorId authorType' authorZipCode' , _hd_url = Nothing
responses') = , _hd_uniqId = Nothing
HyperdataDocument (Just "GrandDebat") id' , _hd_uniqIdBdd = Nothing
Nothing Nothing Nothing Nothing , _hd_page = Nothing
title' authorType' authorType' authorZipCode' , _hd_title = title
(toAbstract <$> responses') , _hd_authors = authorType
publishedAt' , _hd_institutes = authorType
Nothing Nothing Nothing Nothing Nothing Nothing , _hd_source = authorZipCode
(Just $ Text.pack $ show FR) , _hd_abstract = toAbstract <$> responses
, _hd_publication_date = publishedAt
, _hd_publication_year = Nothing
, _hd_publication_month = Nothing
, _hd_publication_day = Nothing
, _hd_publication_hour = Nothing
, _hd_publication_minute = Nothing
, _hd_publication_second = Nothing
, _hd_language_iso2 = Just $ Text.pack $ show FR }
where where
toAbstract = (Text.intercalate " . ") . ((filter (/= "")) . (map toSentence)) toAbstract = (Text.intercalate " . ") . ((filter (/= "")) . (map toSentence))
toSentence (GrandDebatResponse _id _qtitle _qvalue r) = case r of toSentence (GrandDebatResponse _id _qtitle _qvalue r) = case r of
......
...@@ -119,17 +119,24 @@ unbound _ _ = Nothing ...@@ -119,17 +119,24 @@ unbound _ _ = Nothing
bind2doc :: Lang -> [BindingValue] -> HyperdataDocument bind2doc :: Lang -> [BindingValue] -> HyperdataDocument
bind2doc l [ link, date, langDoc, authors, _source, publisher, title, abstract ] = bind2doc l [ link, date, langDoc, authors, _source, publisher, title, abstract ] =
HyperdataDocument (Just "Isidore") HyperdataDocument { _hd_bdd = Just "Isidore"
Nothing , _hd_doi = Nothing
(unbound l link) , _hd_url = unbound l link
Nothing Nothing Nothing , _hd_uniqId = Nothing
(unbound l title) , _hd_uniqIdBdd = Nothing
(unbound l authors) , _hd_page = Nothing
Nothing , _hd_title = unbound l title
(unbound l publisher) , _hd_authors = unbound l authors
(unbound l abstract) , _hd_institutes = Nothing
(unbound l date) , _hd_source = unbound l publisher
Nothing Nothing Nothing Nothing Nothing Nothing , _hd_abstract = unbound l abstract
(unbound l langDoc) , _hd_publication_date = unbound l date
, _hd_publication_year = Nothing
, _hd_publication_month = Nothing
, _hd_publication_day = Nothing
, _hd_publication_hour = Nothing
, _hd_publication_minute = Nothing
, _hd_publication_second = Nothing
, _hd_language_iso2 = unbound l langDoc }
bind2doc _ _ = undefined bind2doc _ _ = undefined
...@@ -48,8 +48,14 @@ json2csv fin fout = do ...@@ -48,8 +48,14 @@ json2csv fin fout = do
writeFile fout (headerCsvGargV3, fromList $ map patent2csvDoc patents) writeFile fout (headerCsvGargV3, fromList $ map patent2csvDoc patents)
patent2csvDoc :: Patent -> CsvDoc patent2csvDoc :: Patent -> CsvDoc
patent2csvDoc (Patent title abstract year _) = patent2csvDoc (Patent { .. }) =
CsvDoc title "Source" (Just $ read (unpack year)) (Just 1) (Just 1) abstract "Authors" CsvDoc { csv_title = _patent_title
, csv_source = "Source"
, csv_publication_year = Just $ read (unpack _patent_year)
, csv_publication_month = Just 1
, csv_publication_day = Just 1
, csv_abstract = _patent_abstract
, csv_authors = "Authors" }
......
...@@ -70,3 +70,5 @@ onField :: ByteString -> (ByteString -> [(ByteString, ByteString)]) ...@@ -70,3 +70,5 @@ onField :: ByteString -> (ByteString -> [(ByteString, ByteString)])
-> [(ByteString, ByteString)] -> [(ByteString, ByteString)] -> [(ByteString, ByteString)] -> [(ByteString, ByteString)]
onField k f m = m <> ( maybe [] f (lookup k m) ) onField k f m = m <> ( maybe [] f (lookup k m) )
...@@ -68,5 +68,3 @@ fixFields ns = map (first fixFields'') ns ...@@ -68,5 +68,3 @@ fixFields ns = map (first fixFields'') ns
| champs == "UR" = "url" | champs == "UR" = "url"
| champs == "N2" = abstract | champs == "N2" = abstract
| otherwise = champs | otherwise = champs
...@@ -95,7 +95,9 @@ parsePage = ...@@ -95,7 +95,9 @@ parsePage =
revision <- revision <-
parseRevision parseRevision
many_ $ ignoreAnyTreeContent many_ $ ignoreAnyTreeContent
return $ Page Mediawiki title revision return $ Page { _markupFormat = Mediawiki
, _title = title
, _text = revision }
parseMediawiki :: MonadThrow m => ConduitT Event Page m (Maybe ()) parseMediawiki :: MonadThrow m => ConduitT Event Page m (Maybe ())
parseMediawiki = parseMediawiki =
...@@ -108,7 +110,7 @@ mediawikiPageToPlain :: Page -> IO Page ...@@ -108,7 +110,7 @@ mediawikiPageToPlain :: Page -> IO Page
mediawikiPageToPlain page = do mediawikiPageToPlain page = do
title <- mediaToPlain $ _title page title <- mediaToPlain $ _title page
revision <- mediaToPlain $ _text page revision <- mediaToPlain $ _text page
return $ Page Plaintext title revision return $ Page { _markupFormat = Plaintext, _title = title, _text = revision }
where mediaToPlain media = where mediaToPlain media =
case media of case media of
(Nothing) -> return Nothing (Nothing) -> return Nothing
......
...@@ -86,7 +86,7 @@ buildNgramsLists user uCid mCid mfslw gp = do ...@@ -86,7 +86,7 @@ buildNgramsLists user uCid mCid mfslw gp = do
data MapListSize = MapListSize { unMapListSize :: !Int } data MapListSize = MapListSize { unMapListSize :: !Int }
buildNgramsOthersList ::( HasNodeError err buildNgramsOthersList :: ( HasNodeError err
, CmdM env err m , CmdM env err m
, HasNodeStory env err m , HasNodeStory env err m
, HasTreeError err , HasTreeError err
......
...@@ -72,7 +72,7 @@ groupWith :: GroupParams ...@@ -72,7 +72,7 @@ groupWith :: GroupParams
-> NgramsTerm -> NgramsTerm
-> NgramsTerm -> NgramsTerm
groupWith GroupIdentity t = identity t groupWith GroupIdentity t = identity t
groupWith (GroupParams l _m _n _) t = groupWith (GroupParams { unGroupParams_lang = l }) t =
NgramsTerm NgramsTerm
$ Text.intercalate " " $ Text.intercalate " "
$ map (stem l) $ map (stem l)
...@@ -86,7 +86,7 @@ groupWith (GroupParams l _m _n _) t = ...@@ -86,7 +86,7 @@ groupWith (GroupParams l _m _n _) t =
$ unNgramsTerm t $ unNgramsTerm t
-- | This lemmatization group done with CoreNLP algo (or others) -- | This lemmatization group done with CoreNLP algo (or others)
groupWith (GroupWithPosTag _ _ m) t = groupWith (GroupWithPosTag { _gwl_map = m }) t =
case HashMap.lookup (unNgramsTerm t) m of case HashMap.lookup (unNgramsTerm t) m of
Nothing -> clean t Nothing -> clean t
Just t' -> clean $ NgramsTerm t' Just t' -> clean $ NgramsTerm t'
......
...@@ -70,9 +70,11 @@ Children are not modified in this specific case. ...@@ -70,9 +70,11 @@ Children are not modified in this specific case.
-- | Old list get -1 score -- | Old list get -1 score
-- New list get +1 score -- New list get +1 score
-- Hence others lists lay around 0 score -- Hence others lists lay around 0 score
addScorePatch fl (t, (NgramsPatch children' (Patch.Replace old_list new_list))) = addScorePatch fl (t, (NgramsPatch { _patch_children
, _patch_list = Patch.Replace old_list new_list })) =
-- Adding new 'Children' score -- Adding new 'Children' score
addScorePatch fl' (t, NgramsPatch children' Patch.Keep) addScorePatch fl' (t, NgramsPatch { _patch_children
, _patch_list = Patch.Keep })
where where
-- | Adding new 'ListType' score -- | Adding new 'ListType' score
fl' = fl & flc_scores . at t %~ (score fls_listType old_list (-1)) fl' = fl & flc_scores . at t %~ (score fls_listType old_list (-1))
...@@ -80,8 +82,9 @@ addScorePatch fl (t, (NgramsPatch children' (Patch.Replace old_list new_list))) ...@@ -80,8 +82,9 @@ addScorePatch fl (t, (NgramsPatch children' (Patch.Replace old_list new_list)))
& flc_cont %~ (HashMap.delete t) & flc_cont %~ (HashMap.delete t)
-- | Patching existing Ngrams with children -- | Patching existing Ngrams with children
addScorePatch fl (p, NgramsPatch children' Patch.Keep) = addScorePatch fl (p, NgramsPatch { _patch_children
foldl' addChild fl $ patchMSet_toList children' , _patch_list = Patch.Keep }) =
foldl' addChild fl $ patchMSet_toList _patch_children
where where
-- | Adding a child -- | Adding a child
addChild fl' (t, Patch.Replace Nothing (Just _)) = doLink ( 1) p t fl' addChild fl' (t, Patch.Replace Nothing (Just _)) = doLink ( 1) p t fl'
...@@ -92,20 +95,24 @@ addScorePatch fl (p, NgramsPatch children' Patch.Keep) = ...@@ -92,20 +95,24 @@ addScorePatch fl (p, NgramsPatch children' Patch.Keep) =
addChild fl' _ = fl' addChild fl' _ = fl'
-- | Inserting a new Ngrams -- | Inserting a new Ngrams
addScorePatch fl (t, NgramsReplace Nothing (Just nre)) = addScorePatch fl (t, NgramsReplace { _patch_old = Nothing
, _patch_new = Just nre }) =
childrenScore 1 t (nre ^. nre_children) childrenScore 1 t (nre ^. nre_children)
$ fl & flc_scores . at t %~ (score fls_listType $ nre ^. nre_list) 1 $ fl & flc_scores . at t %~ (score fls_listType $ nre ^. nre_list) 1
& flc_cont %~ (HashMap.delete t) & flc_cont %~ (HashMap.delete t)
addScorePatch fl (t, NgramsReplace (Just old_nre) maybe_new_nre) = addScorePatch fl (t, NgramsReplace { _patch_old = Just old_nre
, _patch_new = maybe_new_nre }) =
let fl' = childrenScore (-1) t (old_nre ^. nre_children) let fl' = childrenScore (-1) t (old_nre ^. nre_children)
$ fl & flc_scores . at t %~ (score fls_listType $ old_nre ^. nre_list) (-1) $ fl & flc_scores . at t %~ (score fls_listType $ old_nre ^. nre_list) (-1)
& flc_cont %~ (HashMap.delete t) & flc_cont %~ (HashMap.delete t)
in case maybe_new_nre of in case maybe_new_nre of
Nothing -> fl' Nothing -> fl'
Just new_nre -> addScorePatch fl' (t, NgramsReplace Nothing (Just new_nre)) Just new_nre -> addScorePatch fl' (t, NgramsReplace { _patch_old = Nothing
, _patch_new = Just new_nre })
addScorePatch fl (_, NgramsReplace Nothing Nothing) = fl addScorePatch fl (_, NgramsReplace { _patch_old = Nothing
, _patch_new = Nothing }) = fl
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
-- | Utils -- | Utils
......
...@@ -13,8 +13,6 @@ Starting from this model, a specific Gargantext engine will be made ...@@ -13,8 +13,6 @@ Starting from this model, a specific Gargantext engine will be made
(using more metrics scores/features). (using more metrics scores/features).
-} -}
{-# LANGUAGE NamedFieldPuns #-}
module Gargantext.Core.Text.Search where module Gargantext.Core.Text.Search where
import Data.SearchEngine import Data.SearchEngine
......
...@@ -82,11 +82,11 @@ makeLenses ''TermType ...@@ -82,11 +82,11 @@ makeLenses ''TermType
--extractTerms :: Traversable t => TermType Lang -> t Text -> IO (t [Terms]) --extractTerms :: Traversable t => TermType Lang -> t Text -> IO (t [Terms])
extractTerms :: TermType Lang -> [Text] -> IO [[Terms]] extractTerms :: TermType Lang -> [Text] -> IO [[Terms]]
extractTerms (Unsupervised l n s m) xs = mapM (terms (Unsupervised l n s (Just m'))) xs extractTerms (Unsupervised {..}) xs = mapM (terms (Unsupervised { _tt_model = Just m', .. })) xs
where where
m' = case m of m' = case _tt_model of
Just m''-> m'' Just m''-> m''
Nothing -> newTries n (Text.intercalate " " xs) Nothing -> newTries _tt_windowSize (Text.intercalate " " xs)
extractTerms termTypeLang xs = mapM (terms termTypeLang) xs extractTerms termTypeLang xs = mapM (terms termTypeLang) xs
...@@ -96,11 +96,12 @@ withLang :: (Foldable t, Functor t, HasText h) ...@@ -96,11 +96,12 @@ withLang :: (Foldable t, Functor t, HasText h)
=> TermType Lang => TermType Lang
-> t h -> t h
-> TermType Lang -> TermType Lang
withLang (Unsupervised l n s m) ns = Unsupervised l n s m' withLang (Unsupervised {..}) ns = Unsupervised { _tt_model = m', .. }
where where
m' = case m of m' = case _tt_model of
Nothing -> -- trace ("buildTries here" :: String) Nothing -> -- trace ("buildTries here" :: String)
Just $ buildTries n $ fmap toToken Just $ buildTries _tt_ngramsSize
$ fmap toToken
$ uniText $ uniText
$ Text.intercalate " . " $ Text.intercalate " . "
$ List.concat $ List.concat
...@@ -171,9 +172,9 @@ terms :: TermType Lang -> Text -> IO [Terms] ...@@ -171,9 +172,9 @@ terms :: TermType Lang -> Text -> IO [Terms]
terms (Mono lang) txt = pure $ monoTerms lang txt terms (Mono lang) txt = pure $ monoTerms lang txt
terms (Multi lang) txt = multiterms lang txt terms (Multi lang) txt = multiterms lang txt
terms (MonoMulti lang) txt = terms (Multi lang) txt terms (MonoMulti lang) txt = terms (Multi lang) txt
terms (Unsupervised lang n s m) txt = termsUnsupervised (Unsupervised lang n s (Just m')) txt terms (Unsupervised { .. }) txt = termsUnsupervised (Unsupervised { _tt_model = Just m', .. }) txt
where where
m' = maybe (newTries n txt) identity m m' = maybe (newTries _tt_ngramsSize txt) identity _tt_model
-- terms (WithList list) txt = pure . concat $ extractTermsWithList list txt -- terms (WithList list) txt = pure . concat $ extractTermsWithList list txt
......
...@@ -93,7 +93,8 @@ flowList_DbRepo lId ngs = do ...@@ -93,7 +93,8 @@ flowList_DbRepo lId ngs = do
let toInsert = catMaybes [ (,) <$> (getCgramsId mapCgramsId ntype <$> (unNgramsTerm <$> parent)) let toInsert = catMaybes [ (,) <$> (getCgramsId mapCgramsId ntype <$> (unNgramsTerm <$> parent))
<*> getCgramsId mapCgramsId ntype ngram <*> getCgramsId mapCgramsId ntype ngram
| (ntype, ngs') <- Map.toList ngs | (ntype, ngs') <- Map.toList ngs
, NgramsElement (NgramsTerm ngram) _ _ _ _ parent _ <- ngs' , NgramsElement { _ne_ngrams = NgramsTerm ngram
, _ne_parent = parent } <- ngs'
] ]
-- Inserting groups of ngrams -- Inserting groups of ngrams
_r <- insert_Node_NodeNgrams_NodeNgrams _r <- insert_Node_NodeNgrams_NodeNgrams
...@@ -115,15 +116,37 @@ toNodeNgramsW l ngs = List.concat $ map (toNodeNgramsW'' l) ngs ...@@ -115,15 +116,37 @@ toNodeNgramsW l ngs = List.concat $ map (toNodeNgramsW'' l) ngs
-> (NgramsType, [NgramsElement]) -> (NgramsType, [NgramsElement])
-> [NodeNgramsW] -> [NodeNgramsW]
toNodeNgramsW'' l' (ngrams_type, elms) = toNodeNgramsW'' l' (ngrams_type, elms) =
[ NodeNgrams Nothing l' list_type ngrams_terms' ngrams_type Nothing Nothing Nothing 0 | [ NodeNgrams { _nng_id = Nothing
(NgramsElement (NgramsTerm ngrams_terms') _size list_type _occ _root _parent _children) <- elms , _nng_node_id = l'
, _nng_node_subtype = list_type
, _nng_ngrams_id = ngrams_terms'
, _nng_ngrams_type = ngrams_type
, _nng_ngrams_field = Nothing
, _nng_ngrams_tag = Nothing
, _nng_ngrams_class = Nothing
, _nng_ngrams_weight = 0 } |
(NgramsElement { _ne_ngrams = NgramsTerm ngrams_terms'
, _ne_size = _size
, _ne_list = list_type
, _ne_occurrences = _occ
, _ne_root = _root
, _ne_parent = _parent
, _ne_children = _children }) <- elms
] ]
toNodeNgramsW' :: ListId toNodeNgramsW' :: ListId
-> [(Text, [NgramsType])] -> [(Text, [NgramsType])]
-> [NodeNgramsW] -> [NodeNgramsW]
toNodeNgramsW' l'' ngs = [ NodeNgrams Nothing l'' CandidateTerm terms ngrams_type Nothing Nothing Nothing 0 toNodeNgramsW' l'' ngs = [ NodeNgrams { _nng_id = Nothing
, _nng_node_id = l''
, _nng_node_subtype = CandidateTerm
, _nng_ngrams_id = terms
, _nng_ngrams_type = ngrams_type
, _nng_ngrams_field = Nothing
, _nng_ngrams_tag = Nothing
, _nng_ngrams_class = Nothing
, _nng_ngrams_weight = 0 }
| (terms, ngrams_types) <- ngs | (terms, ngrams_types) <- ngs
, ngrams_type <- ngrams_types , ngrams_type <- ngrams_types
] ]
......
...@@ -72,7 +72,10 @@ pairing a c l' = do ...@@ -72,7 +72,10 @@ pairing a c l' = do
Just l'' -> pure l'' Just l'' -> pure l''
dataPaired <- dataPairing a (c,l,Authors) takeName takeName dataPaired <- dataPairing a (c,l,Authors) takeName takeName
r <- insertDB $ prepareInsert dataPaired r <- insertDB $ prepareInsert dataPaired
_ <- insertNodeNode [ NodeNode c a Nothing Nothing] _ <- insertNodeNode [ NodeNode { _nn_node1_id = c
, _nn_node2_id = a
, _nn_score = Nothing
, _nn_category = Nothing }]
pure r pure r
...@@ -96,7 +99,10 @@ dataPairing aId (cId, lId, ngt) fc fa = do ...@@ -96,7 +99,10 @@ dataPairing aId (cId, lId, ngt) fc fa = do
prepareInsert :: HashMap ContactId (Set DocId) -> [NodeNode] prepareInsert :: HashMap ContactId (Set DocId) -> [NodeNode]
prepareInsert m = map (\(n1,n2) -> NodeNode n1 n2 Nothing Nothing) prepareInsert m = map (\(n1,n2) -> NodeNode { _nn_node1_id = n1
, _nn_node2_id = n2
, _nn_score = Nothing
, _nn_category = Nothing })
$ List.concat $ List.concat
$ map (\(contactId, setDocIds) $ map (\(contactId, setDocIds)
-> map (\setDocId -> map (\setDocId
......
...@@ -54,7 +54,10 @@ insertDocNgrams :: CorpusId ...@@ -54,7 +54,10 @@ insertDocNgrams :: CorpusId
-> HashMap (Indexed Int Ngrams) (Map NgramsType (Map NodeId Int)) -> HashMap (Indexed Int Ngrams) (Map NgramsType (Map NodeId Int))
-> Cmd err Int -> Cmd err Int
insertDocNgrams cId m = insertDocNgrams cId m =
insertDocNgramsOn cId [ DocNgrams n (_index ng) (ngramsTypeId t) (fromIntegral i) insertDocNgramsOn cId [ DocNgrams { dn_doc_id = n
, dn_ngrams_id = _index ng
, dn_ngrams_type = ngramsTypeId t
, dn_weight = fromIntegral i }
| (ng, t2n2i) <- HashMap.toList m | (ng, t2n2i) <- HashMap.toList m
, (t, n2i) <- DM.toList t2n2i , (t, n2i) <- DM.toList t2n2i
, (n, i) <- DM.toList n2i , (n, i) <- DM.toList n2i
......
...@@ -29,5 +29,6 @@ sendMail :: HasNodeError err => User -> Cmd err () ...@@ -29,5 +29,6 @@ sendMail :: HasNodeError err => User -> Cmd err ()
sendMail u = do sendMail u = do
server <- view $ hasConfig . gc_url server <- view $ hasConfig . gc_url
userLight <- getUserLightDB u userLight <- getUserLightDB u
liftBase $ mail server (MailInfo (userLight_username userLight) (userLight_email userLight)) liftBase $ mail server (MailInfo { mailInfo_username = userLight_username userLight
, mailInfo_address = userLight_email userLight })
...@@ -92,13 +92,13 @@ queryInCorpus cId t q = proc () -> do ...@@ -92,13 +92,13 @@ queryInCorpus cId t q = proc () -> do
else (nn^.nn_category) .>= (toNullable $ pgInt4 1) else (nn^.nn_category) .>= (toNullable $ pgInt4 1)
restrict -< (n ^. ns_search) @@ (pgTSQuery (unpack q)) restrict -< (n ^. ns_search) @@ (pgTSQuery (unpack q))
restrict -< (n ^. ns_typename ) .== (pgInt4 $ toDBid NodeDocument) restrict -< (n ^. ns_typename ) .== (pgInt4 $ toDBid NodeDocument)
returnA -< FacetDoc (n^.ns_id ) returnA -< FacetDoc { facetDoc_id = n^.ns_id
(n^.ns_date ) , facetDoc_created = n^.ns_date
(n^.ns_name ) , facetDoc_title = n^.ns_name
(n^.ns_hyperdata ) , facetDoc_hyperdata = n^.ns_hyperdata
(nn^.nn_category ) , facetDoc_category = nn^.nn_category
(nn^.nn_score ) , facetDoc_ngramCount = nn^.nn_score
(nn^.nn_score ) , facetDoc_score = nn^.nn_score }
joinInCorpus :: O.Query (NodeSearchRead, NodeNodeReadNull) joinInCorpus :: O.Query (NodeSearchRead, NodeNodeReadNull)
joinInCorpus = leftJoin queryNodeSearchTable queryNodeNodeTable cond joinInCorpus = leftJoin queryNodeSearchTable queryNodeNodeTable cond
...@@ -156,7 +156,10 @@ selectGroup :: HasDBid NodeType ...@@ -156,7 +156,10 @@ selectGroup :: HasDBid NodeType
selectGroup cId aId q = proc () -> do selectGroup cId aId q = proc () -> do
(a, b, c, d) <- aggregate (p4 (groupBy, groupBy, groupBy, O.sum)) (a, b, c, d) <- aggregate (p4 (groupBy, groupBy, groupBy, O.sum))
(selectContactViaDoc cId aId q) -< () (selectContactViaDoc cId aId q) -< ()
returnA -< FacetPaired a b c d returnA -< FacetPaired { _fp_id = a
, _fp_date = b
, _fp_hyperdata = c
, _fp_score = d }
queryContactViaDoc :: O.Query ( NodeSearchRead queryContactViaDoc :: O.Query ( NodeSearchRead
...@@ -270,7 +273,7 @@ textSearchQuery = "SELECT n.id, n.hyperdata->'publication_year' \ ...@@ -270,7 +273,7 @@ textSearchQuery = "SELECT n.id, n.hyperdata->'publication_year' \
textSearch :: HasDBid NodeType textSearch :: HasDBid NodeType
=> TSQuery -> ParentId => TSQuery -> ParentId
-> Limit -> Offset -> Order -> Limit -> Offset -> Order
-> Cmd err [(Int,Value,Value,Value, Value, Maybe Int)] -> Cmd err [(Int, Value, Value, Value, Value, Maybe Int)]
textSearch q p l o ord = runPGSQuery textSearchQuery (q,p,p,typeId,ord,o,l) textSearch q p l o ord = runPGSQuery textSearchQuery (q,p,p,typeId,ord,o,l)
where where
typeId = toDBid NodeDocument typeId = toDBid NodeDocument
......
...@@ -53,7 +53,10 @@ shareNodeWith (ShareNodeWith_User NodeFolderShared u) n = do ...@@ -53,7 +53,10 @@ shareNodeWith (ShareNodeWith_User NodeFolderShared u) n = do
then errorWith "[G.D.A.S.shareNodeWith] Can share to others only" then errorWith "[G.D.A.S.shareNodeWith] Can share to others only"
else do else do
folderSharedId <- getFolderId u NodeFolderShared folderSharedId <- getFolderId u NodeFolderShared
insertDB ([NodeNode folderSharedId n Nothing Nothing]:: [NodeNode]) insertDB ([NodeNode { _nn_node1_id = folderSharedId
, _nn_node2_id = n
, _nn_score = Nothing
, _nn_category = Nothing }]:: [NodeNode])
shareNodeWith (ShareNodeWith_Node NodeFolderPublic nId) n = do shareNodeWith (ShareNodeWith_Node NodeFolderPublic nId) n = do
nodeToCheck <- getNode n nodeToCheck <- getNode n
...@@ -63,7 +66,10 @@ shareNodeWith (ShareNodeWith_Node NodeFolderPublic nId) n = do ...@@ -63,7 +66,10 @@ shareNodeWith (ShareNodeWith_Node NodeFolderPublic nId) n = do
else do else do
folderToCheck <- getNode nId folderToCheck <- getNode nId
if hasNodeType folderToCheck NodeFolderPublic if hasNodeType folderToCheck NodeFolderPublic
then insertDB ([NodeNode nId n Nothing Nothing] :: [NodeNode]) then insertDB ([NodeNode { _nn_node1_id = nId
, _nn_node2_id = n
, _nn_score = Nothing
, _nn_category = Nothing }] :: [NodeNode])
else errorWith "[G.D.A.S.shareNodeWith] Can share NodeWith NodeFolderPublic only" else errorWith "[G.D.A.S.shareNodeWith] Can share NodeWith NodeFolderPublic only"
shareNodeWith _ _ = errorWith "[G.D.A.S.shareNodeWith] Not implemented for this NodeType" shareNodeWith _ _ = errorWith "[G.D.A.S.shareNodeWith] Not implemented for this NodeType"
......
...@@ -118,11 +118,11 @@ instance (Arbitrary i, Arbitrary l) => Arbitrary (Pair i l) where ...@@ -118,11 +118,11 @@ instance (Arbitrary i, Arbitrary l) => Arbitrary (Pair i l) where
arbitrary = Pair <$> arbitrary <*> arbitrary arbitrary = Pair <$> arbitrary <*> arbitrary
data FacetPaired id date hyperdata score = data FacetPaired id date hyperdata score =
FacetPaired {_fp_id :: id FacetPaired { _fp_id :: id
,_fp_date :: date , _fp_date :: date
,_fp_hyperdata :: hyperdata , _fp_hyperdata :: hyperdata
,_fp_score :: score , _fp_score :: score }
} deriving (Show, Generic) deriving (Show, Generic)
$(deriveJSON (unPrefix "_fp_") ''FacetPaired) $(deriveJSON (unPrefix "_fp_") ''FacetPaired)
$(makeAdaptorAndInstance "pFacetPaired" ''FacetPaired) $(makeAdaptorAndInstance "pFacetPaired" ''FacetPaired)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment