Commit 51a7b876 authored by Alexandre Delanoë's avatar Alexandre Delanoë

[DBFLOW] getViewDocument + uniqIdBdd (enables duplicatas from different...

[DBFLOW] getViewDocument + uniqIdBdd (enables duplicatas from different Database, does not enable duplicatas in the same database.
parent 5efcb182
...@@ -7,11 +7,10 @@ Maintainer : team@gargantext.org ...@@ -7,11 +7,10 @@ Maintainer : team@gargantext.org
Stability : experimental Stability : experimental
Portability : POSIX Portability : POSIX
All Database related stuff here.
Target: just import this module and nothing else to work with Target: just import this module and nothing else to work with
Gargantext's database. Gargantext's database.
TODO: configure nodes table in Haskell (Config typenames etc.)
-} -}
{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoImplicitPrelude #-}
......
...@@ -126,18 +126,19 @@ instance Arbitrary FacetChart where ...@@ -126,18 +126,19 @@ instance Arbitrary FacetChart where
arbitrary = FacetChart <$> arbitrary <*> arbitrary arbitrary = FacetChart <$> arbitrary <*> arbitrary
----------------------------------------------------------------------- -----------------------------------------------------------------------
type Trash = Bool
data OrderBy = DateAsc | DateDesc data OrderBy = DateAsc | DateDesc
-- | TitleAsc | TitleDesc -- | TitleAsc | TitleDesc
| FavDesc | FavAsc -- | NgramCount | FavDesc | FavAsc -- | NgramCount
viewDocuments :: CorpusId -> NodeTypeId -> Query FacetDocRead viewDocuments :: CorpusId -> Trash -> NodeTypeId -> Query FacetDocRead
viewDocuments cId ntId = proc () -> do viewDocuments cId t ntId = proc () -> do
n <- queryNodeTable -< () n <- queryNodeTable -< ()
nn <- queryNodeNodeTable -< () nn <- queryNodeNodeTable -< ()
restrict -< _node_id n .== nodeNode_node2_id nn restrict -< _node_id n .== nodeNode_node2_id nn
restrict -< nodeNode_node1_id nn .== (pgInt4 cId) restrict -< nodeNode_node1_id nn .== (pgInt4 cId)
restrict -< _node_typename n .== (pgInt4 ntId) restrict -< _node_typename n .== (pgInt4 ntId)
restrict -< nodeNode_delete nn .== (pgBool t)
returnA -< FacetDoc (_node_id n) (_node_date n) (_node_hyperdata n) (nodeNode_favorite nn) (pgInt4 1) returnA -< FacetDoc (_node_id n) (_node_date n) (_node_hyperdata n) (nodeNode_favorite nn) (pgInt4 1)
...@@ -160,9 +161,9 @@ filterDocuments o l order q = limit' l $ offset' o $ orderBy ordering q ...@@ -160,9 +161,9 @@ filterDocuments o l order q = limit' l $ offset' o $ orderBy ordering q
FavDesc -> desc facetDoc_favorite FavDesc -> desc facetDoc_favorite
runViewDocuments :: CorpusId -> Maybe Offset -> Maybe Limit -> OrderBy -> Cmd [FacetDoc] runViewDocuments :: CorpusId -> Trash -> Maybe Offset -> Maybe Limit -> OrderBy -> Cmd [FacetDoc]
runViewDocuments cId o l order = mkCmd $ \c -> runQuery c ( filterDocuments o l order runViewDocuments cId t o l order = mkCmd $ \c -> runQuery c ( filterDocuments o l order
$ viewDocuments cId ntId) $ viewDocuments cId t ntId)
where where
ntId = nodeTypeId NodeDocument ntId = nodeTypeId NodeDocument
......
...@@ -64,14 +64,14 @@ add_debug pId ns = mkCmd $ \c -> formatQuery c queryAdd (Only $ Values fields in ...@@ -64,14 +64,14 @@ add_debug pId ns = mkCmd $ \c -> formatQuery c queryAdd (Only $ Values fields in
-- | Input Tables: types of the tables -- | Input Tables: types of the tables
inputSqlTypes :: [Text] inputSqlTypes :: [Text]
inputSqlTypes = map DT.pack ["int4","int4","bool"] inputSqlTypes = map DT.pack ["int4","int4","bool","bool"]
-- | SQL query to add documents -- | SQL query to add documents
-- TODO return id of added documents only -- TODO return id of added documents only
queryAdd :: Query queryAdd :: Query
queryAdd = [sql| queryAdd = [sql|
WITH input_rows(node1_id,node2_id, favorite) AS (?) WITH input_rows(node1_id,node2_id, favorite, delete) AS (?)
INSERT INTO nodes_nodes (node1_id, node2_id, favorite) INSERT INTO nodes_nodes (node1_id, node2_id, favorite, delete)
SELECT * FROM input_rows SELECT * FROM input_rows
ON CONFLICT (node1_id, node2_id) DO NOTHING -- on unique index ON CONFLICT (node1_id, node2_id) DO NOTHING -- on unique index
RETURNING 1 RETURNING 1
...@@ -79,7 +79,7 @@ queryAdd = [sql| ...@@ -79,7 +79,7 @@ queryAdd = [sql|
|] |]
prepare :: ParentId -> [NodeId] -> [InputData] prepare :: ParentId -> [NodeId] -> [InputData]
prepare pId ns = map (\nId -> InputData pId nId False) ns prepare pId ns = map (\nId -> InputData pId nId False False) ns
------------------------------------------------------------------------ ------------------------------------------------------------------------
-- * Main Types used -- * Main Types used
...@@ -88,11 +88,13 @@ prepare pId ns = map (\nId -> InputData pId nId False) ns ...@@ -88,11 +88,13 @@ prepare pId ns = map (\nId -> InputData pId nId False) ns
data InputData = InputData { inNode1_id :: NodeId data InputData = InputData { inNode1_id :: NodeId
, inNode2_id :: NodeId , inNode2_id :: NodeId
, inNode_fav :: Bool , inNode_fav :: Bool
, inNode_del :: Bool
} deriving (Show, Generic, Typeable) } deriving (Show, Generic, Typeable)
instance ToRow InputData where instance ToRow InputData where
toRow inputData = [ toField (inNode1_id inputData) toRow inputData = [ toField (inNode1_id inputData)
, toField (inNode2_id inputData) , toField (inNode2_id inputData)
, toField (inNode_fav inputData) , toField (inNode_fav inputData)
, toField (inNode_del inputData)
] ]
...@@ -131,7 +131,8 @@ queryInsert = [sql| ...@@ -131,7 +131,8 @@ queryInsert = [sql|
, ins AS ( , ins AS (
INSERT INTO nodes (typename,user_id,parent_id,name,hyperdata) INSERT INTO nodes (typename,user_id,parent_id,name,hyperdata)
SELECT * FROM input_rows SELECT * FROM input_rows
ON CONFLICT (typename, parent_id, (hyperdata ->> 'uniqId')) DO NOTHING -- on unique index ON CONFLICT ((hyperdata ->> 'uniqIdBdd')) DO NOTHING -- on unique index
-- ON CONFLICT (typename, parent_id, (hyperdata ->> 'uniqId')) DO NOTHING -- on unique index
RETURNING id,hyperdata RETURNING id,hyperdata
) )
...@@ -197,13 +198,15 @@ hashParameters = [ \d -> maybe' (_hyperdataDocument_title d) ...@@ -197,13 +198,15 @@ hashParameters = [ \d -> maybe' (_hyperdataDocument_title d)
, \d -> maybe' (_hyperdataDocument_source d) , \d -> maybe' (_hyperdataDocument_source d)
, \d -> maybe' (_hyperdataDocument_publication_date d) , \d -> maybe' (_hyperdataDocument_publication_date d)
] ]
where
maybe' = maybe (DT.pack "") identity maybe' = maybe (DT.pack "") identity
addUniqId :: HyperdataDocument -> HyperdataDocument addUniqId :: HyperdataDocument -> HyperdataDocument
addUniqId doc = set hyperdataDocument_uniqId (Just hash) doc addUniqId doc = set hyperdataDocument_uniqIdBdd (Just hashBdd)
$ set hyperdataDocument_uniqId (Just hash) doc
where where
hash = uniqId $ DT.concat $ map ($ doc) hashParameters hash = uniqId $ DT.concat $ map ($ doc) hashParameters
hashBdd = uniqId $ DT.concat $ map ($ doc) ([(\d -> maybe' (_hyperdataDocument_bdd d))] <> hashParameters)
uniqId :: Text -> Text uniqId :: Text -> Text
uniqId = DT.pack . SHA.showDigest . SHA.sha256 . DC.pack . DT.unpack uniqId = DT.pack . SHA.showDigest . SHA.sha256 . DC.pack . DT.unpack
......
...@@ -11,6 +11,7 @@ Portability : POSIX ...@@ -11,6 +11,7 @@ Portability : POSIX
{-# OPTIONS_GHC -fno-warn-orphans #-} {-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoImplicitPrelude #-}
...@@ -78,23 +79,23 @@ $(deriveJSON (unPrefix "statusV3_") ''StatusV3) ...@@ -78,23 +79,23 @@ $(deriveJSON (unPrefix "statusV3_") ''StatusV3)
------------------------------------------------------------------------ ------------------------------------------------------------------------
------------------------------------------------------------------------ ------------------------------------------------------------------------
data HyperdataDocumentV3 = HyperdataDocumentV3 { hyperdataDocumentV3_publication_day :: Maybe Int data HyperdataDocumentV3 = HyperdataDocumentV3 { hyperdataDocumentV3_publication_day :: !(Maybe Int)
, hyperdataDocumentV3_language_iso2 :: Maybe Text , hyperdataDocumentV3_language_iso2 :: !(Maybe Text)
, hyperdataDocumentV3_publication_second :: Maybe Int , hyperdataDocumentV3_publication_second :: !(Maybe Int)
, hyperdataDocumentV3_publication_minute :: Maybe Int , hyperdataDocumentV3_publication_minute :: !(Maybe Int)
, hyperdataDocumentV3_publication_month :: Maybe Int , hyperdataDocumentV3_publication_month :: !(Maybe Int)
, hyperdataDocumentV3_publication_hour :: Maybe Int , hyperdataDocumentV3_publication_hour :: !(Maybe Int)
, hyperdataDocumentV3_error :: Maybe Text , hyperdataDocumentV3_error :: !(Maybe Text)
, hyperdataDocumentV3_language_iso3 :: Maybe Text , hyperdataDocumentV3_language_iso3 :: !(Maybe Text)
, hyperdataDocumentV3_authors :: Maybe Text , hyperdataDocumentV3_authors :: !(Maybe Text)
, hyperdataDocumentV3_publication_year :: Maybe Int , hyperdataDocumentV3_publication_year :: !(Maybe Int)
, hyperdataDocumentV3_publication_date :: Maybe Text , hyperdataDocumentV3_publication_date :: !(Maybe Text)
, hyperdataDocumentV3_language_name :: Maybe Text , hyperdataDocumentV3_language_name :: !(Maybe Text)
, hyperdataDocumentV3_statuses :: Maybe [StatusV3] , hyperdataDocumentV3_statuses :: !(Maybe [StatusV3])
, hyperdataDocumentV3_realdate_full_ :: Maybe Text , hyperdataDocumentV3_realdate_full_ :: !(Maybe Text)
, hyperdataDocumentV3_source :: Maybe Text , hyperdataDocumentV3_source :: !(Maybe Text)
, hyperdataDocumentV3_abstract :: Maybe Text , hyperdataDocumentV3_abstract :: !(Maybe Text)
, hyperdataDocumentV3_title :: Maybe Text , hyperdataDocumentV3_title :: !(Maybe Text)
} deriving (Show, Generic) } deriving (Show, Generic)
$(deriveJSON (unPrefix "hyperdataDocumentV3_") ''HyperdataDocumentV3) $(deriveJSON (unPrefix "hyperdataDocumentV3_") ''HyperdataDocumentV3)
------------------------------------------------------------------------ ------------------------------------------------------------------------
...@@ -103,6 +104,7 @@ data HyperdataDocument = HyperdataDocument { _hyperdataDocument_bdd ...@@ -103,6 +104,7 @@ data HyperdataDocument = HyperdataDocument { _hyperdataDocument_bdd
, _hyperdataDocument_doi :: Maybe Text , _hyperdataDocument_doi :: Maybe Text
, _hyperdataDocument_url :: Maybe Text , _hyperdataDocument_url :: Maybe Text
, _hyperdataDocument_uniqId :: Maybe Text , _hyperdataDocument_uniqId :: Maybe Text
, _hyperdataDocument_uniqIdBdd :: Maybe Text
, _hyperdataDocument_page :: Maybe Int , _hyperdataDocument_page :: Maybe Int
, _hyperdataDocument_title :: Maybe Text , _hyperdataDocument_title :: Maybe Text
, _hyperdataDocument_authors :: Maybe Text , _hyperdataDocument_authors :: Maybe Text
...@@ -124,9 +126,9 @@ instance ToField HyperdataDocument where ...@@ -124,9 +126,9 @@ instance ToField HyperdataDocument where
toField = toJSONField toField = toJSONField
toHyperdataDocuments :: [(Text, Text)] -> [HyperdataDocument] toHyperdataDocuments :: [(Text, Text)] -> [HyperdataDocument]
toHyperdataDocuments ts = map (\(t1,t2) -> HyperdataDocument Nothing Nothing Nothing Nothing Nothing (Just t1) toHyperdataDocuments ts = map (\(t1,t2) -> HyperdataDocument Nothing Nothing Nothing Nothing Nothing Nothing (Just t1)
Nothing (Just t2) Nothing Nothing Nothing Nothing (Just t2) Nothing Nothing Nothing Nothing Nothing
Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
) ts ) ts
hyperdataDocuments :: [HyperdataDocument] hyperdataDocuments :: [HyperdataDocument]
...@@ -358,7 +360,7 @@ hyperdataDocument = case decode docExample of ...@@ -358,7 +360,7 @@ hyperdataDocument = case decode docExample of
Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
Nothing Nothing Nothing
docExample :: ByteString docExample :: ByteString
docExample = "{\"doi\":\"sdfds\",\"publication_day\":6,\"language_iso2\":\"en\",\"publication_minute\":0,\"publication_month\":7,\"language_iso3\":\"eng\",\"publication_second\":0,\"authors\":\"Nils Hovdenak, Kjell Haram\",\"publication_year\":2012,\"publication_date\":\"2012-07-06 00:00:00+00:00\",\"language_name\":\"English\",\"realdate_full_\":\"2012 01 12\",\"source\":\"European journal of obstetrics, gynecology, and reproductive biology\",\"abstract\":\"The literature was searched for publications on minerals and vitamins during pregnancy and the possible influence of supplements on pregnancy outcome.\",\"title\":\"Influence of mineral and vitamin supplements on pregnancy outcome.\",\"publication_hour\":0}" docExample = "{\"doi\":\"sdfds\",\"publication_day\":6,\"language_iso2\":\"en\",\"publication_minute\":0,\"publication_month\":7,\"language_iso3\":\"eng\",\"publication_second\":0,\"authors\":\"Nils Hovdenak, Kjell Haram\",\"publication_year\":2012,\"publication_date\":\"2012-07-06 00:00:00+00:00\",\"language_name\":\"English\",\"realdate_full_\":\"2012 01 12\",\"source\":\"European journal of obstetrics, gynecology, and reproductive biology\",\"abstract\":\"The literature was searched for publications on minerals and vitamins during pregnancy and the possible influence of supplements on pregnancy outcome.\",\"title\":\"Influence of mineral and vitamin supplements on pregnancy outcome.\",\"publication_hour\":0}"
......
...@@ -120,6 +120,7 @@ toDoc format d = do ...@@ -120,6 +120,7 @@ toDoc format d = do
(lookup "URL" d) (lookup "URL" d)
Nothing Nothing
Nothing Nothing
Nothing
(lookup "title" d) (lookup "title" d)
(lookup "authors" d) (lookup "authors" d)
(lookup "source" d) (lookup "source" d)
......
...@@ -59,6 +59,7 @@ doc2hyperdataDocument (Doc did dt _ dpy dpm dpd dab dau) = ...@@ -59,6 +59,7 @@ doc2hyperdataDocument (Doc did dt _ dpy dpm dpd dab dau) =
Nothing Nothing
Nothing Nothing
Nothing Nothing
Nothing
(Just dt) (Just dt)
(Just dau) (Just dau)
(Just dab) (Just dab)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment