Commit 8d66d21e authored by Alfredo Di Napoli's avatar Alfredo Di Napoli

WIP: Port DB operations to transactional API

parent c0f94390
...@@ -295,6 +295,7 @@ library ...@@ -295,6 +295,7 @@ library
Gargantext.Database.Admin.Types.Hyperdata.Document Gargantext.Database.Admin.Types.Hyperdata.Document
Gargantext.Database.Admin.Types.Hyperdata.Folder Gargantext.Database.Admin.Types.Hyperdata.Folder
Gargantext.Database.Admin.Types.Node Gargantext.Database.Admin.Types.Node
Gargantext.Database.Class
Gargantext.Database.Prelude Gargantext.Database.Prelude
Gargantext.Database.Query.Facet Gargantext.Database.Query.Facet
Gargantext.Database.Query.Table.Ngrams Gargantext.Database.Query.Table.Ngrams
......
...@@ -37,6 +37,7 @@ import Gargantext.Database.Admin.Types.Hyperdata.Document ( HyperdataDocument(_h ...@@ -37,6 +37,7 @@ import Gargantext.Database.Admin.Types.Hyperdata.Document ( HyperdataDocument(_h
import Gargantext.Database.Query.Table.Node(defaultList, getNodeWith) import Gargantext.Database.Query.Table.Node(defaultList, getNodeWith)
import Gargantext.Database.Query.Table.NodeContext (selectDocs) import Gargantext.Database.Query.Table.NodeContext (selectDocs)
import Gargantext.Database.Schema.Node ( node_hyperdata ) import Gargantext.Database.Schema.Node ( node_hyperdata )
import Gargantext.Database.Prelude
import Gargantext.Prelude hiding (to) import Gargantext.Prelude hiding (to)
type MinSizeBranch = Int type MinSizeBranch = Int
...@@ -44,7 +45,7 @@ type MinSizeBranch = Int ...@@ -44,7 +45,7 @@ type MinSizeBranch = Int
flowPhylo :: (HasNodeStory env err m, HasDBid NodeType) flowPhylo :: (HasNodeStory env err m, HasDBid NodeType)
=> CorpusId => CorpusId
-> m Phylo -> m Phylo
flowPhylo cId = do flowPhylo cId = runDBQuery $ do
corpus_node <- getNodeWith cId (Proxy @HyperdataCorpus) corpus_node <- getNodeWith cId (Proxy @HyperdataCorpus)
let lang = withDefaultLanguage $ view (node_hyperdata . to _hc_lang) corpus_node let lang = withDefaultLanguage $ view (node_hyperdata . to _hc_lang) corpus_node
......
...@@ -32,7 +32,7 @@ import Gargantext.Core.Text.Ngrams (NgramsType(..)) ...@@ -32,7 +32,7 @@ import Gargantext.Core.Text.Ngrams (NgramsType(..))
import Gargantext.Data.HashMap.Strict.Utils as HM ( unionsWith ) import Gargantext.Data.HashMap.Strict.Utils as HM ( unionsWith )
import Gargantext.Database.Admin.Types.Hyperdata.Document import Gargantext.Database.Admin.Types.Hyperdata.Document
import Gargantext.Database.Admin.Types.Node (ListId, CorpusId, NodeId(..), ContextId (..), MasterCorpusId, NodeType(NodeDocument), UserCorpusId, DocId) import Gargantext.Database.Admin.Types.Node (ListId, CorpusId, NodeId(..), ContextId (..), MasterCorpusId, NodeType(NodeDocument), UserCorpusId, DocId)
import Gargantext.Database.Prelude (DBCmd, runPGSQuery) import Gargantext.Database.Prelude
import Gargantext.Database.Schema.Ngrams () -- toDBid instance import Gargantext.Database.Schema.Ngrams () -- toDBid instance
import Gargantext.Prelude import Gargantext.Prelude
...@@ -60,7 +60,7 @@ countContextsByNgramsWith f m = (total, m') ...@@ -60,7 +60,7 @@ countContextsByNgramsWith f m = (total, m')
getContextsByNgramsUser :: HasDBid NodeType getContextsByNgramsUser :: HasDBid NodeType
=> CorpusId => CorpusId
-> NgramsType -> NgramsType
-> DBCmd err (HashMap NgramsTerm (Set ContextId)) -> DBQuery err x (HashMap NgramsTerm (Set ContextId))
getContextsByNgramsUser cId nt = getContextsByNgramsUser cId nt =
HM.fromListWith (<>) <$> map (\(n,t) -> (NgramsTerm t, Set.singleton n)) HM.fromListWith (<>) <$> map (\(n,t) -> (NgramsTerm t, Set.singleton n))
<$> selectNgramsByContextUser cId nt <$> selectNgramsByContextUser cId nt
...@@ -69,9 +69,9 @@ getContextsByNgramsUser cId nt = ...@@ -69,9 +69,9 @@ getContextsByNgramsUser cId nt =
selectNgramsByContextUser :: HasDBid NodeType selectNgramsByContextUser :: HasDBid NodeType
=> CorpusId => CorpusId
-> NgramsType -> NgramsType
-> DBCmd err [(ContextId, Text)] -> DBQuery err x [(ContextId, Text)]
selectNgramsByContextUser cId' nt' = selectNgramsByContextUser cId' nt' =
runPGSQuery queryNgramsByContextUser mkPGQuery queryNgramsByContextUser
( cId' ( cId'
, toDBid NodeDocument , toDBid NodeDocument
, toDBid nt' , toDBid nt'
...@@ -95,16 +95,16 @@ getContextsByNgramsUser cId nt = ...@@ -95,16 +95,16 @@ getContextsByNgramsUser cId nt =
getTreeInstitutesUser :: HasDBid NodeType getTreeInstitutesUser :: HasDBid NodeType
=> CorpusId => CorpusId
-> NgramsType -> NgramsType
-> DBCmd err (HashMap Text [Text]) -> DBQuery err x (HashMap Text [Text])
getTreeInstitutesUser cId nt = getTreeInstitutesUser cId nt =
HM.unionsWith (++) . map (\(_, hd) -> HM.fromList $ Map.toList $ fromMaybe Map.empty (_hd_institutes_tree hd)) <$> selectHyperDataByContextUser cId nt HM.unionsWith (++) . map (\(_, hd) -> HM.fromList $ Map.toList $ fromMaybe Map.empty (_hd_institutes_tree hd)) <$> selectHyperDataByContextUser cId nt
selectHyperDataByContextUser :: HasDBid NodeType selectHyperDataByContextUser :: HasDBid NodeType
=> CorpusId => CorpusId
-> NgramsType -> NgramsType
-> DBCmd err [(ContextId, HyperdataDocument)] -> DBQuery err x [(ContextId, HyperdataDocument)]
selectHyperDataByContextUser cId' nt' = selectHyperDataByContextUser cId' nt' =
runPGSQuery queryHyperDataByContextUser mkPGQuery queryHyperDataByContextUser
( cId' ( cId'
, toDBid nt' , toDBid nt'
) )
...@@ -127,7 +127,7 @@ getOccByNgramsOnlyFast_withSample :: HasDBid NodeType ...@@ -127,7 +127,7 @@ getOccByNgramsOnlyFast_withSample :: HasDBid NodeType
-> Int -> Int
-> NgramsType -> NgramsType
-> [NgramsTerm] -> [NgramsTerm]
-> DBCmd err (HashMap NgramsTerm Int) -> DBQuery err x (HashMap NgramsTerm Int)
getOccByNgramsOnlyFast_withSample cId int nt ngs = getOccByNgramsOnlyFast_withSample cId int nt ngs =
HM.fromListWith (+) <$> selectNgramsOccurrencesOnlyByContextUser_withSample cId int nt ngs HM.fromListWith (+) <$> selectNgramsOccurrencesOnlyByContextUser_withSample cId int nt ngs
...@@ -135,7 +135,7 @@ getOccByNgramsOnlyFast_withSample cId int nt ngs = ...@@ -135,7 +135,7 @@ getOccByNgramsOnlyFast_withSample cId int nt ngs =
getOccByNgramsOnlyFast :: CorpusId getOccByNgramsOnlyFast :: CorpusId
-> ListId -> ListId
-> NgramsType -> NgramsType
-> DBCmd err (HashMap NgramsTerm [ContextId]) -> DBQuery err x (HashMap NgramsTerm [ContextId])
getOccByNgramsOnlyFast cId lId nt = do getOccByNgramsOnlyFast cId lId nt = do
--HM.fromList <$> map (\(t,n) -> (NgramsTerm t, round n)) <$> run cId lId nt --HM.fromList <$> map (\(t,n) -> (NgramsTerm t, round n)) <$> run cId lId nt
HM.fromList <$> map (\(t, ns) -> (NgramsTerm t, UnsafeMkContextId <$> DPST.fromPGArray ns)) <$> run cId lId nt HM.fromList <$> map (\(t, ns) -> (NgramsTerm t, UnsafeMkContextId <$> DPST.fromPGArray ns)) <$> run cId lId nt
...@@ -144,8 +144,8 @@ getOccByNgramsOnlyFast cId lId nt = do ...@@ -144,8 +144,8 @@ getOccByNgramsOnlyFast cId lId nt = do
run :: CorpusId run :: CorpusId
-> ListId -> ListId
-> NgramsType -> NgramsType
-> DBCmd err [(Text, DPST.PGArray Int)] -> DBQuery err x [(Text, DPST.PGArray Int)]
run cId' lId' nt' = runPGSQuery query run cId' lId' nt' = mkPGQuery query
( cId' ( cId'
, lId' , lId'
, toDBid nt' , toDBid nt'
...@@ -208,10 +208,10 @@ selectNgramsOccurrencesOnlyByContextUser_withSample :: HasDBid NodeType ...@@ -208,10 +208,10 @@ selectNgramsOccurrencesOnlyByContextUser_withSample :: HasDBid NodeType
-> Int -> Int
-> NgramsType -> NgramsType
-> [NgramsTerm] -> [NgramsTerm]
-> DBCmd err [(NgramsTerm, Int)] -> DBQuery err x [(NgramsTerm, Int)]
selectNgramsOccurrencesOnlyByContextUser_withSample cId int nt tms = selectNgramsOccurrencesOnlyByContextUser_withSample cId int nt tms =
fmap (first NgramsTerm) <$> fmap (first NgramsTerm) <$>
runPGSQuery queryNgramsOccurrencesOnlyByContextUser_withSample mkPGQuery queryNgramsOccurrencesOnlyByContextUser_withSample
( int ( int
, toDBid NodeDocument , toDBid NodeDocument
, cId , cId
...@@ -269,10 +269,10 @@ selectNgramsOccurrencesOnlyByContextUser_withSample' :: HasDBid NodeType ...@@ -269,10 +269,10 @@ selectNgramsOccurrencesOnlyByContextUser_withSample' :: HasDBid NodeType
=> CorpusId => CorpusId
-> Int -> Int
-> NgramsType -> NgramsType
-> DBCmd err [(NgramsTerm, Int)] -> DBQuery err x [(NgramsTerm, Int)]
selectNgramsOccurrencesOnlyByContextUser_withSample' cId int nt = selectNgramsOccurrencesOnlyByContextUser_withSample' cId int nt =
fmap (first NgramsTerm) <$> fmap (first NgramsTerm) <$>
runPGSQuery queryNgramsOccurrencesOnlyByContextUser_withSample mkPGQuery queryNgramsOccurrencesOnlyByContextUser_withSample
( int ( int
, toDBid NodeDocument , toDBid NodeDocument
, cId , cId
...@@ -303,7 +303,7 @@ getContextsByNgramsOnlyUser :: HasDBid NodeType ...@@ -303,7 +303,7 @@ getContextsByNgramsOnlyUser :: HasDBid NodeType
-> [ListId] -> [ListId]
-> NgramsType -> NgramsType
-> [NgramsTerm] -> [NgramsTerm]
-> DBCmd err (HashMap NgramsTerm (Set ContextId)) -> DBQuery err x (HashMap NgramsTerm (Set ContextId))
getContextsByNgramsOnlyUser cId ls nt ngs = getContextsByNgramsOnlyUser cId ls nt ngs =
HM.unionsWith (<>) HM.unionsWith (<>)
. map (HM.fromListWith (<>) . map (HM.fromListWith (<>)
...@@ -316,7 +316,7 @@ getNgramsByContextOnlyUser :: HasDBid NodeType ...@@ -316,7 +316,7 @@ getNgramsByContextOnlyUser :: HasDBid NodeType
-> [ListId] -> [ListId]
-> NgramsType -> NgramsType
-> [NgramsTerm] -> [NgramsTerm]
-> DBCmd err (Map ContextId (Set NgramsTerm)) -> DBQuery err x (Map ContextId (Set NgramsTerm))
getNgramsByContextOnlyUser cId ls nt ngs = getNgramsByContextOnlyUser cId ls nt ngs =
Map.unionsWith (<>) Map.unionsWith (<>)
. map ( Map.fromListWith (<>) . map ( Map.fromListWith (<>)
...@@ -332,10 +332,10 @@ selectNgramsOnlyByContextUser :: HasDBid NodeType ...@@ -332,10 +332,10 @@ selectNgramsOnlyByContextUser :: HasDBid NodeType
-> [ListId] -> [ListId]
-> NgramsType -> NgramsType
-> [NgramsTerm] -> [NgramsTerm]
-> DBCmd err [(NgramsTerm, ContextId)] -> DBQuery err x [(NgramsTerm, ContextId)]
selectNgramsOnlyByContextUser cId ls nt tms = selectNgramsOnlyByContextUser cId ls nt tms =
fmap (first NgramsTerm) <$> fmap (first NgramsTerm) <$>
runPGSQuery queryNgramsOnlyByContextUser mkPGQuery queryNgramsOnlyByContextUser
( Values fields ((DPS.Only . unNgramsTerm) <$> tms) ( Values fields ((DPS.Only . unNgramsTerm) <$> tms)
, Values [QualifiedIdentifier Nothing "int4"] , Values [QualifiedIdentifier Nothing "int4"]
(DPS.Only <$> map DPS.toField ls) (DPS.Only <$> map DPS.toField ls)
...@@ -367,7 +367,7 @@ getNgramsByDocOnlyUser :: DocId ...@@ -367,7 +367,7 @@ getNgramsByDocOnlyUser :: DocId
-> [ListId] -> [ListId]
-> NgramsType -> NgramsType
-> [NgramsTerm] -> [NgramsTerm]
-> DBCmd err (HashMap NgramsTerm (Set NodeId)) -> DBQuery err x (HashMap NgramsTerm (Set NodeId))
getNgramsByDocOnlyUser cId ls nt ngs = getNgramsByDocOnlyUser cId ls nt ngs =
HM.unionsWith (<>) HM.unionsWith (<>)
. map (HM.fromListWith (<>) . map (second Set.singleton)) . map (HM.fromListWith (<>) . map (second Set.singleton))
...@@ -378,10 +378,10 @@ selectNgramsOnlyByDocUser :: DocId ...@@ -378,10 +378,10 @@ selectNgramsOnlyByDocUser :: DocId
-> [ListId] -> [ListId]
-> NgramsType -> NgramsType
-> [NgramsTerm] -> [NgramsTerm]
-> DBCmd err [(NgramsTerm, NodeId)] -> DBQuery err x [(NgramsTerm, NodeId)]
selectNgramsOnlyByDocUser dId ls nt tms = selectNgramsOnlyByDocUser dId ls nt tms =
fmap (first NgramsTerm) <$> fmap (first NgramsTerm) <$>
runPGSQuery queryNgramsOnlyByDocUser mkPGQuery queryNgramsOnlyByDocUser
( Values fields ((DPS.Only . unNgramsTerm) <$> tms) ( Values fields ((DPS.Only . unNgramsTerm) <$> tms)
, Values [QualifiedIdentifier Nothing "int4"] , Values [QualifiedIdentifier Nothing "int4"]
(DPS.Only <$> (map DPS.toField ls)) (DPS.Only <$> (map DPS.toField ls))
...@@ -410,7 +410,7 @@ queryNgramsOnlyByDocUser = [sql| ...@@ -410,7 +410,7 @@ queryNgramsOnlyByDocUser = [sql|
getContextsByNgramsMaster :: HasDBid NodeType getContextsByNgramsMaster :: HasDBid NodeType
=> UserCorpusId => UserCorpusId
-> MasterCorpusId -> MasterCorpusId
-> DBCmd err (HashMap Text (Set NodeId)) -> DBQuery err x (HashMap Text (Set NodeId))
getContextsByNgramsMaster ucId mcId = unionsWith (<>) getContextsByNgramsMaster ucId mcId = unionsWith (<>)
. map (HM.fromListWith (<>) . map (\(n,t) -> (t, Set.singleton n))) . map (HM.fromListWith (<>) . map (\(n,t) -> (t, Set.singleton n)))
-- . takeWhile (not . List.null) -- . takeWhile (not . List.null)
...@@ -422,8 +422,8 @@ selectNgramsByContextMaster :: HasDBid NodeType ...@@ -422,8 +422,8 @@ selectNgramsByContextMaster :: HasDBid NodeType
-> UserCorpusId -> UserCorpusId
-> MasterCorpusId -> MasterCorpusId
-> Int -> Int
-> DBCmd err [(NodeId, Text)] -> DBQuery err x [(NodeId, Text)]
selectNgramsByContextMaster n ucId mcId p = runPGSQuery selectNgramsByContextMaster n ucId mcId p = mkPGQuery
queryNgramsByContextMaster' queryNgramsByContextMaster'
( ucId ( ucId
, toDBid NgramsTerms , toDBid NgramsTerms
...@@ -438,7 +438,7 @@ selectNgramsByContextMaster n ucId mcId p = runPGSQuery ...@@ -438,7 +438,7 @@ selectNgramsByContextMaster n ucId mcId p = runPGSQuery
) )
-- | TODO fix context_node_ngrams relation -- | TODO fix context_node_ngrams relation
queryNgramsByContextMaster' :: DPS.Query queryNgramsByContextMaster' :: DPST.Query
queryNgramsByContextMaster' = [sql| queryNgramsByContextMaster' = [sql|
WITH contextsByNgramsUser AS ( WITH contextsByNgramsUser AS (
......
...@@ -21,7 +21,7 @@ import Gargantext.Core.Text.Metrics.TFICF ...@@ -21,7 +21,7 @@ import Gargantext.Core.Text.Metrics.TFICF
import Gargantext.Core.Text.Ngrams (NgramsType(..)) import Gargantext.Core.Text.Ngrams (NgramsType(..))
import Gargantext.Database.Action.Metrics.NgramsByContext (getContextsByNgramsUser, {-getOccByNgramsOnlyFast,-} getOccByNgramsOnlyFast_withSample) import Gargantext.Database.Action.Metrics.NgramsByContext (getContextsByNgramsUser, {-getOccByNgramsOnlyFast,-} getOccByNgramsOnlyFast_withSample)
import Gargantext.Database.Admin.Types.Node -- (ListId, CorpusId, NodeId) import Gargantext.Database.Admin.Types.Node -- (ListId, CorpusId, NodeId)
import Gargantext.Database.Prelude (DBCmd) import Gargantext.Database.Prelude
import Gargantext.Database.Query.Table.NodeContext (selectCountDocs) import Gargantext.Database.Query.Table.NodeContext (selectCountDocs)
import Gargantext.Prelude import Gargantext.Prelude
...@@ -56,7 +56,7 @@ getTficf_withSample :: HasDBid NodeType ...@@ -56,7 +56,7 @@ getTficf_withSample :: HasDBid NodeType
=> UserCorpusId => UserCorpusId
-> MasterCorpusId -> MasterCorpusId
-> NgramsType -> NgramsType
-> DBCmd err (HashMap NgramsTerm Double) -> DBQuery err x (HashMap NgramsTerm Double)
getTficf_withSample cId mId nt = do getTficf_withSample cId mId nt = do
mapTextDoubleLocal <- HM.filter (> 1) mapTextDoubleLocal <- HM.filter (> 1)
<$> HM.map (fromIntegral . Set.size) <$> HM.map (fromIntegral . Set.size)
......
...@@ -21,12 +21,12 @@ import Gargantext.Core ...@@ -21,12 +21,12 @@ import Gargantext.Core
-- import Gargantext.Core.Types.Main (ListType(CandidateTerm)) -- import Gargantext.Core.Types.Main (ListType(CandidateTerm))
import Gargantext.Database.Admin.Types.Node -- (ListId, CorpusId, NodeId) import Gargantext.Database.Admin.Types.Node -- (ListId, CorpusId, NodeId)
-- (ListId, CorpusId, NodeId) -- (ListId, CorpusId, NodeId)
import Gargantext.Database.Prelude (execPGSQuery, DBCmd) import Gargantext.Database.Prelude
import Gargantext.Prelude import Gargantext.Prelude
import qualified Database.PostgreSQL.Simple as DPS import qualified Database.PostgreSQL.Simple as DPS
triggerCountInsert :: HasDBid NodeType => DBCmd err Int64 triggerCountInsert :: HasDBid NodeType => DBUpdate err Int64
triggerCountInsert = execPGSQuery query (toDBid NodeDocument, toDBid NodeList) triggerCountInsert = mkPGUpdate query (toDBid NodeDocument, toDBid NodeList)
where where
query :: DPS.Query query :: DPS.Query
query = [sql| query = [sql|
...@@ -61,11 +61,11 @@ triggerCountInsert = execPGSQuery query (toDBid NodeDocument, toDBid NodeList) ...@@ -61,11 +61,11 @@ triggerCountInsert = execPGSQuery query (toDBid NodeDocument, toDBid NodeList)
EXECUTE PROCEDURE set_ngrams_global_count(); EXECUTE PROCEDURE set_ngrams_global_count();
|] |]
triggerCountInsert2 :: HasDBid NodeType => DBCmd err Int64 triggerCountInsert2 :: HasDBid NodeType => DBUpdate err Int64
triggerCountInsert2 = execPGSQuery query ( toDBid NodeCorpus triggerCountInsert2 = mkPGUpdate query ( toDBid NodeCorpus
, toDBid NodeDocument , toDBid NodeDocument
, toDBid NodeList , toDBid NodeList
) )
where where
query :: DPS.Query query :: DPS.Query
query = [sql| query = [sql|
......
...@@ -20,15 +20,15 @@ import Database.PostgreSQL.Simple qualified as DPS ...@@ -20,15 +20,15 @@ import Database.PostgreSQL.Simple qualified as DPS
import Database.PostgreSQL.Simple.SqlQQ (sql) import Database.PostgreSQL.Simple.SqlQQ (sql)
import Gargantext.Core (HasDBid(..)) import Gargantext.Core (HasDBid(..))
import Gargantext.Database.Admin.Types.Node -- (ListId, CorpusId, NodeId) import Gargantext.Database.Admin.Types.Node -- (ListId, CorpusId, NodeId)
import Gargantext.Database.Prelude (execPGSQuery, DBCmd) import Gargantext.Database.Prelude
import Gargantext.Prelude import Gargantext.Prelude
triggerSearchUpdate :: HasDBid NodeType => DBCmd err Int64 triggerSearchUpdate :: HasDBid NodeType => DBUpdate err Int64
triggerSearchUpdate = execPGSQuery query ( toDBid NodeDocument triggerSearchUpdate = mkPGUpdate query ( toDBid NodeDocument
, toDBid NodeDocument , toDBid NodeDocument
, toDBid NodeContact , toDBid NodeContact
) )
where where
query :: DPS.Query query :: DPS.Query
query = [sql| query = [sql|
...@@ -68,16 +68,16 @@ triggerSearchUpdate = execPGSQuery query ( toDBid NodeDocument ...@@ -68,16 +68,16 @@ triggerSearchUpdate = execPGSQuery query ( toDBid NodeDocument
type Secret = Text type Secret = Text
triggerUpdateHash :: HasDBid NodeType => Secret -> DBCmd err Int64 triggerUpdateHash :: HasDBid NodeType => Secret -> DBUpdate err Int64
triggerUpdateHash secret = execPGSQuery query ( toDBid NodeDocument triggerUpdateHash secret = mkPGUpdate query ( toDBid NodeDocument
, toDBid NodeContact , toDBid NodeContact
, secret , secret
, secret , secret
, toDBid NodeDocument , toDBid NodeDocument
, toDBid NodeContact , toDBid NodeContact
, secret , secret
, secret , secret
) )
where where
query :: DPS.Query query :: DPS.Query
query = [sql| query = [sql|
......
...@@ -20,16 +20,16 @@ import Gargantext.Database.Admin.Trigger.ContextNodeNgrams (triggerCountInsert, ...@@ -20,16 +20,16 @@ import Gargantext.Database.Admin.Trigger.ContextNodeNgrams (triggerCountInsert,
import Gargantext.Database.Admin.Trigger.Contexts (triggerSearchUpdate, triggerUpdateHash) import Gargantext.Database.Admin.Trigger.Contexts (triggerSearchUpdate, triggerUpdateHash)
import Gargantext.Database.Admin.Trigger.NodesContexts ({-triggerDeleteCount,-} triggerInsertCount, triggerUpdateAdd, triggerUpdateDel, MasterListId) -- , triggerCoocInsert) import Gargantext.Database.Admin.Trigger.NodesContexts ({-triggerDeleteCount,-} triggerInsertCount, triggerUpdateAdd, triggerUpdateDel, MasterListId) -- , triggerCoocInsert)
-- , triggerCoocInsert) -- , triggerCoocInsert)
import Gargantext.Database.Prelude (DBCmd) import Gargantext.Database.Prelude
import Gargantext.Prelude import Gargantext.Prelude
------------------------------------------------------------------------ ------------------------------------------------------------------------
initFirstTriggers :: Text -> DBCmd err [Int64] initFirstTriggers :: Text -> DBUpdate err [Int64]
initFirstTriggers secret = do initFirstTriggers secret = do
t0 <- triggerUpdateHash secret t0 <- triggerUpdateHash secret
pure [t0] pure [t0]
initLastTriggers :: MasterListId -> DBCmd err [Int64] initLastTriggers :: MasterListId -> DBUpdate err [Int64]
initLastTriggers lId = do initLastTriggers lId = do
t0 <- triggerSearchUpdate t0 <- triggerSearchUpdate
t1 <- triggerCountInsert t1 <- triggerCountInsert
......
...@@ -21,14 +21,14 @@ import Database.PostgreSQL.Simple.SqlQQ (sql) ...@@ -21,14 +21,14 @@ import Database.PostgreSQL.Simple.SqlQQ (sql)
import Gargantext.Core import Gargantext.Core
import Gargantext.Database.Admin.Config () import Gargantext.Database.Admin.Config ()
import Gargantext.Database.Admin.Types.Node -- (ListId, CorpusId, NodeId) import Gargantext.Database.Admin.Types.Node -- (ListId, CorpusId, NodeId)
import Gargantext.Database.Prelude (execPGSQuery, DBCmd) import Gargantext.Database.Prelude
import Gargantext.Prelude import Gargantext.Prelude
import qualified Database.PostgreSQL.Simple as DPS import qualified Database.PostgreSQL.Simple as DPS
type MasterListId = ListId type MasterListId = ListId
triggerInsertCount :: MasterListId -> DBCmd err Int64 triggerInsertCount :: MasterListId -> DBUpdate err Int64
triggerInsertCount lId = execPGSQuery query (lId, toDBid NodeList) triggerInsertCount lId = mkPGUpdate query (lId, toDBid NodeList)
where where
query :: DPS.Query query :: DPS.Query
query = [sql| query = [sql|
...@@ -63,8 +63,8 @@ triggerInsertCount lId = execPGSQuery query (lId, toDBid NodeList) ...@@ -63,8 +63,8 @@ triggerInsertCount lId = execPGSQuery query (lId, toDBid NodeList)
|] |]
triggerUpdateAdd :: MasterListId -> DBCmd err Int64 triggerUpdateAdd :: MasterListId -> DBUpdate err Int64
triggerUpdateAdd lId = execPGSQuery query (lId, toDBid NodeList) triggerUpdateAdd lId = mkPGUpdate query (lId, toDBid NodeList)
where where
query :: DPS.Query query :: DPS.Query
query = [sql| query = [sql|
...@@ -103,8 +103,8 @@ triggerUpdateAdd lId = execPGSQuery query (lId, toDBid NodeList) ...@@ -103,8 +103,8 @@ triggerUpdateAdd lId = execPGSQuery query (lId, toDBid NodeList)
|] |]
triggerUpdateDel :: MasterListId -> DBCmd err Int64 triggerUpdateDel :: MasterListId -> DBUpdate err Int64
triggerUpdateDel lId = execPGSQuery query (lId, toDBid NodeList) triggerUpdateDel lId = mkPGUpdate query (lId, toDBid NodeList)
where where
query :: DPS.Query query :: DPS.Query
query = [sql| query = [sql|
...@@ -145,8 +145,8 @@ triggerUpdateDel lId = execPGSQuery query (lId, toDBid NodeList) ...@@ -145,8 +145,8 @@ triggerUpdateDel lId = execPGSQuery query (lId, toDBid NodeList)
triggerDeleteCount :: MasterListId -> DBCmd err Int64 triggerDeleteCount :: MasterListId -> DBUpdate err Int64
triggerDeleteCount lId = execPGSQuery query (lId, toDBid NodeList) triggerDeleteCount lId = mkPGUpdate query (lId, toDBid NodeList)
where where
query :: DPS.Query query :: DPS.Query
query = [sql| query = [sql|
......
{-# LANGUAGE ConstraintKinds #-}
module Gargantext.Database.Class where
import Control.Lens (Getter)
import Control.Monad.Random ( MonadRandom )
import Control.Monad.Trans.Control (MonadBaseControl)
import Data.Pool (Pool)
import Database.PostgreSQL.Simple (Connection)
import Gargantext.Core.Notifications.CentralExchange.Types qualified as CET
import Gargantext.Core.Config (HasConfig(..))
import Gargantext.Core.Mail.Types (HasMail)
import Gargantext.Core.NLP (HasNLPServer)
import Gargantext.Prelude
-- $typesAndConstraints
--
-- The names of the constraints and types in this module are chosen based on
-- the following guidelines:
-- * By default, constraints are relatively lenient. Stricter constraints are
-- obtained by appending the `Extra` suffix to the minimal constraint name.
-- * `IsDBEnv(Extra)` applies to the environment; the basic constraint allows
-- access to the database, and the `Extra` variant offers some more
-- capabilities such as access to mail.
-- * `IsCmd` is the basic constraint for command monads. Append `DB` to it to get
-- a monad of commands that can talk to the database. Append `Extra` to get
-- the ability to send mail, make use of the NLP server and deal with central
-- exchange notifications. Append `Random` to get access to randomness.
-- * Existential versions of the constraints bear the same name as the constraint
-- they are based on, but without the `Is` prefix.
class HasConnectionPool env where
connPool :: Getter env (Pool Connection)
instance HasConnectionPool (Pool Connection) where
connPool = identity
-- | The most basic constraints for an environment with a database.
-- If possible, try to not add more constraints here. When performing
-- a query/update on the DB, one shouldn't need more than being able to
-- fetch from the underlying 'env' the connection pool and access the
-- 'GargConfig' for some sensible defaults to store into the DB.
type IsDBEnv env =
( HasConnectionPool env
, HasConfig env
)
-- | Constraints for a full-fledged environment, with a database, mail exchange,
-- NLP processing, notifications.
type IsDBEnvExtra env =
( IsDBEnv env
, HasMail env
, HasNLPServer env
, CET.HasCentralExchangeNotification env
)
-- | The most general constraints for commands. To interact with the database,
-- or access extra features (such as sending mail), you'll need to add some more
-- constraints (see the rest of this module)
type IsCmd env err m =
( MonadReader env m
, MonadError err m
, MonadBaseControl IO m
)
-- | Only the /minimum/ amount of class constraints required
-- to use the Gargantext Database. It's important, to ease testability,
-- that these constraints stays as few as possible.
type IsDBCmd env err m =
( IsCmd env err m
, IsDBEnv env
)
-- | Full-fledged command class. Types in this class provide commands that can
-- interact with the database, perform NLP processing, etc.
type IsDBCmdExtra env err m =
( IsCmd env err m
, IsDBEnvExtra env
)
-- | Basic command with access to randomness. It feels a little ad hoc to have
-- such a constraint instead of substituting it (and its counterpart existential
-- type `CmdRandom`) with its definition every time it appears in the codebase,
-- but I tried to doing that substitution and it wasn't so easy.
type IsCmdRandom env err m =
( IsCmd env err m
, MonadRandom m
)
-- | Barebones command type, without any built-in ability to interact with the
-- database or do stuff like email exchanges.
type Cmd env err a = forall m. IsCmd env err m => m a
-- | Basic command type with access to randomness
type CmdRandom env err a = forall m. IsCmdRandom env err m => m a
-- | Command type that allows for interaction with the database.
type DBCmd err a = forall m env. IsDBCmd env err m => m a
-- | Command type that allows for interaction with the database. Similar to
-- `DBCmd`, except you can constraint the environment type some more.
type DBCmdWithEnv env err a = forall m. IsDBCmd env err m => m a
-- | Full-fledged command types, with access to the database, mail, NLP
-- processing and central exchange notifications.
type DBCmdExtra err a = forall m env. IsDBCmdExtra env err m => m a
...@@ -36,21 +36,14 @@ module Gargantext.Database.Prelude ...@@ -36,21 +36,14 @@ module Gargantext.Database.Prelude
-- ** Miscellaneous Type(s) -- ** Miscellaneous Type(s)
, JSONB , JSONB
-- * Functions -- * Functions
-- ** Executing DB Queries -- ** Executing DB transactions
-- *** PostgreSQL.Simple , module Gargantext.Database.Transactional
, execPGSQuery
, runPGSQuery
, runPGSQuery_
-- *** Opaleye
, runOpaQuery
, runCountOpaQuery
-- ** Other Functions -- ** Other Functions
, runCmd , runCmd
, createDBIfNotExists , createDBIfNotExists
, dbCheck , dbCheck
, formatPGSQuery , debugFormatPGSQuery
, fromField' , fromField'
, mkCmd
, restrictMaybe , restrictMaybe
, createLargeObject , createLargeObject
, readLargeObject , readLargeObject
...@@ -60,28 +53,22 @@ module Gargantext.Database.Prelude ...@@ -60,28 +53,22 @@ module Gargantext.Database.Prelude
where where
import Control.Exception.Safe qualified as CES import Control.Exception.Safe qualified as CES
import Control.Lens (Getter, view) import Control.Lens (view)
import Control.Monad.Random ( MonadRandom )
import Control.Monad.Trans.Control (MonadBaseControl)
import Data.Aeson (Result(..)) import Data.Aeson (Result(..))
import Data.ByteString qualified as BS import Data.ByteString qualified as BS
import Data.ByteString.Lazy qualified as BSL import Data.ByteString.Lazy qualified as BSL
import Data.List qualified as DL import Data.List qualified as DL
import Data.Pool (Pool, withResource) import Data.Pool (withResource)
import Data.Profunctor.Product.Default (Default) import Data.Profunctor.Product.Default (Default)
import Database.PostgreSQL.Simple (Connection) import Database.PostgreSQL.Simple (Connection)
import Database.PostgreSQL.Simple qualified as PGS import Database.PostgreSQL.Simple qualified as PGS
import Database.PostgreSQL.Simple.FromField ( Conversion, ResultError(ConversionFailed), fromField, returnError) import Database.PostgreSQL.Simple.FromField ( Conversion, ResultError(ConversionFailed), fromField, returnError)
import Database.PostgreSQL.Simple.Internal (Field) import Database.PostgreSQL.Simple.Internal (Field)
import Database.PostgreSQL.Simple.LargeObjects qualified as PSQL import Database.PostgreSQL.Simple.LargeObjects qualified as PSQL
import Database.PostgreSQL.Simple.Types (Query(..)) import Gargantext.Database.Class
import Gargantext.Core.Notifications.CentralExchange.Types qualified as CET import Gargantext.Database.Transactional
import Gargantext.Core.Config (HasConfig(..))
import Gargantext.Core.Mail.Types (HasMail)
import Gargantext.Core.NLP (HasNLPServer)
import Gargantext.Prelude import Gargantext.Prelude
import Opaleye (FromFields, Select, runSelect, SqlJsonb, DefaultFromField, toFields, matchMaybe, MaybeFields) import Opaleye (SqlJsonb, DefaultFromField, toFields, matchMaybe, MaybeFields)
import Opaleye.Aggregate (countRows)
import Opaleye.Internal.Constant qualified import Opaleye.Internal.Constant qualified
import Opaleye.Internal.Operators qualified import Opaleye.Internal.Operators qualified
import Shelly qualified as SH import Shelly qualified as SH
...@@ -89,106 +76,16 @@ import System.Directory (removeFile) ...@@ -89,106 +76,16 @@ import System.Directory (removeFile)
import System.IO.Temp (emptySystemTempFile) import System.IO.Temp (emptySystemTempFile)
-- $typesAndConstraints
--
-- The names of the constraints and types in this module are chosen based on
-- the following guidelines:
-- * By default, constraints are relatively lenient. Stricter constraints are
-- obtained by appending the `Extra` suffix to the minimal constraint name.
-- * `IsDBEnv(Extra)` applies to the environment; the basic constraint allows
-- access to the database, and the `Extra` variant offers some more
-- capabilities such as access to mail.
-- * `IsCmd` is the basic constraint for command monads. Append `DB` to it to get
-- a monad of commands that can talk to the database. Append `Extra` to get
-- the ability to send mail, make use of the NLP server and deal with central
-- exchange notifications. Append `Random` to get access to randomness.
-- * Existential versions of the constraints bear the same name as the constraint
-- they are based on, but without the `Is` prefix.
class HasConnectionPool env where
connPool :: Getter env (Pool Connection)
instance HasConnectionPool (Pool Connection) where
connPool = identity
-- | The most basic constraints for an environment with a database.
-- If possible, try to not add more constraints here. When performing
-- a query/update on the DB, one shouldn't need more than being able to
-- fetch from the underlying 'env' the connection pool and access the
-- 'GargConfig' for some sensible defaults to store into the DB.
type IsDBEnv env =
( HasConnectionPool env
, HasConfig env
)
-- | Constraints for a full-fledged environment, with a database, mail exchange,
-- NLP processing, notifications.
type IsDBEnvExtra env =
( IsDBEnv env
, HasMail env
, HasNLPServer env
, CET.HasCentralExchangeNotification env
)
-- | The most general constraints for commands. To interact with the database,
-- or access extra features (such as sending mail), you'll need to add some more
-- constraints (see the rest of this module)
type IsCmd env err m =
( MonadReader env m
, MonadError err m
, MonadBaseControl IO m
)
-- | Only the /minimum/ amount of class constraints required
-- to use the Gargantext Database. It's important, to ease testability,
-- that these constraints stays as few as possible.
type IsDBCmd env err m =
( IsCmd env err m
, IsDBEnv env
)
-- | Full-fledged command class. Types in this class provide commands that can
-- interact with the database, perform NLP processing, etc.
type IsDBCmdExtra env err m =
( IsCmd env err m
, IsDBEnvExtra env
)
-- | Basic command with access to randomness. It feels a little ad hoc to have
-- such a constraint instead of substituting it (and its counterpart existential
-- type `CmdRandom`) with its definition every time it appears in the codebase,
-- but I tried to doing that substitution and it wasn't so easy.
type IsCmdRandom env err m =
( IsCmd env err m
, MonadRandom m
)
-- | Barebones command type, without any built-in ability to interact with the
-- database or do stuff like email exchanges.
type Cmd env err a = forall m. IsCmd env err m => m a
-- | Basic command type with access to randomness
type CmdRandom env err a = forall m. IsCmdRandom env err m => m a
-- | Command type that allows for interaction with the database.
type DBCmd err a = forall m env. IsDBCmd env err m => m a
-- | Command type that allows for interaction with the database. Similar to
-- `DBCmd`, except you can constraint the environment type some more.
type DBCmdWithEnv env err a = forall m. IsDBCmd env err m => m a
-- | Full-fledged command types, with access to the database, mail, NLP
-- processing and central exchange notifications.
type DBCmdExtra err a = forall m env. IsDBCmdExtra env err m => m a
type JSONB = DefaultFromField SqlJsonb type JSONB = DefaultFromField SqlJsonb
fromInt64ToInt :: Int64 -> Int -- FIXME(adinapoli): Using this function is dangerous and it should
fromInt64ToInt = fromIntegral -- eventualaly be removed. This function allows embedding /any/ IO computation
-- into a 'DBCmd' with a given 'Connection', which weans we can completely
-- TODO: ideally there should be very few calls to this functions. -- bypass the transactional API. This function should /NOT/ be exported, but
mkCmd :: (Connection -> IO a) -> DBCmd err a -- rather used here carefully on a case-by-case analysis, like the functions
mkCmd k = do -- dealing with large objects.
withConn :: (Connection -> IO a) -> DBCmd err a
withConn k = do
pool <- view connPool pool <- view connPool
liftBase $ withResource pool (liftBase . k) liftBase $ withResource pool (liftBase . k)
...@@ -197,41 +94,9 @@ runCmd :: env ...@@ -197,41 +94,9 @@ runCmd :: env
-> IO (Either err a) -> IO (Either err a)
runCmd env m = runExceptT $ runReaderT m env runCmd env m = runExceptT $ runReaderT m env
runOpaQuery :: Default FromFields fields haskells debugFormatPGSQuery :: PGS.ToRow a => PGS.Query -> a -> DBCmd err BS.ByteString
=> Select fields debugFormatPGSQuery q a = withConn $ \conn -> PGS.formatQuery conn q a
-> DBCmd err [haskells]
runOpaQuery q = mkCmd $ \c -> runSelect c q
runCountOpaQuery :: Select a -> DBCmd err Int
runCountOpaQuery q = do
counts <- mkCmd $ \c -> runSelect c $ countRows q
-- countRows is guaranteed to return a list with exactly one row so DL.head is safe here
pure $ fromInt64ToInt $ DL.head counts
formatPGSQuery :: PGS.ToRow a => PGS.Query -> a -> DBCmd err BS.ByteString
formatPGSQuery q a = mkCmd $ \conn -> PGS.formatQuery conn q a
runPGSQuery :: ( PGS.FromRow r, PGS.ToRow q )
=> PGS.Query -> q -> DBCmd err [r]
runPGSQuery q a = mkCmd $ \conn -> catch (PGS.query conn q a) (printError conn)
where where
printError c (SomeException e) = do
q' <- PGS.formatQuery c q a
hPutStrLn stderr q'
CES.throw (SomeException e)
-- | TODO catch error
runPGSQuery_ :: ( PGS.FromRow r )
=> PGS.Query -> DBCmd err [r]
runPGSQuery_ q = mkCmd $ \conn -> catch (PGS.query_ conn q) printError
where
printError (SomeException e) = do
hPutStrLn stderr (fromQuery q)
CES.throw (SomeException e)
execPGSQuery :: PGS.ToRow a => PGS.Query -> a -> DBCmd err Int64
execPGSQuery q a = mkCmd $ \conn -> PGS.execute conn q a
fromField' :: (Typeable b, FromJSON b) => Field -> Maybe BS.ByteString -> Conversion b fromField' :: (Typeable b, FromJSON b) => Field -> Maybe BS.ByteString -> Conversion b
fromField' field mb = do fromField' field mb = do
...@@ -247,8 +112,8 @@ fromField' field mb = do ...@@ -247,8 +112,8 @@ fromField' field mb = do
] ]
dbCheck :: DBCmd err Bool dbCheck :: DBCmd err Bool
dbCheck = do dbCheck = runDBQuery $ do
r :: [PGS.Only Text] <- runPGSQuery_ "select username from public.auth_user" r :: [PGS.Only Text] <- mkPGQuery "select username from public.auth_user" ()
case r of case r of
[] -> pure False [] -> pure False
_ -> pure True _ -> pure True
...@@ -286,7 +151,7 @@ createDBIfNotExists connStr dbName = do ...@@ -286,7 +151,7 @@ createDBIfNotExists connStr dbName = do
-- objects and if you do, make sure the tests run. -- objects and if you do, make sure the tests run.
createLargeObject :: BS.ByteString -> DBCmd err PSQL.Oid createLargeObject :: BS.ByteString -> DBCmd err PSQL.Oid
createLargeObject bs = mkCmd $ \c -> PGS.withTransaction c $ do createLargeObject bs = withConn $ \c -> PGS.withTransaction c $ do
oId <- PSQL.loCreat c oId <- PSQL.loCreat c
loFd <- PSQL.loOpen c oId PSQL.WriteMode loFd <- PSQL.loOpen c oId PSQL.WriteMode
_ <- PSQL.loWrite c loFd bs _ <- PSQL.loWrite c loFd bs
...@@ -296,7 +161,7 @@ createLargeObject bs = mkCmd $ \c -> PGS.withTransaction c $ do ...@@ -296,7 +161,7 @@ createLargeObject bs = mkCmd $ \c -> PGS.withTransaction c $ do
-- | Read a large object directly, given an oid. We read it in a -- | Read a large object directly, given an oid. We read it in a
-- single transaction, looping by given chunk size -- single transaction, looping by given chunk size
readLargeObject :: PSQL.Oid -> DBCmd err BS.ByteString readLargeObject :: PSQL.Oid -> DBCmd err BS.ByteString
readLargeObject oId = mkCmd $ \c -> PGS.withTransaction c $ do readLargeObject oId = withConn $ \c -> PGS.withTransaction c $ do
loFd <- PSQL.loOpen c oId PSQL.ReadMode loFd <- PSQL.loOpen c oId PSQL.ReadMode
let chunkSize = 1024 let chunkSize = 1024
let readChunks tell = do let readChunks tell = do
...@@ -321,12 +186,12 @@ readLargeObjectViaTempFile oId = do ...@@ -321,12 +186,12 @@ readLargeObjectViaTempFile oId = do
CES.bracket (liftBase $ emptySystemTempFile "large-object") CES.bracket (liftBase $ emptySystemTempFile "large-object")
(liftBase . removeFile) (liftBase . removeFile)
(\fp -> do (\fp -> do
mkCmd $ \c -> withTransaction c $ \_ -> PSQL.loExport c oId fp withConn $ \c -> withTransaction c $ \_ -> PSQL.loExport c oId fp
!contents <- liftBase $ BS.readFile fp !contents <- liftBase $ BS.readFile fp
pure contents) pure contents)
where where
withTransaction c = CES.bracket (PGS.begin c) (\_ -> PGS.rollback c) withTransaction c = CES.bracket (PGS.begin c) (\_ -> PGS.rollback c)
removeLargeObject :: Int -> DBCmd err () removeLargeObject :: Int -> DBCmd err ()
removeLargeObject oId = mkCmd $ \c -> do removeLargeObject oId = withConn $ \c -> do
PSQL.loUnlink c $ PSQL.Oid $ fromIntegral oId PSQL.loUnlink c $ PSQL.Oid $ fromIntegral oId
...@@ -21,7 +21,7 @@ import Gargantext.Core.Types ...@@ -21,7 +21,7 @@ import Gargantext.Core.Types
import Gargantext.Core.Types.Query (Limit, Offset) import Gargantext.Core.Types.Query (Limit, Offset)
import Gargantext.Database.Admin.Types.Hyperdata.Any ( HyperdataAny ) import Gargantext.Database.Admin.Types.Hyperdata.Any ( HyperdataAny )
import Gargantext.Database.Admin.Types.Hyperdata.Document ( HyperdataDocument, HyperdataDocumentV3 ) import Gargantext.Database.Admin.Types.Hyperdata.Document ( HyperdataDocument, HyperdataDocumentV3 )
import Gargantext.Database.Prelude (DBCmd, JSONB, runOpaQuery) import Gargantext.Database.Prelude
import Gargantext.Database.Query.Filter (limit', offset') import Gargantext.Database.Query.Filter (limit', offset')
import Gargantext.Database.Query.Table.Node.Error ( HasNodeError, nodeError, NodeError(NoContextFound) ) import Gargantext.Database.Query.Table.Node.Error ( HasNodeError, nodeError, NodeError(NoContextFound) )
import Gargantext.Database.Schema.Context import Gargantext.Database.Schema.Context
...@@ -31,9 +31,9 @@ import Prelude hiding (null, id, map, sum) ...@@ -31,9 +31,9 @@ import Prelude hiding (null, id, map, sum)
getContextWith :: (HasNodeError err, JSONB a) getContextWith :: (HasNodeError err, JSONB a)
=> ContextId -> proxy a -> DBCmd err (Node a) => ContextId -> proxy a -> DBQuery err x (Node a)
getContextWith cId _ = do getContextWith cId _ = do
maybeContext <- headMay <$> runOpaQuery (selectContext (pgContextId cId)) maybeContext <- headMay <$> mkOpaQuery (selectContext (pgContextId cId))
case maybeContext of case maybeContext of
Nothing -> nodeError (NoContextFound cId) Nothing -> nodeError (NoContextFound cId)
Just r -> pure $ context2node r Just r -> pure $ context2node r
...@@ -47,8 +47,8 @@ selectContext id' = proc () -> do ...@@ -47,8 +47,8 @@ selectContext id' = proc () -> do
restrict -< _context_id row .== id' restrict -< _context_id row .== id'
returnA -< row returnA -< row
runGetContexts :: Select ContextRead -> DBCmd err [Context HyperdataAny] runGetContexts :: Select ContextRead -> DBQuery err x [Context HyperdataAny]
runGetContexts = runOpaQuery runGetContexts = mkOpaQuery
------------------------------------------------------------------------ ------------------------------------------------------------------------
------------------------------------------------------------------------ ------------------------------------------------------------------------
...@@ -80,12 +80,12 @@ selectContextsWith' parentId maybeContextType = proc () -> do ...@@ -80,12 +80,12 @@ selectContextsWith' parentId maybeContextType = proc () -> do
------------------------------------------------------------------------ ------------------------------------------------------------------------
getDocumentsV3WithParentId :: HasDBid NodeType => NodeId -> DBCmd err [Context HyperdataDocumentV3] getDocumentsV3WithParentId :: HasDBid NodeType => NodeId -> DBQuery err x [Context HyperdataDocumentV3]
getDocumentsV3WithParentId n = runOpaQuery $ selectContextsWith' n (Just NodeDocument) getDocumentsV3WithParentId n = mkOpaQuery $ selectContextsWith' n (Just NodeDocument)
-- TODO: merge with getDocumentsWithParentId by having a class IsHyperdataDocument -- TODO: merge with getDocumentsWithParentId by having a class IsHyperdataDocument
getDocumentsWithParentId :: HasDBid NodeType => NodeId -> DBCmd err [Context HyperdataDocument] getDocumentsWithParentId :: HasDBid NodeType => NodeId -> DBQuery err x [Context HyperdataDocument]
getDocumentsWithParentId n = runOpaQuery $ selectContextsWith' n (Just NodeDocument) getDocumentsWithParentId n = mkOpaQuery $ selectContextsWith' n (Just NodeDocument)
------------------------------------------------------------------------ ------------------------------------------------------------------------
selectContextsWithParentID :: NodeId -> Select ContextRead selectContextsWithParentID :: NodeId -> Select ContextRead
...@@ -99,8 +99,8 @@ selectContextsWithParentID n = proc () -> do ...@@ -99,8 +99,8 @@ selectContextsWithParentID n = proc () -> do
-- | Example of use: -- | Example of use:
-- runCmdReplEasy (getNodesWithType NodeList (Proxy :: Proxy HyperdataList)) -- runCmdReplEasy (getNodesWithType NodeList (Proxy :: Proxy HyperdataList))
getContextsWithType :: (HasNodeError err, JSONB a, HasDBid NodeType) getContextsWithType :: (HasNodeError err, JSONB a, HasDBid NodeType)
=> NodeType -> proxy a -> DBCmd err [Context a] => NodeType -> proxy a -> DBQuery err x [Context a]
getContextsWithType nt _ = runOpaQuery $ selectContextsWithType nt getContextsWithType nt _ = mkOpaQuery $ selectContextsWithType nt
where where
selectContextsWithType :: HasDBid NodeType selectContextsWithType :: HasDBid NodeType
=> NodeType -> Select ContextRead => NodeType -> Select ContextRead
...@@ -110,9 +110,9 @@ getContextsWithType nt _ = runOpaQuery $ selectContextsWithType nt ...@@ -110,9 +110,9 @@ getContextsWithType nt _ = runOpaQuery $ selectContextsWithType nt
returnA -< row returnA -< row
getContextsIdWithType :: (HasNodeError err, HasDBid NodeType) getContextsIdWithType :: (HasNodeError err, HasDBid NodeType)
=> NodeType -> DBCmd err [ContextId] => NodeType -> DBQuery err x [ContextId]
getContextsIdWithType nt = do getContextsIdWithType nt = do
ns <- runOpaQuery $ selectContextsIdWithType nt ns <- mkOpaQuery $ selectContextsIdWithType nt
pure (map UnsafeMkContextId ns) pure (map UnsafeMkContextId ns)
selectContextsIdWithType :: HasDBid NodeType selectContextsIdWithType :: HasDBid NodeType
......
...@@ -85,7 +85,7 @@ import Gargantext.Database.Admin.Types.Hyperdata.Default ( defaultHyperdata, Def ...@@ -85,7 +85,7 @@ import Gargantext.Database.Admin.Types.Hyperdata.Default ( defaultHyperdata, Def
import Gargantext.Database.Admin.Types.Hyperdata.Folder (HyperdataFolder) import Gargantext.Database.Admin.Types.Hyperdata.Folder (HyperdataFolder)
import Gargantext.Database.Admin.Types.Hyperdata.List ( HyperdataList ) import Gargantext.Database.Admin.Types.Hyperdata.List ( HyperdataList )
import Gargantext.Database.Admin.Types.Hyperdata.Prelude ( Hyperdata ) import Gargantext.Database.Admin.Types.Hyperdata.Prelude ( Hyperdata )
import Gargantext.Database.Prelude (DBCmd, JSONB, mkCmd, execPGSQuery, runPGSQuery, runOpaQuery) import Gargantext.Database.Prelude
import Gargantext.Database.Query.Filter (limit', offset') import Gargantext.Database.Query.Filter (limit', offset')
import Gargantext.Database.Query.Table.Node.Children (getChildrenByParentId) import Gargantext.Database.Query.Table.Node.Children (getChildrenByParentId)
import Gargantext.Database.Query.Table.Node.Error import Gargantext.Database.Query.Table.Node.Error
...@@ -132,17 +132,17 @@ selectNodesWith' parentId maybeNodeType = proc () -> do ...@@ -132,17 +132,17 @@ selectNodesWith' parentId maybeNodeType = proc () -> do
returnA -< row ) -< () returnA -< row ) -< ()
returnA -< node' returnA -< node'
deleteNode :: NodeId -> DBCmd err Int deleteNode :: NodeId -> DBUpdate err Int
deleteNode n = mkCmd $ \conn -> deleteNode n =
fromIntegral <$> runDelete conn fromIntegral <$> mkOpaDelete
(Delete nodeTable (Delete nodeTable
(\(Node n_id _ _ _ _ _ _ _) -> n_id .== pgNodeId n) (\(Node n_id _ _ _ _ _ _ _) -> n_id .== pgNodeId n)
rCount rCount
) )
deleteNodes :: [NodeId] -> DBCmd err Int deleteNodes :: [NodeId] -> DBUpdate err Int
deleteNodes ns = mkCmd $ \conn -> deleteNodes ns =
fromIntegral <$> runDelete conn fromIntegral <$> mkOpaDelete
(Delete nodeTable (Delete nodeTable
(\(Node n_id _ _ _ _ _ _ _) -> in_ ((map pgNodeId ns)) n_id) (\(Node n_id _ _ _ _ _ _ _) -> in_ ((map pgNodeId ns)) n_id)
rCount rCount
...@@ -150,25 +150,25 @@ deleteNodes ns = mkCmd $ \conn -> ...@@ -150,25 +150,25 @@ deleteNodes ns = mkCmd $ \conn ->
-- TODO: NodeType should match with `a' -- TODO: NodeType should match with `a'
getNodesWith :: (JSONB a, HasDBid NodeType) => NodeId -> proxy a -> Maybe NodeType getNodesWith :: (JSONB a, HasDBid NodeType) => NodeId -> proxy a -> Maybe NodeType
-> Maybe Offset -> Maybe Limit -> DBCmd err [Node a] -> Maybe Offset -> Maybe Limit -> DBQuery err x [Node a]
getNodesWith parentId _ nodeType maybeOffset maybeLimit = getNodesWith parentId _ nodeType maybeOffset maybeLimit =
runOpaQuery $ selectNodesWith parentId nodeType maybeOffset maybeLimit mkOpaQuery $ selectNodesWith parentId nodeType maybeOffset maybeLimit
-- TODO: Why is the second parameter ignored? -- TODO: Why is the second parameter ignored?
-- TODO: Why not use getNodesWith? -- TODO: Why not use getNodesWith?
getNodesWithParentId :: (Hyperdata a, JSONB a) getNodesWithParentId :: (Hyperdata a, JSONB a)
=> Maybe NodeId => Maybe NodeId
-> DBCmd err [Node a] -> DBQuery err x [Node a]
getNodesWithParentId n = runOpaQuery $ selectNodesWithParentID n' getNodesWithParentId n = mkOpaQuery $ selectNodesWithParentID n'
where where
n' = case n of n' = case n of
Just n'' -> n'' Just n'' -> n''
Nothing -> 0 Nothing -> 0
-- | Given a node id, find it's parent node id (if exists) -- | Given a node id, find it's parent node id (if exists)
getParentId :: NodeId -> DBCmd err (Maybe NodeId) getParentId :: NodeId -> DBQuery err x (Maybe NodeId)
getParentId nId = do getParentId nId = do
result <- runPGSQuery query (PGS.Only nId) result <- mkPGQuery query (PGS.Only nId)
case result of case result of
[PGS.Only parentId] -> pure $ Just $ UnsafeMkNodeId parentId [PGS.Only parentId] -> pure $ Just $ UnsafeMkNodeId parentId
_ -> pure Nothing _ -> pure Nothing
...@@ -186,9 +186,9 @@ getParentId nId = do ...@@ -186,9 +186,9 @@ getParentId nId = do
getClosestParentIdByType :: HasDBid NodeType getClosestParentIdByType :: HasDBid NodeType
=> NodeId => NodeId
-> NodeType -> NodeType
-> DBCmd err (Maybe NodeId) -> DBQuery err x (Maybe NodeId)
getClosestParentIdByType nId nType = do getClosestParentIdByType nId nType = do
result <- runPGSQuery query (PGS.Only nId) result <- mkPGQuery query (PGS.Only nId)
case result of case result of
[(_NodeId -> parentId, pTypename)] -> do [(_NodeId -> parentId, pTypename)] -> do
if toDBid nType == pTypename then if toDBid nType == pTypename then
...@@ -210,9 +210,9 @@ getClosestParentIdByType nId nType = do ...@@ -210,9 +210,9 @@ getClosestParentIdByType nId nType = do
getClosestParentIdByType' :: HasDBid NodeType getClosestParentIdByType' :: HasDBid NodeType
=> NodeId => NodeId
-> NodeType -> NodeType
-> DBCmd err (Maybe NodeId) -> DBQuery err x (Maybe NodeId)
getClosestParentIdByType' nId nType = do getClosestParentIdByType' nId nType = do
result <- runPGSQuery query (PGS.Only nId) result <- mkPGQuery query (PGS.Only nId)
case result of case result of
[(_NodeId -> id, pTypename)] -> do [(_NodeId -> id, pTypename)] -> do
if toDBid nType == pTypename then if toDBid nType == pTypename then
...@@ -233,7 +233,7 @@ getClosestParentIdByType' nId nType = do ...@@ -233,7 +233,7 @@ getClosestParentIdByType' nId nType = do
getChildrenByType :: HasDBid NodeType getChildrenByType :: HasDBid NodeType
=> NodeId => NodeId
-> NodeType -> NodeType
-> DBCmd err [NodeId] -> DBQuery err x [NodeId]
getChildrenByType nId nType = do getChildrenByType nId nType = do
childrenFirstLevel <- getClosestChildrenByType nId nType childrenFirstLevel <- getClosestChildrenByType nId nType
childrenLst <- mapM (\id -> getChildrenByType id nType) childrenFirstLevel childrenLst <- mapM (\id -> getChildrenByType id nType) childrenFirstLevel
...@@ -244,9 +244,9 @@ getChildrenByType nId nType = do ...@@ -244,9 +244,9 @@ getChildrenByType nId nType = do
getClosestChildrenByType :: HasDBid NodeType getClosestChildrenByType :: HasDBid NodeType
=> NodeId => NodeId
-> NodeType -> NodeType
-> DBCmd err [NodeId] -> DBQuery err x [NodeId]
getClosestChildrenByType nId nType = do getClosestChildrenByType nId nType = do
results <- runPGSQuery query (nId, toDBid nType) results <- mkPGQuery query (nId, toDBid nType)
pure $ (\(PGS.Only nodeId) -> nodeId) <$> results pure $ (\(PGS.Only nodeId) -> nodeId) <$> results
where where
query :: PGS.Query query :: PGS.Query
...@@ -258,8 +258,8 @@ getClosestChildrenByType nId nType = do ...@@ -258,8 +258,8 @@ getClosestChildrenByType nId nType = do
|] |]
------------------------------------------------------------------------ ------------------------------------------------------------------------
getCorporaWithParentId :: HasDBid NodeType => NodeId -> DBCmd err [Node HyperdataCorpus] getCorporaWithParentId :: HasDBid NodeType => NodeId -> DBQuery err x [Node HyperdataCorpus]
getCorporaWithParentId n = runOpaQuery $ selectNodesWith' n (Just NodeCorpus) getCorporaWithParentId n = mkOpaQuery $ selectNodesWith' n (Just NodeCorpus)
------------------------------------------------------------------------ ------------------------------------------------------------------------
selectNodesWithParentID :: NodeId -> Select NodeRead selectNodesWithParentID :: NodeId -> Select NodeRead
...@@ -272,8 +272,8 @@ selectNodesWithParentID n = proc () -> do ...@@ -272,8 +272,8 @@ selectNodesWithParentID n = proc () -> do
------------------------------------------------------------------------ ------------------------------------------------------------------------
-- | Example of use: -- | Example of use:
-- runCmdReplEasy (getNodesWithType NodeList (Proxy :: Proxy HyperdataList)) -- runCmdReplEasy (getNodesWithType NodeList (Proxy :: Proxy HyperdataList))
getNodesWithType :: (HasNodeError err, JSONB a, HasDBid NodeType) => NodeType -> proxy a -> DBCmd err [Node a] getNodesWithType :: (HasNodeError err, JSONB a, HasDBid NodeType) => NodeType -> proxy a -> DBQuery err x [Node a]
getNodesWithType nt _ = runOpaQuery $ selectNodesWithType nt getNodesWithType nt _ = mkOpaQuery $ selectNodesWithType nt
where where
selectNodesWithType :: HasDBid NodeType selectNodesWithType :: HasDBid NodeType
=> NodeType -> Select NodeRead => NodeType -> Select NodeRead
...@@ -286,8 +286,8 @@ getNodeWithType :: (HasNodeError err, JSONB a, HasDBid NodeType) ...@@ -286,8 +286,8 @@ getNodeWithType :: (HasNodeError err, JSONB a, HasDBid NodeType)
=> NodeId => NodeId
-> NodeType -> NodeType
-> proxy a -> proxy a
-> DBCmd err [Node a] -> DBQuery err x [Node a]
getNodeWithType nId nt _ = runOpaQuery $ selectNodeWithType nId nt getNodeWithType nId nt _ = mkOpaQuery $ selectNodeWithType nId nt
where where
selectNodeWithType :: HasDBid NodeType selectNodeWithType :: HasDBid NodeType
=> NodeId -> NodeType -> Select NodeRead => NodeId -> NodeType -> Select NodeRead
...@@ -297,9 +297,9 @@ getNodeWithType nId nt _ = runOpaQuery $ selectNodeWithType nId nt ...@@ -297,9 +297,9 @@ getNodeWithType nId nt _ = runOpaQuery $ selectNodeWithType nId nt
restrict -< tn .== sqlInt4 (toDBid nt') restrict -< tn .== sqlInt4 (toDBid nt')
returnA -< row returnA -< row
getNodesIdWithType :: (HasNodeError err, HasDBid NodeType) => NodeType -> DBCmd err [NodeId] getNodesIdWithType :: (HasNodeError err, HasDBid NodeType) => NodeType -> DBQuery err x [NodeId]
getNodesIdWithType nt = do getNodesIdWithType nt = do
ns <- runOpaQuery $ selectNodesIdWithType nt ns <- mkOpaQuery $ selectNodesIdWithType nt
pure (map UnsafeMkNodeId ns) pure (map UnsafeMkNodeId ns)
selectNodesIdWithType :: HasDBid NodeType selectNodesIdWithType :: HasDBid NodeType
...@@ -310,15 +310,15 @@ selectNodesIdWithType nt = proc () -> do ...@@ -310,15 +310,15 @@ selectNodesIdWithType nt = proc () -> do
returnA -< _node_id row returnA -< _node_id row
-- | Get node, Hyperdata is 'Aeson.Value' -- | Get node, Hyperdata is 'Aeson.Value'
getNode :: HasNodeError err => NodeId -> DBCmd err (Node Value) getNode :: HasNodeError err => NodeId -> DBQuery err x (Node Value)
getNode nId = do getNode nId = do
maybeNode <- headMay <$> runOpaQuery (selectNode (pgNodeId nId)) maybeNode <- headMay <$> mkOpaQuery (selectNode (pgNodeId nId))
case maybeNode of case maybeNode of
Nothing -> nodeError (DoesNotExist nId) Nothing -> nodeError (DoesNotExist nId)
Just r -> pure r Just r -> pure r
-- | Get the nodes recursively, as a hierarchical tree. -- | Get the nodes recursively, as a hierarchical tree.
getNodes :: HasNodeError err => NodeId -> DBCmd err (Tree (Node Value)) getNodes :: HasNodeError err => NodeId -> DBQuery err x (Tree (Node Value))
getNodes nId = do getNodes nId = do
root <- getNode nId root <- getNode nId
children <- getChildrenByParentId nId children <- getChildrenByParentId nId
...@@ -328,16 +328,16 @@ getNodes nId = do ...@@ -328,16 +328,16 @@ getNodes nId = do
-- | Get the parent of a given 'Node', failing if this was called -- | Get the parent of a given 'Node', failing if this was called
-- on a root node. -- on a root node.
getParent :: HasNodeError err => Node a -> DBCmd err (Node Value) getParent :: HasNodeError err => Node a -> DBQuery err x (Node Value)
getParent n = do getParent n = do
case n ^. node_parent_id of case n ^. node_parent_id of
Nothing -> nodeError NoRootFound Nothing -> nodeError NoRootFound
Just nId -> getNode nId Just nId -> getNode nId
getNodeWith :: (HasNodeError err, JSONB a) getNodeWith :: (HasNodeError err, JSONB a)
=> NodeId -> proxy a -> DBCmd err (Node a) => NodeId -> proxy a -> DBQuery err x (Node a)
getNodeWith nId _ = do getNodeWith nId _ = do
maybeNode <- headMay <$> runOpaQuery (selectNode (pgNodeId nId)) maybeNode <- headMay <$> mkOpaQuery (selectNode (pgNodeId nId))
case maybeNode of case maybeNode of
Nothing -> nodeError (DoesNotExist nId) Nothing -> nodeError (DoesNotExist nId)
Just r -> pure r Just r -> pure r
...@@ -346,11 +346,11 @@ getNodeWith nId _ = do ...@@ -346,11 +346,11 @@ getNodeWith nId _ = do
------------------------------------------------------------------------ ------------------------------------------------------------------------
-- | Sugar to insert Node with NodeType in Database -- | Sugar to insert Node with NodeType in Database
insertDefaultNode :: (HasDBid NodeType, HasNodeError err) insertDefaultNode :: (HasDBid NodeType, HasNodeError err)
=> NodeType -> ParentId -> UserId -> DBCmd err NodeId => NodeType -> ParentId -> UserId -> DBUpdate err NodeId
insertDefaultNode nt p u = insertNode nt Nothing Nothing p u insertDefaultNode nt p u = insertNode nt Nothing Nothing p u
insertDefaultNodeIfNotExists :: (HasDBid NodeType, HasNodeError err) insertDefaultNodeIfNotExists :: (HasDBid NodeType, HasNodeError err)
=> NodeType -> ParentId -> UserId -> DBCmd err [NodeId] => NodeType -> ParentId -> UserId -> DBUpdate err [NodeId]
insertDefaultNodeIfNotExists nt p u = do insertDefaultNodeIfNotExists nt p u = do
children <- getChildrenByType p nt children <- getChildrenByType p nt
case children of case children of
...@@ -358,7 +358,7 @@ insertDefaultNodeIfNotExists nt p u = do ...@@ -358,7 +358,7 @@ insertDefaultNodeIfNotExists nt p u = do
xs -> pure xs xs -> pure xs
insertNode :: (HasDBid NodeType, HasNodeError err) insertNode :: (HasDBid NodeType, HasNodeError err)
=> NodeType -> Maybe Name -> Maybe DefaultHyperdata -> ParentId -> UserId -> DBCmd err NodeId => NodeType -> Maybe Name -> Maybe DefaultHyperdata -> ParentId -> UserId -> DBUpdate err NodeId
insertNode nt n h p u = insertNodeWithHyperdata nt n' h' (Just p) u insertNode nt n h p u = insertNodeWithHyperdata nt n' h' (Just p) u
where where
n' = fromMaybe (defaultName nt) n n' = fromMaybe (defaultName nt) n
...@@ -370,7 +370,7 @@ insertNodeWithHyperdata :: (ToJSON h, Hyperdata h, HasDBid NodeType, HasNodeErro ...@@ -370,7 +370,7 @@ insertNodeWithHyperdata :: (ToJSON h, Hyperdata h, HasDBid NodeType, HasNodeErro
-> h -> h
-> Maybe ParentId -> Maybe ParentId
-> UserId -> UserId
-> DBCmd err NodeId -> DBUpdate err NodeId
insertNodeWithHyperdata nt n h p u = do insertNodeWithHyperdata nt n h p u = do
res <- insertNodesR [node nt n h p u] res <- insertNodesR [node nt n h p u]
case res of case res of
...@@ -397,12 +397,12 @@ node nodeType name hyperData parentId userId = ...@@ -397,12 +397,12 @@ node nodeType name hyperData parentId userId =
typeId = toDBid nodeType typeId = toDBid nodeType
------------------------------- -------------------------------
insertNodesR :: [NodeWrite] -> DBCmd err [NodeId] insertNodesR :: [NodeWrite] -> DBUpdate err [NodeId]
insertNodesR ns = mkCmd $ \conn -> insertNodesR ns = mkOpaInsert (Insert nodeTable ns (rReturning (\(Node i _ _ _ _ _ _ _) -> i)) Nothing)
runInsert conn (Insert nodeTable ns (rReturning (\(Node i _ _ _ _ _ _ _) -> i)) Nothing)
insertNodesWithParentR :: Maybe ParentId -> [NodeWrite] -> DBCmd err [NodeId] insertNodesWithParentR :: Maybe ParentId -> [NodeWrite] -> DBUpdate err [NodeId]
insertNodesWithParentR pid ns = insertNodesR (set node_parent_id (pgNodeId <$> pid) <$> ns) insertNodesWithParentR pid ns = insertNodesR (set node_parent_id (pgNodeId <$> pid) <$> ns)
------------------------------------------------------------------------ ------------------------------------------------------------------------
-- TODO -- TODO
-- currently this function removes the child relation -- currently this function removes the child relation
...@@ -422,7 +422,12 @@ data CorpusType = CorpusDocument | CorpusContact ...@@ -422,7 +422,12 @@ data CorpusType = CorpusDocument | CorpusContact
class MkCorpus a class MkCorpus a
where where
mk :: (HasDBid NodeType, HasNodeError err) => Maybe Name -> Maybe a -> ParentId -> UserId -> DBCmd err [NodeId] mk :: (HasDBid NodeType, HasNodeError err)
=> Maybe Name
-> Maybe a
-> ParentId
-> UserId
-> DBUpdate err [NodeId]
instance MkCorpus HyperdataCorpus instance MkCorpus HyperdataCorpus
where where
...@@ -439,44 +444,44 @@ instance MkCorpus HyperdataAnnuaire ...@@ -439,44 +444,44 @@ instance MkCorpus HyperdataAnnuaire
getOrMkList :: (HasNodeError err, HasDBid NodeType) getOrMkList :: (HasNodeError err, HasDBid NodeType)
=> ParentId => ParentId
-> UserId -> UserId
-> DBCmd err ListId -> DBUpdate err ListId
getOrMkList pId uId = getOrMkList pId uId =
maybe (mkList' pId uId) (pure . view node_id) . headMay =<< getListsWithParentId pId maybe (mkList' pId uId) (pure . view node_id) . headMay =<< getListsWithParentId pId
where where
mkList' pId' uId' = insertDefaultNode NodeList pId' uId' mkList' pId' uId' = insertDefaultNode NodeList pId' uId'
-- | TODO remove defaultList -- | TODO remove defaultList
defaultList :: (HasNodeError err, HasDBid NodeType) => CorpusId -> DBCmd err ListId defaultList :: (HasNodeError err, HasDBid NodeType) => CorpusId -> DBQuery err x ListId
defaultList cId = defaultList cId =
maybe (nodeError (NoListFound cId)) (pure . view node_id) . headMay =<< getListsWithParentId cId maybe (nodeError (NoListFound cId)) (pure . view node_id) . headMay =<< getListsWithParentId cId
getListsWithParentId :: HasDBid NodeType => NodeId -> DBCmd err [Node HyperdataList] getListsWithParentId :: HasDBid NodeType => NodeId -> DBQuery err x [Node HyperdataList]
getListsWithParentId n = runOpaQuery $ selectNodesWith' n (Just NodeList) getListsWithParentId n = mkOpaQuery $ selectNodesWith' n (Just NodeList)
-- | Returns the /root/ public node for the input user. By root we mean that -- | Returns the /root/ public node for the input user. By root we mean that
-- if we were to traverse all the parents of the result, we wouldn't find any -- if we were to traverse all the parents of the result, we wouldn't find any
-- other parent which 'NodeType' was 'NodeFolderPublic'. -- other parent which 'NodeType' was 'NodeFolderPublic'.
getUserRootPublicNode :: (HasNodeError err, HasDBid NodeType) getUserRootPublicNode :: (HasNodeError err, HasDBid NodeType)
=> UserId => UserId
-> DBCmd err (Node HyperdataFolder) -> DBQuery err x (Node HyperdataFolder)
getUserRootPublicNode = get_user_root_node_folder NodeFolderPublic getUserRootPublicNode = get_user_root_node_folder NodeFolderPublic
getUserRootPrivateNode :: (HasNodeError err, HasDBid NodeType) getUserRootPrivateNode :: (HasNodeError err, HasDBid NodeType)
=> UserId => UserId
-> DBCmd err (Node HyperdataFolder) -> DBQuery err x (Node HyperdataFolder)
getUserRootPrivateNode = get_user_root_node_folder NodeFolderPrivate getUserRootPrivateNode = get_user_root_node_folder NodeFolderPrivate
getUserRootShareNode :: (HasNodeError err, HasDBid NodeType) getUserRootShareNode :: (HasNodeError err, HasDBid NodeType)
=> UserId => UserId
-> DBCmd err (Node HyperdataFolder) -> DBQuery err x (Node HyperdataFolder)
getUserRootShareNode = get_user_root_node_folder NodeFolderShared getUserRootShareNode = get_user_root_node_folder NodeFolderShared
get_user_root_node_folder :: (HasNodeError err, HasDBid NodeType) get_user_root_node_folder :: (HasNodeError err, HasDBid NodeType)
=> NodeType => NodeType
-> UserId -> UserId
-> DBCmd err (Node HyperdataFolder) -> DBQuery err x (Node HyperdataFolder)
get_user_root_node_folder nty userId = do get_user_root_node_folder nty userId = do
result <- runOpaQuery $ do result <- mkOpaQuery $ do
n <- queryNodeTable n <- queryNodeTable
where_ $ (n ^. node_typename .== sqlInt4 (toDBid nty)) .&& where_ $ (n ^. node_typename .== sqlInt4 (toDBid nty)) .&&
(n ^. node_user_id .== sqlInt4 (_UserId userId)) (n ^. node_user_id .== sqlInt4 (_UserId userId))
...@@ -491,9 +496,9 @@ get_user_root_node_folder nty userId = do ...@@ -491,9 +496,9 @@ get_user_root_node_folder nty userId = do
-> pure $ NE.head (NE.sortWith _node_id folders) -> pure $ NE.head (NE.sortWith _node_id folders)
-- | An input 'NodeId' identifies a user node if its typename is 'NodeUser' and it has no parent_id. -- | An input 'NodeId' identifies a user node if its typename is 'NodeUser' and it has no parent_id.
isUserNode :: HasDBid NodeType => NodeId -> DBCmd err Bool isUserNode :: HasDBid NodeType => NodeId -> DBQuery err x Bool
isUserNode userNodeId = (== [PGS.Only True]) isUserNode userNodeId = (== [PGS.Only True])
<$> runPGSQuery [sql| <$> mkPGQuery [sql|
SELECT EXISTS ( SELECT EXISTS (
SELECT 1 SELECT 1
FROM nodes FROM nodes
...@@ -506,7 +511,7 @@ copyNode :: Bool -- ^ Whether to copy whole subtree (`True`) or just the node ...@@ -506,7 +511,7 @@ copyNode :: Bool -- ^ Whether to copy whole subtree (`True`) or just the node
-> Bool -- ^ Whether to deal with ngrams and contexts (`True`) or just the data in the `nodes` table (`False`) -> Bool -- ^ Whether to deal with ngrams and contexts (`True`) or just the data in the `nodes` table (`False`)
-> NodeId -- ^ ID of the node to be copied -> NodeId -- ^ ID of the node to be copied
-> NodeId -- ^ ID of the node which will become the parent of the copied node -> NodeId -- ^ ID of the node which will become the parent of the copied node
-> DBCmd BackendInternalError NodeId -- ^ ID of the copied node -> DBUpdate BackendInternalError NodeId -- ^ ID of the copied node
copyNode copySubtree smart idToCopy newParentId = if copySubtree copyNode copySubtree smart idToCopy newParentId = if copySubtree
-- Recursive copy: -- Recursive copy:
then do then do
...@@ -517,26 +522,22 @@ copyNode copySubtree smart idToCopy newParentId = if copySubtree ...@@ -517,26 +522,22 @@ copyNode copySubtree smart idToCopy newParentId = if copySubtree
return copiedNode return copiedNode
-- Single-node (non-recursive) copy: -- Single-node (non-recursive) copy:
else do else do
newNodes <- runPGSQuery copiedNode <- mkPGUpdateReturningOne
-- Copy node. Should return exactly one ID, that of the new node: -- Copy node. Should return exactly one ID, that of the new node:
[sql| [sql|
INSERT INTO public.nodes (typename, user_id, parent_id, name, date, hyperdata) INSERT INTO public.nodes (typename, user_id, parent_id, name, date, hyperdata)
SELECT typename, user_id, ?, name, date, hyperdata FROM public.nodes WHERE id = ? SELECT typename, user_id, ?, name, date, hyperdata FROM public.nodes WHERE id = ?
RETURNING id; RETURNING id;
|] (newParentId, idToCopy) |] (newParentId, idToCopy)
case newNodes of -- Check that we got exactly one node back -- Copy node stories/contexts if applicable
[copiedNode] -> do when smart $ do
-- Copy node stories/contexts if applicable nodeToCopy <- getNode idToCopy
when smart $ do case nodeTypes !> view node_typename nodeToCopy of
nodeToCopy <- getNode idToCopy NodeList -> copyNodeStories idToCopy copiedNode
case nodeTypes !> view node_typename nodeToCopy of -- Contexts are attached to a corpus node, not to the docs node:
NodeList -> copyNodeStories idToCopy copiedNode NodeCorpus -> copyNodeContexts idToCopy copiedNode
-- Contexts are attached to a corpus node, not to the docs node: _ -> return ()
NodeCorpus -> copyNodeContexts idToCopy copiedNode return copiedNode
_ -> return ()
return copiedNode
_ -> throwError $ InternalUnexpectedError $ SomeException $ PatternMatchFail $
"SQL insert returned zero or more than one node"
-- | Given two IDs of terms nodes, copies the node stories of the first into -- | Given two IDs of terms nodes, copies the node stories of the first into
...@@ -545,8 +546,8 @@ copyNode copySubtree smart idToCopy newParentId = if copySubtree ...@@ -545,8 +546,8 @@ copyNode copySubtree smart idToCopy newParentId = if copySubtree
-- TODO add a check that we are looking at the right type of node? -- TODO add a check that we are looking at the right type of node?
copyNodeStories :: NodeId -- ^ The ID of the node whose stories are to be copied copyNodeStories :: NodeId -- ^ The ID of the node whose stories are to be copied
-> NodeId -- ^ The ID of the node under which to copy the stories -> NodeId -- ^ The ID of the node under which to copy the stories
-> DBCmd BackendInternalError () -> DBUpdate BackendInternalError ()
copyNodeStories oldNodeId newNodeId = void $ execPGSQuery copyNodeStories oldNodeId newNodeId = void $ mkPGUpdate
[sql| [sql|
INSERT INTO node_stories INSERT INTO node_stories
(node_id, version, ngrams_type_id, ngrams_id, ngrams_repo_element) (node_id, version, ngrams_type_id, ngrams_id, ngrams_repo_element)
...@@ -560,8 +561,8 @@ copyNodeStories oldNodeId newNodeId = void $ execPGSQuery ...@@ -560,8 +561,8 @@ copyNodeStories oldNodeId newNodeId = void $ execPGSQuery
-- second, although the contexts are not technically duplicated in the database. -- second, although the contexts are not technically duplicated in the database.
copyNodeContexts :: NodeId -- ^ The ID of the node whose contexts are to be "copied" copyNodeContexts :: NodeId -- ^ The ID of the node whose contexts are to be "copied"
-> NodeId -- ^ The ID of the node under which to "copy" the contexts -> NodeId -- ^ The ID of the node under which to "copy" the contexts
-> DBCmd BackendInternalError () -> DBUpdate BackendInternalError ()
copyNodeContexts oldNodeId newNodeId = void $ execPGSQuery copyNodeContexts oldNodeId newNodeId = void $ mkPGUpdate
[sql| [sql|
INSERT INTO node_contexts (node_id, context_id, score, category) INSERT INTO node_contexts (node_id, context_id, score, category)
SELECT ?, context_id, score, category FROM node_stories WHERE node_id = ? SELECT ?, context_id, score, category FROM node_stories WHERE node_id = ?
......
...@@ -23,7 +23,7 @@ import Gargantext.Core.Types ...@@ -23,7 +23,7 @@ import Gargantext.Core.Types
import Gargantext.Core.Types.Query (Limit, Offset) import Gargantext.Core.Types.Query (Limit, Offset)
import Gargantext.Database.Admin.Types.Hyperdata.Contact ( HyperdataContact ) import Gargantext.Database.Admin.Types.Hyperdata.Contact ( HyperdataContact )
import Gargantext.Database.Admin.Types.Hyperdata.Document ( HyperdataDocument ) import Gargantext.Database.Admin.Types.Hyperdata.Document ( HyperdataDocument )
import Gargantext.Database.Prelude (DBCmd, JSONB, runCountOpaQuery, runOpaQuery, runPGSQuery) import Gargantext.Database.Prelude
import Gargantext.Database.Query.Filter ( limit', offset' ) import Gargantext.Database.Query.Filter ( limit', offset' )
import Gargantext.Database.Query.Table.NodeContext ( NodeContextPoly(NodeContext), queryNodeContextTable ) import Gargantext.Database.Query.Table.NodeContext ( NodeContextPoly(NodeContext), queryNodeContextTable )
import Gargantext.Database.Schema.Context import Gargantext.Database.Schema.Context
...@@ -33,12 +33,12 @@ import Opaleye ...@@ -33,12 +33,12 @@ import Opaleye
-- TODO getAllTableDocuments -- TODO getAllTableDocuments
getAllDocuments :: HasDBid NodeType => ParentId -> DBCmd err (TableResult (Node HyperdataDocument)) getAllDocuments :: HasDBid NodeType => ParentId -> DBQuery err x (TableResult (Node HyperdataDocument))
getAllDocuments pId = getAllChildren pId (Proxy :: Proxy HyperdataDocument) getAllDocuments pId = getAllChildren pId (Proxy :: Proxy HyperdataDocument)
(Just NodeDocument) (Just NodeDocument)
-- TODO getAllTableContacts -- TODO getAllTableContacts
getAllContacts :: HasDBid NodeType => ParentId -> DBCmd err (TableResult (Node HyperdataContact)) getAllContacts :: HasDBid NodeType => ParentId -> DBQuery err x (TableResult (Node HyperdataContact))
getAllContacts pId = getAllChildren pId (Proxy :: Proxy HyperdataContact) getAllContacts pId = getAllChildren pId (Proxy :: Proxy HyperdataContact)
(Just NodeContact) (Just NodeContact)
...@@ -46,7 +46,7 @@ getAllChildren :: (JSONB a, HasDBid NodeType) ...@@ -46,7 +46,7 @@ getAllChildren :: (JSONB a, HasDBid NodeType)
=> ParentId => ParentId
-> proxy a -> proxy a
-> Maybe NodeType -> Maybe NodeType
-> DBCmd err (NodeTableResult a) -> DBQuery err x (NodeTableResult a)
getAllChildren pId p maybeNodeType = getChildren pId p maybeNodeType Nothing Nothing getAllChildren pId p maybeNodeType = getChildren pId p maybeNodeType Nothing Nothing
...@@ -56,7 +56,7 @@ getChildren :: (JSONB a, HasDBid NodeType) ...@@ -56,7 +56,7 @@ getChildren :: (JSONB a, HasDBid NodeType)
-> Maybe NodeType -> Maybe NodeType
-> Maybe Offset -> Maybe Offset
-> Maybe Limit -> Maybe Limit
-> DBCmd err (NodeTableResult a) -> DBQuery err x (NodeTableResult a)
getChildren pId p t@(Just NodeDocument) maybeOffset maybeLimit = getChildrenContext pId p t maybeOffset maybeLimit getChildren pId p t@(Just NodeDocument) maybeOffset maybeLimit = getChildrenContext pId p t maybeOffset maybeLimit
getChildren pId p t@(Just NodeContact ) maybeOffset maybeLimit = getChildrenContext pId p t maybeOffset maybeLimit getChildren pId p t@(Just NodeContact ) maybeOffset maybeLimit = getChildrenContext pId p t maybeOffset maybeLimit
getChildren a b c d e = getChildrenNode a b c d e getChildren a b c d e = getChildrenNode a b c d e
...@@ -64,8 +64,8 @@ getChildren a b c d e = getChildrenNode a b c d e ...@@ -64,8 +64,8 @@ getChildren a b c d e = getChildrenNode a b c d e
-- | Get the list of (IDs of) children of a given node (ID) -- | Get the list of (IDs of) children of a given node (ID)
getChildrenByParentId :: NodeId -- ^ ID of the parent node getChildrenByParentId :: NodeId -- ^ ID of the parent node
-> DBCmd err [NodeId] -- ^ List of IDs of the children nodes -> DBQuery err x [NodeId] -- ^ List of IDs of the children nodes
getChildrenByParentId parentId = runPGSQuery getChildrenByParentId parentId = mkPGQuery
[sql| SELECT id FROM public.nodes WHERE parent_id = ?; |] [sql| SELECT id FROM public.nodes WHERE parent_id = ?; |]
parentId parentId
...@@ -76,16 +76,16 @@ getChildrenNode :: (JSONB a, HasDBid NodeType) ...@@ -76,16 +76,16 @@ getChildrenNode :: (JSONB a, HasDBid NodeType)
-> Maybe NodeType -> Maybe NodeType
-> Maybe Offset -> Maybe Offset
-> Maybe Limit -> Maybe Limit
-> DBCmd err (NodeTableResult a) -> DBQuery err x (NodeTableResult a)
getChildrenNode pId _ maybeNodeType maybeOffset maybeLimit = do getChildrenNode pId _ maybeNodeType maybeOffset maybeLimit = do
-- printDebug "getChildrenNode" (pId, maybeNodeType) -- printDebug "getChildrenNode" (pId, maybeNodeType)
let query = selectChildrenNode pId maybeNodeType let query = selectChildrenNode pId maybeNodeType
docs <- runOpaQuery docs <- mkOpaQuery
$ limit' maybeLimit $ limit' maybeLimit
$ offset' maybeOffset $ offset' maybeOffset
$ orderBy (asc _node_id) $ orderBy (asc _node_id)
$ query $ query
docCount <- runCountOpaQuery query docCount <- mkOpaCountQuery query
pure $ TableResult { tr_docs = docs, tr_count = docCount } pure $ TableResult { tr_docs = docs, tr_count = docCount }
...@@ -107,18 +107,18 @@ getChildrenContext :: (JSONB a, HasDBid NodeType) ...@@ -107,18 +107,18 @@ getChildrenContext :: (JSONB a, HasDBid NodeType)
-> Maybe NodeType -> Maybe NodeType
-> Maybe Offset -> Maybe Offset
-> Maybe Limit -> Maybe Limit
-> DBCmd err (NodeTableResult a) -> DBQuery err x (NodeTableResult a)
getChildrenContext pId _ maybeNodeType maybeOffset maybeLimit = do getChildrenContext pId _ maybeNodeType maybeOffset maybeLimit = do
-- printDebug "getChildrenContext" (pId, maybeNodeType) -- printDebug "getChildrenContext" (pId, maybeNodeType)
let query = selectChildren' pId maybeNodeType let query = selectChildren' pId maybeNodeType
docs <- runOpaQuery docs <- mkOpaQuery
$ limit' maybeLimit $ limit' maybeLimit
$ offset' maybeOffset $ offset' maybeOffset
$ orderBy (asc _context_id) $ orderBy (asc _context_id)
$ query $ query
docCount <- runCountOpaQuery query docCount <- mkOpaCountQuery query
pure $ TableResult { tr_docs = map context2node docs, tr_count = docCount } pure $ TableResult { tr_docs = map context2node docs, tr_count = docCount }
......
...@@ -68,7 +68,7 @@ import Gargantext.Core (HasDBid(toDBid)) ...@@ -68,7 +68,7 @@ import Gargantext.Core (HasDBid(toDBid))
import Gargantext.Database.Admin.Types.Hyperdata.Contact ( HyperdataContact ) import Gargantext.Database.Admin.Types.Hyperdata.Contact ( HyperdataContact )
import Gargantext.Database.Admin.Types.Hyperdata.Document ( HyperdataDocument(..) ) import Gargantext.Database.Admin.Types.Hyperdata.Document ( HyperdataDocument(..) )
import Gargantext.Database.Admin.Types.Node import Gargantext.Database.Admin.Types.Node
import Gargantext.Database.Prelude (runPGSQuery, DBCmd{-, formatPGSQuery-}) import Gargantext.Database.Prelude
import Gargantext.Database.Schema.Node (NodePoly(..)) import Gargantext.Database.Schema.Node (NodePoly(..))
import Gargantext.Defaults qualified as Defaults import Gargantext.Defaults qualified as Defaults
import Gargantext.Prelude hiding (hash, toLower) import Gargantext.Prelude hiding (hash, toLower)
...@@ -87,8 +87,8 @@ import Database.PostgreSQL.Simple (formatQuery) ...@@ -87,8 +87,8 @@ import Database.PostgreSQL.Simple (formatQuery)
-- ParentId : folder ID which is parent of the inserted documents -- ParentId : folder ID which is parent of the inserted documents
-- Administrator of the database has to create a uniq index as following SQL command: -- Administrator of the database has to create a uniq index as following SQL command:
-- `create unique index on contexts table (typename, parent_id, (hyperdata ->> 'uniqId'));` -- `create unique index on contexts table (typename, parent_id, (hyperdata ->> 'uniqId'));`
insertDb :: (InsertDb a, HasDBid NodeType) => UserId -> Maybe ParentId -> [a] -> DBCmd err [ReturnId] insertDb :: (InsertDb a, HasDBid NodeType) => UserId -> Maybe ParentId -> [a] -> DBQuery err x [ReturnId]
insertDb u p = runPGSQuery queryInsert . Only . Values fields . map (insertDb' u p) insertDb u p as = mkPGQuery queryInsert (Only . Values fields $ map (insertDb' u p) as)
where where
fields = map (QualifiedIdentifier Nothing) inputSqlTypes fields = map (QualifiedIdentifier Nothing) inputSqlTypes
......
...@@ -135,15 +135,11 @@ instance ToJSON NodeError where ...@@ -135,15 +135,11 @@ instance ToJSON NodeError where
class HasNodeError e where class HasNodeError e where
_NodeError :: Prism' e NodeError _NodeError :: Prism' e NodeError
errorWith :: ( MonadError e m errorWith :: HasNodeError e => Text -> DBTx e r a
, HasNodeError e)
=> Text -> m a
errorWith x = nodeError (NodeError $ toException $ userError $ T.unpack x) errorWith x = nodeError (NodeError $ toException $ userError $ T.unpack x)
nodeError :: ( MonadError e m nodeError :: HasNodeError e => NodeError -> DBTx e r a
, HasNodeError e ) nodeError ne = dbFail $ _NodeError # ne
=> NodeError -> m a
nodeError ne = throwError $ _NodeError # ne
nodeCreationError :: ( MonadError e m, HasNodeError e) nodeCreationError :: ( MonadError e m, HasNodeError e)
=> NodeCreationError => NodeCreationError
......
...@@ -68,13 +68,13 @@ queryNodeContextTable :: Select NodeContextRead ...@@ -68,13 +68,13 @@ queryNodeContextTable :: Select NodeContextRead
queryNodeContextTable = selectTable nodeContextTable queryNodeContextTable = selectTable nodeContextTable
-- | not optimized (get all ngrams without filters) -- | not optimized (get all ngrams without filters)
_nodesContexts :: DBCmd err [NodeContext] _nodesContexts :: DBQuery err x [NodeContext]
_nodesContexts = runOpaQuery queryNodeContextTable _nodesContexts = mkOpaQuery queryNodeContextTable
------------------------------------------------------------------------ ------------------------------------------------------------------------
-- | Basic NodeContext tools -- | Basic NodeContext tools
getNodeContexts :: NodeId -> DBCmd err [NodeContext] getNodeContexts :: NodeId -> DBQuery err x [NodeContext]
getNodeContexts n = runOpaQuery (selectNodeContexts $ pgNodeId n) getNodeContexts n = mkOpaQuery (selectNodeContexts $ pgNodeId n)
where where
selectNodeContexts :: Field SqlInt4 -> Select NodeContextRead selectNodeContexts :: Field SqlInt4 -> Select NodeContextRead
selectNodeContexts n' = proc () -> do selectNodeContexts n' = proc () -> do
...@@ -83,9 +83,9 @@ getNodeContexts n = runOpaQuery (selectNodeContexts $ pgNodeId n) ...@@ -83,9 +83,9 @@ getNodeContexts n = runOpaQuery (selectNodeContexts $ pgNodeId n)
returnA -< ns returnA -< ns
getNodeContext :: HasNodeError err => ContextId -> NodeId -> DBCmd err NodeContext getNodeContext :: HasNodeError err => ContextId -> NodeId -> DBQuery err x NodeContext
getNodeContext c n = do getNodeContext c n = do
maybeNodeContext <- headMay <$> runOpaQuery (selectNodeContext (pgContextId c) (pgNodeId n)) maybeNodeContext <- headMay <$> mkOpaQuery (selectNodeContext (pgContextId c) (pgNodeId n))
case maybeNodeContext of case maybeNodeContext of
Nothing -> nodeError (NoContextFound c) Nothing -> nodeError (NoContextFound c)
Just r -> pure r Just r -> pure r
...@@ -97,9 +97,9 @@ getNodeContext c n = do ...@@ -97,9 +97,9 @@ getNodeContext c n = do
restrict -< _nc_node_id ns .== n' restrict -< _nc_node_id ns .== n'
returnA -< ns returnA -< ns
updateNodeContextCategory :: ContextId -> NodeId -> Int -> DBCmd err Int64 updateNodeContextCategory :: ContextId -> NodeId -> Int -> DBUpdate err Int64
updateNodeContextCategory cId nId cat = do updateNodeContextCategory cId nId cat = do
execPGSQuery upScore (cat, cId, nId) mkPGUpdate upScore (cat, cId, nId)
where where
upScore :: PGS.Query upScore :: PGS.Query
upScore = [sql| UPDATE nodes_contexts upScore = [sql| UPDATE nodes_contexts
...@@ -118,9 +118,9 @@ data ContextForNgrams = ...@@ -118,9 +118,9 @@ data ContextForNgrams =
getContextsForNgrams :: HasNodeError err getContextsForNgrams :: HasNodeError err
=> NodeId => NodeId
-> [Int] -> [Int]
-> DBCmd err [ContextForNgrams] -> DBQuery err x [ContextForNgrams]
getContextsForNgrams cId ngramsIds = do getContextsForNgrams cId ngramsIds = do
res <- runPGSQuery query (cId, PGS.In ngramsIds) res <- mkPGQuery query (cId, PGS.In ngramsIds)
pure $ (\( _cfn_nodeId pure $ (\( _cfn_nodeId
, _cfn_hash , _cfn_hash
, _cfn_userId , _cfn_userId
...@@ -152,10 +152,10 @@ getContextsForNgramsTerms :: HasNodeError err ...@@ -152,10 +152,10 @@ getContextsForNgramsTerms :: HasNodeError err
=> NodeId => NodeId
-> [Text] -> [Text]
-> Maybe Bool -> Maybe Bool
-> DBCmd err [ContextForNgramsTerms] -> DBQuery err x [ContextForNgramsTerms]
getContextsForNgramsTerms cId ngramsTerms (Just True) = do getContextsForNgramsTerms cId ngramsTerms (Just True) = do
let terms_length = length ngramsTerms let terms_length = length ngramsTerms
res <- runPGSQuery query (cId, PGS.In ngramsTerms, terms_length) res <- mkPGQuery query (cId, PGS.In ngramsTerms, terms_length)
pure $ (\( _cfnt_nodeId pure $ (\( _cfnt_nodeId
, _cfnt_hash , _cfnt_hash
, _cfnt_nodeTypeId , _cfnt_nodeTypeId
...@@ -198,7 +198,7 @@ getContextsForNgramsTerms cId ngramsTerms (Just True) = do ...@@ -198,7 +198,7 @@ getContextsForNgramsTerms cId ngramsTerms (Just True) = do
|] |]
getContextsForNgramsTerms cId ngramsTerms _ = do getContextsForNgramsTerms cId ngramsTerms _ = do
res <- runPGSQuery query (cId, PGS.In ngramsTerms) res <- mkPGQuery query (cId, PGS.In ngramsTerms)
pure $ (\( _cfnt_nodeId pure $ (\( _cfnt_nodeId
, _cfnt_hash , _cfnt_hash
, _cfnt_nodeTypeId , _cfnt_nodeTypeId
...@@ -246,9 +246,9 @@ getContextsForNgramsTerms cId ngramsTerms _ = do ...@@ -246,9 +246,9 @@ getContextsForNgramsTerms cId ngramsTerms _ = do
getContextNgrams :: HasNodeError err getContextNgrams :: HasNodeError err
=> NodeId => NodeId
-> NodeId -> NodeId
-> DBCmd err [Text] -> DBQuery err x [Text]
getContextNgrams contextId listId = do getContextNgrams contextId listId = do
res <- runPGSQuery query (contextId, listId) res <- mkPGQuery query (contextId, listId)
pure $ (\(PGS.Only term) -> term) <$> res pure $ (\(PGS.Only term) -> term) <$> res
where where
...@@ -270,9 +270,9 @@ getContextNgrams contextId listId = do ...@@ -270,9 +270,9 @@ getContextNgrams contextId listId = do
getContextNgramsMatchingFTS :: HasNodeError err getContextNgramsMatchingFTS :: HasNodeError err
=> ContextId => ContextId
-> NodeId -> NodeId
-> DBCmd err [Text] -> DBQuery err x [Text]
getContextNgramsMatchingFTS contextId listId = do getContextNgramsMatchingFTS contextId listId = do
res <- runPGSQuery query (listId, contextId) res <- mkPGQuery query (listId, contextId)
pure $ (\(PGS.Only term) -> term) <$> res pure $ (\(PGS.Only term) -> term) <$> res
where where
...@@ -299,9 +299,8 @@ getContextNgramsMatchingFTS contextId listId = do ...@@ -299,9 +299,8 @@ getContextNgramsMatchingFTS contextId listId = do
AND (contexts.search @@ plainto_tsquery(ngrams.terms) AND (contexts.search @@ plainto_tsquery(ngrams.terms)
OR contexts.search @@ plainto_tsquery('french', ngrams.terms)) |] OR contexts.search @@ plainto_tsquery('french', ngrams.terms)) |]
------------------------------------------------------------------------ ------------------------------------------------------------------------
insertNodeContext :: [NodeContext] -> DBCmd err Int insertNodeContext :: [NodeContext] -> DBUpdate err Int
insertNodeContext ns = mkCmd $ \conn -> fromIntegral <$> (runInsert conn insertNodeContext ns = fromIntegral <$> mkOpaInsert (Insert nodeContextTable ns' rCount (Just doNothing))
$ Insert nodeContextTable ns' rCount (Just doNothing))
where where
ns' :: [NodeContextWrite] ns' :: [NodeContextWrite]
ns' = map (\(NodeContext i n c x y) ns' = map (\(NodeContext i n c x y)
...@@ -317,9 +316,8 @@ insertNodeContext ns = mkCmd $ \conn -> fromIntegral <$> (runInsert conn ...@@ -317,9 +316,8 @@ insertNodeContext ns = mkCmd $ \conn -> fromIntegral <$> (runInsert conn
type Node_Id = NodeId type Node_Id = NodeId
type Context_Id = NodeId type Context_Id = NodeId
deleteNodeContext :: Node_Id -> Context_Id -> DBCmd err Int deleteNodeContext :: Node_Id -> Context_Id -> DBUpdate err Int64
deleteNodeContext n c = mkCmd $ \conn -> deleteNodeContext n c = mkOpaDelete $
fromIntegral <$> runDelete conn
(Delete nodeContextTable (Delete nodeContextTable
(\(NodeContext _ n_id c_id _ _) -> n_id .== pgNodeId n (\(NodeContext _ n_id c_id _ _) -> n_id .== pgNodeId n
.&& c_id .== pgNodeId c .&& c_id .== pgNodeId c
...@@ -329,9 +327,9 @@ deleteNodeContext n c = mkCmd $ \conn -> ...@@ -329,9 +327,9 @@ deleteNodeContext n c = mkCmd $ \conn ->
------------------------------------------------------------------------ ------------------------------------------------------------------------
-- | Favorite management -- | Favorite management
nodeContextsCategory :: [(CorpusId, DocId, Int)] -> DBCmd err [Int] nodeContextsCategory :: [(CorpusId, DocId, Int)] -> DBUpdate err [Int]
nodeContextsCategory inputData = map (\(PGS.Only a) -> a) nodeContextsCategory inputData = map (\(PGS.Only a) -> a)
<$> runPGSQuery catSelect (PGS.Only $ Values fields inputData) <$> mkPGUpdateReturningMany catSelect (PGS.Only $ Values fields inputData)
where where
fields = map (QualifiedIdentifier Nothing) ["int4","int4","int4"] fields = map (QualifiedIdentifier Nothing) ["int4","int4","int4"]
catSelect :: PGS.Query catSelect :: PGS.Query
...@@ -345,9 +343,9 @@ nodeContextsCategory inputData = map (\(PGS.Only a) -> a) ...@@ -345,9 +343,9 @@ nodeContextsCategory inputData = map (\(PGS.Only a) -> a)
------------------------------------------------------------------------ ------------------------------------------------------------------------
-- | Score management -- | Score management
nodeContextsScore :: [(CorpusId, DocId, Int)] -> DBCmd err [Int] nodeContextsScore :: [(CorpusId, DocId, Int)] -> DBUpdate err [Int]
nodeContextsScore inputData = map (\(PGS.Only a) -> a) nodeContextsScore inputData = map (\(PGS.Only a) -> a)
<$> runPGSQuery catScore (PGS.Only $ Values fields inputData) <$> mkPGUpdateReturningMany catScore (PGS.Only $ Values fields inputData)
where where
fields = map (QualifiedIdentifier Nothing) ["int4","int4","int4"] fields = map (QualifiedIdentifier Nothing) ["int4","int4","int4"]
catScore :: PGS.Query catScore :: PGS.Query
...@@ -370,8 +368,8 @@ nodeContextsScore inputData = map (\(PGS.Only a) -> a) ...@@ -370,8 +368,8 @@ nodeContextsScore inputData = map (\(PGS.Only a) -> a)
-- nc.node_id = 88 -- nc.node_id = 88
-- and nc.category >= 1 -- and nc.category >= 1
-- and c.typename = 4 -- and c.typename = 4
selectCountDocs :: HasDBid NodeType => CorpusId -> DBCmd err Int selectCountDocs :: HasDBid NodeType => CorpusId -> DBQuery err x Int
selectCountDocs cId = runCountOpaQuery (queryCountDocs cId) selectCountDocs cId = mkOpaCountQuery (countRows $ queryCountDocs cId)
where where
queryCountDocs cId' = proc () -> do queryCountDocs cId' = proc () -> do
(c, nc) <- joinInCorpus -< () (c, nc) <- joinInCorpus -< ()
...@@ -382,14 +380,14 @@ selectCountDocs cId = runCountOpaQuery (queryCountDocs cId) ...@@ -382,14 +380,14 @@ selectCountDocs cId = runCountOpaQuery (queryCountDocs cId)
-- | TODO use UTCTime fast -- | TODO use UTCTime fast
selectDocsDates :: HasDBid NodeType => CorpusId -> DBCmd err [Text] selectDocsDates :: HasDBid NodeType => CorpusId -> DBQuery err x [Text]
selectDocsDates cId = map (head' "selectDocsDates" . splitOn "-") selectDocsDates cId = map (head' "selectDocsDates" . splitOn "-")
<$> catMaybes <$> catMaybes
<$> map (view hd_publication_date) <$> map (view hd_publication_date)
<$> selectDocs cId <$> selectDocs cId
selectDocs :: HasDBid NodeType => CorpusId -> DBCmd err [HyperdataDocument] selectDocs :: HasDBid NodeType => CorpusId -> DBQuery err x [HyperdataDocument]
selectDocs cId = runOpaQuery (queryDocs cId) selectDocs cId = mkOpaQuery (queryDocs cId)
queryDocs :: HasDBid NodeType => CorpusId -> O.Select (Field SqlJsonb) queryDocs :: HasDBid NodeType => CorpusId -> O.Select (Field SqlJsonb)
queryDocs cId = proc () -> do queryDocs cId = proc () -> do
...@@ -399,8 +397,8 @@ queryDocs cId = proc () -> do ...@@ -399,8 +397,8 @@ queryDocs cId = proc () -> do
restrict -< (c ^. context_typename) .== sqlInt4 (toDBid NodeDocument) restrict -< (c ^. context_typename) .== sqlInt4 (toDBid NodeDocument)
returnA -< view (context_hyperdata) c returnA -< view (context_hyperdata) c
selectDocNodes :: HasDBid NodeType => CorpusId -> DBCmd err [Context HyperdataDocument] selectDocNodes :: HasDBid NodeType => CorpusId -> DBQuery err x [Context HyperdataDocument]
selectDocNodes cId = runOpaQuery (queryDocNodes cId) selectDocNodes cId = mkOpaQuery (queryDocNodes cId)
queryDocNodes :: HasDBid NodeType => CorpusId -> O.Select ContextRead queryDocNodes :: HasDBid NodeType => CorpusId -> O.Select ContextRead
queryDocNodes cId = proc () -> do queryDocNodes cId = proc () -> do
...@@ -414,8 +412,8 @@ queryDocNodes cId = proc () -> do ...@@ -414,8 +412,8 @@ queryDocNodes cId = proc () -> do
restrict -< (c ^. context_typename) .== sqlInt4 (toDBid NodeDocument) restrict -< (c ^. context_typename) .== sqlInt4 (toDBid NodeDocument)
returnA -< c returnA -< c
selectDocNodesOnlyId :: HasDBid NodeType => CorpusId -> DBCmd err [ContextOnlyId HyperdataDocument] selectDocNodesOnlyId :: HasDBid NodeType => CorpusId -> DBQuery err x [ContextOnlyId HyperdataDocument]
selectDocNodesOnlyId cId = runOpaQuery (queryDocNodesOnlyId cId) selectDocNodesOnlyId cId = mkOpaQuery (queryDocNodesOnlyId cId)
queryDocNodesOnlyId :: HasDBid NodeType => CorpusId -> O.Select ContextOnlyIdRead queryDocNodesOnlyId :: HasDBid NodeType => CorpusId -> O.Select ContextOnlyIdRead
queryDocNodesOnlyId cId = proc () -> do queryDocNodesOnlyId cId = proc () -> do
...@@ -441,8 +439,8 @@ joinOn1 = proc () -> do ...@@ -441,8 +439,8 @@ joinOn1 = proc () -> do
------------------------------------------------------------------------ ------------------------------------------------------------------------
selectPublicContexts :: HasDBid NodeType => (Hyperdata a, DefaultFromField SqlJsonb a) selectPublicContexts :: HasDBid NodeType => (Hyperdata a, DefaultFromField SqlJsonb a)
=> DBCmd err [(Node a, Maybe Int)] => DBQuery err x [(Node a, Maybe Int)]
selectPublicContexts = runOpaQuery (queryWithType NodeFolderPublic) selectPublicContexts = mkOpaQuery (queryWithType NodeFolderPublic)
queryWithType :: HasDBid NodeType => NodeType -> O.Select (NodeRead, MaybeFields (Field SqlInt4)) queryWithType :: HasDBid NodeType => NodeType -> O.Select (NodeRead, MaybeFields (Field SqlInt4))
queryWithType nt = proc () -> do queryWithType nt = proc () -> do
......
{-# LANGUAGE KindSignatures #-} {-# LANGUAGE KindSignatures #-}
{-# LANGUAGE LambdaCase #-} {-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
module Gargantext.Database.Transactional ( module Gargantext.Database.Transactional (
DBOperation DBOperation
, DBTransactionOp -- opaque , DBTransactionOp -- opaque
...@@ -15,28 +16,32 @@ module Gargantext.Database.Transactional ( ...@@ -15,28 +16,32 @@ module Gargantext.Database.Transactional (
-- * Smart constructors -- * Smart constructors
, mkPGQuery , mkPGQuery
, mkPGUpdate , mkPGUpdate
, mkPGUpdateReturning , mkPGUpdateReturningOne
, mkPGUpdateReturningMany
, mkOpaQuery , mkOpaQuery
, mkOpaCountQuery
, mkOpaUpdate , mkOpaUpdate
, mkOpaInsert , mkOpaInsert
, mkOpaDelete
-- * Throwing errors (which allow rollbacks) -- * Throwing errors (which allow rollbacks)
, dbFail , dbFail
) where ) where
import Control.Exception.Safe qualified as Safe
import Control.Lens import Control.Lens
import Control.Monad.Base import Control.Monad.Base
import Control.Monad.Error.Class import Control.Monad.Error.Class
import Control.Monad.Free import Control.Monad.Free
import Control.Monad.Trans.Control (MonadBaseControl, control) import Control.Monad.Trans.Control (MonadBaseControl, control)
import Data.Int (Int64)
import Data.Pool (withResource, Pool) import Data.Pool (withResource, Pool)
import Data.Profunctor.Product.Default import Data.Profunctor.Product.Default
import Database.PostgreSQL.Simple qualified as PG import Database.PostgreSQL.Simple qualified as PG
import Database.PostgreSQL.Simple.Transaction qualified as PG import Database.PostgreSQL.Simple.Transaction qualified as PG
import Gargantext.Database.Prelude import Gargantext.Database.Class
import Opaleye import Opaleye
import Prelude import Prelude
import qualified Control.Exception.Safe as Safe
data DBOperation = DBRead | DBWrite data DBOperation = DBRead | DBWrite
...@@ -49,20 +54,24 @@ data DBTransactionOp err (r :: DBOperation) next where ...@@ -49,20 +54,24 @@ data DBTransactionOp err (r :: DBOperation) next where
PGQuery :: (PG.ToRow q, PG.FromRow a) => PG.Query -> q -> ([a] -> next) -> DBTransactionOp err r next PGQuery :: (PG.ToRow q, PG.FromRow a) => PG.Query -> q -> ([a] -> next) -> DBTransactionOp err r next
-- | A Postgres /write/, returning the number of affected rows. It can be used only in -- | A Postgres /write/, returning the number of affected rows. It can be used only in
-- 'DBWrite' transactions. -- 'DBWrite' transactions.
PGUpdate :: PG.ToRow a => PG.Query -> a -> (Int -> next) -> DBTransactionOp err DBWrite next PGUpdate :: PG.ToRow a => PG.Query -> a -> (Int64 -> next) -> DBTransactionOp err DBWrite next
-- | Unlike a 'PGUpdate' that returns the list of affected rows, this can be used -- | Unlike a 'PGUpdate' that returns the list of affected rows, this can be used
-- to write updates that returns a value via the \"RETURNING\" directive. It's the programmer's -- to write updates that returns a value via the \"RETURNING\" directive. It's the programmer's
-- responsibility to ensure that the SQL fragment contains it. -- responsibility to ensure that the SQL fragment contains it.
PGUpdateReturning :: (PG.ToRow q, PG.FromRow a) => PG.Query -> q -> (a -> next) -> DBTransactionOp err DBWrite next PGUpdateReturningMany :: (PG.ToRow q, PG.FromRow a) => PG.Query -> q -> ([a] -> next) -> DBTransactionOp err DBWrite next
-- | Ditto as above, but the contract is that the query has to return /exactly one/ result.
PGUpdateReturningOne :: (PG.ToRow q, PG.FromRow a) => PG.Query -> q -> (a -> next) -> DBTransactionOp err DBWrite next
-- | An Opaleye /read/, returning a list of results. The 'r' in the result is polymorphic -- | An Opaleye /read/, returning a list of results. The 'r' in the result is polymorphic
-- so that reads can be embedded in updates transactions. -- so that reads can be embedded in updates transactions.
OpaQuery :: Default FromFields fields a => Select fields -> ([a] -> next) -> DBTransactionOp err r next OpaQuery :: Default FromFields fields a => Select fields -> ([a] -> next) -> DBTransactionOp err r next
OpaCountQuery :: Select a -> (Int -> next) -> DBTransactionOp err r next
-- | An Opaleye /write/, returning a result depending on the input 'Insert'. It can be used only in -- | An Opaleye /write/, returning a result depending on the input 'Insert'. It can be used only in
-- 'DBWrite' transactions. -- 'DBWrite' transactions.
OpaInsert :: Insert a -> (a -> next) -> DBTransactionOp err DBWrite next OpaInsert :: Insert a -> (a -> next) -> DBTransactionOp err DBWrite next
-- | An Opaleye /write/, returning a result depending on the input 'Update'. It can be used only in -- | An Opaleye /write/, returning a result depending on the input 'Update'. It can be used only in
-- 'DBWrite' transactions. -- 'DBWrite' transactions.
OpaUpdate :: Update a -> (a -> next) -> DBTransactionOp err DBWrite next OpaUpdate :: Update a -> (a -> next) -> DBTransactionOp err DBWrite next
OpaDelete :: Delete a -> (a -> next) -> DBTransactionOp err DBWrite next
-- | Monadic failure for DB transactions. -- | Monadic failure for DB transactions.
DBFail :: err -> DBTransactionOp err r next DBFail :: err -> DBTransactionOp err r next
...@@ -80,13 +89,16 @@ type DBTxCmd err a = forall m env. (IsCmd env err m, HasConnectionPool env) => m ...@@ -80,13 +89,16 @@ type DBTxCmd err a = forall m env. (IsCmd env err m, HasConnectionPool env) => m
instance Functor (DBTransactionOp err r) where instance Functor (DBTransactionOp err r) where
fmap f = \case fmap f = \case
PGQuery q params cont -> PGQuery q params (f . cont) PGQuery q params cont -> PGQuery q params (f . cont)
PGUpdate q a cont -> PGUpdate q a (f . cont) PGUpdate q a cont -> PGUpdate q a (f . cont)
PGUpdateReturning q a cont -> PGUpdateReturning q a (f . cont) PGUpdateReturningOne q a cont -> PGUpdateReturningOne q a (f . cont)
OpaQuery sel cont -> OpaQuery sel (f . cont) PGUpdateReturningMany q a cont -> PGUpdateReturningMany q a (f . cont)
OpaInsert ins cont -> OpaInsert ins (f . cont) OpaQuery sel cont -> OpaQuery sel (f . cont)
OpaUpdate upd cont -> OpaUpdate upd (f . cont) OpaCountQuery sel cont -> OpaCountQuery sel (f . cont)
DBFail err -> DBFail err OpaInsert ins cont -> OpaInsert ins (f . cont)
OpaUpdate upd cont -> OpaUpdate upd (f . cont)
OpaDelete del cont -> OpaDelete del (f . cont)
DBFail err -> DBFail err
-- | Generalised version of 'withResource' to work over any unlifted monad. -- | Generalised version of 'withResource' to work over any unlifted monad.
-- For some reason 'resource-pool' removed this from version 0.3.0.0 onwards. -- For some reason 'resource-pool' removed this from version 0.3.0.0 onwards.
...@@ -132,13 +144,22 @@ runDBQuery (DBTx m) = do ...@@ -132,13 +144,22 @@ runDBQuery (DBTx m) = do
-- 'DBCmd'. -- 'DBCmd'.
evalOp :: PG.Connection -> DBTransactionOp err r a -> DBTxCmd err a evalOp :: PG.Connection -> DBTransactionOp err r a -> DBTxCmd err a
evalOp conn = \case evalOp conn = \case
PGQuery qr q cc -> cc <$> liftBase (PG.query conn qr q) PGQuery qr q cc -> cc <$> liftBase (PG.query conn qr q)
PGUpdate qr a cc -> cc <$> liftBase (fromIntegral <$> PG.execute conn qr a) PGUpdate qr a cc -> cc <$> liftBase (PG.execute conn qr a)
PGUpdateReturning qr a cc -> cc <$> liftBase (queryOne conn qr a) PGUpdateReturningOne qr a cc -> cc <$> liftBase (queryOne conn qr a)
OpaQuery sel cc -> cc <$> liftBase (runSelect conn sel) PGUpdateReturningMany qr a cc -> cc <$> liftBase (PG.query conn qr a)
OpaInsert ins cc -> cc <$> liftBase (runInsert conn ins) OpaQuery sel cc -> cc <$> liftBase (runSelect conn sel)
OpaUpdate upd cc -> cc <$> liftBase (runUpdate conn upd) OpaCountQuery sel cc -> cc <$> liftBase (evalOpaCountQuery conn sel)
DBFail err -> throwError err OpaInsert ins cc -> cc <$> liftBase (runInsert conn ins)
OpaUpdate upd cc -> cc <$> liftBase (runUpdate conn upd)
OpaDelete del cc -> cc <$> liftBase (runDelete conn del)
DBFail err -> throwError err
evalOpaCountQuery :: PG.Connection -> Select a -> IO Int
evalOpaCountQuery conn sel = do
counts <- runSelect conn $ countRows sel
-- countRows is guaranteed to return a list with exactly one row so DL.head is safe here
pure $ fromIntegral @Int64 @Int $ head counts
queryOne :: (PG.ToRow q, PG.FromRow r) => PG.Connection -> PG.Query -> q -> IO r queryOne :: (PG.ToRow q, PG.FromRow r) => PG.Connection -> PG.Query -> q -> IO r
queryOne conn q v = do queryOne conn q v = do
...@@ -161,19 +182,29 @@ mkPGQuery :: (PG.ToRow q, PG.FromRow a) ...@@ -161,19 +182,29 @@ mkPGQuery :: (PG.ToRow q, PG.FromRow a)
-> DBQuery err r [a] -> DBQuery err r [a]
mkPGQuery q a = DBTx $ liftF (PGQuery q a id) mkPGQuery q a = DBTx $ liftF (PGQuery q a id)
mkPGUpdate :: PG.ToRow a => PG.Query -> a -> DBUpdate err Int mkPGUpdate :: PG.ToRow a => PG.Query -> a -> DBUpdate err Int64
mkPGUpdate q a = DBTx $ liftF (PGUpdate q a id) mkPGUpdate q a = DBTx $ liftF (PGUpdate q a id)
mkPGUpdateReturning :: (PG.ToRow q, PG.FromRow a) => PG.Query -> q -> DBUpdate err a mkPGUpdateReturningOne :: (PG.ToRow q, PG.FromRow a) => PG.Query -> q -> DBUpdate err a
mkPGUpdateReturning q a = DBTx $ liftF (PGUpdateReturning q a id) mkPGUpdateReturningOne q a = DBTx $ liftF (PGUpdateReturningOne q a id)
mkPGUpdateReturningMany :: (PG.ToRow q, PG.FromRow a) => PG.Query -> q -> DBUpdate err [a]
mkPGUpdateReturningMany q a = DBTx $ liftF (PGUpdateReturningMany q a id)
mkOpaQuery :: Default FromFields fields a mkOpaQuery :: Default FromFields fields a
=> Select fields => Select fields
-> DBQuery err x [a] -> DBQuery err x [a]
mkOpaQuery s = DBTx $ liftF (OpaQuery s id) mkOpaQuery s = DBTx $ liftF (OpaQuery s id)
mkOpaCountQuery :: Select fields
-> DBQuery err x Int
mkOpaCountQuery s = DBTx $ liftF (OpaCountQuery s id)
mkOpaUpdate :: Update a -> DBUpdate err a mkOpaUpdate :: Update a -> DBUpdate err a
mkOpaUpdate a = DBTx $ liftF (OpaUpdate a id) mkOpaUpdate a = DBTx $ liftF (OpaUpdate a id)
mkOpaInsert :: Insert a -> DBUpdate err a mkOpaInsert :: Insert a -> DBUpdate err a
mkOpaInsert a = DBTx $ liftF (OpaInsert a id) mkOpaInsert a = DBTx $ liftF (OpaInsert a id)
mkOpaDelete :: Delete a -> DBUpdate err a
mkOpaDelete a = DBTx $ liftF (OpaDelete a id)
...@@ -146,17 +146,17 @@ getCounterById (CounterId cid) = do ...@@ -146,17 +146,17 @@ getCounterById (CounterId cid) = do
insertCounter :: DBUpdate IOException Counter insertCounter :: DBUpdate IOException Counter
insertCounter = do insertCounter = do
mkPGUpdateReturning [sql| INSERT INTO public.ggtx_test_counter_table(counter_value) VALUES(0) RETURNING id, counter_value|] () mkPGUpdateReturningOne [sql| INSERT INTO public.ggtx_test_counter_table(counter_value) VALUES(0) RETURNING id, counter_value|] ()
updateCounter :: CounterId -> Int -> DBUpdate IOException Counter updateCounter :: CounterId -> Int -> DBUpdate IOException Counter
updateCounter cid x = do updateCounter cid x = do
mkPGUpdateReturning [sql| UPDATE public.ggtx_test_counter_table SET counter_value = ? WHERE id = ? RETURNING *|] (x, cid) mkPGUpdateReturningOne [sql| UPDATE public.ggtx_test_counter_table SET counter_value = ? WHERE id = ? RETURNING *|] (x, cid)
-- | We deliberately write this as a composite operation. -- | We deliberately write this as a composite operation.
stepCounter :: CounterId -> DBUpdate IOException Counter stepCounter :: CounterId -> DBUpdate IOException Counter
stepCounter cid = do stepCounter cid = do
Counter{..} <- getCounterById cid Counter{..} <- getCounterById cid
mkPGUpdateReturning [sql| UPDATE public.ggtx_test_counter_table SET counter_value = ? WHERE id = ? RETURNING *|] (counterValue + 1, cid) mkPGUpdateReturningOne [sql| UPDATE public.ggtx_test_counter_table SET counter_value = ? WHERE id = ? RETURNING *|] (counterValue + 1, cid)
-- --
-- MAIN TESTS -- MAIN TESTS
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment