[graphql] fixes to the ngrams context endpoint

parent aeb9e7bd
...@@ -11,7 +11,7 @@ module Gargantext.API.GraphQL where ...@@ -11,7 +11,7 @@ module Gargantext.API.GraphQL where
import Data.ByteString.Lazy.Char8 import Data.ByteString.Lazy.Char8
( ByteString ( ByteString
) )
import Data.Map (Map) import Data.Map.Strict (Map)
import Data.Morpheus import Data.Morpheus
( App ( App
, deriveApp ) , deriveApp )
......
...@@ -28,20 +28,20 @@ import Gargantext.Prelude ...@@ -28,20 +28,20 @@ import Gargantext.Prelude
import GHC.Generics (Generic) import GHC.Generics (Generic)
data AnnuaireContact = AnnuaireContact data AnnuaireContact = AnnuaireContact
{ ac_title :: Maybe Text { ac_title :: !(Maybe Text)
, ac_source :: Maybe Text , ac_source :: !(Maybe Text)
, ac_id :: Int , ac_id :: !Int
, ac_firstName :: Maybe Text , ac_firstName :: !(Maybe Text)
, ac_lastName :: Maybe Text , ac_lastName :: !(Maybe Text)
, ac_labTeamDepts :: [Text] , ac_labTeamDepts :: ![Text]
, ac_organization :: [Text] , ac_organization :: ![Text]
, ac_role :: Maybe Text , ac_role :: !(Maybe Text)
, ac_office :: Maybe Text , ac_office :: !(Maybe Text)
, ac_country :: Maybe Text , ac_country :: !(Maybe Text)
, ac_city :: Maybe Text , ac_city :: !(Maybe Text)
, ac_touchMail :: Maybe Text , ac_touchMail :: !(Maybe Text)
, ac_touchPhone :: Maybe Text , ac_touchPhone :: !(Maybe Text)
, ac_touchUrl :: Maybe Text , ac_touchUrl :: !(Maybe Text)
} }
deriving (Generic, GQLType, Show) deriving (Generic, GQLType, Show)
......
...@@ -13,16 +13,15 @@ import Data.Morpheus.Types ...@@ -13,16 +13,15 @@ import Data.Morpheus.Types
, lift , lift
) )
import Data.Text (Text, pack) import Data.Text (Text, pack)
import Data.Time (UTCTime)
import Data.Time.Format.ISO8601 (iso8601Show) import Data.Time.Format.ISO8601 (iso8601Show)
import Gargantext.API.Admin.Types (HasSettings) import Gargantext.API.Admin.Types (HasSettings)
import Gargantext.API.Prelude (GargM, GargError) import Gargantext.API.Prelude (GargM, GargError)
import Gargantext.Core.Types.Search (HyperdataRow(..), toHyperdataRow) import Gargantext.Core.Types.Search (HyperdataRow(..), toHyperdataRow)
import Gargantext.Core.Mail.Types (HasMail) import Gargantext.Core.Mail.Types (HasMail)
import Gargantext.Database.Admin.Types.Hyperdata (HyperdataDocument) import Gargantext.Database.Admin.Types.Hyperdata (HyperdataDocument)
import Gargantext.Database.Admin.Types.Node (ContextTitle, NodeId(..), NodeTypeId, ParentId, UserId, unNodeId) import Gargantext.Database.Admin.Types.Node (ContextTitle, NodeId(..), NodeTypeId, UserId, unNodeId)
import Gargantext.Database.Prelude (HasConnectionPool, HasConfig) import Gargantext.Database.Prelude (HasConnectionPool, HasConfig)
import Gargantext.Database.Query.Table.NodeContext (getNodeContext, getContextsForNgrams) import Gargantext.Database.Query.Table.NodeContext (getNodeContext, getContextsForNgramsTerms, ContextForNgramsTerms(..))
import qualified Gargantext.Database.Query.Table.NodeContext as DNC import qualified Gargantext.Database.Query.Table.NodeContext as DNC
import Gargantext.Database.Schema.NodeContext (NodeContext, NodeContextPoly(..)) import Gargantext.Database.Schema.NodeContext (NodeContext, NodeContextPoly(..))
import Gargantext.Prelude import Gargantext.Prelude
...@@ -38,31 +37,33 @@ data ContextGQL = ContextGQL ...@@ -38,31 +37,33 @@ data ContextGQL = ContextGQL
, c_name :: ContextTitle , c_name :: ContextTitle
, c_date :: Text -- TODO UTCTime , c_date :: Text -- TODO UTCTime
, c_hyperdata :: Maybe HyperdataRowDocumentGQL , c_hyperdata :: Maybe HyperdataRowDocumentGQL
, c_score :: Maybe Double
, c_category :: Maybe Int
} deriving (Generic, GQLType, Show) } deriving (Generic, GQLType, Show)
-- We need this type instead of HyperdataRow(HyperdataRowDocument) -- We need this type instead of HyperdataRow(HyperdataRowDocument)
-- because the latter is a sum type (of doc and contact) and we return -- because the latter is a sum type (of doc and contact) and we return
-- docs here only. Without the union type, GraphQL endpoint is simpler. -- docs here only. Without the union type, GraphQL endpoint is simpler.
data HyperdataRowDocumentGQL = data HyperdataRowDocumentGQL =
HyperdataRowDocumentGQL { hrd_abstract :: !Text HyperdataRowDocumentGQL { hrd_abstract :: Text
, hrd_authors :: !Text , hrd_authors :: Text
, hrd_bdd :: !Text , hrd_bdd :: Text
, hrd_doi :: !Text , hrd_doi :: Text
, hrd_institutes :: !Text , hrd_institutes :: Text
, hrd_language_iso2 :: !Text , hrd_language_iso2 :: Text
, hrd_page :: !Int , hrd_page :: Int
, hrd_publication_date :: !Text , hrd_publication_date :: Text
, hrd_publication_day :: !Int , hrd_publication_day :: Int
, hrd_publication_hour :: !Int , hrd_publication_hour :: Int
, hrd_publication_minute :: !Int , hrd_publication_minute :: Int
, hrd_publication_month :: !Int , hrd_publication_month :: Int
, hrd_publication_second :: !Int , hrd_publication_second :: Int
, hrd_publication_year :: !Int , hrd_publication_year :: Int
, hrd_source :: !Text , hrd_source :: Text
, hrd_title :: !Text , hrd_title :: Text
, hrd_url :: !Text , hrd_url :: Text
, hrd_uniqId :: !Text , hrd_uniqId :: Text
, hrd_uniqIdBdd :: !Text , hrd_uniqIdBdd :: Text
} deriving (Generic, GQLType, Show) } deriving (Generic, GQLType, Show)
data NodeContextGQL = NodeContextGQL data NodeContextGQL = NodeContextGQL
...@@ -85,7 +86,7 @@ data NodeContextArgs ...@@ -85,7 +86,7 @@ data NodeContextArgs
data ContextsForNgramsArgs data ContextsForNgramsArgs
= ContextsForNgramsArgs = ContextsForNgramsArgs
{ corpus_id :: Int { corpus_id :: Int
, ngrams_ids :: [Int] , ngrams_terms :: [Text]
} deriving (Generic, GQLType) } deriving (Generic, GQLType)
data NodeContextCategoryMArgs = NodeContextCategoryMArgs data NodeContextCategoryMArgs = NodeContextCategoryMArgs
...@@ -109,8 +110,8 @@ resolveNodeContext NodeContextArgs { context_id, node_id } = ...@@ -109,8 +110,8 @@ resolveNodeContext NodeContextArgs { context_id, node_id } =
resolveContextsForNgrams resolveContextsForNgrams
:: (HasConnectionPool env, HasConfig env, HasMail env) :: (HasConnectionPool env, HasConfig env, HasMail env)
=> ContextsForNgramsArgs -> GqlM e env [ContextGQL] => ContextsForNgramsArgs -> GqlM e env [ContextGQL]
resolveContextsForNgrams ContextsForNgramsArgs { corpus_id, ngrams_ids } = resolveContextsForNgrams ContextsForNgramsArgs { corpus_id, ngrams_terms } =
dbContextForNgrams corpus_id ngrams_ids dbContextForNgrams corpus_id ngrams_terms
-- DB -- DB
...@@ -128,11 +129,11 @@ dbNodeContext context_id node_id = do ...@@ -128,11 +129,11 @@ dbNodeContext context_id node_id = do
dbContextForNgrams dbContextForNgrams
:: (HasConnectionPool env, HasConfig env, HasMail env) :: (HasConnectionPool env, HasConfig env, HasMail env)
=> Int -> [Int] -> GqlM e env [ContextGQL] => Int -> [Text] -> GqlM e env [ContextGQL]
dbContextForNgrams node_id ngrams_ids = do dbContextForNgrams node_id ngrams_terms = do
contextTuples <- lift $ getContextsForNgrams (NodeId node_id) ngrams_ids contextsForNgramsTerms <- lift $ getContextsForNgramsTerms (NodeId node_id) ngrams_terms
lift $ printDebug "[dbContextForNgrams] contextTuples" contextTuples --lift $ printDebug "[dbContextForNgrams] contextsForNgramsTerms" contextsForNgramsTerms
pure $ toContextGQL <$> contextTuples pure $ toContextGQL <$> contextsForNgramsTerms
-- Conversion functions -- Conversion functions
...@@ -146,18 +147,23 @@ toNodeContextGQL (NodeContext { _nc_node_id = NodeId nc_node_id ...@@ -146,18 +147,23 @@ toNodeContextGQL (NodeContext { _nc_node_id = NodeId nc_node_id
, nc_score = _nc_score , nc_score = _nc_score
, nc_category = _nc_category } , nc_category = _nc_category }
toContextGQL :: (NodeId, Maybe Hash, NodeTypeId, UserId, Maybe ParentId, ContextTitle, UTCTime, HyperdataDocument) -> ContextGQL toContextGQL :: ContextForNgramsTerms -> ContextGQL
toContextGQL ( c_id toContextGQL ContextForNgramsTerms { _cfnt_nodeId = c_id
, c_hash_id , _cfnt_hash = c_hash_id
, c_typename , _cfnt_nodeTypeId = c_typename
, c_user_id , _cfnt_userId = c_user_id
, m_c_parent_id , _cfnt_parentId = m_c_parent_id
, c_name , _cfnt_c_title = c_name
, c_date , _cfnt_date = c_date
, hyperdata ) = ContextGQL { c_id = unNodeId c_id , _cfnt_hyperdata =hyperdata
, _cfnt_score = c_score
, _cfnt_category = c_category } =
ContextGQL { c_id = unNodeId c_id
, c_parent_id = unNodeId <$> m_c_parent_id , c_parent_id = unNodeId <$> m_c_parent_id
, c_date = pack $ iso8601Show c_date , c_date = pack $ iso8601Show c_date
, c_hyperdata = toHyperdataRowDocumentGQL hyperdata , c_hyperdata = toHyperdataRowDocumentGQL hyperdata
, c_score
, c_category
, .. } , .. }
toHyperdataRowDocumentGQL :: HyperdataDocument -> Maybe HyperdataRowDocumentGQL toHyperdataRowDocumentGQL :: HyperdataDocument -> Maybe HyperdataRowDocumentGQL
......
...@@ -42,7 +42,7 @@ resolveUsers UserArgs { user_id } = dbUsers user_id ...@@ -42,7 +42,7 @@ resolveUsers UserArgs { user_id } = dbUsers user_id
-- | Inner function to fetch the user from DB. -- | Inner function to fetch the user from DB.
dbUsers dbUsers
:: (HasConnectionPool env, HasConfig env, HasMail env) :: (HasConnectionPool env, HasConfig env, HasMail env)
=> Int -> GqlM e env ([User (GqlM e env)]) => Int -> GqlM e env [User (GqlM e env)]
dbUsers user_id = lift (map toUser <$> getUsersWithId user_id) dbUsers user_id = lift (map toUser <$> getUsersWithId user_id)
toUser toUser
......
...@@ -153,7 +153,7 @@ updateUserInfo (UserInfoMArgs { ui_id, .. }) = do ...@@ -153,7 +153,7 @@ updateUserInfo (UserInfoMArgs { ui_id, .. }) = do
pure 1 pure 1
where where
uh _ Nothing u_hyperdata = u_hyperdata uh _ Nothing u_hyperdata = u_hyperdata
uh lens' (Just val) u_hyperdata = u_hyperdata & lens' .~ Just val uh lens' (Just val) u_hyperdata = u_hyperdata & lens' ?~ val
uh' _ Nothing u_hyperdata = u_hyperdata uh' _ Nothing u_hyperdata = u_hyperdata
uh' lens' (Just val) u_hyperdata = u_hyperdata & lens' .~ val uh' lens' (Just val) u_hyperdata = u_hyperdata & lens' .~ val
nId Node {_node_id} = _node_id nId Node {_node_id} = _node_id
......
...@@ -160,7 +160,3 @@ defaultPublicData = ...@@ -160,7 +160,3 @@ defaultPublicData =
, date = "YY/MM/DD" , date = "YY/MM/DD"
, database = "database" , database = "database"
, author = "Author" } , author = "Author" }
...@@ -136,5 +136,3 @@ publisBySchool hal_data' = Gargantext.Prelude.map (\(i,n) -> (M.lookup i mapIdSc ...@@ -136,5 +136,3 @@ publisBySchool hal_data' = Gargantext.Prelude.map (\(i,n) -> (M.lookup i mapIdSc
$ DV.toList $ DV.toList
$ DV.map (\n -> splitOn ( ", ") (csvHal_instStructId_i n) ) $ DV.map (\n -> splitOn ( ", ") (csvHal_instStructId_i n) )
$ DV.filter (\n -> csvHal_publication_year n == 2017) hal_data' $ DV.filter (\n -> csvHal_publication_year n == 2017) hal_data'
...@@ -186,4 +186,3 @@ getHistory types listsId = do ...@@ -186,4 +186,3 @@ getHistory types listsId = do
pure $ Map.map (Map.filterWithKey (\k _ -> List.elem k types)) pure $ Map.map (Map.filterWithKey (\k _ -> List.elem k types))
$ Map.filterWithKey (\k _ -> List.elem k listsId) $ Map.filterWithKey (\k _ -> List.elem k listsId)
$ Map.fromListWith (Map.unionWith (<>)) nsp $ Map.fromListWith (Map.unionWith (<>)) nsp
...@@ -145,4 +145,3 @@ score field list n m = (Just mempty <> m) ...@@ -145,4 +145,3 @@ score field list n m = (Just mempty <> m)
%~ (<> Just n) %~ (<> Just n)
------------------------------------------------------------------------ ------------------------------------------------------------------------
...@@ -128,4 +128,3 @@ unPatchMapToMap = Map.fromList . PatchMap.toList ...@@ -128,4 +128,3 @@ unPatchMapToMap = Map.fromList . PatchMap.toList
unNgramsTablePatch :: NgramsTablePatch -> HashMap NgramsTerm NgramsPatch unNgramsTablePatch :: NgramsTablePatch -> HashMap NgramsTerm NgramsPatch
unNgramsTablePatch (NgramsTablePatch p) = unPatchMapToHashMap p unNgramsTablePatch (NgramsTablePatch p) = unPatchMapToHashMap p
...@@ -39,5 +39,3 @@ average x = L.sum x / L.genericLength x ...@@ -39,5 +39,3 @@ average x = L.sum x / L.genericLength x
average' :: [Int] -> Double average' :: [Int] -> Double
average' x = (L.sum y) / (L.genericLength y) where average' x = (L.sum y) / (L.genericLength y) where
y = L.map fromIntegral x y = L.map fromIntegral x
...@@ -142,7 +142,7 @@ recomputeGraph _uId nId partitionMethod bridgeMethod maybeSimilarity maybeStreng ...@@ -142,7 +142,7 @@ recomputeGraph _uId nId partitionMethod bridgeMethod maybeSimilarity maybeStreng
listVersion = graph ^? _Just . graph_metadata . _Just . gm_list . lfg_version listVersion = graph ^? _Just . graph_metadata . _Just . gm_list . lfg_version
graphMetric = case maybeSimilarity of graphMetric = case maybeSimilarity of
Nothing -> graph ^? _Just . graph_metadata . _Just . gm_metric Nothing -> graph ^? _Just . graph_metadata . _Just . gm_metric
_ -> maybeSimilarity Just _ -> maybeSimilarity
similarity = case graphMetric of similarity = case graphMetric of
Nothing -> withMetric Order1 Nothing -> withMetric Order1
Just m -> withMetric m Just m -> withMetric m
...@@ -216,14 +216,12 @@ computeGraph corpusId partitionMethod bridgeMethod similarity strength (nt1,nt2) ...@@ -216,14 +216,12 @@ computeGraph corpusId partitionMethod bridgeMethod similarity strength (nt1,nt2)
$ -} getCoocByNgrams'' (Diagonal True) (identity, identity) (m1,m2) $ -} getCoocByNgrams'' (Diagonal True) (identity, identity) (m1,m2)
-- TODO MultiPartite Here -- TODO MultiPartite Here
graph <- liftBase liftBase
$ cooc2graphWith partitionMethod bridgeMethod (MultiPartite (Partite (HashMap.keysSet m1) nt1) $ cooc2graphWith partitionMethod bridgeMethod (MultiPartite (Partite (HashMap.keysSet m1) nt1)
(Partite (HashMap.keysSet m2) nt2) (Partite (HashMap.keysSet m2) nt2)
) )
similarity 0 strength myCooc similarity 0 strength myCooc
pure graph
defaultGraphMetadata :: HasNodeError err defaultGraphMetadata :: HasNodeError err
...@@ -246,7 +244,7 @@ defaultGraphMetadata cId t repo gm str = do ...@@ -246,7 +244,7 @@ defaultGraphMetadata cId t repo gm str = do
, LegendField 3 "#FFF" "Cluster3" , LegendField 3 "#FFF" "Cluster3"
, LegendField 4 "#FFF" "Cluster4" , LegendField 4 "#FFF" "Cluster4"
] ]
, _gm_list = (ListForGraph lId (repo ^. unNodeStory . at lId . _Just . a_version)) , _gm_list = ListForGraph lId (repo ^. unNodeStory . at lId . _Just . a_version)
, _gm_startForceAtlas = True , _gm_startForceAtlas = True
} }
-- (map (\n -> LegendField n "#FFFFFF" (pack $ show n)) [1..10]) -- (map (\n -> LegendField n "#FFFFFF" (pack $ show n)) [1..10])
......
...@@ -131,6 +131,3 @@ testIndices = myMap == ( M.filter (>0) myMap') ...@@ -131,6 +131,3 @@ testIndices = myMap == ( M.filter (>0) myMap')
(ti,it) = createIndices myMap (ti,it) = createIndices myMap
matrix = mat2map $ map2mat Square 0 (M.size ti) $ toIndex ti myMap matrix = mat2map $ map2mat Square 0 (M.size ti) $ toIndex ti myMap
myMap' = fromIndex it matrix myMap' = fromIndex it matrix
...@@ -45,7 +45,3 @@ take 7 [(CommunityId, take 3 [Label])] ...@@ -45,7 +45,3 @@ take 7 [(CommunityId, take 3 [Label])]
-} -}
...@@ -110,5 +110,3 @@ getMax (i,j) Nothing (Just d) = Just ((j,i), d) ...@@ -110,5 +110,3 @@ getMax (i,j) Nothing (Just d) = Just ((j,i), d)
getMax ij (Just di) (Just dj) = if di >= dj then getMax ij (Just di) Nothing getMax ij (Just di) (Just dj) = if di >= dj then getMax ij (Just di) Nothing
else getMax ij Nothing (Just dj) else getMax ij Nothing (Just dj)
getMax _ _ _ = Nothing getMax _ _ _ = Nothing
...@@ -567,4 +567,3 @@ instance ToSchema EdgeType ...@@ -567,4 +567,3 @@ instance ToSchema EdgeType
---------------------------- ----------------------------
-- | TODO XML instances | -- -- | TODO XML instances | --
---------------------------- ----------------------------
...@@ -30,6 +30,9 @@ module Gargantext.Database.Query.Table.NodeContext ...@@ -30,6 +30,9 @@ module Gargantext.Database.Query.Table.NodeContext
, getNodeContext , getNodeContext
, updateNodeContextCategory , updateNodeContextCategory
, getContextsForNgrams , getContextsForNgrams
, ContextForNgrams(..)
, getContextsForNgramsTerms
, ContextForNgramsTerms(..)
, insertNodeContext , insertNodeContext
, deleteNodeContext , deleteNodeContext
, selectPublicContexts , selectPublicContexts
...@@ -103,8 +106,27 @@ updateNodeContextCategory cId nId cat = do ...@@ -103,8 +106,27 @@ updateNodeContextCategory cId nId cat = do
WHERE context_id = ? WHERE context_id = ?
AND node_id = ? |] AND node_id = ? |]
getContextsForNgrams :: HasNodeError err => NodeId -> [Int] -> Cmd err [(NodeId, Maybe Hash, NodeTypeId, UserId, Maybe ParentId, ContextTitle, UTCTime, HyperdataDocument)] data ContextForNgrams =
getContextsForNgrams cId ngramsIds = runPGSQuery query (cId, PGS.In ngramsIds) ContextForNgrams { _cfn_nodeId :: NodeId
, _cfn_hash :: Maybe Hash
, _cfn_userId :: UserId
, _cfn_parentId :: Maybe ParentId
, _cfn_c_title :: ContextTitle
, _cfn_date :: UTCTime
, _cfn_hyperdata :: HyperdataDocument }
getContextsForNgrams :: HasNodeError err
=> NodeId
-> [Int]
-> Cmd err [ContextForNgrams]
getContextsForNgrams cId ngramsIds = do
res <- runPGSQuery query (cId, PGS.In ngramsIds)
pure $ (\( _cfn_nodeId
, _cfn_hash
, _cfn_userId
, _cfn_parentId
, _cfn_c_title
, _cfn_date
, _cfn_hyperdata) -> ContextForNgrams { .. }) <$> res
where where
query :: PGS.Query query :: PGS.Query
query = [sql| SELECT contexts.id, hash_id, typename, user_id, parent_id, name, date, hyperdata query = [sql| SELECT contexts.id, hash_id, typename, user_id, parent_id, name, date, hyperdata
...@@ -114,6 +136,46 @@ getContextsForNgrams cId ngramsIds = runPGSQuery query (cId, PGS.In ngramsIds) ...@@ -114,6 +136,46 @@ getContextsForNgrams cId ngramsIds = runPGSQuery query (cId, PGS.In ngramsIds)
WHERE nodes_contexts.node_id = ? WHERE nodes_contexts.node_id = ?
AND context_node_ngrams.ngrams_id IN ? |] AND context_node_ngrams.ngrams_id IN ? |]
data ContextForNgramsTerms =
ContextForNgramsTerms { _cfnt_nodeId :: NodeId
, _cfnt_hash :: Maybe Hash
, _cfnt_nodeTypeId :: NodeTypeId
, _cfnt_userId :: UserId
, _cfnt_parentId :: Maybe ParentId
, _cfnt_c_title :: ContextTitle
, _cfnt_date :: UTCTime
, _cfnt_hyperdata :: HyperdataDocument
, _cfnt_score :: Maybe Double
, _cfnt_category :: Maybe Int }
getContextsForNgramsTerms :: HasNodeError err
=> NodeId
-> [Text]
-> Cmd err [ContextForNgramsTerms]
getContextsForNgramsTerms cId ngramsTerms = do
res <- runPGSQuery query (cId, PGS.In ngramsTerms)
pure $ (\( _cfnt_nodeId
, _cfnt_hash
, _cfnt_nodeTypeId
, _cfnt_userId
, _cfnt_parentId
, _cfnt_c_title
, _cfnt_date
, _cfnt_hyperdata
, _cfnt_score
, _cfnt_category) -> ContextForNgramsTerms { .. }) <$> res
where
query :: PGS.Query
query = [sql| SELECT t.id, t.hash_id, t.typename, t.user_id, t.parent_id, t.name, t.date, t.hyperdata, t.score, t.category
FROM (
SELECT DISTINCT ON (contexts.id) contexts.id AS id, hash_id, typename, user_id, parent_id, name, date, hyperdata, nodes_contexts.score AS score, nodes_contexts.category AS category,context_node_ngrams.doc_count AS doc_count
FROM contexts
JOIN context_node_ngrams ON contexts.id = context_node_ngrams.context_id
JOIN nodes_contexts ON contexts.id = nodes_contexts.context_id
JOIN ngrams ON context_node_ngrams.ngrams_id = ngrams.id
WHERE nodes_contexts.node_id = ?
AND ngrams.terms IN ?) t
ORDER BY t.doc_count DESC |]
------------------------------------------------------------------------ ------------------------------------------------------------------------
insertNodeContext :: [NodeContext] -> Cmd err Int insertNodeContext :: [NodeContext] -> Cmd err Int
insertNodeContext ns = mkCmd $ \conn -> fromIntegral <$> (runInsert_ conn insertNodeContext ns = mkCmd $ \conn -> fromIntegral <$> (runInsert_ conn
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment