[graphql] fixes to the ngrams context endpoint

parent aeb9e7bd
......@@ -11,7 +11,7 @@ module Gargantext.API.GraphQL where
import Data.ByteString.Lazy.Char8
( ByteString
)
import Data.Map (Map)
import Data.Map.Strict (Map)
import Data.Morpheus
( App
, deriveApp )
......
......@@ -28,20 +28,20 @@ import Gargantext.Prelude
import GHC.Generics (Generic)
data AnnuaireContact = AnnuaireContact
{ ac_title :: Maybe Text
, ac_source :: Maybe Text
, ac_id :: Int
, ac_firstName :: Maybe Text
, ac_lastName :: Maybe Text
, ac_labTeamDepts :: [Text]
, ac_organization :: [Text]
, ac_role :: Maybe Text
, ac_office :: Maybe Text
, ac_country :: Maybe Text
, ac_city :: Maybe Text
, ac_touchMail :: Maybe Text
, ac_touchPhone :: Maybe Text
, ac_touchUrl :: Maybe Text
{ ac_title :: !(Maybe Text)
, ac_source :: !(Maybe Text)
, ac_id :: !Int
, ac_firstName :: !(Maybe Text)
, ac_lastName :: !(Maybe Text)
, ac_labTeamDepts :: ![Text]
, ac_organization :: ![Text]
, ac_role :: !(Maybe Text)
, ac_office :: !(Maybe Text)
, ac_country :: !(Maybe Text)
, ac_city :: !(Maybe Text)
, ac_touchMail :: !(Maybe Text)
, ac_touchPhone :: !(Maybe Text)
, ac_touchUrl :: !(Maybe Text)
}
deriving (Generic, GQLType, Show)
......
......@@ -13,16 +13,15 @@ import Data.Morpheus.Types
, lift
)
import Data.Text (Text, pack)
import Data.Time (UTCTime)
import Data.Time.Format.ISO8601 (iso8601Show)
import Gargantext.API.Admin.Types (HasSettings)
import Gargantext.API.Prelude (GargM, GargError)
import Gargantext.Core.Types.Search (HyperdataRow(..), toHyperdataRow)
import Gargantext.Core.Mail.Types (HasMail)
import Gargantext.Database.Admin.Types.Hyperdata (HyperdataDocument)
import Gargantext.Database.Admin.Types.Node (ContextTitle, NodeId(..), NodeTypeId, ParentId, UserId, unNodeId)
import Gargantext.Database.Admin.Types.Node (ContextTitle, NodeId(..), NodeTypeId, UserId, unNodeId)
import Gargantext.Database.Prelude (HasConnectionPool, HasConfig)
import Gargantext.Database.Query.Table.NodeContext (getNodeContext, getContextsForNgrams)
import Gargantext.Database.Query.Table.NodeContext (getNodeContext, getContextsForNgramsTerms, ContextForNgramsTerms(..))
import qualified Gargantext.Database.Query.Table.NodeContext as DNC
import Gargantext.Database.Schema.NodeContext (NodeContext, NodeContextPoly(..))
import Gargantext.Prelude
......@@ -38,31 +37,33 @@ data ContextGQL = ContextGQL
, c_name :: ContextTitle
, c_date :: Text -- TODO UTCTime
, c_hyperdata :: Maybe HyperdataRowDocumentGQL
, c_score :: Maybe Double
, c_category :: Maybe Int
} deriving (Generic, GQLType, Show)
-- We need this type instead of HyperdataRow(HyperdataRowDocument)
-- because the latter is a sum type (of doc and contact) and we return
-- docs here only. Without the union type, GraphQL endpoint is simpler.
data HyperdataRowDocumentGQL =
HyperdataRowDocumentGQL { hrd_abstract :: !Text
, hrd_authors :: !Text
, hrd_bdd :: !Text
, hrd_doi :: !Text
, hrd_institutes :: !Text
, hrd_language_iso2 :: !Text
, hrd_page :: !Int
, hrd_publication_date :: !Text
, hrd_publication_day :: !Int
, hrd_publication_hour :: !Int
, hrd_publication_minute :: !Int
, hrd_publication_month :: !Int
, hrd_publication_second :: !Int
, hrd_publication_year :: !Int
, hrd_source :: !Text
, hrd_title :: !Text
, hrd_url :: !Text
, hrd_uniqId :: !Text
, hrd_uniqIdBdd :: !Text
HyperdataRowDocumentGQL { hrd_abstract :: Text
, hrd_authors :: Text
, hrd_bdd :: Text
, hrd_doi :: Text
, hrd_institutes :: Text
, hrd_language_iso2 :: Text
, hrd_page :: Int
, hrd_publication_date :: Text
, hrd_publication_day :: Int
, hrd_publication_hour :: Int
, hrd_publication_minute :: Int
, hrd_publication_month :: Int
, hrd_publication_second :: Int
, hrd_publication_year :: Int
, hrd_source :: Text
, hrd_title :: Text
, hrd_url :: Text
, hrd_uniqId :: Text
, hrd_uniqIdBdd :: Text
} deriving (Generic, GQLType, Show)
data NodeContextGQL = NodeContextGQL
......@@ -85,7 +86,7 @@ data NodeContextArgs
data ContextsForNgramsArgs
= ContextsForNgramsArgs
{ corpus_id :: Int
, ngrams_ids :: [Int]
, ngrams_terms :: [Text]
} deriving (Generic, GQLType)
data NodeContextCategoryMArgs = NodeContextCategoryMArgs
......@@ -109,8 +110,8 @@ resolveNodeContext NodeContextArgs { context_id, node_id } =
resolveContextsForNgrams
:: (HasConnectionPool env, HasConfig env, HasMail env)
=> ContextsForNgramsArgs -> GqlM e env [ContextGQL]
resolveContextsForNgrams ContextsForNgramsArgs { corpus_id, ngrams_ids } =
dbContextForNgrams corpus_id ngrams_ids
resolveContextsForNgrams ContextsForNgramsArgs { corpus_id, ngrams_terms } =
dbContextForNgrams corpus_id ngrams_terms
-- DB
......@@ -128,11 +129,11 @@ dbNodeContext context_id node_id = do
dbContextForNgrams
:: (HasConnectionPool env, HasConfig env, HasMail env)
=> Int -> [Int] -> GqlM e env [ContextGQL]
dbContextForNgrams node_id ngrams_ids = do
contextTuples <- lift $ getContextsForNgrams (NodeId node_id) ngrams_ids
lift $ printDebug "[dbContextForNgrams] contextTuples" contextTuples
pure $ toContextGQL <$> contextTuples
=> Int -> [Text] -> GqlM e env [ContextGQL]
dbContextForNgrams node_id ngrams_terms = do
contextsForNgramsTerms <- lift $ getContextsForNgramsTerms (NodeId node_id) ngrams_terms
--lift $ printDebug "[dbContextForNgrams] contextsForNgramsTerms" contextsForNgramsTerms
pure $ toContextGQL <$> contextsForNgramsTerms
-- Conversion functions
......@@ -146,18 +147,23 @@ toNodeContextGQL (NodeContext { _nc_node_id = NodeId nc_node_id
, nc_score = _nc_score
, nc_category = _nc_category }
toContextGQL :: (NodeId, Maybe Hash, NodeTypeId, UserId, Maybe ParentId, ContextTitle, UTCTime, HyperdataDocument) -> ContextGQL
toContextGQL ( c_id
, c_hash_id
, c_typename
, c_user_id
, m_c_parent_id
, c_name
, c_date
, hyperdata ) = ContextGQL { c_id = unNodeId c_id
toContextGQL :: ContextForNgramsTerms -> ContextGQL
toContextGQL ContextForNgramsTerms { _cfnt_nodeId = c_id
, _cfnt_hash = c_hash_id
, _cfnt_nodeTypeId = c_typename
, _cfnt_userId = c_user_id
, _cfnt_parentId = m_c_parent_id
, _cfnt_c_title = c_name
, _cfnt_date = c_date
, _cfnt_hyperdata =hyperdata
, _cfnt_score = c_score
, _cfnt_category = c_category } =
ContextGQL { c_id = unNodeId c_id
, c_parent_id = unNodeId <$> m_c_parent_id
, c_date = pack $ iso8601Show c_date
, c_hyperdata = toHyperdataRowDocumentGQL hyperdata
, c_score
, c_category
, .. }
toHyperdataRowDocumentGQL :: HyperdataDocument -> Maybe HyperdataRowDocumentGQL
......
......@@ -42,7 +42,7 @@ resolveUsers UserArgs { user_id } = dbUsers user_id
-- | Inner function to fetch the user from DB.
dbUsers
:: (HasConnectionPool env, HasConfig env, HasMail env)
=> Int -> GqlM e env ([User (GqlM e env)])
=> Int -> GqlM e env [User (GqlM e env)]
dbUsers user_id = lift (map toUser <$> getUsersWithId user_id)
toUser
......
......@@ -153,7 +153,7 @@ updateUserInfo (UserInfoMArgs { ui_id, .. }) = do
pure 1
where
uh _ Nothing u_hyperdata = u_hyperdata
uh lens' (Just val) u_hyperdata = u_hyperdata & lens' .~ Just val
uh lens' (Just val) u_hyperdata = u_hyperdata & lens' ?~ val
uh' _ Nothing u_hyperdata = u_hyperdata
uh' lens' (Just val) u_hyperdata = u_hyperdata & lens' .~ val
nId Node {_node_id} = _node_id
......
......@@ -160,7 +160,3 @@ defaultPublicData =
, date = "YY/MM/DD"
, database = "database"
, author = "Author" }
......@@ -136,5 +136,3 @@ publisBySchool hal_data' = Gargantext.Prelude.map (\(i,n) -> (M.lookup i mapIdSc
$ DV.toList
$ DV.map (\n -> splitOn ( ", ") (csvHal_instStructId_i n) )
$ DV.filter (\n -> csvHal_publication_year n == 2017) hal_data'
......@@ -186,4 +186,3 @@ getHistory types listsId = do
pure $ Map.map (Map.filterWithKey (\k _ -> List.elem k types))
$ Map.filterWithKey (\k _ -> List.elem k listsId)
$ Map.fromListWith (Map.unionWith (<>)) nsp
......@@ -145,4 +145,3 @@ score field list n m = (Just mempty <> m)
%~ (<> Just n)
------------------------------------------------------------------------
......@@ -128,4 +128,3 @@ unPatchMapToMap = Map.fromList . PatchMap.toList
unNgramsTablePatch :: NgramsTablePatch -> HashMap NgramsTerm NgramsPatch
unNgramsTablePatch (NgramsTablePatch p) = unPatchMapToHashMap p
......@@ -39,5 +39,3 @@ average x = L.sum x / L.genericLength x
average' :: [Int] -> Double
average' x = (L.sum y) / (L.genericLength y) where
y = L.map fromIntegral x
......@@ -142,7 +142,7 @@ recomputeGraph _uId nId partitionMethod bridgeMethod maybeSimilarity maybeStreng
listVersion = graph ^? _Just . graph_metadata . _Just . gm_list . lfg_version
graphMetric = case maybeSimilarity of
Nothing -> graph ^? _Just . graph_metadata . _Just . gm_metric
_ -> maybeSimilarity
Just _ -> maybeSimilarity
similarity = case graphMetric of
Nothing -> withMetric Order1
Just m -> withMetric m
......@@ -216,14 +216,12 @@ computeGraph corpusId partitionMethod bridgeMethod similarity strength (nt1,nt2)
$ -} getCoocByNgrams'' (Diagonal True) (identity, identity) (m1,m2)
-- TODO MultiPartite Here
graph <- liftBase
liftBase
$ cooc2graphWith partitionMethod bridgeMethod (MultiPartite (Partite (HashMap.keysSet m1) nt1)
(Partite (HashMap.keysSet m2) nt2)
)
similarity 0 strength myCooc
pure graph
defaultGraphMetadata :: HasNodeError err
......@@ -246,7 +244,7 @@ defaultGraphMetadata cId t repo gm str = do
, LegendField 3 "#FFF" "Cluster3"
, LegendField 4 "#FFF" "Cluster4"
]
, _gm_list = (ListForGraph lId (repo ^. unNodeStory . at lId . _Just . a_version))
, _gm_list = ListForGraph lId (repo ^. unNodeStory . at lId . _Just . a_version)
, _gm_startForceAtlas = True
}
-- (map (\n -> LegendField n "#FFFFFF" (pack $ show n)) [1..10])
......
......@@ -131,6 +131,3 @@ testIndices = myMap == ( M.filter (>0) myMap')
(ti,it) = createIndices myMap
matrix = mat2map $ map2mat Square 0 (M.size ti) $ toIndex ti myMap
myMap' = fromIndex it matrix
......@@ -45,7 +45,3 @@ take 7 [(CommunityId, take 3 [Label])]
-}
......@@ -110,5 +110,3 @@ getMax (i,j) Nothing (Just d) = Just ((j,i), d)
getMax ij (Just di) (Just dj) = if di >= dj then getMax ij (Just di) Nothing
else getMax ij Nothing (Just dj)
getMax _ _ _ = Nothing
......@@ -567,4 +567,3 @@ instance ToSchema EdgeType
----------------------------
-- | TODO XML instances | --
----------------------------
......@@ -30,6 +30,9 @@ module Gargantext.Database.Query.Table.NodeContext
, getNodeContext
, updateNodeContextCategory
, getContextsForNgrams
, ContextForNgrams(..)
, getContextsForNgramsTerms
, ContextForNgramsTerms(..)
, insertNodeContext
, deleteNodeContext
, selectPublicContexts
......@@ -103,8 +106,27 @@ updateNodeContextCategory cId nId cat = do
WHERE context_id = ?
AND node_id = ? |]
getContextsForNgrams :: HasNodeError err => NodeId -> [Int] -> Cmd err [(NodeId, Maybe Hash, NodeTypeId, UserId, Maybe ParentId, ContextTitle, UTCTime, HyperdataDocument)]
getContextsForNgrams cId ngramsIds = runPGSQuery query (cId, PGS.In ngramsIds)
data ContextForNgrams =
ContextForNgrams { _cfn_nodeId :: NodeId
, _cfn_hash :: Maybe Hash
, _cfn_userId :: UserId
, _cfn_parentId :: Maybe ParentId
, _cfn_c_title :: ContextTitle
, _cfn_date :: UTCTime
, _cfn_hyperdata :: HyperdataDocument }
getContextsForNgrams :: HasNodeError err
=> NodeId
-> [Int]
-> Cmd err [ContextForNgrams]
getContextsForNgrams cId ngramsIds = do
res <- runPGSQuery query (cId, PGS.In ngramsIds)
pure $ (\( _cfn_nodeId
, _cfn_hash
, _cfn_userId
, _cfn_parentId
, _cfn_c_title
, _cfn_date
, _cfn_hyperdata) -> ContextForNgrams { .. }) <$> res
where
query :: PGS.Query
query = [sql| SELECT contexts.id, hash_id, typename, user_id, parent_id, name, date, hyperdata
......@@ -114,6 +136,46 @@ getContextsForNgrams cId ngramsIds = runPGSQuery query (cId, PGS.In ngramsIds)
WHERE nodes_contexts.node_id = ?
AND context_node_ngrams.ngrams_id IN ? |]
data ContextForNgramsTerms =
ContextForNgramsTerms { _cfnt_nodeId :: NodeId
, _cfnt_hash :: Maybe Hash
, _cfnt_nodeTypeId :: NodeTypeId
, _cfnt_userId :: UserId
, _cfnt_parentId :: Maybe ParentId
, _cfnt_c_title :: ContextTitle
, _cfnt_date :: UTCTime
, _cfnt_hyperdata :: HyperdataDocument
, _cfnt_score :: Maybe Double
, _cfnt_category :: Maybe Int }
getContextsForNgramsTerms :: HasNodeError err
=> NodeId
-> [Text]
-> Cmd err [ContextForNgramsTerms]
getContextsForNgramsTerms cId ngramsTerms = do
res <- runPGSQuery query (cId, PGS.In ngramsTerms)
pure $ (\( _cfnt_nodeId
, _cfnt_hash
, _cfnt_nodeTypeId
, _cfnt_userId
, _cfnt_parentId
, _cfnt_c_title
, _cfnt_date
, _cfnt_hyperdata
, _cfnt_score
, _cfnt_category) -> ContextForNgramsTerms { .. }) <$> res
where
query :: PGS.Query
query = [sql| SELECT t.id, t.hash_id, t.typename, t.user_id, t.parent_id, t.name, t.date, t.hyperdata, t.score, t.category
FROM (
SELECT DISTINCT ON (contexts.id) contexts.id AS id, hash_id, typename, user_id, parent_id, name, date, hyperdata, nodes_contexts.score AS score, nodes_contexts.category AS category,context_node_ngrams.doc_count AS doc_count
FROM contexts
JOIN context_node_ngrams ON contexts.id = context_node_ngrams.context_id
JOIN nodes_contexts ON contexts.id = nodes_contexts.context_id
JOIN ngrams ON context_node_ngrams.ngrams_id = ngrams.id
WHERE nodes_contexts.node_id = ?
AND ngrams.terms IN ?) t
ORDER BY t.doc_count DESC |]
------------------------------------------------------------------------
insertNodeContext :: [NodeContext] -> Cmd err Int
insertNodeContext ns = mkCmd $ \conn -> fromIntegral <$> (runInsert_ conn
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment