[facet] use score for the score column, not doc_count

parent 5dcd1840
Pipeline #5224 failed with stages
in 69 minutes and 25 seconds
......@@ -133,7 +133,10 @@ getTableApi cId tabType mLimit mOffset mOrderBy mQuery mYear =
t <- getTable cId tabType mOffset mLimit mOrderBy mQuery mYear
pure $ constructHashedResponse t
postTableApi :: (CmdM env err m, MonadLogger m, HasNodeError err) => NodeId -> TableQuery -> m FacetTableResult
postTableApi :: (CmdM env err m, MonadLogger m, HasNodeError err)
=> NodeId
-> TableQuery
-> m FacetTableResult
postTableApi cId tq = case tq of
TableQuery o l order ft "" -> do
$(logLocM) DEBUG $ "New search with no query"
......@@ -170,7 +173,8 @@ searchInCorpus' cId t q o l order = do
Right boolQuery -> do
docs <- searchInCorpus cId t boolQuery o l order
countAllDocs <- searchCountInCorpus cId t boolQuery
pure $ TableResult { tr_docs = docs, tr_count = countAllDocs }
pure $ TableResult { tr_docs = docs
, tr_count = countAllDocs }
getTable :: HasNodeError err
......
......@@ -39,15 +39,12 @@ module Gargantext.Database.Query.Facet
import Control.Arrow (returnA)
import Control.Lens ((^.))
import qualified Data.Text as T
import Opaleye
import qualified Opaleye.Aggregate as OAgg
import Protolude hiding (null, map, sum, not)
import qualified Opaleye.Internal.Unpackspec()
import Data.Text qualified as T
import Gargantext.Core
import Gargantext.Core.Types
import Gargantext.Core.Types.Query (Limit, Offset, IsTrash)
import Gargantext.Database.Prelude
import Gargantext.Database.Query.Facet.Types
import Gargantext.Database.Query.Filter
import Gargantext.Database.Query.Table.Context
import Gargantext.Database.Query.Table.ContextNodeNgrams
......@@ -55,11 +52,13 @@ import Gargantext.Database.Query.Table.Ngrams
import Gargantext.Database.Query.Table.Node (defaultList)
import Gargantext.Database.Query.Table.Node.Error (HasNodeError)
import Gargantext.Database.Query.Table.NodeContext (queryNodeContextTable)
import Gargantext.Database.Query.Facet.Types
import Gargantext.Database.Prelude
import Gargantext.Database.Schema.Context
import Gargantext.Database.Schema.Node
import Gargantext.Database.Schema.NodeContext
import Opaleye
import Opaleye.Aggregate qualified as OAgg
import Opaleye.Internal.Unpackspec ()
import Protolude hiding (null, map, sum, not)
------------------------------------------------------------------------
......@@ -162,7 +161,7 @@ viewDocuments cId lId t ntId mQuery mYear =
, facetDoc_hyperdata = OAgg.groupBy
, facetDoc_category = OAgg.groupBy
, facetDoc_ngramCount = OAgg.sumInt4
, facetDoc_score = OAgg.sumInt4 })
, facetDoc_score = OAgg.groupBy })
(viewDocumentsAgg cId lId t ntId mQuery mYear)
viewDocumentsAgg :: CorpusId
......@@ -188,7 +187,7 @@ viewDocumentsAgg cId lId t ntId mQuery mYear = proc () -> do
-- currently it is all 0's in the DB and the
-- search functionality on the frontend orders
-- by Score.
, facetDoc_score = ngramCount
, facetDoc_score = unsafeCast "int8" $ nc ^. nc_score
}
-- TODO Join with context_node_ngrams at context_id/node_id and sum by
......
......@@ -152,7 +152,7 @@ type FacetDocAggPart = Facet (Field SqlInt4 )
(Field SqlJsonb )
(Field SqlInt4) -- Category
(Field SqlInt4) -- Ngrams Count
(Field SqlInt4) -- Score
(Field SqlInt8) -- Score
-----------------------------------------------------------------------
-----------------------------------------------------------------------
......
......@@ -180,15 +180,17 @@ getContextsForNgramsTerms cId ngramsTerms = do
date,
hyperdata,
nodes_contexts.score AS score,
nodes_contexts.category AS category,
context_node_ngrams.doc_count AS doc_count
nodes_contexts.category AS category --,
-- context_node_ngrams.doc_count AS doc_count
FROM contexts
JOIN context_node_ngrams ON contexts.id = context_node_ngrams.context_id
JOIN nodes_contexts ON contexts.id = nodes_contexts.context_id
JOIN ngrams ON context_node_ngrams.ngrams_id = ngrams.id
WHERE nodes_contexts.node_id = ?
AND ngrams.terms IN ?) t
ORDER BY t.doc_count DESC |]
-- ORDER BY t.doc_count DESC
ORDER BY t.score DESC
|]
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment