Commit 265f0c27 authored by Romain Loth's avatar Romain Loth

api: add total count of publications in tfidf relevancy view (for graphexplorer)

parent 0713e2c3
......@@ -144,7 +144,7 @@ class NodeListHaving(APIView):
Takes IDs of corpus and ngram and returns list of relevent documents in json format
according to TFIDF score (order is decreasing).
2016-09: add total counts to output json
'''
def get(self, request, corpus_id):
parameters = get_parameters(request)
......@@ -178,26 +178,18 @@ class NodeListHaving(APIView):
.filter(Node.typename == 'DOCUMENT', Node.parent_id== corpus.id)
.filter(or_(*[NodeNodeNgram.ngram_id==ngram_id for ngram_id in ngram_ids]))
.group_by(Node)
.order_by(func.sum(NodeNodeNgram.score).desc())
.limit(limit)
)
# print("\n")
# print("in TFIDF:")
# print("\tcorpus_id:",corpus_id)
# convert query result to a list of dicts
# if nodes_query is None:
# print("TFIDF error, juste take sums")
# nodes_query = (session
# .query(Node, func.sum(NodeNgram.weight))
# .join(NodeNgram, NodeNgram.node_id == Node.id)
# .filter(Node.parent_id == corpus_id)
# .filter(Node.typename == 'DOCUMENT')
# .filter(or_(*[NodeNgram.ngram_id==ngram_id for ngram_id in ngram_ids]))
# .group_by(Node)
# .order_by(func.sum(NodeNgram.weight).desc())
# .limit(limit)
# )
for node, score in nodes_query:
# get the total count before applying limit
nodes_count = nodes_query.count()
# now the query with the limit
nodes_results_query = (nodes_query
.order_by(func.sum(NodeNodeNgram.score).desc())
.limit(limit)
)
for node, score in nodes_results_query:
print(node,score)
print("\t corpus:",corpus_id,"\t",node.name)
node_dict = {
......@@ -209,7 +201,10 @@ class NodeListHaving(APIView):
node_dict[key] = node.hyperdata[key]
nodes_list.append(node_dict)
return JsonHttpResponse(nodes_list)
return JsonHttpResponse({
'count': nodes_count,
'records': nodes_list
})
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment