Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
gargantext
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
humanities
gargantext
Commits
32c3b66f
Commit
32c3b66f
authored
Jan 13, 2016
by
delanoe
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
[FIX] Session issue with get_session() function.
parent
afc6583f
Changes
24
Hide whitespace changes
Inline
Side-by-side
Showing
24 changed files
with
173 additions
and
54 deletions
+173
-54
cooccurrences.py
analysis/cooccurrences.py
+11
-9
lists.py
analysis/lists.py
+9
-1
views.py
annotations/views.py
+4
-2
celery.py
gargantext_web/celery.py
+4
-2
db.py
gargantext_web/db.py
+11
-5
views.py
gargantext_web/views.py
+18
-3
views_optimized.py
gargantext_web/views_optimized.py
+10
-2
cvalue.py
ngram/cvalue.py
+4
-1
group.py
ngram/group.py
+7
-4
importExport.py
ngram/importExport.py
+3
-2
lists.py
ngram/lists.py
+12
-1
mapList.py
ngram/mapList.py
+3
-2
occurrences.py
ngram/occurrences.py
+2
-1
specificity.py
ngram/specificity.py
+6
-5
stemLem.py
ngram/stemLem.py
+3
-1
stop.py
ngram/stop.py
+6
-3
tfidf.py
ngram/tfidf.py
+2
-1
tools.py
ngram/tools.py
+3
-2
workflow.py
ngram/workflow.py
+1
-1
corpustools.py
parsing/corpustools.py
+6
-2
api.py
rest_v1_0/api.py
+13
-1
graph.py
rest_v1_0/graph.py
+3
-1
ngrams.py
rest_v1_0/ngrams.py
+13
-1
views.py
tests/ngramstable/views.py
+19
-1
No files found.
analysis/cooccurrences.py
View file @
32c3b66f
...
...
@@ -5,7 +5,7 @@ from sqlalchemy.sql import func
from
gargantext_web.db
import
Node
,
Ngram
,
NodeNgram
,
NodeNgramNgram
,
\
NodeNodeNgram
,
NodeHyperdataNgram
,
NodeHyperdata
,
Hyperdata
from
gargantext_web.db
import
session
,
cache
,
get_or_create_node
,
bulk_insert
from
gargantext_web.db
import
get_
session
,
cache
,
get_or_create_node
,
bulk_insert
from
analysis.lists
import
WeightedMatrix
,
UnweightedList
,
Translations
import
inspect
import
datetime
...
...
@@ -40,24 +40,26 @@ def do_cooc(corpus=None
# Security test
field1
,
field2
=
str
(
field1
),
str
(
field2
)
session
=
get_session
()
# Get node
node_cooc
=
get_or_create_node
(
nodetype
=
'Cooccurrence'
,
corpus
=
corpus
,
name_str
=
"Cooccurrences corpus "
\
+
str
(
corpus
.
id
)
+
"list_id: "
+
str
(
miam_id
)
#, hyperdata={'field1': field1, 'field2':field2}
)
,
session
=
session
)
# BEGIN
# Saving the parameters of the analysis in the Node JSONB hyperdata field
args
,
_
,
_
,
parameters
=
inspect
.
getargvalues
(
inspect
.
currentframe
())
hyperdata
=
dict
()
for
parameter
in
parameters
.
keys
():
if
parameter
!=
'corpus'
and
parameter
!=
'node_cooc'
:
hyperdata
[
parameter
]
=
parameters
[
parameter
]
node_cooc
.
hyperdata
=
hyperdata
# hyperdata = dict()
#
# for parameter in parameters.keys():
# if parameter != 'corpus' and parameter != 'node_cooc':
# hyperdata[parameter] = parameters[parameter]
#
# node_cooc.hyperdata = hyperdata
#
session
.
add
(
node_cooc
)
session
.
commit
()
# END
...
...
analysis/lists.py
View file @
32c3b66f
from
collections
import
defaultdict
from
math
import
sqrt
from
gargantext_web.db
import
session
,
NodeNgram
,
NodeNgramNgram
,
bulk_insert
from
gargantext_web.db
import
get_
session
,
NodeNgram
,
NodeNgramNgram
,
bulk_insert
class
BaseClass
:
...
...
@@ -67,6 +67,7 @@ class Translations(BaseClass):
self
.
items
=
defaultdict
(
int
)
self
.
groups
=
defaultdict
(
set
)
elif
isinstance
(
other
,
int
):
session
=
get_session
()
query
=
(
session
.
query
(
NodeNgramNgram
.
ngramy_id
,
NodeNgramNgram
.
ngramx_id
)
.
filter
(
NodeNgramNgram
.
node_id
==
other
)
...
...
@@ -118,6 +119,7 @@ class Translations(BaseClass):
def
save
(
self
,
node_id
):
# delete previous data
session
=
get_session
()
session
.
query
(
NodeNgramNgram
)
.
filter
(
NodeNgramNgram
.
node_id
==
node_id
)
.
delete
()
session
.
commit
()
# insert new data
...
...
@@ -134,6 +136,7 @@ class WeightedMatrix(BaseClass):
if
other
is
None
:
self
.
items
=
defaultdict
(
lambda
:
defaultdict
(
float
))
elif
isinstance
(
other
,
int
):
session
=
get_session
()
query
=
(
session
.
query
(
NodeNgramNgram
.
ngramx_id
,
NodeNgramNgram
.
ngramy_id
,
NodeNgramNgram
.
score
)
.
filter
(
NodeNgramNgram
.
node_id
==
other
)
...
...
@@ -159,6 +162,7 @@ class WeightedMatrix(BaseClass):
def
save
(
self
,
node_id
):
# delete previous data
session
=
get_session
()
session
.
query
(
NodeNgramNgram
)
.
filter
(
NodeNgramNgram
.
node_id
==
node_id
)
.
delete
()
session
.
commit
()
# insert new data
...
...
@@ -243,6 +247,7 @@ class UnweightedList(BaseClass):
if
other
is
None
:
self
.
items
=
set
()
elif
isinstance
(
other
,
int
):
session
=
get_session
()
query
=
(
session
.
query
(
NodeNgram
.
ngram_id
)
.
filter
(
NodeNgram
.
node_id
==
other
)
...
...
@@ -323,6 +328,7 @@ class UnweightedList(BaseClass):
def
save
(
self
,
node_id
):
# delete previous data
session
=
get_session
()
session
.
query
(
NodeNgram
)
.
filter
(
NodeNgram
.
node_id
==
node_id
)
.
delete
()
session
.
commit
()
# insert new data
...
...
@@ -339,6 +345,7 @@ class WeightedList(BaseClass):
if
other
is
None
:
self
.
items
=
defaultdict
(
float
)
elif
isinstance
(
other
,
int
):
session
=
get_session
()
query
=
(
session
.
query
(
NodeNgram
.
ngram_id
,
NodeNgram
.
weight
)
.
filter
(
NodeNgram
.
node_id
==
other
)
...
...
@@ -435,6 +442,7 @@ class WeightedList(BaseClass):
def
save
(
self
,
node_id
):
# delete previous data
session
=
get_session
()
session
.
query
(
NodeNgram
)
.
filter
(
NodeNgram
.
node_id
==
node_id
)
.
delete
()
session
.
commit
()
# insert new data
...
...
annotations/views.py
View file @
32c3b66f
...
...
@@ -13,7 +13,7 @@ from rest_framework.exceptions import APIException
from
rest_framework.authentication
import
SessionAuthentication
,
BasicAuthentication
from
node.models
import
Node
from
gargantext_web.db
import
session
,
cache
,
Node
,
NodeNgram
,
Ngram
from
gargantext_web.db
import
get_
session
,
cache
,
Node
,
NodeNgram
,
Ngram
from
ngram.lists
import
listIds
,
listNgramIds
from
gargantext_web.db
import
get_or_create_node
...
...
@@ -63,6 +63,7 @@ class NgramEdit(APIView):
"""
renderer_classes
=
(
JSONRenderer
,)
authentication_classes
=
(
SessionAuthentication
,
BasicAuthentication
)
session
=
get_session
()
def
post
(
self
,
request
,
list_id
,
ngram_ids
):
"""
...
...
@@ -134,7 +135,8 @@ class NgramCreate(APIView):
"""
renderer_classes
=
(
JSONRenderer
,)
authentication_classes
=
(
SessionAuthentication
,
BasicAuthentication
)
session
=
get_session
()
def
post
(
self
,
request
,
list_id
):
"""
create NGram in a given list
...
...
gargantext_web/celery.py
View file @
32c3b66f
...
...
@@ -11,13 +11,14 @@ import cProfile
def
debug_task
(
request
):
print
(
'Request: {0!r}'
.
format
(
request
))
from
gargantext_web.db
import
session
,
cache
,
Node
from
gargantext_web.db
import
get_
session
,
cache
,
Node
from
ngram.workflow
import
ngram_workflow
@
shared_task
def
apply_sum
(
x
,
y
):
print
(
x
+
y
)
session
=
get_session
()
print
(
session
.
query
(
Node
.
name
)
.
first
())
...
...
@@ -33,7 +34,8 @@ def apply_workflow(corpus_id):
dbg
.
show
(
'ALL WORKFLOW'
)
update_state
=
WorkflowTracking
()
session
=
get_session
()
corpus
=
session
.
query
(
Node
)
.
filter
(
Node
.
id
==
corpus_id
)
.
first
()
update_state
.
processing_
(
corpus
,
"Parsing"
)
...
...
gargantext_web/db.py
View file @
32c3b66f
...
...
@@ -2,7 +2,7 @@ from django.conf import settings
from
node
import
models
__all__
=
[
'literalquery'
,
'
session'
,
'
cache'
,
'Session'
,
'bulk_insert'
,
'engine'
,
'get_cursor'
,
'User'
]
__all__
=
[
'literalquery'
,
'cache'
,
'Session'
,
'bulk_insert'
,
'engine'
,
'get_cursor'
,
'User'
]
# initialize sqlalchemy
...
...
@@ -136,10 +136,11 @@ def get_sessionmaker():
from
sqlalchemy.orm
import
sessionmaker
return
sessionmaker
(
bind
=
engine
)
Session
=
get_sessionmaker
()
session
=
scoped_session
(
Session
)
def
get_session
():
Session
=
get_sessionmaker
(
)
return
scoped_session
(
Session
)
session
=
get_session
()
# SQLAlchemy model objects caching
from
sqlalchemy
import
or_
...
...
@@ -161,6 +162,7 @@ class ModelCache(dict):
for
column
in
self
.
_columns
if
column
.
type
.
python_type
==
str
or
key
.
__class__
==
column
.
type
.
python_type
]
session
=
get_session
()
element
=
session
.
query
(
self
.
_model
)
.
filter
(
or_
(
*
conditions
))
.
first
()
if
element
is
None
:
raise
KeyError
...
...
@@ -169,6 +171,7 @@ class ModelCache(dict):
def
preload
(
self
):
self
.
clear
()
session
=
get_session
()
for
element
in
session
.
query
(
self
.
_model
)
.
all
():
for
column_name
in
self
.
_columns_names
:
key
=
getattr
(
element
,
column_name
)
...
...
@@ -234,12 +237,15 @@ class bulk_insert:
readline
=
read
def
get_or_create_node
(
nodetype
=
None
,
corpus
=
None
,
corpus_id
=
None
,
name_str
=
None
,
hyperdata
=
None
):
def
get_or_create_node
(
nodetype
=
None
,
corpus
=
None
,
corpus_id
=
None
,
name_str
=
None
,
hyperdata
=
None
,
session
=
None
):
'''
Should be a method of the object. __get_or_create__ ?
name_str :: String
hyperdata :: Dict
'''
if
session
is
None
:
session
=
get_session
()
if
nodetype
is
None
:
print
(
"Need to give a type node"
)
else
:
...
...
gargantext_web/views.py
View file @
32c3b66f
...
...
@@ -39,14 +39,14 @@ from django.contrib.auth import authenticate, login, logout
from
scrappers.scrap_pubmed.admin
import
Logger
from
gargantext_web.db
import
*
from
sqlalchemy
import
or_
,
func
from
gargantext_web
import
about
from
gargantext_web.celery
import
empty_trash
from
gargantext_web.db
import
cache
,
NodeNgram
,
NodeNgramNgram
from
gargantext_web.db
import
*
from
gargantext_web.db
import
get_session
,
cache
,
NodeNgram
,
NodeNgramNgram
def
login_user
(
request
):
logout
(
request
)
...
...
@@ -230,7 +230,7 @@ def projects(request):
date
=
datetime
.
datetime
.
now
()
# print(Logger.write("STATIC_ROOT"))
session
=
get_session
()
projects
=
session
.
query
(
Node
)
.
filter
(
Node
.
user_id
==
user_id
,
Node
.
type_id
==
project_type_id
)
.
order_by
(
Node
.
date
)
.
all
()
number
=
len
(
projects
)
...
...
@@ -300,6 +300,7 @@ def update_nodes(request, project_id, corpus_id, view=None):
if
not
request
.
user
.
is_authenticated
():
return
redirect
(
'/login/?next=
%
s'
%
request
.
path
)
session
=
get_session
()
try
:
offset
=
int
(
project_id
)
offset
=
int
(
corpus_id
)
...
...
@@ -376,6 +377,8 @@ def corpus(request, project_id, corpus_id):
corpus
=
cache
.
Node
[
int
(
corpus_id
)]
type_doc_id
=
cache
.
NodeType
[
'Document'
]
.
id
session
=
get_session
()
number
=
session
.
query
(
func
.
count
(
Node
.
id
))
.
filter
(
Node
.
parent_id
==
corpus_id
,
Node
.
type_id
==
type_doc_id
)
.
all
()[
0
][
0
]
...
...
@@ -410,6 +413,7 @@ def newpaginatorJSON(request , corpus_id):
# t = get_template('tests/newpag/thetable.html')
# project = session.query(Node).filter(Node.id==project_id).first()
session
=
get_session
()
corpus
=
session
.
query
(
Node
)
.
filter
(
Node
.
id
==
corpus_id
)
.
first
()
type_document_id
=
cache
.
NodeType
[
'Document'
]
.
id
user_id
=
request
.
user
.
id
...
...
@@ -464,6 +468,7 @@ def newpaginatorJSON(request , corpus_id):
def
move_to_trash
(
node_id
):
try
:
session
=
get_session
()
node
=
session
.
query
(
Node
)
.
filter
(
Node
.
id
==
node_id
)
.
first
()
previous_type_id
=
node
.
type_id
...
...
@@ -493,6 +498,7 @@ def move_to_trash_multiple(request):
nodes2trash
=
json
.
loads
(
request
.
POST
[
"nodeids"
])
print
(
"nodes to the trash:"
)
print
(
nodes2trash
)
session
=
get_session
()
nodes
=
session
.
query
(
Node
)
.
filter
(
Node
.
id
.
in_
(
nodes2trash
))
.
all
()
for
node
in
nodes
:
node
.
type_id
=
cache
.
NodeType
[
'Trash'
]
.
id
...
...
@@ -508,6 +514,8 @@ def delete_node(request, node_id):
# do we have a valid user?
user
=
request
.
user
session
=
get_session
()
node
=
session
.
query
(
Node
)
.
filter
(
Node
.
id
==
node_id
)
.
first
()
if
not
user
.
is_authenticated
():
...
...
@@ -549,6 +557,7 @@ def chart(request, project_id, corpus_id):
user
=
request
.
user
date
=
datetime
.
datetime
.
now
()
session
=
get_session
()
project
=
session
.
query
(
Node
)
.
filter
(
Node
.
id
==
project_id
)
.
first
()
corpus
=
session
.
query
(
Node
)
.
filter
(
Node
.
id
==
corpus_id
)
.
first
()
...
...
@@ -566,6 +575,7 @@ def sankey(request, corpus_id):
user
=
request
.
user
date
=
datetime
.
datetime
.
now
()
session
=
get_session
()
corpus
=
session
.
query
(
Node
)
.
filter
(
Node
.
id
==
corpus_id
)
.
first
()
html
=
t
.
render
(
Context
({
\
...
...
@@ -584,6 +594,7 @@ def matrix(request, project_id, corpus_id):
user
=
request
.
user
date
=
datetime
.
datetime
.
now
()
session
=
get_session
()
project
=
session
.
query
(
Node
)
.
filter
(
Node
.
id
==
project_id
)
.
first
()
corpus
=
session
.
query
(
Node
)
.
filter
(
Node
.
id
==
corpus_id
)
.
first
()
...
...
@@ -602,6 +613,7 @@ def graph(request, project_id, corpus_id, generic=100, specific=100):
user
=
request
.
user
date
=
datetime
.
datetime
.
now
()
session
=
get_session
()
project
=
session
.
query
(
Node
)
.
filter
(
Node
.
id
==
project_id
)
.
first
()
corpus
=
session
.
query
(
Node
)
.
filter
(
Node
.
id
==
corpus_id
)
.
first
()
...
...
@@ -665,6 +677,7 @@ def corpus_csv(request, project_id, corpus_id):
writer
=
csv
.
writer
(
response
)
session
=
get_session
()
corpus_id
=
session
.
query
(
Node
.
id
)
.
filter
(
Node
.
id
==
corpus_id
)
.
first
()
type_document_id
=
cache
.
NodeType
[
'Document'
]
.
id
documents
=
session
.
query
(
Node
)
.
filter
(
Node
.
parent_id
==
corpus_id
,
Node
.
type_id
==
type_document_id
)
.
all
()
...
...
@@ -737,6 +750,7 @@ def node_link(request, corpus_id):
'''
data
=
[]
session
=
get_session
()
corpus
=
session
.
query
(
Node
)
.
filter
(
Node
.
id
==
corpus_id
)
.
first
()
data
=
get_cooc
(
request
=
request
,
corpus
=
corpus
,
type
=
"node_link"
)
return
JsonHttpResponse
(
data
)
...
...
@@ -744,6 +758,7 @@ def node_link(request, corpus_id):
def
sankey_csv
(
request
,
corpus_id
):
data
=
[]
session
=
get_session
()
corpus
=
session
.
query
(
Node
)
.
filter
(
Node
.
id
==
corpus_id
)
.
first
()
data
=
[
[
"source"
,
"target"
,
"value"
]
...
...
gargantext_web/views_optimized.py
View file @
32c3b66f
...
...
@@ -15,7 +15,7 @@ from threading import Thread
from
node.admin
import
CustomForm
from
gargantext_web.db
import
*
from
gargantext_web.db
import
get_or_create_node
from
gargantext_web.db
import
get_or_create_node
,
get_session
from
gargantext_web.settings
import
DEBUG
,
MEDIA_ROOT
from
rest_v1_0.api
import
JsonHttpResponse
from
django.db
import
connection
...
...
@@ -39,6 +39,8 @@ def project(request, project_id):
raise
Http404
()
# do we have a valid project?
session
=
get_session
()
project
=
(
session
.
query
(
Node
)
.
filter
(
Node
.
id
==
project_id
)
...
...
@@ -144,6 +146,7 @@ def project(request, project_id):
language_id
=
None
,
hyperdata
=
{
'Processing'
:
"Parsing documents"
,}
)
session
=
get_session
()
session
.
add
(
corpus
)
session
.
commit
()
...
...
@@ -205,8 +208,10 @@ def tfidf(request, corpus_id, ngram_ids):
# filter input
ngram_ids
=
ngram_ids
.
split
(
'a'
)
ngram_ids
=
[
int
(
i
)
for
i
in
ngram_ids
]
session
=
get_session
()
corpus
=
session
.
query
(
Node
)
.
filter
(
Node
.
id
==
corpus_id
)
.
first
()
tfidf_id
=
get_or_create_node
(
corpus
=
corpus
,
nodetype
=
'Tfidf'
)
.
id
print
(
tfidf_id
)
# request data
...
...
@@ -253,6 +258,8 @@ def tfidf(request, corpus_id, ngram_ids):
def
getCorpusIntersection
(
request
,
corpuses_ids
):
FinalDict
=
False
session
=
get_session
()
if
request
.
method
==
'POST'
and
"nodeids"
in
request
.
POST
and
len
(
request
.
POST
[
"nodeids"
])
>
0
:
import
ast
node_ids
=
[
int
(
i
)
for
i
in
(
ast
.
literal_eval
(
request
.
POST
[
"nodeids"
]
))
]
...
...
@@ -304,6 +311,7 @@ def getUserPortfolio(request , project_id):
corpus_type_id
=
cache
.
NodeType
[
'Corpus'
]
.
id
results
=
{}
session
=
get_session
()
projs
=
session
.
query
(
Node
)
.
filter
(
Node
.
user_id
==
user_id
,
Node
.
type_id
==
project_type_id
)
.
all
()
...
...
ngram/cvalue.py
View file @
32c3b66f
...
...
@@ -6,7 +6,8 @@ from admin.utils import PrintException,DebugTime
from
gargantext_web.db
import
NodeNgram
,
NodeNodeNgram
from
gargantext_web.db
import
*
from
gargantext_web.db
import
get_or_create_node
from
gargantext_web.db
import
get_or_create_node
,
get_session
from
parsing.corpustools
import
*
...
...
@@ -43,6 +44,7 @@ def getNgrams(corpus=None, limit=1000):
terms
=
dict
()
tfidf_node
=
get_or_create_node
(
nodetype
=
'Tfidf (global)'
,
corpus
=
corpus
)
session
=
get_session
()
#print(corpus.name)
ngrams
=
(
session
.
query
(
Ngram
.
id
,
Ngram
.
terms
,
func
.
sum
(
NodeNgram
.
weight
),
NodeNodeNgram
.
score
)
.
join
(
NodeNgram
,
NodeNgram
.
ngram_id
==
Ngram
.
id
)
...
...
@@ -122,6 +124,7 @@ def compute_cvalue(corpus=None, limit=1000):
result
=
cvalueAll
()
#print([n for n in result])
session
=
get_session
()
session
.
query
(
NodeNodeNgram
)
.
filter
(
NodeNodeNgram
.
nodex_id
==
cvalue_node
.
id
)
.
delete
()
session
.
commit
()
...
...
ngram/group.py
View file @
32c3b66f
...
...
@@ -5,7 +5,7 @@ from admin.utils import PrintException,DebugTime
from
gargantext_web.db
import
NodeNgram
,
NodeNodeNgram
from
gargantext_web.db
import
*
from
gargantext_web.db
import
get_or_create_node
from
gargantext_web.db
import
get_or_create_node
,
get_session
from
analysis.lists
import
Translations
,
UnweightedList
from
parsing.corpustools
import
*
...
...
@@ -55,6 +55,7 @@ def compute_groups(corpus, limit_inf=None, limit_sup=None, how='Stem'):
dbg
=
DebugTime
(
'Corpus #
%
d - group'
%
corpus
.
id
)
dbg
.
show
(
'Group'
)
session
=
get_session
()
#spec,cvalue = getNgrams(corpus, limit_inf=limit_inf, limit_sup=limit_sup)
#list_to_check=cvalue.union(spec)
...
...
@@ -62,16 +63,18 @@ def compute_groups(corpus, limit_inf=None, limit_sup=None, how='Stem'):
stemIt
=
getStemmer
(
corpus
)
group_to_insert
=
set
()
node_group
=
get_or_create_node
(
nodetype
=
'Group'
,
corpus
=
corpus
)
node_group
=
get_or_create_node
(
nodetype
=
'Group'
,
corpus
=
corpus
,
session
=
session
)
miam_to_insert
=
set
()
miam_node
=
get_or_create_node
(
nodetype
=
'MiamList'
,
corpus
=
corpus
)
miam_node
=
get_or_create_node
(
nodetype
=
'MiamList'
,
corpus
=
corpus
,
session
=
session
)
stop_node
=
get_or_create_node
(
nodetype
=
'StopList'
,
corpus
=
corpus
)
stop_node
=
get_or_create_node
(
nodetype
=
'StopList'
,
corpus
=
corpus
,
session
=
session
)
#stop_list = UnweightedList(stop_node.id)
Stop
=
aliased
(
NodeNgram
)
frequency
=
sa
.
func
.
count
(
NodeNgram
.
weight
)
ngrams
=
(
session
.
query
(
Ngram
.
id
,
Ngram
.
terms
,
frequency
)
.
join
(
NodeNgram
,
NodeNgram
.
ngram_id
==
Ngram
.
id
)
.
join
(
Node
,
Node
.
id
==
NodeNgram
.
node_id
)
...
...
ngram/importExport.py
View file @
32c3b66f
...
...
@@ -12,7 +12,7 @@ TODO : REFACTOR 2) improvements in ngram creation (?bulk like node_ngram links)
"""
from
gargantext_web.db
import
Ngram
,
NodeNgram
,
NodeNodeNgram
,
NodeNgramNgram
from
gargantext_web.db
import
cache
,
session
,
get_or_create_node
,
bulk_insert
from
gargantext_web.db
import
cache
,
get_
session
,
get_or_create_node
,
bulk_insert
# import sqlalchemy as sa
from
sqlalchemy.sql
import
func
,
exists
...
...
@@ -105,6 +105,7 @@ def exportNgramLists(node,filename,delimiter="\t"):
2 <=> mapList
"""
# récupérer d'un coup les objets Ngram (avec terme)
session
=
get_session
()
if
len
(
ngram_ids
):
ng_objs
=
session
.
query
(
Ngram
)
.
filter
(
Ngram
.
id
.
in_
(
ngram_ids
))
.
all
()
else
:
...
...
@@ -187,7 +188,7 @@ def importNgramLists(node,filename,delimiter="\t", del_lists=[]):
(and ideally add its logic to analysis.lists.Translations)
'''
session
=
get_session
()
# the node arg has to be a corpus here
if
not
hasattr
(
node
,
"type_id"
)
or
node
.
type_id
!=
4
:
raise
TypeError
(
"IMPORT: node argument must be a Corpus Node"
)
...
...
ngram/lists.py
View file @
32c3b66f
from
admin.utils
import
PrintException
from
gargantext_web.db
import
NodeNgram
from
gargantext_web.db
import
NodeNgram
,
get_session
from
gargantext_web.db
import
*
from
parsing.corpustools
import
*
...
...
@@ -20,6 +20,9 @@ def listIds(typeList=None, user_id=None, corpus_id=None):
typeList :: String, Type of the Node that should be created
[Node] :: List of Int, returned or created by the function
'''
session
=
get_session
()
if
typeList
is
None
:
typeList
=
'MiamList'
...
...
@@ -74,6 +77,9 @@ def listNgramIds(list_id=None, typeList=None,
doc_id : to get specific ngrams related to a document with Node.id=doc_id
user_id : needed to create list if it does not exist
'''
session
=
get_session
()
if
typeList
is
None
:
typeList
=
[
'MiamList'
,
'StopList'
]
elif
isinstance
(
typeList
,
string
):
...
...
@@ -124,6 +130,7 @@ def ngramList(do, list_id, ngram_ids=None) :
list_id = Int : list id (Node.id)
'''
results
=
[]
session
=
get_session
()
if
do
==
'create'
:
terms
=
copy
(
ngram_ids
)
...
...
@@ -174,6 +181,7 @@ def ngrams2miam(user_id=None, corpus_id=None):
'''
Create a Miam List only
'''
session
=
get_session
()
miam_id
=
listIds
(
typeList
=
'MiamList'
,
user_id
=
user_id
,
corpus_id
=
corpus_id
)[
0
][
0
]
print
(
miam_id
)
...
...
@@ -205,6 +213,8 @@ def ngrams2miamBis(corpus):
miam_id
=
get_or_create_node
(
corpus
=
corpus
,
nodetype
=
'MiamList'
)
stop_id
=
get_or_create_node
(
corpus
=
corpus
,
nodetype
=
'StopList'
)
session
=
get_session
()
query
=
(
session
.
query
(
literal_column
(
str
(
miam_id
))
.
label
(
"node_id"
),
...
...
@@ -247,6 +257,7 @@ def doList(
lem = equivalent Words which are lemmatized (but the main form)
cvalue = equivalent N-Words according to C-Value (but the main form)
'''
session
=
get_session
()
if
type_list
not
in
[
'MiamList'
,
'MainList'
]:
raise
Exception
(
"Type List (
%
s) not supported, try:
\'
MiamList
\'
or
\'
MainList
\'
"
%
type_list
)
...
...
ngram/mapList.py
View file @
32c3b66f
...
...
@@ -5,7 +5,7 @@ from admin.env import *
from
admin.utils
import
PrintException
,
DebugTime
from
gargantext_web.db
import
NodeNgram
,
NodeNodeNgram
,
NodeNgramNgram
from
gargantext_web.db
import
get_or_create_node
,
session
,
bulk_insert
from
gargantext_web.db
import
get_or_create_node
,
get_
session
,
bulk_insert
from
sqlalchemy.sql
import
func
from
sqlalchemy
import
desc
,
asc
,
or_
,
and_
,
Date
,
cast
,
select
...
...
@@ -19,6 +19,7 @@ def compute_mapList(corpus,limit=500,n=1):
'''
According to Specificities and stoplist,
'''
session
=
get_session
()
monograms_part
=
0.005
monograms_limit
=
round
(
limit
*
monograms_part
)
...
...
@@ -89,7 +90,7 @@ def compute_mapList(corpus,limit=500,n=1):
def
insert_miam
(
corpus
,
ngrams
=
None
,
path_file_csv
=
None
):
dbg
=
DebugTime
(
'Corpus #
%
d - computing Miam'
%
corpus
.
id
)
session
=
get_session
()
node_miam
=
get_or_create_node
(
nodetype
=
'MiamList'
,
corpus
=
corpus
)
session
.
query
(
NodeNgram
)
.
filter
(
NodeNgram
.
node_id
==
node_miam
.
id
)
.
delete
()
session
.
commit
()
...
...
ngram/occurrences.py
View file @
32c3b66f
from
gargantext_web.db
import
session
,
cache
,
get_cursor
from
gargantext_web.db
import
get_
session
,
cache
,
get_cursor
from
gargantext_web.db
import
Node
,
NodeNgram
,
NodeNodeNgram
from
gargantext_web.db
import
get_or_create_node
from
admin.utils
import
DebugTime
...
...
@@ -9,6 +9,7 @@ def compute_occs(corpus):
dbg
.
show
(
'Calculate occurrences'
)
occs_node
=
get_or_create_node
(
nodetype
=
'Occurrences'
,
corpus
=
corpus
)
session
=
get_session
()
#print(occs_node.id)
(
session
.
query
(
NodeNodeNgram
)
...
...
ngram/specificity.py
View file @
32c3b66f
...
...
@@ -10,7 +10,7 @@ import numpy as np
import
pandas
as
pd
from
analysis.cooccurrences
import
do_cooc
from
gargantext_web.db
import
session
,
cache
,
get_or_create_node
,
bulk_insert
from
gargantext_web.db
import
get_
session
,
cache
,
get_or_create_node
,
bulk_insert
from
gargantext_web.db
import
NodeNgramNgram
,
NodeNodeNgram
from
sqlalchemy
import
desc
,
asc
,
or_
,
and_
,
Date
,
cast
,
select
...
...
@@ -19,7 +19,7 @@ def specificity(cooc_id=None, corpus=None, limit=100):
'''
Compute the specificity, simple calculus.
'''
session
=
get_session
()
cooccurrences
=
(
session
.
query
(
NodeNgramNgram
)
.
filter
(
NodeNgramNgram
.
node_id
==
cooc_id
)
.
order_by
(
NodeNgramNgram
.
score
)
...
...
@@ -41,7 +41,7 @@ def specificity(cooc_id=None, corpus=None, limit=100):
m
=
(
xs
-
ys
)
/
(
2
*
(
x
.
shape
[
0
]
-
1
))
m
=
m
.
sort
(
inplace
=
False
)
node
=
get_or_create_node
(
nodetype
=
'Specificity'
,
corpus
=
corpus
)
node
=
get_or_create_node
(
nodetype
=
'Specificity'
,
corpus
=
corpus
,
session
=
session
)
data
=
zip
(
[
node
.
id
for
i
in
range
(
1
,
m
.
shape
[
0
])]
,
[
corpus
.
id
for
i
in
range
(
1
,
m
.
shape
[
0
])]
...
...
@@ -63,8 +63,9 @@ def compute_specificity(corpus,limit=100):
2) Compute the specificity score, saving it in database, return its Node
'''
dbg
=
DebugTime
(
'Corpus #
%
d - specificity'
%
corpus
.
id
)
list_cvalue
=
get_or_create_node
(
nodetype
=
'Cvalue'
,
corpus
=
corpus
)
session
=
get_session
()
list_cvalue
=
get_or_create_node
(
nodetype
=
'Cvalue'
,
corpus
=
corpus
,
session
=
session
)
cooc_id
=
do_cooc
(
corpus
=
corpus
,
cvalue_id
=
list_cvalue
.
id
,
limit
=
limit
)
specificity
(
cooc_id
=
cooc_id
,
corpus
=
corpus
,
limit
=
limit
)
...
...
ngram/stemLem.py
View file @
32c3b66f
...
...
@@ -4,12 +4,13 @@ from parsing.corpustools import *
from
gargantext_web.db
import
NodeNgram
from
sqlalchemy
import
desc
,
asc
,
or_
,
and_
,
Date
,
cast
,
select
from
gargantext_web.db
import
get_cursor
,
bulk_insert
from
gargantext_web.db
import
get_cursor
,
bulk_insert
,
get_session
def
get_ngramogram
(
corpus
,
limit
=
None
):
"""
Ngram is a composition of ograms (ogram = 1gram)
"""
session
=
get_session
()
try
:
query
=
(
session
.
query
(
Ngram
.
id
,
Ngram
.
terms
)
...
...
@@ -303,6 +304,7 @@ def stem_corpus(corpus_id=None):
Returns Int as id of the Stem Node
stem_corpus :: Int
'''
session
=
get_session
()
corpus
=
session
.
query
(
Node
)
.
filter
(
Node
.
id
==
corpus_id
)
.
first
()
#print('Number of new ngrams to stem:',
...
...
ngram/stop.py
View file @
32c3b66f
...
...
@@ -2,7 +2,7 @@ import re
from
admin.utils
import
PrintException
from
gargantext_web.db
import
Node
,
Ngram
,
NodeNgram
,
NodeNodeNgram
from
gargantext_web.db
import
cache
,
session
,
get_or_create_node
,
bulk_insert
from
gargantext_web.db
import
cache
,
get_
session
,
get_or_create_node
,
bulk_insert
import
sqlalchemy
as
sa
from
sqlalchemy.sql
import
func
...
...
@@ -14,6 +14,8 @@ from ngram.tools import insert_ngrams
from
analysis.lists
import
WeightedList
,
UnweightedList
def
importStopList
(
node
,
filename
,
language
=
'fr'
):
session
=
get_session
()
with
open
(
filename
,
"r"
)
as
f
:
stop_list
=
f
.
read
()
.
splitlines
()
...
...
@@ -76,9 +78,10 @@ def compute_stop(corpus,limit=2000,debug=False):
'''
do some statitics on all stop lists of database of the same type
'''
stop_node
=
get_or_create_node
(
nodetype
=
'StopList'
,
corpus
=
corpus
)
stop_node
_id
=
get_or_create_node
(
nodetype
=
'StopList'
,
corpus
=
corpus
)
.
id
# TODO do a function to get all stop words with social scores
session
=
get_session
()
root
=
session
.
query
(
Node
)
.
filter
(
Node
.
type_id
==
cache
.
NodeType
[
'Root'
]
.
id
)
.
first
()
root_stop_id
=
get_or_create_node
(
nodetype
=
'StopList'
,
corpus
=
root
)
.
id
...
...
@@ -108,5 +111,5 @@ def compute_stop(corpus,limit=2000,debug=False):
#print([n for n in ngrams_to_stop])
stop
=
WeightedList
({
n
[
0
]
:
-
1
for
n
in
ngrams_to_stop
})
stop
.
save
(
stop_node
.
id
)
stop
.
save
(
stop_node
_
id
)
ngram/tfidf.py
View file @
32c3b66f
#from admin.env import *
from
math
import
log
from
gargantext_web.db
import
*
from
gargantext_web.db
import
get_or_create_node
from
gargantext_web.db
import
get_
session
,
get_
or_create_node
from
admin.utils
import
DebugTime
...
...
@@ -128,6 +128,7 @@ def compute_tfidf_global(corpus):
dbg
.
show
(
'calculate terms frequencies sums'
)
tfidf_node
=
get_or_create_node
(
nodetype
=
'Tfidf (global)'
,
corpus
=
corpus
)
session
=
get_session
()
# update would be better
session
.
query
(
NodeNodeNgram
)
.
filter
(
NodeNodeNgram
.
nodex_id
==
tfidf_node
.
id
)
.
delete
()
session
.
commit
()
...
...
ngram/tools.py
View file @
32c3b66f
from
gargantext_web.db
import
session
from
gargantext_web.db
import
Ngram
,
NodeNgram
,
NodeNgramNgram
from
gargantext_web.db
import
get_cursor
,
bulk_insert
,
get_or_create_node
from
gargantext_web.db
import
get_cursor
,
bulk_insert
,
get_or_create_node
,
get_session
def
insert_ngrams_to_list
(
list_of_ngrams
,
corpus
,
list_type
=
'MapList'
,
erase
=
True
):
'''
Works only for Stop and Map
'''
session
=
get_session
()
list_node
=
get_or_create_node
(
corpus
=
corpus
,
nodetype
=
list_type
)
group_node
=
get_or_create_node
(
corpus
=
corpus
,
nodetype
=
'GroupList'
)
group_list
=
(
session
.
query
(
NodeNgramNgram
.
ngramy_id
)
...
...
ngram/workflow.py
View file @
32c3b66f
...
...
@@ -8,7 +8,7 @@ from gargantext_web.db import get_or_create_node
from
ngram.mapList
import
compute_mapList
from
ngram.occurrences
import
compute_occs
from
gargantext_web.db
import
session
,
Node
,
NodeNgram
from
gargantext_web.db
import
Node
,
NodeNgram
from
admin.utils
import
WorkflowTracking
...
...
parsing/corpustools.py
View file @
32c3b66f
...
...
@@ -7,6 +7,7 @@ from math import log
from
admin.utils
import
DebugTime
from
gargantext_web.db
import
*
from
gargantext_web.db
import
get_session
from
.parsers_config
import
parsers
as
_parsers
from
ngram.tools
import
insert_ngrams
...
...
@@ -30,7 +31,7 @@ parsers = Parsers()
# resources management
def
add_resource
(
corpus
,
**
kwargs
):
# only for tests
session
=
S
ession
()
session
=
get_s
ession
()
resource
=
Resource
(
guid
=
str
(
random
()),
**
kwargs
)
# User
if
'user_id'
not
in
kwargs
:
...
...
@@ -66,7 +67,9 @@ def add_resource(corpus, **kwargs):
def
parse_resources
(
corpus
,
user
=
None
,
user_id
=
None
):
dbg
=
DebugTime
(
'Corpus #
%
d - parsing'
%
corpus
.
id
)
session
=
Session
()
session
=
get_session
()
corpus_id
=
corpus
.
id
type_id
=
cache
.
NodeType
[
'Document'
]
.
id
if
user_id
is
None
and
user
is
not
None
:
...
...
@@ -205,6 +208,7 @@ ngramsextractors = NgramsExtractors()
def
extract_ngrams
(
corpus
,
keys
,
nlp
=
True
):
dbg
=
DebugTime
(
'Corpus #
%
d - ngrams'
%
corpus
.
id
)
session
=
get_session
()
default_language_iso2
=
None
if
corpus
.
language_id
is
None
else
cache
.
Language
[
corpus
.
language_id
]
.
iso2
# query the hyperdata associated with the given keys
columns
=
[
Node
.
id
,
Node
.
language_id
]
+
[
Node
.
hyperdata
[
key
]
for
key
in
keys
]
...
...
rest_v1_0/api.py
View file @
32c3b66f
...
...
@@ -11,7 +11,7 @@ import datetime
import
copy
from
gargantext_web.views
import
move_to_trash
from
gargantext_web.db
import
session
,
cache
,
Node
,
NodeNgram
,
NodeNgramNgram
,
NodeNodeNgram
,
Ngram
,
Hyperdata
,
Node_Ngram
\
from
gargantext_web.db
import
get_
session
,
cache
,
Node
,
NodeNgram
,
NodeNgramNgram
,
NodeNodeNgram
,
Ngram
,
Hyperdata
,
Node_Ngram
\
,
NodeType
,
Node_Hyperdata
from
gargantext_web.validation
import
validate
,
ValidationException
from
node
import
models
...
...
@@ -65,6 +65,7 @@ class APIException(_APIException):
self
.
status_code
=
code
self
.
detail
=
message
session
=
get_session
()
_operators_dict
=
{
"="
:
lambda
field
,
value
:
(
field
==
value
),
...
...
@@ -102,6 +103,7 @@ class NodesChildrenNgrams(APIView):
def
get
(
self
,
request
,
node_id
):
# query ngrams
ParentNode
=
aliased
(
Node
)
session
=
get_session
()
ngrams_query
=
(
session
.
query
(
Ngram
.
terms
,
func
.
sum
(
Node_Ngram
.
weight
)
.
label
(
'count'
))
.
join
(
Node_Ngram
,
Node_Ngram
.
ngram_id
==
Ngram
.
id
)
...
...
@@ -144,6 +146,7 @@ class NodesChildrenNgramsIds(APIView):
def
get
(
self
,
request
,
node_id
):
# query ngrams
ParentNode
=
aliased
(
Node
)
session
=
get_session
()
ngrams_query
=
(
session
.
query
(
Node
.
id
,
func
.
sum
(
Node_Ngram
.
weight
)
.
label
(
'count'
))
.
join
(
Node_Ngram
,
Node_Ngram
.
node_id
==
Node
.
id
)
...
...
@@ -189,6 +192,7 @@ class Ngrams(APIView):
def
get
(
self
,
request
,
node_id
):
# query ngrams
ParentNode
=
aliased
(
Node
)
session
=
get_session
()
corpus
=
session
.
query
(
Node
)
.
filter
(
Node
.
id
==
node_id
)
.
first
()
group_by
=
[]
results
=
[
'id'
,
'terms'
]
...
...
@@ -315,6 +319,7 @@ class NodesChildrenDuplicates(APIView):
raise
APIException
(
'Missing GET parameter: "keys"'
,
400
)
keys
=
request
.
GET
[
'keys'
]
.
split
(
','
)
# hyperdata retrieval
session
=
get_session
()
hyperdata_query
=
(
session
.
query
(
Hyperdata
)
.
filter
(
Hyperdata
.
name
.
in_
(
keys
))
...
...
@@ -398,6 +403,7 @@ def get_metadata(corpus_id_list):
# query hyperdata keys
ParentNode
=
aliased
(
Node
)
session
=
get_session
()
hyperdata_query
=
(
session
.
query
(
Hyperdata
)
.
join
(
Node_Hyperdata
,
Node_Hyperdata
.
hyperdata_id
==
Hyperdata
.
id
)
...
...
@@ -472,6 +478,7 @@ class ApiNgrams(APIView):
# query ngrams
ParentNode
=
aliased
(
Node
)
session
=
get_session
()
ngrams_query
=
(
session
.
query
(
Ngram
.
terms
,
func
.
sum
(
Node_Ngram
.
weight
)
.
label
(
'count'
))
.
join
(
Node_Ngram
,
Node_Ngram
.
ngram_id
==
Ngram
.
id
)
...
...
@@ -696,6 +703,7 @@ class NodesList(APIView):
def
get
(
self
,
request
):
print
(
"user id : "
+
str
(
request
.
user
))
session
=
get_session
()
query
=
(
session
.
query
(
Node
.
id
,
Node
.
name
,
NodeType
.
name
.
label
(
'type'
))
.
filter
(
Node
.
user_id
==
int
(
request
.
user
.
id
))
...
...
@@ -713,6 +721,7 @@ class NodesList(APIView):
class
Nodes
(
APIView
):
def
get
(
self
,
request
,
node_id
):
session
=
get_session
()
node
=
session
.
query
(
Node
)
.
filter
(
Node
.
id
==
node_id
)
.
first
()
if
node
is
None
:
raise
APIException
(
'This node does not exist'
,
404
)
...
...
@@ -733,6 +742,7 @@ class Nodes(APIView):
def
delete
(
self
,
request
,
node_id
):
user
=
request
.
user
session
=
get_session
()
node
=
session
.
query
(
Node
)
.
filter
(
Node
.
id
==
node_id
)
.
first
()
msgres
=
str
()
...
...
@@ -752,6 +762,7 @@ class CorpusController:
corpus_id
=
int
(
corpus_id
)
except
:
raise
ValidationError
(
'Corpora are identified by an integer.'
,
400
)
session
=
get_session
()
corpusQuery
=
session
.
query
(
Node
)
.
filter
(
Node
.
id
==
corpus_id
)
.
first
()
# print(str(corpusQuery))
# raise Http404("404 error.")
...
...
@@ -773,6 +784,7 @@ class CorpusController:
# build query
ParentNode
=
aliased
(
Node
)
session
=
get_session
()
query
=
(
session
.
query
(
Ngram
.
terms
,
func
.
count
(
'*'
))
.
join
(
Node_Ngram
,
Node_Ngram
.
ngram_id
==
Ngram
.
id
)
...
...
rest_v1_0/graph.py
View file @
32c3b66f
from
rest_v1_0.api
import
APIView
,
APIException
,
JsonHttpResponse
,
CsvHttpResponse
from
rest_framework.authentication
import
SessionAuthentication
,
BasicAuthentication
from
gargantext_web.db
import
session
,
Node
from
gargantext_web.db
import
get_
session
,
Node
from
analysis.functions
import
get_cooc
class
Graph
(
APIView
):
authentication_classes
=
(
SessionAuthentication
,
BasicAuthentication
)
session
=
get_session
()
def
get
(
self
,
request
,
corpus_id
):
'''
Graph.get :: Get graph data as REST api.
...
...
rest_v1_0/ngrams.py
View file @
32c3b66f
...
...
@@ -16,7 +16,7 @@ from gargantext_web.db import cache
from
gargantext_web.validation
import
validate
,
ValidationException
from
gargantext_web.db
import
session
,
Node
,
NodeNgram
,
NodeNgramNgram
\
from
gargantext_web.db
import
get_
session
,
Node
,
NodeNgram
,
NodeNgramNgram
\
,
NodeNodeNgram
,
Ngram
,
Hyperdata
,
Node_Ngram
,
get_or_create_node
...
...
@@ -84,6 +84,7 @@ class List(APIView):
start_
=
time
.
time
()
session
=
get_session
()
nodes_ngrams
=
session
.
query
(
Ngram
.
id
,
Ngram
.
terms
)
.
filter
(
Ngram
.
id
.
in_
(
list
(
ngram_ids
.
keys
())))
.
all
()
for
node
in
nodes_ngrams
:
if
node
.
id
in
ngram_ids
:
...
...
@@ -125,6 +126,7 @@ class List(APIView):
def
get
(
self
,
request
,
corpus_id
,
list_name
):
if
not
request
.
user
.
is_authenticated
():
return
JsonHttpResponse
(
{
"request"
:
"forbidden"
}
)
session
=
get_session
()
corpus
=
session
.
query
(
Node
)
.
filter
(
Node
.
id
==
corpus_id
)
.
first
()
# if corpus==None:
# return JsonHttpResponse( {"request" : "forbidden"} )
...
...
@@ -162,6 +164,7 @@ class Ngrams(APIView):
def
get
(
self
,
request
,
node_id
):
if
not
request
.
user
.
is_authenticated
():
return
JsonHttpResponse
(
{
"request"
:
"forbidden"
}
)
session
=
get_session
()
corpus
=
session
.
query
(
Node
)
.
filter
(
Node
.
id
==
node_id
)
.
first
()
# if corpus==None:
# return JsonHttpResponse( {"request" : "forbidden"} )
...
...
@@ -340,6 +343,7 @@ class Group(APIView):
'''
def
get_group_id
(
self
,
node_id
,
user_id
):
node_id
=
int
(
node_id
)
session
=
get_session
()
corpus
=
session
.
query
(
Node
)
.
filter
(
Node
.
id
==
node_id
)
.
first
()
if
corpus
==
None
:
return
None
group
=
get_or_create_node
(
corpus
=
corpus
,
nodetype
=
'Group'
)
...
...
@@ -370,6 +374,8 @@ class Group(APIView):
import
networkx
as
nx
G
=
nx
.
Graph
()
DG
=
nx
.
DiGraph
()
session
=
get_session
()
ngrams_ngrams
=
(
session
.
query
(
NodeNgramNgram
)
.
filter
(
NodeNgramNgram
.
node_id
==
group_id
)
...
...
@@ -416,6 +422,8 @@ class Group(APIView):
def
delete
(
self
,
request
,
corpus_id
):
# input validation
session
=
get_session
()
input
=
validate
(
request
.
DATA
,
{
'data'
:
{
'source'
:
int
,
'target'
:
list
}})
group_id
=
get_group_id
(
corpus_id
,
request
.
user
.
id
)
...
...
@@ -434,6 +442,7 @@ class Group(APIView):
raise
APIException
(
'Missing parameter: "{
\'
data
\'
: [
\'
source
\'
: Int,
\'
target
\'
: [Int]}"'
,
400
)
def
put
(
self
,
request
,
corpus_id
):
session
=
get_session
()
group_rawreq
=
dict
(
request
.
data
)
...
...
@@ -448,6 +457,8 @@ class Group(APIView):
gdict
.
append
(
subform
)
GDict
.
append
(
gdict
)
existing_group_id
=
self
.
get_group_id
(
corpus_id
,
request
.
user
.
id
)
session
=
get_session
()
grouped_ngrams
=
(
session
.
query
(
NodeNgramNgram
)
.
filter
(
NodeNgramNgram
.
node_id
==
existing_group_id
)
...
...
@@ -569,6 +580,7 @@ class Keep(APIView):
"""
renderer_classes
=
(
JSONRenderer
,)
authentication_classes
=
(
SessionAuthentication
,
BasicAuthentication
)
session
=
get_session
()
def
get
(
self
,
request
,
corpus_id
):
# list_id = session.query(Node).filter(Node.id==list_id).first()
...
...
tests/ngramstable/views.py
View file @
32c3b66f
...
...
@@ -42,7 +42,7 @@ from gargantext_web import settings
# from gargantext_web.db import *
from
gargantext_web.db
import
session
,
cache
,
Node
,
NodeNgram
from
gargantext_web.db
import
get_
session
,
cache
,
Node
,
NodeNgram
from
sqlalchemy
import
func
from
rest_v1_0.api
import
JsonHttpResponse
...
...
@@ -65,6 +65,8 @@ def get_ngrams(request , project_id , corpus_id ):
project
=
cache
.
Node
[
int
(
project_id
)]
corpus
=
cache
.
Node
[
int
(
corpus_id
)]
type_doc_id
=
cache
.
NodeType
[
'Document'
]
.
id
session
=
get_session
()
number
=
session
.
query
(
func
.
count
(
Node
.
id
))
.
filter
(
Node
.
parent_id
==
corpus_id
,
Node
.
type_id
==
type_doc_id
)
.
all
()[
0
][
0
]
myamlist_type_id
=
cache
.
NodeType
[
'MiamList'
]
.
id
miamlist
=
session
.
query
(
Node
)
.
filter
(
Node
.
parent_id
==
corpus_id
,
Node
.
type_id
==
myamlist_type_id
)
.
first
()
...
...
@@ -121,6 +123,8 @@ def get_journals(request , project_id , corpus_id ):
project
=
cache
.
Node
[
int
(
project_id
)]
corpus
=
cache
.
Node
[
int
(
corpus_id
)]
type_doc_id
=
cache
.
NodeType
[
'Document'
]
.
id
session
=
get_session
()
number
=
session
.
query
(
func
.
count
(
Node
.
id
))
.
filter
(
Node
.
parent_id
==
corpus_id
,
Node
.
type_id
==
type_doc_id
)
.
all
()[
0
][
0
]
the_query
=
""" SELECT hyperdata FROM node_node WHERE id=
%
d """
%
(
int
(
corpus_id
)
)
...
...
@@ -150,7 +154,10 @@ def get_journals_json(request , project_id, corpus_id ):
user_id
=
request
.
user
.
id
document_type_id
=
cache
.
NodeType
[
'Document'
]
.
id
session
=
get_session
()
documents
=
session
.
query
(
Node
)
.
filter
(
Node
.
parent_id
==
corpus_id
,
Node
.
type_id
==
document_type_id
)
.
all
()
for
doc
in
documents
:
if
"journal"
in
doc
.
hyperdata
:
journal
=
doc
.
hyperdata
[
"journal"
]
...
...
@@ -164,6 +171,8 @@ def get_journals_json(request , project_id, corpus_id ):
def
get_corpuses
(
request
,
node_ids
):
ngrams
=
[
int
(
i
)
for
i
in
node_ids
.
split
(
"+"
)
]
session
=
get_session
()
results
=
session
.
query
(
Node
.
id
,
Node
.
hyperdata
)
.
filter
(
Node
.
id
.
in_
(
ngrams
)
)
.
all
()
for
r
in
results
:
print
(
r
)
...
...
@@ -232,11 +241,15 @@ def graph_share(request, generic=100, specific=100):
# resource_id = cache.ResourceType["Pubmed (xml format)"].id
# corpus = session.query(Node).filter( Node.type_id==resource_id , Node.user_id==user_id , Node.id==corpus_id , Node.type_id == cache.NodeType['Corpus'].id ).first()
# if corpus==None: return JsonHttpResponse( {"request" : "forbidden"} )
session
=
get_session
()
miamlist
=
session
.
query
(
Node
)
.
filter
(
Node
.
user_id
==
user_id
,
Node
.
parent_id
==
corpus_id
,
Node
.
type_id
==
cache
.
NodeType
[
'MiamList'
]
.
id
)
.
first
()
if
miamlist
==
None
:
return
JsonHttpResponse
(
{
"request"
:
"forbidden"
}
)
graphurl
=
"node_link_share.json?token="
+
request
.
GET
[
"token"
]
date
=
datetime
.
datetime
.
now
()
t
=
get_template
(
'explorer_share.html'
)
html
=
t
.
render
(
Context
({
\
'debug'
:
settings
.
DEBUG
,
'date'
:
date
,
\
...
...
@@ -263,6 +276,8 @@ def node_link_share(request):
from
analysis.functions
import
get_cooc
data
=
[]
session
=
get_session
()
corpus
=
session
.
query
(
Node
)
.
filter
(
Node
.
user_id
==
user_id
,
Node
.
id
==
corpus_id
)
.
first
()
data
=
get_cooc
(
request
=
request
,
corpus
=
corpus
,
type
=
"node_link"
)
...
...
@@ -296,7 +311,10 @@ def share_resource(request , resource_id , group_id) :
# [ getting all childs ids of this project ]
ids2changeowner
=
[
project2share
.
id
]
session
=
get_session
()
corpuses
=
session
.
query
(
Node
.
id
)
.
filter
(
Node
.
user_id
==
request
.
user
.
id
,
Node
.
parent_id
==
resource_id
,
Node
.
type_id
==
cache
.
NodeType
[
"Corpus"
]
.
id
)
.
all
()
for
corpus
in
corpuses
:
ids2changeowner
.
append
(
corpus
.
id
)
lists
=
session
.
query
(
Node
.
id
,
Node
.
name
)
.
filter
(
Node
.
user_id
==
request
.
user
.
id
,
Node
.
parent_id
==
corpus
.
id
)
.
all
()
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment