Commit f64dd8ce authored by delanoe's avatar delanoe

[FEAT] Graph Explorer as a module, only template is working, need to connect the rest api.

parent 1325bf54
"""URL Configuration of GarganText
Views are shared between three main modules:
- `api`, for JSON and CSV interaction with data
- `pages`, to present HTML views to the user
- `contents`, for Python-generated contents
Views are shared between these modules:
- `api`, for JSON and CSV interaction with data
- `pages`, to present HTML views to the user
- `contents`, for Python-generated contents
- `annotations`, to annotate local context of a corpus (as global context)
- `graph explorer`, to explore graphs
"""
from django.conf.urls import include, url
......@@ -14,10 +16,15 @@ import gargantext.views.api.urls
import gargantext.views.generated.urls
import gargantext.views.pages.urls
# tempo: unchanged doc-annotations --
from annotations import urls as annotations_urls
# Module Annotation
## tempo: unchanged doc-annotations --
from annotations import urls as annotations_urls
from annotations.views import main as annotations_main_view
# Module "Graph Explorer"
#from graphExplorer import urls as graphExplorer_urls
from graphExplorer.rest import Graph
from graphExplorer.views import explorer
urlpatterns = [
url(r'^admin/', admin.site.urls),
......@@ -25,7 +32,13 @@ urlpatterns = [
url(r'^api/', include(gargantext.views.api.urls)),
url(r'^', include(gargantext.views.pages.urls)),
# tempo: unchanged doc-annotations routes --
# Module Annotation
# tempo: unchanged doc-annotations routes --
url(r'^annotations/', include(annotations_urls)),
url(r'^projects/(\d+)/corpora/(\d+)/documents/(\d+)/$', annotations_main_view),
url(r'^annotations/', include(annotations_urls))
# Module "Graph Explorer"
url(r'^projects/(\d+)/corpora/(\d+)/explorer$', explorer)
#url(r'^projects/(\d+)/corpora/(\d+)/explorer$', include(graphExplorer.urls))
#url(r'^projects/(\d+)/corpora/(\d+)/explorer$', include(graphExplorer_urls))
]
from gargantext.util.db import *
from gargantext.util.db_cache import *
from gargantext.constants import *
from gargantext.models.nodes import Node
from gargantext.models.ngrams import Ngram, NodeNgram, NodeNgramNgram, \
NodeHyperdataNgram, NodeHyperdata, Hyperdata
from sqlalchemy import desc, asc, or_, and_, Date, cast, select
from sqlalchemy import literal_column
from sqlalchemy.orm import aliased
from sqlalchemy.sql import func
import datetime
import inspect
def do_cooc(corpus=None
, field1='ngrams', field2='ngrams'
, main_id=None, stop_id=None, group_id=None
, cvalue_id=None
, n_min=1, n_max=None
, start=None, end=None
, limit=1000
, isMonopartite=True
, hapax = 3
, session=None):
'''
Compute the cooccurence matrix and save it, returning NodeNgramNgram.node_id
For the moment list of paramters are not supported because, lists need to
be merged before.
corpus :: Corpus
cvalue_id :: Int
main_id :: Int
stop_id :: Int
group_id :: Int
For the moment, start and end are simple, only year is implemented yet
start :: TimeStamp -- example: '2010-05-30 02:00:00+02'
end :: TimeStamp
limit :: Int
'''
# TODO : add hyperdata here
# Security test
field1,field2 = str(field1), str(field2)
# Get node
node_cooc = session.query(Node).filter(
Node.parent_id==corpus.id,
Node.typename == "COOCCURRENCES"
).first()
if node_cooc == None:
node_cooc = Node(
name="Coccurrences node",
parent_id=corpus.id,
user_id=corpus.user_id,
typename="COOCCURRENCES")
session.add(node_cooc)
session.commit()
# BEGIN
# Saving the parameters of the analysis in the Node JSONB hyperdata field
args, _, _, parameters = inspect.getargvalues(inspect.currentframe())
# hyperdata = dict()
#
# for parameter in parameters.keys():
# if parameter != 'corpus' and parameter != 'node_cooc':
# hyperdata[parameter] = parameters[parameter]
#
# node_cooc.hyperdata = hyperdata
#
# session.add(node_cooc)
# session.commit()
# END
session.query(NodeNgramNgram).filter(NodeNgramNgram.node_id==node_cooc.id).delete()
session.commit()
NodeNgramX = aliased(NodeNgram)
cooc_score = func.count(NodeNgramX.node_id).label('cooc_score')
#cooc_score = func.sqrt(func.sum(NodeNgramX.weight * NodeNgramY.weight)).label('cooc_score')
#print([n for n in test_query])
if isMonopartite :
NodeNgramY = aliased(NodeNgram)
cooc_query = (session.query(NodeNgramX.ngram_id, NodeNgramY.ngram_id, cooc_score)
.join(Node, Node.id == NodeNgramX.node_id)
.join(NodeNgramY, NodeNgramY.node_id == Node.id)
.filter(Node.parent_id==corpus.id, Node.typename=="DOCUMENT")
)
else :
NodeNgramY = aliased(NodeNgram)
cooc_query = (session.query(NodeHyperdataNgram.ngram_id, NodeNgramY.ngram_id, cooc_score)
.join(Node, Node.id == NodeHyperdataNgram.node_id)
.join(NodeNgramY, NodeNgramY.node_id == Node.id)
.join(Hyperdata, Hyperdata.id == NodeHyperdataNgram.hyperdata_id)
.filter(Node.parent_id == corpus.id, Node.typename == "DOCUMENT")
.filter(Hyperdata.name == field1)
)
#print(cooc_query)
# Size of the ngrams between n_min and n_max
if n_min is not None or n_max is not None:
if isMonopartite:
NgramX = aliased(Ngram)
cooc_query = cooc_query.join(NgramX, NgramX.id == NodeNgramX.ngram_id)
NgramY = aliased(Ngram)
cooc_query = (cooc_query
.join(NgramY, NgramY.id == NodeNgramY.ngram_id)
)
if n_min is not None:
cooc_query = (cooc_query
.filter(NgramY.n >= n_min)
)
if isMonopartite:
cooc_query = cooc_query.filter(NgramX.n >= n_min)
if n_max is not None:
cooc_query = (cooc_query
.filter(NgramY.n >= n_min)
)
if isMonopartite:
cooc_query = cooc_query.filter(NgramX.n >= n_min)
# Cooc between the dates start and end
if start is not None:
#date_start = datetime.datetime.strptime ("2001-2-3 10:11:12", "%Y-%m-%d %H:%M:%S")
# TODO : more complexe date format here.
date_start = datetime.datetime.strptime (str(start), "%Y-%m-%d")
date_start_utc = date_start.strftime("%Y-%m-%d %H:%M:%S")
Start=aliased(NodeHyperdata)
StartFormat = aliased(Hyperdata)
cooc_query = (cooc_query.join(Start, Start.node_id == Node.id)
.join(StartFormat, StartFormat.id == Start.hyperdata_id)
.filter(StartFormat.name == 'publication_date')
.filter(Start.value_datetime >= date_start_utc)
)
if end is not None:
# TODO : more complexe date format here.
date_end = datetime.datetime.strptime (str(end), "%Y-%m-%d")
date_end_utc = date_end.strftime("%Y-%m-%d %H:%M:%S")
End=aliased(NodeHyperdata)
EndFormat = aliased(Hyperdata)
cooc_query = (cooc_query.join(End, End.node_id == Node.id)
.join(EndFormat, EndFormat.id == End.hyperdata_id)
.filter(EndFormat.name == 'publication_date')
.filter(End.value_datetime <= date_end_utc)
)
if isMonopartite:
# Cooc is symetric, take only the main cooccurrences and cut at the limit
cooc_query = cooc_query.filter(NodeNgramX.ngram_id < NodeNgramY.ngram_id)
cooc_query = cooc_query.having(cooc_score > hapax)
if isMonopartite:
cooc_query = cooc_query.group_by(NodeNgramX.ngram_id, NodeNgramY.ngram_id)
else:
cooc_query = cooc_query.group_by(NodeHyperdataNgram.ngram_id, NodeNgramY.ngram_id)
cooc_query = cooc_query.order_by(desc('cooc_score'))
# END of the query
matrix = LISTTYPES["COOCCURRENCES"](cooc_query)
#print(matrix)
if isMonopartite:
if main_id is not None :
main_list = LISTTYPES["MAINLIST"](main_id)
if stop_id is not None :
stop_list = LISTTYPES["STOPLIST"](stop_id)
if group_id is not None :
group_list = LISTTYPES["GROUPLIST"](group_id)
if main_id is not None and stop_id is None and group_id is None :
cooc = matrix & main_list
elif main_id is not None and stop_id is not None and group_id is None :
cooc = matrix & (main_list - stop_list)
elif main_id is not None and stop_id is not None and group_id is not None :
print("main_id is not None and stop_id is not None and group_id is not None")
cooc = matrix & (main_list * group_list - stop_list)
#cooc = matrix & (main_list - stop_list)
elif main_id is not None and stop_id is None and group_id is not None :
cooc = matrix & (main_list * group_list)
else :
cooc = matrix
else:
cooc = matrix
cooc.save(node_cooc.id)
return(node_cooc.id)
......@@ -6,9 +6,9 @@ from gargantext.util.db_cache import cache
from gargantext.constants import DEFAULT_COOC_THRESHOLD
from datetime import datetime
def compute_coocs(corpus,
overwrite_id = None,
threshold = DEFAULT_COOC_THRESHOLD,
def compute_coocs( corpus,
overwrite_id = None,
threshold = DEFAULT_COOC_THRESHOLD,
mainlist_id = None,
stoplist_id = None,
start = None,
......@@ -23,10 +23,10 @@ def compute_coocs(corpus,
node_id | ngram_id | weight ngram1_id | ngram2_id | score |
--------+----------+-------- ----------+-----------+-------+
MYDOCA | 487 | 1 => 487 | 294 | 2 |
MYDOCA | 294 | 3
MYDOCB | 487 | 1
MYDOCB | 294 | 4
MyDocA | 487 | 1 => 487 | 294 | 2 |
MyDocA | 294 | 3
MyDocB | 487 | 1
MyDocB | 294 | 4
Fill that info in DB:
- a *new* COOCCURRENCES node
......@@ -103,8 +103,8 @@ def compute_coocs(corpus,
coocs_query = (
session.query(x1.ngram_id, x2.ngram_id, ucooc)
.filter(x1.node_id == x2.node_id) # <- by definition of cooc
.filter(x1.ngram_id != x2.ngram_id) # <- b/c not with itself
.filter(x1.node_id == x2.node_id) # <- by definition of cooc
.filter(x1.ngram_id != x2.ngram_id) # <- b/c not with itself
.filter(x1.node_id.in_(docids_subquery)) # <- b/c within corpus
.group_by(x1.ngram_id, x2.ngram_id)
)
......@@ -209,7 +209,7 @@ def compute_coocs(corpus,
# 5) SAVE
# --------
# saving the parameters of the analysis in the Node JSON
new_hyperdata = { 'corpus': corpus.id,
new_hyperdata = { 'corpus' : corpus.id,
'threshold': threshold }
if overwrite_id:
# overwrite pre-existing id
......
from django.conf.urls import url
from . import main, auth
from . import projects, corpora, terms, graph
from . import projects, corpora, terms
urlpatterns = [
......@@ -29,7 +29,4 @@ urlpatterns = [
# terms table for the corpus
url(r'^projects/(\d+)/corpora/(\d+)/terms/?$', terms.ngramtable),
# graph explorer
url(r'^projects/(\d+)/corpora/(\d+)/graph/?$', graph.explorer),
]
from gargantext.util.db import session
from collections import defaultdict
from operator import itemgetter
from django.db import connection, transaction
import math
from math import log,sqrt
import numpy as np
import pandas as pd
from copy import copy
import networkx as nx
from networkx.readwrite import json_graph
from graphExplorer.louvain import best_partition, generate_dendogram, partition_at_level
from sqlalchemy.orm import aliased
def do_distance(cooc_id, field1=None, field2=None, isMonopartite=True, distance='conditional'):
'''
do_distance :: Int -> (Graph, Partition, {ids}, {weight})
'''
# implicit global session
authorized = ['conditional', 'distributional', 'cosine']
if distance not in authorized:
distance = 'conditional'
matrix = defaultdict(lambda : defaultdict(float))
ids = defaultdict(lambda : defaultdict(int))
labels = dict()
weight = dict()
Cooc = aliased(NodeNgramNgram)
query = session.query(Cooc).filter(Cooc.node_id==cooc_id).all()
for cooc in query:
matrix[cooc.ngramx_id][cooc.ngramy_id] = cooc.score
matrix[cooc.ngramy_id][cooc.ngramx_id] = cooc.score
ids[cooc.ngramx_id] = (field1, cooc.ngramx_id)
ids[cooc.ngramy_id] = (field2, cooc.ngramy_id)
weight[cooc.ngramx_id] = weight.get(cooc.ngramx_id, 0) + cooc.score
weight[cooc.ngramy_id] = weight.get(cooc.ngramy_id, 0) + cooc.score
x = pd.DataFrame(matrix).fillna(0)
if distance == 'conditional':
x = x / x.sum(axis=1)
#y = y / y.sum(axis=0)
xs = x.sum(axis=1) - x
ys = x.sum(axis=0) - x
# top inclus ou exclus
n = ( xs + ys) / (2 * (x.shape[0] - 1))
# top generic or specific
m = ( xs - ys) / (2 * (x.shape[0] - 1))
n = n.sort(inplace=False)
m = m.sort(inplace=False)
nodes_included = 500 #int(round(size/20,0))
#nodes_excluded = int(round(size/10,0))
nodes_specific = 500 #int(round(size/10,0))
#nodes_generic = int(round(size/10,0))
# TODO use the included score for the node size
n_index = pd.Index.intersection(x.index, n.index[:nodes_included])
# Generic:
#m_index = pd.Index.intersection(x.index, m.index[:nodes_generic])
# Specific:
m_index = pd.Index.intersection(x.index, m.index[-nodes_specific:])
#m_index = pd.Index.intersection(x.index, n.index[:nodes_included])
x_index = pd.Index.union(n_index, m_index)
xx = x[list(x_index)].T[list(x_index)]
# Removing unconnected nodes
xxx = xx.values
threshold = min(xxx.max(axis=1))
matrix_filtered = np.where(xxx >= threshold, xxx, 0)
#matrix_filtered = matrix_filtered.resize((90,90))
G = nx.from_numpy_matrix(np.matrix(matrix_filtered))
G = nx.relabel_nodes(G, dict(enumerate([ ids[id_][1] for id_ in list(xx.columns)])))
elif distance == 'cosine':
scd = defaultdict(lambda : defaultdict(int))
for i in matrix.keys():
for j in matrix.keys():
numerator = sum(
[
matrix[i][k] * matrix[j][k]
for k in matrix.keys()
if i != j and k != i and k != j
]
)
denominator = sqrt(
sum([
matrix[i][k]
for k in matrix.keys()
if k != i and k != j #and matrix[i][k] > 0
])
*
sum([
matrix[i][k]
for k in matrix.keys()
if k != i and k != j #and matrix[i][k] > 0
])
)
try:
scd[i][j] = numerator / denominator
except Exception as error:
scd[i][j] = 0
minmax = min([ max([ scd[i][j] for i in scd.keys()]) for j in scd.keys()])
G = nx.DiGraph()
G.add_edges_from(
[
(i, j, {'weight': scd[i][j]})
for i in scd.keys() for j in scd.keys()
if i != j and scd[i][j] > minmax and scd[i][j] > scd[j][i]
]
)
elif distance == 'distributional':
mi = defaultdict(lambda : defaultdict(int))
total_cooc = x.sum().sum()
for i in matrix.keys():
si = sum([matrix[i][j] for j in matrix[i].keys() if i != j])
for j in matrix[i].keys():
sj = sum([matrix[j][k] for k in matrix[j].keys() if j != k])
if i!=j :
mi[i][j] = log( matrix[i][j] / ((si * sj) / total_cooc) )
r = defaultdict(lambda : defaultdict(int))
for i in matrix.keys():
for j in matrix.keys():
sumMin = sum(
[
min(mi[i][k], mi[j][k])
for k in matrix.keys()
if i != j and k != i and k != j and mi[i][k] > 0
]
)
sumMi = sum(
[
mi[i][k]
for k in matrix.keys()
if k != i and k != j and mi[i][k] > 0
]
)
try:
r[i][j] = sumMin / sumMi
except Exception as error:
r[i][j] = 0
# Need to filter the weak links, automatic threshold here
minmax = min([ max([ r[i][j] for i in r.keys()]) for j in r.keys()])
G = nx.DiGraph()
G.add_edges_from(
[
(i, j, {'weight': r[i][j]})
for i in r.keys() for j in r.keys()
if i != j and r[i][j] > minmax and r[i][j] > r[j][i]
]
)
# degree_max = max([(n, d) for n,d in G.degree().items()], key=itemgetter(1))[1]
# nodes_to_remove = [n for (n,d) in G.degree().items() if d <= round(degree_max/2)]
# G.remove_nodes_from(nodes_to_remove)
# Removing too connected nodes (find automatic way to do it)
#edges_to_remove = [ e for e in G.edges_iter() if
# nodes_to_remove = [n for n in degree if degree[n] <= 1]
# G.remove_nodes_from(nodes_to_remove)
def getWeight(item):
return item[1]
#
# node_degree = sorted(G.degree().items(), key=getWeight, reverse=True)
# #print(node_degree)
# nodes_too_connected = [n[0] for n in node_degree[0:(round(len(node_degree)/5))]]
#
# for n in nodes_too_connected:
# n_edges = list()
# for v in nx.neighbors(G,n):
# #print((n, v), G[n][v]['weight'], ":", (v,n), G[v][n]['weight'])
# n_edges.append(((n, v), G[n][v]['weight']))
#
# n_edges_sorted = sorted(n_edges, key=getWeight, reverse=True)
# #G.remove_edges_from([ e[0] for e in n_edges_sorted[round(len(n_edges_sorted)/2):]])
# #G.remove_edges_from([ e[0] for e in n_edges_sorted[(round(len(nx.neighbors(G,n))/3)):]])
# G.remove_edges_from([ e[0] for e in n_edges_sorted[10:]])
G.remove_nodes_from(nx.isolates(G))
partition = best_partition(G.to_undirected())
return(G,partition,ids,weight)
# Prelude lib
from copy import copy, deepcopy
from collections import defaultdict
from sqlalchemy.orm import aliased
# Math/Graph lib
import math
import pandas as pd
import numpy as np
import networkx as nx
from math import log
from networkx.readwrite import json_graph
# Gargantext lib
from gargantext.util.http import JsonHttpResponse
from gargantext.util.toolchain.ngram_coocs import compute_coocs
from graphExplorer.distance import do_distance
def get_cooc( request=None, corpus=None
, field1='ngrams', field2='ngrams'
, cooc_id=None , type='node_link'
, start=None , end=None
, hapax=1
, distance='conditional'
, size=1000
, bridgeness=5
):
'''
get_ccoc : to compute the graph.
'''
# implicit global session
data = {}
#if session.query(Node).filter(Node.type_id==type_cooc_id, Node.parent_id==corpus_id).first() is None:
print("Cooccurrences do not exist yet, creating it.")
miam_id = 1
stop_id = 2
group_id = 3
SamuelFlag = False
# if field1 == field2 == 'ngrams' :
# isMonopartite = True
# SamuelFlag = True
# else:
# isMonopartite = False
isMonopartite = True # Always. So, calcule the graph B and from these B-nodes, build the graph-A
# data deleted each time
#cooc_id = get_or_create_node(nodetype='Cooccurrence', corpus=corpus).id
cooc_id = compute_cooc(corpus, field1="ngrams", field2="ngrams"
, miam_id=miam_id, group_id=group_id, stop_id=stop_id, limit=size
, isMonopartite=True, start=start , end=end , hapax=hapax)
G, partition, ids, weight = do_distance(cooc_id, field1="ngrams", field2="ngrams"
, isMonopartite=True, distance=distance)
if type == "node_link":
nodesB_dict = {}
for node_id in G.nodes():
try:
#node,type(labels[node])
G.node[node_id]['pk'] = ids[node_id][1]
nodesB_dict [ ids[node_id][1] ] = True
# TODO the query below is not optimized (do it do_distance).
the_label = session.query(Ngram.terms).filter(Ngram.id==node_id).first()
the_label = ", ".join(the_label)
G.node[node_id]['label'] = the_label
G.node[node_id]['size'] = weight[node_id]
G.node[node_id]['type'] = ids[node_id][0].replace("ngrams","terms")
G.node[node_id]['attributes'] = { "clust_default": partition[node_id]} # new format
# G.add_edge(node, "cluster " + str(partition[node]), weight=3)
except Exception as error:
pass #PrintException()
#print("error01: ",error)
links = []
i=1
if bridgeness > 0:
com_link = defaultdict(lambda: defaultdict(list))
com_ids = defaultdict(list)
for k, v in partition.items():
com_ids[v].append(k)
for e in G.edges_iter():
s = e[0]
t = e[1]
weight = G[ids[s][1]][ids[t][1]]["weight"]
if bridgeness < 0:
info = {
"s": ids[s][1] ,
"t": ids[t][1] ,
"w": weight
}
links.append(info)
else:
if partition[s] == partition[t]:
info = {
"s": ids[s][1] ,
"t": ids[t][1] ,
"w": weight
}
links.append(info)
if bridgeness > 0:
if partition[s] < partition[t]:
com_link[partition[s]][partition[t]].append((s,t,weight))
if bridgeness > 0:
for c1 in com_link.keys():
for c2 in com_link[c1].keys():
index = round(bridgeness*len(com_link[c1][c2]) / (len(com_ids[c1]) + len(com_ids[c2])))
#print((c1,len(com_ids[c1])), (c2,len(com_ids[c2])), index)
if index > 0:
for link in sorted(com_link[c1][c2], key=lambda x: x[2], reverse=True)[:index]:
#print(c1, c2, link[2])
info = {"s": link[0], "t": link[1], "w": link[2]}
links.append(info)
B = json_graph.node_link_data(G)
B["links"] = []
B["links"] = links
if field1 == field2 == 'ngrams' :
data["nodes"] = B["nodes"]
data["links"] = B["links"]
else:
A = get_graphA( "journal" , nodesB_dict , B["links"] , corpus )
print("#nodesA:",len(A["nodes"]))
print("#linksAA + #linksAB:",len(A["links"]))
print("#nodesB:",len(B["nodes"]))
print("#linksBB:",len(B["links"]))
data["nodes"] = A["nodes"] + B["nodes"]
data["links"] = A["links"] + B["links"]
print(" total nodes :",len(data["nodes"]))
print(" total links :",len(data["links"]))
print("")
elif type == "adjacency":
for node in G.nodes():
try:
#node,type(labels[node])
#G.node[node]['label'] = node
G.node[node]['name'] = node
#G.node[node]['size'] = weight[node]
G.node[node]['group'] = partition[node]
#G.add_edge(node, partition[node], weight=3)
except Exception as error:
print("error02: ",error)
data = json_graph.node_link_data(G)
elif type == 'bestpartition':
return(partition)
return(data)
def get_graphA( nodeA_type , NodesB , links , corpus ):
from analysis.InterUnion import Utils
print(" = = = == = = = ")
print("In get_graphA(), corpus id:",corpus.id)
nodeA_type_id = cache.Hyperdata[nodeA_type].id
threshold_cotainf = 0.02
max_nodeid = -1
for nodeid in NodesB:
if nodeid > max_nodeid:
max_nodeid = nodeid
# = = = = [ 01. Getting ALL documents of the Corpus c ] = = = = #
Docs = {}
document_type_id = cache.NodeType['Document'].id
sql_query = 'select id from node_node where parent_id='+str(corpus.id)+' and type_id='+str(document_type_id)
cursor = connection.cursor()
cursor.execute(sql_query)
results = cursor.fetchall()
for i in results:
Docs[i[0]] = True
print("docs:",len(Docs.keys()))
# = = = = [ / 01. Getting ALL documents of the Corpus c ] = = = = #
# = = = = [ 02. Getting ALL Documents related with Ngrams of the carte semantic ] = = = = #
sql_query = 'select nodey_id,ngram_id from node_nodenodengram where ngram_id IN (' + ','.join(map(str, NodesB.keys())) + ")"
cursor = connection.cursor()
cursor.execute(sql_query)
results = cursor.fetchall()
# = = = = [ / 02. Getting ALL Documents related with Ngrams of the carte semantic ] = = = = #
# = = = = [ 03. Now we limit the retrieved Documents(step 02) to those belonging to the Corpus c ] = = = = ]
Docs_and_ = {
"nodesA":{},
"nodesB":{}
}
NodesB_and_Docs = {}
for i in results:
doc_id = i[0]
ngram_id = i[1]
if ngram_id in NodesB and doc_id in Docs:
if doc_id not in Docs_and_["nodesB"]:
Docs_and_["nodesB"][doc_id] = []
Docs_and_["nodesB"][doc_id].append( ngram_id )
if ngram_id not in NodesB_and_Docs:
NodesB_and_Docs[ngram_id] = []
NodesB_and_Docs[ngram_id].append( doc_id )
# = = = = [ / 03. Now we limit the retrieved Documents(step 02) to those belonging to the Corpus c ] = = = = ]
# # = = = = [ Getting Authors ] = = = = ]
# Authors = {}
# sql_query = 'select node_id,value_string from node_node_hyperdata where node_id IN (' + ','.join(map(str, Docs_and_["nodesB"].keys())) + ")"+' and hyperdata_id=10'# 10 -> authors
# cursor = connection.cursor()
# cursor.execute(sql_query)
# results = cursor.fetchall()
# for i in results:
# doc_id = i[0]
# authors = i[1].split(",")
# for a in authors:
# if a not in Authors:
# Authors[a] = 0
# Authors[a] += 1
# print("")
# print("#authors:")
# import pprint
# pprint.pprint(Authors)
# print("")
# # = = = = [ / Getting Authors ] = = = = ]
# = = = = [ 04. Getting A-elems and making the dictionaries] = = = = ]
sql_query = 'select node_id,value_string from node_node_hyperdata where node_id IN (' + \
','.join(map(str, Docs_and_["nodesB"].keys())) + ")"+' and hyperdata_id='+str(nodeA_type_id)
cursor = connection.cursor()
cursor.execute(sql_query)
results = cursor.fetchall()
A_Freq = {}
A_int2str = {}
A_str2int = {}
counter = max_nodeid+1
for i in results:
doc_id = i[0]
a = i[1]
if a not in A_str2int:
A_str2int[ a ] = counter
A_int2str[counter] = a
counter += 1
for i in results:
doc_id = i[0]
a = A_str2int[i[1]]
Docs_and_["nodesA"][doc_id] = a
if a not in A_Freq:
A_Freq[ a ] = 0
A_Freq[ a ] += 1
# = = = = [ / 04. Getting A-elems and making the dictionaries ] = = = = ]
# = = = = [ Filling graph-A ] = = = = ]
Graph_A = Utils()
for i in NodesB_and_Docs:
ngram = i
docs = NodesB_and_Docs[i]
k_A_clique = {}
for doc in docs:
k_A = Docs_and_["nodesA"][doc]
k_A_clique[k_A] = True
if len(k_A_clique.keys())>1:
Graph_A.addCompleteSubGraph( k_A_clique.keys() )
# = = = = [ / Filling graph-A ] = = = = ]
# = = = = [ graph-A to JSON ] = = = = ]
A = Graph_A.G
for node_id in A.nodes():
A.node[node_id]['label'] = A_int2str[node_id]
A.node[node_id]['size'] = A_Freq[node_id]
A.node[node_id]['type'] = nodeA_type
A.node[node_id]['attributes'] = { "clust_default": 1 }
A_links = []
min_weight = 999999
max_weight = -1
Weights_Dist = {}
for e in A.edges_iter():
s = e[0]
t = e[1]
w = A[s][t]["weight"]
if w not in Weights_Dist:
Weights_Dist[ w ] = { "freq": 0 , "deleted":0 }
Weights_Dist[ w ]["freq"] += 1
if min_weight > w:
min_weight = w
if max_weight < w:
max_weight = w
edges2remove = []
for e in A.edges_iter():
s = e[0]
t = e[1]
w = A[s][t]["weight"]
if Weights_Dist [ w ]["freq"] < ( len(A)*3 ): # weight-threshold
info = {
"s":s ,
"t":t ,
"w": w / max_weight # normalization
}
A_links.append(info)
else:
# if Weights_Dist [ w ]["deleted"] < round(Weights_Dist [ w ]["freq"]*0.95):
atuple = (s,t)
edges2remove.append(atuple)
Weights_Dist [ w ]["deleted"] += 1
A.remove_edges_from( edges2remove )
A.remove_nodes_from(nx.isolates(A))
data = json_graph.node_link_data(A) # saving nodesA
AB = nx.Graph()
for i in NodesB_and_Docs:
b = i
docs = NodesB_and_Docs[i]
for doc in docs:
a = Docs_and_["nodesA"][doc]
if A.has_node(a):
AB.add_edge( a , b )
AB_links = []
for e in AB.edges_iter():
info = { "s": e[0], "t": e[1], "w": 1 }
AB_links.append(info)
data["links"] = A_links + AB_links # saving AA-links and AB-links
# = = = = [ / graph-A to JSON ] = = = = ]
return data
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This module implements community detection.
"""
__all__ = ["partition_at_level", "modularity", "best_partition", "generate_dendogram", "induced_graph"]
__author__ = """Thomas Aynaud (thomas.aynaud@lip6.fr)"""
# Copyright (C) 2009 by
# Thomas Aynaud <thomas.aynaud@lip6.fr>
# All rights reserved.
# BSD license.
# Adapted to python 3 by anoe
__PASS_MAX = -1
__MIN = 0.0000001
import networkx as nx
import sys
import types
import array
def partition_at_level(dendogram, level) :
"""Return the partition of the nodes at the given level
A dendogram is a tree and each level is a partition of the graph
nodes. Level 0 is the first partition, which contains the smallest
communities, and the best is len(dendogram) - 1. The higher the
level is, the bigger are the communities
Parameters
----------
dendogram : list of dict
a list of partitions, ie dictionnaries where keys of the i+1 are the values of the i.
level : int
the level which belongs to [0..len(dendogram)-1]
Returns
-------
partition : dictionnary
A dictionary where keys are the nodes and the values are the set it belongs to
Raises
------
KeyError
If the dendogram is not well formed or the level is too high
See Also
--------
best_partition which directly combines partition_at_level and
generate_dendogram to obtain the partition of highest modularity
Examples
--------
>>> G=nx.erdos_renyi_graph(100, 0.01)
>>> dendo = generate_dendogram(G)
>>> for level in range(len(dendo) - 1) :
>>> print("partition at level", level, "is", partition_at_level(dendo, level))
"""
partition = dendogram[0].copy()
for index in range(1, level + 1) :
for node, community in tuple(partition.items()) :
partition[node] = dendogram[index][community]
return(partition)
def modularity(partition, graph) :
"""Compute the modularity of a partition of a graph
Parameters
----------
partition : dict
the partition of the nodes, i.e a dictionary where keys are their nodes and values the communities
graph : networkx.Graph
the networkx graph which is decomposed
Returns
-------
modularity : float
The modularity
Raises
------
KeyError
If the partition is not a partition of all graph nodes
ValueError
If the graph has no link
TypeError
If graph is not a networkx.Graph
References
----------
.. 1. Newman, M.E.J. & Girvan, M. Finding and evaluating community structure in networks. Physical Review E 69, 26113(2004).
Examples
--------
>>> G=nx.erdos_renyi_graph(100, 0.01)
>>> part = best_partition(G)
>>> modularity(part, G)
"""
if type(graph) != nx.Graph :
raise TypeError("Bad graph type, use only non directed graph")
inc = dict([])
deg = dict([])
links = graph.size(weight='weight')
if links == 0 :
raise ValueError("A graph without link has an undefined modularity")
for node in graph :
com = partition[node]
deg[com] = deg.get(com, 0.) + graph.degree(node, weight = 'weight')
for neighbor, datas in tuple(graph[node].items()) :
weight = datas.get("weight", 1)
if partition[neighbor] == com :
if neighbor == node :
inc[com] = inc.get(com, 0.) + float(weight)
else :
inc[com] = inc.get(com, 0.) + float(weight) / 2.
res = 0.
for com in set(partition.values()) :
res += (inc.get(com, 0.) / links) - (deg.get(com, 0.) / (2.*links))**2
return res
def best_partition(graph, partition = None) :
"""Compute the partition of the graph nodes which maximises the modularity
(or try..) using the Louvain heuristices
This is the partition of highest modularity, i.e. the highest partition of the dendogram
generated by the Louvain algorithm.
Parameters
----------
graph : networkx.Graph
the networkx graph which is decomposed
partition : dict, optionnal
the algorithm will start using this partition of the nodes. It's a dictionary where keys are their nodes and values the communities
Returns
-------
partition : dictionnary
The partition, with communities numbered from 0 to number of communities
Raises
------
NetworkXError
If the graph is not Eulerian.
See Also
--------
generate_dendogram to obtain all the decompositions levels
Notes
-----
Uses Louvain algorithm
References
----------
.. 1. Blondel, V.D. et al. Fast unfolding of communities in large networks. J. Stat. Mech 10008, 1-12(2008).
Examples
--------
>>> #Basic usage
>>> G=nx.erdos_renyi_graph(100, 0.01)
>>> part = best_partition(G)
>>> #other example to display a graph with its community :
>>> #better with karate_graph() as defined in networkx examples
>>> #erdos renyi don't have true community structure
>>> G = nx.erdos_renyi_graph(30, 0.05)
>>> #first compute the best partition
>>> partition = G.best_partition(G)
>>> #drawing
>>> size = float(len(set(partition.values())))
>>> pos = nx.spring_layout(G)
>>> count = 0.
>>> for com in set(partition.values()) :
>>> count = count + 1.
>>> list_nodes = [nodes for nodes in partition.keys()
>>> if partition[nodes] == com]
>>> nx.draw_networkx_nodes(G, pos, list_nodes, node_size = 20,
node_color = str(count / size))
>>> nx.draw_networkx_edges(G,pos, alpha=0.5)
>>> plt.show()
"""
dendo = generate_dendogram(graph, partition)
return partition_at_level(dendo, len(dendo) - 1 )
def generate_dendogram(graph, part_init = None) :
"""Find communities in the graph and return the associated dendogram
A dendogram is a tree and each level is a partition of the graph
nodes. Level 0 is the first partition, which contains the smallest
communities, and the best is len(dendogram) - 1. The higher the level
is, the bigger are the communities
Parameters
----------
graph : networkx.Graph
the networkx graph which will be decomposed
part_init : dict, optionnal
the algorithm will start using this partition of the nodes. It's a
dictionary where keys are their nodes and values the communities
Returns
-------
dendogram : list of dictionaries
a list of partitions, ie dictionnaries where keys of the i+1 are the
values of the i. and where keys of the first are the nodes of graph
Raises
------
TypeError
If the graph is not a networkx.Graph
See Also
--------
best_partition
Notes
-----
Uses Louvain algorithm
References
----------
.. 1. Blondel, V.D. et al. Fast unfolding of communities in large networks. J. Stat. Mech 10008, 1-12(2008).
Examples
--------
>>> G=nx.erdos_renyi_graph(100, 0.01)
>>> dendo = generate_dendogram(G)
>>> for level in range(len(dendo) - 1) :
>>> print "partition at level", level, "is", partition_at_level(dendo, level)
"""
if type(graph) != nx.Graph :
raise TypeError("Bad graph type, use only non directed graph")
#special case, when there is no link
#the best partition is everyone in its community
if graph.number_of_edges() == 0 :
part = dict([])
for node in graph.nodes() :
part[node] = node
return part
current_graph = graph.copy()
status = Status()
status.init(current_graph, part_init)
mod = __modularity(status)
status_list = list()
__one_level(current_graph, status)
new_mod = __modularity(status)
partition = __renumber(status.node2com)
status_list.append(partition)
mod = new_mod
current_graph = induced_graph(partition, current_graph)
status.init(current_graph)
while True :
__one_level(current_graph, status)
new_mod = __modularity(status)
if new_mod - mod < __MIN :
break
partition = __renumber(status.node2com)
status_list.append(partition)
mod = new_mod
current_graph = induced_graph(partition, current_graph)
status.init(current_graph)
return status_list[:]
def induced_graph(partition, graph) :
"""Produce the graph where nodes are the communities
there is a link of weight w between communities if the sum of the
weights of the links between their elements is w
Parameters
----------
partition : dict
a dictionary where keys are graph nodes and values the part the node belongs to
graph : networkx.Graph
the initial graph
Returns
-------
g : networkx.Graph
a networkx graph where nodes are the parts
Examples
--------
>>> n = 5
>>> g = nx.complete_graph(2*n)
>>> part = dict([])
>>> for node in g.nodes() :
>>> part[node] = node % 2
>>> ind = induced_graph(part, g)
>>> goal = nx.Graph()
>>> goal.add_weighted_edges_from([(0,1,n*n),(0,0,n*(n-1)/2), (1, 1, n*(n-1)/2)])
>>> nx.is_isomorphic(int, goal)
True
"""
ret = nx.Graph()
ret.add_nodes_from(partition.values())
for node1, node2, datas in graph.edges_iter(data = True) :
weight = datas.get("weight", 1)
com1 = partition[node1]
com2 = partition[node2]
w_prec = ret.get_edge_data(com1, com2, {"weight":0}).get("weight", 1)
ret.add_edge(com1, com2, weight = w_prec + weight)
return ret
def __renumber(dictionary) :
"""Renumber the values of the dictionary from 0 to n
"""
count = 0
ret = dictionary.copy()
new_values = dict([])
for key in dictionary.keys() :
value = dictionary[key]
new_value = new_values.get(value, -1)
if new_value == -1 :
new_values[value] = count
new_value = count
count = count + 1
ret[key] = new_value
return ret
def __load_binary(data) :
"""Load binary graph as used by the cpp implementation of this algorithm
"""
if type(data) == types.StringType :
data = open(data, "rb")
reader = array.array("I")
reader.fromfile(data, 1)
num_nodes = reader.pop()
reader = array.array("I")
reader.fromfile(data, num_nodes)
cum_deg = reader.tolist()
num_links = reader.pop()
reader = array.array("I")
reader.fromfile(data, num_links)
links = reader.tolist()
graph = nx.Graph()
graph.add_nodes_from(range(num_nodes))
prec_deg = 0
for index in range(num_nodes) :
last_deg = cum_deg[index]
neighbors = links[prec_deg:last_deg]
graph.add_edges_from([(index, int(neigh)) for neigh in neighbors])
prec_deg = last_deg
return graph
def __one_level(graph, status) :
"""Compute one level of communities
"""
modif = True
nb_pass_done = 0
cur_mod = __modularity(status)
new_mod = cur_mod
while modif and nb_pass_done != __PASS_MAX :
cur_mod = new_mod
modif = False
nb_pass_done += 1
for node in graph.nodes() :
com_node = status.node2com[node]
degc_totw = status.gdegrees.get(node, 0.) / (status.total_weight*2.)
neigh_communities = __neighcom(node, graph, status)
__remove(node, com_node,
neigh_communities.get(com_node, 0.), status)
best_com = com_node
best_increase = 0
for com, dnc in tuple(neigh_communities.items()) :
incr = dnc - status.degrees.get(com, 0.) * degc_totw
if incr > best_increase :
best_increase = incr
best_com = com
__insert(node, best_com,
neigh_communities.get(best_com, 0.), status)
if best_com != com_node :
modif = True
new_mod = __modularity(status)
if new_mod - cur_mod < __MIN :
break
class Status :
"""
To handle several data in one struct.
Could be replaced by named tuple, but don't want to depend on python 2.6
"""
node2com = {}
total_weight = 0
internals = {}
degrees = {}
gdegrees = {}
def __init__(self) :
self.node2com = dict([])
self.total_weight = 0
self.degrees = dict([])
self.gdegrees = dict([])
self.internals = dict([])
self.loops = dict([])
def __str__(self) :
return ("node2com : " + str(self.node2com) + " degrees : "
+ str(self.degrees) + " internals : " + str(self.internals)
+ " total_weight : " + str(self.total_weight))
def copy(self) :
"""Perform a deep copy of status"""
new_status = Status()
new_status.node2com = self.node2com.copy()
new_status.internals = self.internals.copy()
new_status.degrees = self.degrees.copy()
new_status.gdegrees = self.gdegrees.copy()
new_status.total_weight = self.total_weight
def init(self, graph, part = None) :
"""Initialize the status of a graph with every node in one community"""
count = 0
self.node2com = dict([])
self.total_weight = 0
self.degrees = dict([])
self.gdegrees = dict([])
self.internals = dict([])
try:
self.total_weight = graph.size(weighted = True)
except:
self.total_weight = graph.size(weight='weight')
if part == None :
for node in graph.nodes() :
self.node2com[node] = count
try:
deg = float(graph.degree(node, weighted = True))
except:
deg = float(graph.degree(node, weight = 'weight'))
if deg < 0 :
raise ValueError("Bad graph type, use positive weights")
self.degrees[count] = deg
self.gdegrees[node] = deg
self.loops[node] = float(graph.get_edge_data(node, node,
{"weight":0}).get("weight", 1))
self.internals[count] = self.loops[node]
count = count + 1
else :
for node in graph.nodes() :
com = part[node]
self.node2com[node] = com
deg = float(graph.degree(node, weigh = 'weight'))
self.degrees[com] = self.degrees.get(com, 0) + deg
self.gdegrees[node] = deg
inc = 0.
for neighbor, datas in tuple(graph[node].items()) :
weight = datas.get("weight", 1)
if weight <= 0 :
raise ValueError("Bad graph type, use positive weights")
if part[neighbor] == com :
if neighbor == node :
inc += float(weight)
else :
inc += float(weight) / 2.
self.internals[com] = self.internals.get(com, 0) + inc
def __neighcom(node, graph, status) :
"""
Compute the communities in the neighborood of node in the graph given
with the decomposition node2com
"""
weights = {}
for neighbor, datas in tuple(graph[node].items()):
if neighbor != node :
weight = datas.get("weight", 1)
neighborcom = status.node2com[neighbor]
weights[neighborcom] = weights.get(neighborcom, 0) + weight
return weights
def __remove(node, com, weight, status) :
""" Remove node from community com and modify status"""
status.degrees[com] = ( status.degrees.get(com, 0.)
- status.gdegrees.get(node, 0.) )
status.internals[com] = float( status.internals.get(com, 0.) -
weight - status.loops.get(node, 0.) )
status.node2com[node] = -1
def __insert(node, com, weight, status) :
""" Insert node into community and modify status"""
status.node2com[node] = com
status.degrees[com] = ( status.degrees.get(com, 0.) +
status.gdegrees.get(node, 0.) )
status.internals[com] = float( status.internals.get(com, 0.) +
weight + status.loops.get(node, 0.) )
def __modularity(status) :
"""
Compute the modularity of the partition of the graph faslty using status precomputed
"""
links = float(status.total_weight)
result = 0.
for community in set(status.node2com.values()) :
in_degree = status.internals.get(community, 0.)
degree = status.degrees.get(community, 0.)
if links > 0 :
result = result + in_degree / links - ((degree / (2.*links))**2)
return result
def __main() :
"""Main function to mimic C++ version behavior"""
try :
filename = sys.argv[1]
graphfile = __load_binary(filename)
partition = best_partition(graphfile)
print >> sys.stderr, str(modularity(partition, graphfile))
for elem, part in tuple(partition.items()) :
print(str(elem) + " " + str(part))
except (IndexError, IOError):
print("Usage : ./community filename")
print("find the communities in graph filename and display the dendogram")
print("Parameters:")
print("filename is a binary file as generated by the ")
print("convert utility distributed with the C implementation")
if __name__ == "__main__" :
__main()
from gargantext.util.http import APIView, APIException, JsonHttpResponse
#from rest_framework.authentication import SessionAuthentication, BasicAuthentication
from gargantext.util.db import session
from graphExplorer.functions import get_cooc
class Graph(APIView):
#authentication_classes = (SessionAuthentication, BasicAuthentication)
def get(self, request, corpus_id):
'''
Graph.get :: Get graph data as REST api.
Get all the parameters first
graph?field1=ngrams&field2=ngrams&
graph?field1=ngrams&field2=ngrams&start=''&end=''
'''
# implicit global session
field1 = request.GET.get ('field1' , 'ngrams' )
field2 = request.GET.get ('field2' , 'ngrams' )
start = request.GET.get ('start' , None )
end = request.GET.get ('end' , None )
threshold = request.GET.get ('threshold' , 1 )
bridgeness = request.GET.get ('bridgeness', -1 )
format_ = request.GET.get ('format' , 'json' )
type_ = request.GET.get ('type' , 'node_link' )
distance = request.GET.get ('distance' , 'conditional')
corpus = session.query(Node).filter(Node.id==corpus_id).first()
accepted_field1 = ['ngrams', 'journal', 'source', 'authors']
accepted_field2 = ['ngrams',]
options = ['start', 'end', 'threshold', 'distance']
if field1 in accepted_field1 :
if field2 in accepted_field2 :
if start is not None and end is not None :
data = compute_cooc( corpus
#, field1=field1 , field2=field2
, start=start , end=end
, threshold=threshold
, distance=distance
)
else:
data = compute_cooc( corpus
#, field1=field1, field2=field2
, threshold = threshold
, distance = distance
, bridgeness = bridgeness)
if format_ == 'json':
return JsonHttpResponse(data)
else:
return JsonHttpResponse({
'Warning USAGE' : 'One field for each range:'
, 'field1' : accepted_field1
, 'field2' : accepted_field2
, 'options': options
})
mv /srv/gargantext/static/js/tina* .
from django.conf.urls import patterns, url
from graphExplorer import views
# /!\ urls patterns here are *without* the trailing slash
urlpatterns = patterns('',
url(r'^register/$', views.Register.as_view()), # Register
url(r'^login/$', views.Login.as_view()), # Login
)
......@@ -26,7 +26,7 @@ def explorer(request, project_id, corpus_id):
# rendered page : journals.html
return render(
template_name = 'pages/graph.html',
template_name = 'graphExplorer/explorer.html',
request = request,
context = {
'debug' : settings.DEBUG,
......
......@@ -27,3 +27,4 @@ ujson==1.35
umalqurra==0.2 # arabic calendars (?? why use ??)
wheel==0.29.0
pandas==0.18.0
networkx==1.11
<html>
<head>
{% load staticfiles %}
<link rel="stylesheet" href="{% static "css/bootstrap.css" %}">
<link rel="stylesheet" href="{% static "css/bootstrap-theme.min.css" %}">
<link rel="stylesheet" href="{% static "js/libs/jquery/jquery-ui.css" %}" media="screen">
<!-- <link rel="stylesheet" href="{% static "js/libs/bootstrap/css/bootstrap.css" %}" media="screen"> -->
<link rel="stylesheet" href="{% static "js/libs/css2/freshslider.css" %}" media="screen">
<link rel="stylesheet" href="{% static "js/libs/css2/custom.css" %}" media="screen">
<link rel="stylesheet" href="{% static "js/libs/css2/sidebar.css" %}" media="screen">
<style>
#leftcolumn {
font-size: 10px;
width:20%;
overflow: auto;
}
#ctlzoom {
width:7%;
}
#topPapers{
margin: 7px;
padding: 5px 0px 5px 5px;
}
#topPapers ul {
list-style-type: none;
}
#ctlzoom {
position: absolute; right: 16%; bottom: 1%; list-style: none; padding: 0; margin: 0;
}
#category0 ul li { margin: 0 12px 12px 0; }
#category1 ul li { margin: 0 12px 12px 0; }
</style>
<!--
<link rel="stylesheet" href="{% static "js/libs/bootswatch/css/bootswatch.css" %}">
<link rel="stylesheet" href="{% static "js/libs/css2/font.css" %}" type="text/css">
-->
</head>
<body>
<div class="navbar navbar-inverse" role="navigation">
<div class="container">
<div class="navbar-inner">
<button class="navbar-toggle" data-toggle="collapse" data-target=".navbar-collapse">
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" style="line-height:15px; height:10px; padding: 10px 10px;" href="/"><img src="/generated/img/logo.svg" title="Back to home."></a>
</div>
<div class="navbar-collapse collapse">
<ul class="nav navbar-nav">
<li><a href="/about/" title="More informations about the project, its sponsors and its authors.">About</a>
</li>
{% if user.is_authenticated %}
<li><a href="/projects/" title="All your projects are here.">Projects</a></li>
{% endif %}
{% if project %}
<li><a href="/projects/{{project.id}}">{{project.name}}</a></li>
{% endif %}
{% if corpus %}
<li><a href="/projects/{{project.id}}/corpora/{{corpus.id}}">{{corpus.name}}</a></li>
{% endif %}
</ul>
<ul class="nav pull-right">
<li class="dropdown">
<a href="#" role="button" class="dropdown-toggle" data-toggle="dropdown" title="That is your login"><i class="icon-user"></i> {{ user }}<i class="caret"></i>
</a>
<ul class="dropdown-menu">
<li><a tabindex="-1" href="http://www.iscpif.fr/tiki-index.php?page=gargantext_feedback" title="Send us a message (bug, thanks, congrats...)">Report Feedback</a></li>
<li class="divider"></li>
{% if user.is_authenticated %}
<li><a tabindex="-1" href="/auth/logout" title="Click here to logout especially on public devices">Logout</a></li>
{% else %}
<li><a tabindex="-1" href="/auth/">Login</a></li>
{% endif %}
</ul>
</li>
</ul>
</div>
</div>
</div>
<!-- this is the tweakbar -->
<div id="defaultop" class="navbar navbar-default">
<div class="navbar-collapse collapse navbar-responsive-collapse">
<div id="left" style="margin:0em 2em;">
<ul class="nav navbar-nav">
<!--
<li>
<a>
<select id="aselector" onchange="console.log('salut monde')" class="selectpicker" data-style="btn btn-success btn-sm" data-width="auto">
<option value="Document" selected>Scholars</option>
<option value="NGram">Keywords</option>
</select>
</a>
</li> -->
<li>
<a>
<button type="button" id="changetype" class="btn btn-success btn-sm">Change Type</button>
</a>
</li>
<li>
<a>
<button type="button" id="changelevel" class="btn btn-info btn-sm" disabled>Change Level</button>
</a>
</li>
<li>
<a>
<div style="margin:0em 2em;" id="unranged-value"></div>
<label style="margin:0em 2em;" for="unranged-value">selector size</label>
</a>
</li>
<li>
<a>
<div id="graphid" style="visibility: hidden;">{{graphfile}}</div>
<input type="hidden" id="list_id" value="{{ list_id }}"></input>
<div id="jquerytemplatenb" style="visibility: hidden;">{{user.id}}</div>
</a>
</li>
<!--
<li>
<a>
<button type="button" onclick="partialGraph.stopForceAtlas2();" class="btn btn-sm">wu</button>
</a>
</li>
-->
</ul>
<ul id="category0" class="nav navbar-nav navbar-right">
<li>
<ul style="list-style-type: none; margin:.5em 0em 1em 1em;">
<li><div id="slidercat0nodesweight"></div></li>
<li><div id="slidercat0edgesweight"></div></li>
<li><div id="slidercat0nodessize"></div></li>
</ul>
</li>
</ul>
<ul id="filterslegend" class="nav navbar-nav navbar-right">
<li>
<ul style="list-style-type: none;">
<li>Nodes</li>
<li>Edges</li>
<li>Size</li>
</ul>
</li>
</ul>
<ul id="category1" class="nav navbar-nav navbar-right">
<li>
<ul style="list-style-type: none; margin:0em 1em;">
<li><div id="slidercat1nodesweight"></div></li>
<li><div id="slidercat1edgesweight"></div></li>
<li><div id="slidercat1nodessize"></div></li>
</ul>
</li>
</ul>
<ul class="nav navbar-nav navbar-right">
<li><a>
<input type="checkbox" id="checkboxdiv" onclick="alertCheckBox(this);">Add</input>
</a></li>
</ul>
<ul class="nav navbar-nav navbar-right">
<li><a>
<input id="searchinput" autocomplete="off" class="form-control input-sm col-lg-8" placeholder="Search" type="text">
</a></li>
</ul>
<ul class="nav navbar-nav navbar-right">
<li>
<a>
<img width="17%" title="Compare with other corpus!" onclick="GetUserPortfolio(); $('#corpuses').modal('show');" src="{% static "js/libs/img2/INTER.png" %}"></img>
</a>
</li>
</ul>
<div class="colorgraph_div"></div>
<div class="sizegraph_div"></div>
<!---->
</div>
<!--
<ul class="nav navbar-nav navbar-right">
<li><a href="#">Link</a></li>
<li class="dropdown">
<a href="#" class="dropdown-toggle" data-toggle="dropdown">Dropdown <b class="caret"></b></a>
<ul class="dropdown-menu">
<li><a href="#">Action</a></li>
<li><a href="#">Another action</a></li>
<li><a href="#">Something else here</a></li>
<li class="divider"></li>
<li><a href="#">Separated link</a></li>
</ul>
</li>
</ul>
-->
</div><!-- /.nav-collapse -->
</div><!-- /.navbar -->
<div id="wrapper">
<div id="zonecentre">
<!-- Page content -->
<div id="sigma-example"></div>
<div style="visibility: hidden;" id="sigma-othergraph"></div>
<img id="semLoader" style="position:absolute; top:50%; left:40%; width:80px;" src="{% static "js/libs/img2/loading-bar.gif" %}"></img>
<ul id="ctlzoom">
<!-- <div class="content-header">
<button id="menu-toggle">X</button>
</div> -->
<!--
<li>
<a href="#" id="geomapicon" onclick="$('#geomapmodal').modal('show'); callGeomap();">
<img title="World Map Distribution" width="34px" src="{% static "js/libs/img2/world.png" %}" ></img>
</a>
</li>
-->
<li>
<a href="#" id="snapicon" onclick="saveGraphIMG();" >
<img title="Take a photo!" width="34px" src="{% static "js/libs/img2/camera.png" %}" ></img>
</a>
</li>
<li>
<a href="#" id="saveAs">
<img width="30px" title="Save As..." src="{% static "js/libs/img2/save.png" %}" ></img>
</a>
</li>
<li>
<a href="#" id="zoomPlusButton" title="S'approcher"> </a>
</li>
<li id="zoomSliderzone">
<div id="zoomSlider"></div>
</li>
<li>
<a href="#" id="zoomMinusButton" title="S'éloigner"> </a>
</li>
<li>
<a href="#" id="lensButton"> </a>
</li>
<li>
<a href="#" id="edgesButton"> </a>
</li>
</ul>
</div>
<!-- Sidebar -->
<div id="leftcolumn">
<div id="tips"></div>
<div id="names"></div>
<div id="ngrams_actions"></div>
<br>
<div id="tab-container" class='tab-container' style="display: none;">
<ul class='etabs'>
<li id="taboppos" class='tab'><a href="#tabs1">Opposite-Neighbors</a></li>
<li id="tabneigh" class='tab'><a href="#tabs2">Neighbors</a></li>
</ul>
<div class='panel-container'>
<div id="tabs1">
<div id="opossiteNodes"></div>
</div>
<div id="tabs2">
<div id="sameNodes"></div>
</div>
</div>
</div>
<!-- <div id="topPapers"></div> -->
<div id="tab-container-top" class='tab-container' style="display: none;">
<ul class='etabs'>
<li id="tabmed" class='tab active'><a href="#tabs3">Pubs</a></li>
<li id="tabgps" class='tab'><a href="#tabs3"></a></li>
</ul>
<div class='panel-container'>
<div id="tabs3">
<div id="topPapers"></div>
</div>
<div id="tabs4">
<div id="topProposals"></div>
</div>
</div>
</div>
<div id="information"></div>
</div>
</div>
<div id="savemodal" class="modal fade">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
<h4 class="modal-title">Exporting GEXF file</h4>
</div>
<div class="modal-body form-horizontal">
What do you want to save?:
<div class="form-group">
<label class="col-lg-2 control-label"> </label>
<div class="col-lg-10">
<div class="radio">
<label>
<input type="radio" name="optionsRadios" id="fullgraph" value="option1" checked="true">
Full Graph
</label>
</div>
<div class="radio">
<label>
<input type="radio" name="optionsRadios" id="visgraph" value="option2">
Visible Graph
</label>
</div>
</div>
</div>
</div>
<div class="modal-body form-horizontal">
Which atributes do you want to keep?:
<div class="form-group">
<label class="col-lg-2 control-label"> </label>
<div class="col-lg-10">
<div class="checkbox">
<label>
<input type="checkbox" name="optionsRadios" id="check_size" value="option1">
Size
</label>
</div>
<div class="checkbox">
<label>
<input type="checkbox" name="optionsRadios" id="check_color" value="option2">
Color
</label>
</div>
</div>
</div>
</div>
<div class="modal-footer">
<button id="closesavemodal" type="button" class="btn btn-default" data-dismiss="modal">Close</button>
<button type="button" class="btn btn-primary" onclick="saveGraph();">Save File</button>
</div>
</div>
</div>
</div>
<div id="corpuses" class="modal fade">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
<h3 class="modal-title">Corpus Comparison Tool</h3>
</div>
<div class="modal-body form-horizontal">
<h4>Choose one corpus:</h4>
<div style="color:red;" id="selected_corpus"></div>
<div id="user_portfolio">
</div>
<div class="modal-footer">
<button id="closecorpuses" type="button" class="btn btn-default" data-dismiss="modal">Close</button>
<button id="add_corpus_tab" type="button" class="btn btn-primary" disabled onclick='printCorpuses();'>Add Tab</button>
</div>
</div>
</div>
</div>
<div id="modalloader" class="modal fade">
<div id="loader" class="loader">
<img src="{% static "js/libs/img2/loader.gif" %}" ></img>
</div>
<div id="closeloader" data-dismiss="modal"></div>
</div>
<script src="{% static "js/jquery/jquery.min.js" %}" type="text/javascript"></script>
<script src="{% static "js/libs/jquery/jquery-ui.js" %}" type="text/javascript"></script>
<script src="{% static "js/libs/jquery/jquery.ba-dotimeout.min.js" %}" type="text/javascript"></script>
<script src="{% static "js/libs/jquery/jquery.mousewheel.min.js" %}" type="text/javascript"></script>
<script type="text/javascript" src="{% static "js/libs/freshslider.1.0.js" %}"></script>
<script type="text/javascript" src="{% static "js/libs/readmore.js" %}"></script>
<script type="text/javascript" src="{% static "js/libs/jquery/jquery.easytabs.min.js" %}"></script>
<script src="{% static "js/libs/bootstrap/js/bootstrap.min.js" %}"></script>
<script src="{% static "js/libs/bootstrap/js/bootstrap-modal.js" %}" type="text/javascript"></script>
<script src="{% static "js/libs/bootstrap/js/bootstrap-hover-dropdown.min.js" %}" type="text/javascript"></script>
<script src="{% static "js/tinawebJS/globalUtils.js" %}" type="text/javascript"></script>
<script src="{% static "js/tinawebJS/plugins/jLouvain.js" %}" type="text/javascript"></script>
<script src="{% static "js/tinawebJS/sigma.min.js" %}" type="text/javascript" language="javascript"></script>
<script src="{% static "js/tinawebJS/sigma.forceatlas2.js" %}" type="text/javascript" language="javascript"></script>
<script src="{% static "js/settings_explorerjs.js" %}" type="text/javascript" language="javascript"></script>
<script src="{% static "js/tinawebJS/sigma.parseCustom.js" %}" type="text/javascript" language="javascript"></script>
<script src="{% static "js/extras_explorerjs.js" %}" type="text/javascript" language="javascript"></script>
<script src="{% static "js/tinawebJS/sigmaUtils.js" %}" type="text/javascript" language="javascript"></script>
<script src="{% static "js/tinawebJS/methods.js" %}" type="text/javascript" language="javascript"></script>
<script src="{% static "js/tinawebJS/minimap.js" %}" type="text/javascript" language="javascript"></script>
<script src="{% static "js/tinawebJS/enviroment.js" %}" type="text/javascript" language="javascript"></script>
<script src="{% static "js/tinawebJS/asyncFA2.js" %}" type="text/javascript" language="javascript"></script>
<script src="{% static "js/tinawebJS/Tinaweb.js" %}" type="text/javascript" language="javascript"></script>
<script src="{% static "js/tinawebJS/main.js" %}" type="text/javascript" language="javascript"></script>
<script type="text/javascript">
function newPopup(url) {
popupWindow = window.open(
url,'popUpWindow','height=700,width=800,left=10,top=10,resizable=yes,scrollbars=yes,toolbar=no,menubar=no,location=no,directories=no,status=no')
}
$('#tab-container').easytabs({updateHash:false});
// $('#tab-container-top').easytabs({updateHash:false});
</script>
</body>
</html>
......@@ -66,9 +66,10 @@
<div class="col-md-12">
<div class="btn-group btn-group-justified">
<center>
<!--
<a type="button" class="btn btn-default
href="/projects/{{project.id}}/corpora/{{ corpus.id }}/">Export corpus</a>
--!>
<!-- <li class="divider"></li> --!>
<a type="button" class="btn btn-default
......@@ -84,9 +85,11 @@
<!-- <li class="divider"></li> --!>
<!-- FIXME put a separator here --!>
<!--
<a type="button" class="btn btn-default
{% if view == "charts" %}active{%endif%}"
href="/projects/{{project.id}}/corpora/{{ corpus.id }}/terms">Advanced Charts</a>
--!>
<!-- FIXME a pop up for advanced mode of graphs --!>
<a type="button" class="btn btn-default
......@@ -95,9 +98,11 @@
<a type="button" class="btn btn-default
{% if view == "distributional" %}active{%endif%}"
href="/projects/{{project.id}}/corpora/{{ corpus.id }}/graph">Graphs (Distributional)</a>
<!--
<a type="button" class="btn btn-default
{% if view == "journalTerms" %}active{%endif%}"
href="/projects/{{project.id}}/corpora/{{ corpus.id }}/graph">Graphs Journal/Terms</a>
--!>
</center>
</div>
</div>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment