Commit 236873ae authored by PkSM3's avatar PkSM3

geomap OK for soc/sem selection

parent 9e08ba53
...@@ -8,27 +8,50 @@ function callGeomap(){ ...@@ -8,27 +8,50 @@ function callGeomap(){
jsonparams='["unique_id"]&unique_id='+getUrlParam.nodeidparam; jsonparams='["unique_id"]&unique_id='+getUrlParam.nodeidparam;
} else { } else {
N=0; N=getNodesByAtt(catSoc).length;
k=0;
cats=(categoriesIndex.length); nodesA = []
arr={}; nodesB = []
if(cats==2 && swclickActual=="social") { socneigh = []
N=Object.keys(partialGraph._core.graph.nodes.filter(function(n){return n.type==catSoc})).length; for(var i in selections) {
arr=nodes1; if(Nodes[i].type==catSoc) nodesA.push(i);
if(Nodes[i].type==catSem) nodesB.push(i);
} }
if(cats==2 && swclickActual=="semantic") {
N=Object.keys(partialGraph._core.graph.nodes.filter(function(n){return n.type==catSem})).length; if(nodesA.length==0 && nodesB.length>0) socneigh = getArrSubkeys(opos,"key");
arr=nodes2; if(nodesA.length>0 && nodesB.length>0) socneigh = getNeighs(nodesB,bipartiteN2D);
kSels = {}
for(var i in nodesA) {
kSels[nodesA[i]] = 1;
} }
if(cats==1) for(var i in socneigh) {
N=Object.keys(Nodes).length; kSels[socneigh[i]] = 1;
}
k=Object.keys(kSels).length;
// cats=(categoriesIndex.length);
// arr={};
// if(cats==2 && swclickActual=="social") {
// N=Object.keys(partialGraph._core.graph.nodes.filter(function(n){return n.type==catSoc})).length;
// arr=nodes1;
// }
// if(cats==2 && swclickActual=="semantic") {
// N=Object.keys(partialGraph._core.graph.nodes.filter(function(n){return n.type==catSem})).length;
// arr=nodes2;
// }
// if(cats==1)
// N=Object.keys(Nodes).length;
temp=getNeighs(selections,arr); // temp=getNeighs(Object.keys(selections),arr);
sel_plus_neigh=Object.keys(temp); // sel_plus_neigh=Object.keys(temp);
k=sel_plus_neigh.length; // k=sel_plus_neigh.length;
// if(N==k) jsonparams='["all"]'; // // if(N==k) jsonparams='["all"]';
pr ("N: "+N+" - k: "+k)
if(N==k) jsonparams='["unique_id"]&unique_id='+getUrlParam.nodeidparam; if(N==k) jsonparams='["unique_id"]&unique_id='+getUrlParam.nodeidparam;
else jsonparams=JSON.stringify(sel_plus_neigh); else jsonparams=JSON.stringify(Object.keys(kSels));
//jsonparams=JSON.stringify(getSelections()); //jsonparams=JSON.stringify(getSelections());
//jsonparams = jsonparams.split('&').join('__and__'); //jsonparams = jsonparams.split('&').join('__and__');
......
...@@ -44,7 +44,7 @@ function getGeomapDiv(min,max){ ...@@ -44,7 +44,7 @@ function getGeomapDiv(min,max){
txt+=' </div>'; txt+=' </div>';
txt+='</div>'; txt+='</div>';
txt+='<div class="therange">'; txt+='<div class="therange">';
txt+=' <div>Range of Publications:</div>'; txt+=' <div>Range of Scholars:</div>';
txt+=' <div class="min">'+min+'</div>&nbsp;'; txt+=' <div class="min">'+min+'</div>&nbsp;';
txt+=' <div class="distBar"></div>&nbsp;'; txt+=' <div class="distBar"></div>&nbsp;';
txt+=' <div class="max">'+max+'</div>'; txt+=' <div class="max">'+max+'</div>';
......
...@@ -30,7 +30,7 @@ ...@@ -30,7 +30,7 @@
<p>Press mouse and release here.</p> <p>Press mouse and release here.</p>
<img src="img/descarga.png"></img>
</div> </div>
......
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import sqlite3 import sqlite3
#import pprint
import networkx as nx import networkx as nx
import random import random
import math import math
import cgi import cgi
import json
import sys import sys
reload(sys) reload(sys)
...@@ -12,8 +12,8 @@ sys.setdefaultencoding('utf-8') ...@@ -12,8 +12,8 @@ sys.setdefaultencoding('utf-8')
class extract: class extract:
def __init__(self): def __init__(self,dbpath):
self.connection=sqlite3.connect('../community.db') self.connection=sqlite3.connect(dbpath)
self.connection.row_factory = sqlite3.Row# Magic line! self.connection.row_factory = sqlite3.Row# Magic line!
self.cursor=self.connection.cursor() self.cursor=self.connection.cursor()
self.scholars = {} self.scholars = {}
...@@ -81,129 +81,6 @@ class extract: ...@@ -81,129 +81,6 @@ class extract:
yield l[i:i+n] yield l[i:i+n]
def testSQLimit(self,scholar_array):
for scholar_id in scholar_array:
sql3='SELECT * FROM scholars where unique_id="'+scholar_id+'"'
try:
self.cursor.execute(sql3)
res3=self.cursor.fetchall()
n=len(res3)#in the DB, there are unique_ids duplicated
info = {};
#With (n-1) we're fetching only the last result.
ide="D::"+str(res3[n-1]['id']);
info['id'] = ide;
info['unique_id'] = res3[n-1]['unique_id'];
info['photo_url'] = res3[n-1]['photo_url'];
info['first_name'] = res3[n-1]['first_name'];
info['initials'] = res3[n-1]['initials'];
info['last_name'] = res3[n-1]['last_name'];
info['nb_keywords'] = res3[n-1]['nb_keywords'];
info['css_voter'] = res3[n-1]['css_voter'];
info['css_member'] = res3[n-1]['css_member'];
info['keywords_ids'] = res3[n-1]['keywords_ids'].split(',');
info['keywords'] = res3[n-1]['keywords'];
info['country'] = res3[n-1]['country'];
info['homepage'] = res3[n-1]['homepage'];
info['lab'] = res3[n-1]['lab'];
info['affiliation'] = res3[n-1]['affiliation'];
info['lab2'] = res3[n-1]['lab2'];
info['affiliation2'] = res3[n-1]['affiliation2'];
info['homepage'] = res3[n-1]['homepage'];
info['title'] = res3[n-1]['title'];
info['position'] = res3[n-1]['position'];
info['job_market'] = res3[n-1]['job_market'];
info['login'] = res3[n-1]['login'];
if info['nb_keywords']>0:
self.scholars[ide] = info;
except Exception as error:
print "sql3:\t"+sql3
print error
# génère le gexf
# include('gexf_generator.php');
imsize=80;
termsMatrix = {};
scholarsMatrix = {};
scholarsIncluded = 0;
for i in self.scholars:
self.scholars_colors[self.scholars[i]['login'].strip()]=0;
scholar_keywords = self.scholars[i]['keywords_ids'];
for k in range(len(scholar_keywords)):
if scholar_keywords[k] != None and scholar_keywords[k]!="":
#print scholar_keywords[k]
if termsMatrix.has_key(scholar_keywords[k]):
termsMatrix[scholar_keywords[k]]['occ'] = termsMatrix[scholar_keywords[k]]['occ'] + 1
for l in range(len(scholar_keywords)):
if termsMatrix[scholar_keywords[k]]['cooc'].has_key(scholar_keywords[l]):
termsMatrix[scholar_keywords[k]]['cooc'][scholar_keywords[l]] += 1
else:
termsMatrix[scholar_keywords[k]]['cooc'][scholar_keywords[l]] = 1;
else:
termsMatrix[scholar_keywords[k]]={}
termsMatrix[scholar_keywords[k]]['occ'] = 1;
termsMatrix[scholar_keywords[k]]['cooc'] = {};
for l in range(len(scholar_keywords)):
if termsMatrix[scholar_keywords[k]]['cooc'].has_key(scholar_keywords[l]):
termsMatrix[scholar_keywords[k]]['cooc'][scholar_keywords[l]] += 1
else:
termsMatrix[scholar_keywords[k]]['cooc'][scholar_keywords[l]] = 1;
sql='select login from jobs';
for res in self.cursor.execute(sql):
if res['login'].strip() in self.scholars_colors:
self.scholars_colors[res['login'].strip()]+=1;
sql="SELECT term,id,occurrences FROM terms"
#self.cursor.execute(sql)
cont=0
for t in termsMatrix:
if cont==0:
sql+=' where id='+t
cont+=1
else: sql+=' or id='+t
print "before crash"
print len(termsMatrix)
# sql="SELECT term,id,occurrences FROM terms where id=123 OR id=343 ... or id=978"
sqlarray = []
chunkedTerms = list(self.chunks(termsMatrix.keys(), 500))
for chunk_i in chunkedTerms:
if len(chunk_i)>0:
query = "SELECT term,id,occurrences FROM terms where id="
conditions = " or id=".join(chunk_i)
sqlarray.append(query+conditions)
# import pprint
# pprint.pprint(sqlarray)
for sql in sqlarray:
for res in self.cursor.execute(sql):
idT = res['id']
info = {}
info['id'] = idT
info['occurrences'] = res['occurrences']
info['term'] = res['term']
self.terms_array[idT] = info
count=1
def extract(self,scholar_array): def extract(self,scholar_array):
for scholar_id in scholar_array: for scholar_id in scholar_array:
sql3='SELECT * FROM scholars where unique_id="'+scholar_id+'"' sql3='SELECT * FROM scholars where unique_id="'+scholar_id+'"'
...@@ -303,8 +180,6 @@ class extract: ...@@ -303,8 +180,6 @@ class extract:
conditions = " or id=".join(chunk_i) conditions = " or id=".join(chunk_i)
sqlarray.append(query+conditions) sqlarray.append(query+conditions)
# import pprint
# pprint.pprint(sqlarray)
for sql in sqlarray: for sql in sqlarray:
for res in self.cursor.execute(sql): for res in self.cursor.execute(sql):
...@@ -404,10 +279,6 @@ class extract: ...@@ -404,10 +279,6 @@ class extract:
def toHTML(self,string): def toHTML(self,string):
return cgi.escape(string).encode("ascii", "xmlcharrefreplace") return cgi.escape(string).encode("ascii", "xmlcharrefreplace")
def iterNodeAtts(self):
print "asdf"
def buildJSON_sansfa2(self,graph,coordsRAW=None): def buildJSON_sansfa2(self,graph,coordsRAW=None):
nodesA=0 nodesA=0
...@@ -419,7 +290,6 @@ class extract: ...@@ -419,7 +290,6 @@ class extract:
nodes = {} nodes = {}
edges = {} edges = {}
if coordsRAW: if coordsRAW:
import json
xy = coordsRAW #For FA2.java: json.loads(coordsRAW) xy = coordsRAW #For FA2.java: json.loads(coordsRAW)
#print xy #print xy
coords = {} coords = {}
...@@ -570,129 +440,3 @@ class extract: ...@@ -570,129 +440,3 @@ class extract:
def buildSimpleJSONFinal(self,graph):
print "gonna build a mofo json in buildSimpleJSONFinal"
nodes = {}
nodes2 = []
edges = []
countnodes=0
#for n in graph.edges_iter():
# print n[0] + "," + n[1]
# pprint.pprint( graph[n[0]][n[1]] )
for idNode in graph.nodes_iter():
if idNode[0]=="N":#If it is NGram
numID=int(idNode.split("::")[1])
nodeLabel= self.terms_array[numID]['term'].replace("&"," and ")
colorg=max(0,180-(100*self.terms_colors[numID]))
term_occ = self.terms_array[numID]['occurrences']
node = {}
#node["nID"] = countnodes
#node["sID"] = idNode
#node["id"] = countnodes
node["id"] = idNode
#node["group"] = 1
#node["name"] = nodeLabel
#node["color"] = "green"
#node["occ"] = int(term_occ)
node["degree"] = 0
node["size"] = int(term_occ)
#node["x"] = str(coords[idNode][0])
#node["y"] = str(coords[idNode][1])
nodes2.append(node)
nodes[idNode] = countnodes
if idNode[0]=='D':#If it is Document
nodeLabel= self.scholars[idNode]['title']+" "+self.scholars[idNode]['first_name']+" "+self.scholars[idNode]['initials']+" "+self.scholars[idNode]['last_name']
color=""
if self.scholars_colors[self.scholars[idNode]['login']]==1:
color='243,183,19'
elif self.scholars[idNode]['job_market'] == "Yes":
color = '139,28,28'
else:
color = '78,193,127'
content=""
photo_url=self.scholars[idNode]['photo_url']
if photo_url != "":
content += '<img src=http://main.csregistry.org/' + photo_url + ' width=' + str(self.imsize) + 'px style=float:left;margin:5px>';
else:
if len(self.scholars)<2000:
im_id = int(math.floor(random.randint(0, 11)))
content += '<img src=http://communityexplorer.csregistry.org/img/' + str(im_id) + '.png width=' + str(self.imsize) + 'px style=float:left;margin:5px>'
#print '<img src=http://communityexplorer.csregistry.org/img/' + str(im_id) + '.png'
content += '<b>Country: </b>' + self.scholars[idNode]['country'] + '</br>'
if self.scholars[idNode]['position'] != "":
content += '<b>Position: </b>' +self.scholars[idNode]['position'].replace("&"," and ")+ '</br>'
affiliation=""
if self.scholars[idNode]['lab'] != "":
affiliation += self.scholars[idNode]['lab']+ ','
if self.scholars[idNode]['affiliation'] != "":
affiliation += self.scholars[idNode]['affiliation']
if self.scholars[idNode]['affiliation'] != "" or self.scholars[idNode]['lab'] != "":
content += '<b>Affiliation: </b>' + affiliation.replace("&"," and ") + '</br>'
if len(self.scholars[idNode]['keywords']) > 3:
content += '<b>Keywords: </b>' + self.scholars[idNode]['keywords'][:-2].replace(",",", ")+'.</br>'
if self.scholars[idNode]['homepage'][0:3] == "www":
content += '[ <a href=http://' +self.scholars[idNode]['homepage'].replace("&"," and ")+ ' target=blank > View homepage </a ><br/>]'
elif self.scholars[idNode]['homepage'][0:4] == "http":
content += '[ <a href=' +self.scholars[idNode]['homepage'].replace("&"," and ")+ ' target=blank > View homepage </a ><br/>]'
node = {}
#node["nID"] = countnodes
#node["sID"] = idNode
#node["id"] = countnodes
#node["group"] = 2
#node["name"] = '"'+nodeLabel+'"'
#node["color"] = "orange"
#node["occ"] = 12
#node["x"] = str(coords[idNode][0])
#node["y"] = str(coords[idNode][1])
#node["content"] = self.toHTML(content)
node["id"] = idNode
node["degree"] = 0
node["size"] = 12
nodes2.append(node)
nodes[idNode] = countnodes
countnodes+=1
e = 0
for n in self.Graph.edges_iter():#Memory, what's wrong with you?
weight = str("%.2f" % self.Graph[n[0]][n[1]]['weight'])
edge = {}
edge["source"] = n[0].encode('utf-8')
edge["target"] = n[1].encode('utf-8')
nodes2[nodes[edge["source"]]]['degree']+=1 # incrementing the degree
nodes2[nodes[edge["target"]]]['degree']+=1
edge["weight"] = str(self.Graph[n[0]][n[1]]['weight'])
#edge["type"] = self.Graph[n[0]][n[1]]['type']
edges.append(edge)
e+=1
#if e%1000 == 0:
# print e
graph = {}
graph["nodes"] = nodes2
graph["edges"] = edges
return graph
...@@ -15,20 +15,8 @@ app = Flask(__name__) ...@@ -15,20 +15,8 @@ app = Flask(__name__)
@app.route("/getJSON") @app.route("/getJSON")
def main(): def main():
#query = request.args['query']
#{"categorya"%3A"Keywords"%2C"categoryb"%3A"Scholars"%2C"keywords"%3A[]%2C"countries"%3A["Chile"]%2C"laboratories"%3A[]%2C"coloredby"%3A[]%2C"tags"%3A[]%2C"organizations"%3A[]}
# i=int(sys.argv[2]) db=SQLite('../community.db')
# unique_id = sys.argv[1]
# db=SQLite(unique_id)
# db.extract()
# < Data Extraction > #
# i = int(request.args['it'])
db=SQLite()
if request.args.has_key("query"): if request.args.has_key("query"):
filteredquery = request.args['query'] filteredquery = request.args['query']
...@@ -40,32 +28,11 @@ def main(): ...@@ -40,32 +28,11 @@ def main():
db.extract(scholars) db.extract(scholars)
# < / Data Extraction > # # < / Data Extraction > #
# Arnaud Banos network:
# Bruce Edmonds exists twice, but one of them has no keywords
# import pprint as p
# A=tempGraph["edges"]
# for j in A:
# s=j["source"]
# t=j["target"]
# if s=="D::593" or t=="D::593":
# print j
# spatialized = ForceAtlas2(tempGraph)
# spatialized.init()
# spatialized.getGraph()
# for i in range(0,i):
# spatialized.atomicGo()
graphArray = db.buildJSON_sansfa2(db.Graph) graphArray = db.buildJSON_sansfa2(db.Graph)
return json.dumps(graphArray) return json.dumps(graphArray)
if __name__ == "__main__": if __name__ == "__main__":
# main()
app.run(port=8080) app.run(port=8080)
...@@ -1850,10 +1850,14 @@ function saveGraphIMG(){ ...@@ -1850,10 +1850,14 @@ function saveGraphIMG(){
var edgesDiv = partialGraph._core.domElements.edges; var edgesDiv = partialGraph._core.domElements.edges;
var edgesCtx = edgesDiv.getContext("2d"); var edgesCtx = edgesDiv.getContext("2d");
var hoverDiv = partialGraph._core.domElements.hover;
var hoverCtx = hoverDiv.getContext("2d");
var labelsDiv = partialGraph._core.domElements.labels; var labelsDiv = partialGraph._core.domElements.labels;
var labelsCtx = labelsDiv.getContext("2d"); var labelsCtx = labelsDiv.getContext("2d");
nodesCtx.drawImage(hoverDiv,0,0);
nodesCtx.drawImage(labelsDiv,0,0); nodesCtx.drawImage(labelsDiv,0,0);
edgesCtx.drawImage(nodesDiv,0,0); edgesCtx.drawImage(nodesDiv,0,0);
......
...@@ -66,6 +66,13 @@ function getVisibleNodes() { ...@@ -66,6 +66,13 @@ function getVisibleNodes() {
}); });
} }
function getNodesByAtt(att) {
return partialGraph._core.graph.nodes.filter(function(n) {
return n['type']==att;
});
}
function getn(id){ function getn(id){
return partialGraph._core.graph.nodesIndex[id]; return partialGraph._core.graph.nodesIndex[id];
} }
...@@ -123,19 +130,32 @@ function getSelections(){ ...@@ -123,19 +130,32 @@ function getSelections(){
return params; return params;
} }
//i added an excpt... why
//This receives an array not a dict!
// i added an excpt... why
function getNeighs(sels,arr){ function getNeighs(sels,arr){
neighDict={}; neighDict={};
for(var i in sels) { for(var i in sels) {
if(!isUndef(arr[i])) { id = sels[i]
A=arr[i].neighbours; if(!isUndef(arr[id])) {
A=arr[id].neighbours;
for(var j in A){ for(var j in A){
neighDict[A[j]]=1 neighDict[A[j]]=1
} }
neighDict[i]=1; neighDict[id]=1;
} }
} }
return neighDict; return Object.keys(neighDict);
}//It returns an array not a dict!
//to general utils
function getArrSubkeys(arr,id) {
var result = []
for(var i in arr) {
result.push(arr[i][id])
}
return result;
} }
//to general utils //to general utils
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment