Commit 55a2c04d authored by PkSM3's avatar PkSM3

Merge branch 'unstable' of ssh://delanoe.org:1979/gargantext into samuel

parents f751059f b4f15d03
...@@ -120,7 +120,7 @@ def do_cooc(corpus=None ...@@ -120,7 +120,7 @@ def do_cooc(corpus=None
if start is not None: if start is not None:
#date_start = datetime.datetime.strptime ("2001-2-3 10:11:12", "%Y-%m-%d %H:%M:%S") #date_start = datetime.datetime.strptime ("2001-2-3 10:11:12", "%Y-%m-%d %H:%M:%S")
# TODO : more complexe date format here. # TODO : more complexe date format here.
date_start = datetime.datetime.strptime (str(start), "%Y") date_start = datetime.datetime.strptime (str(start), "%Y-%m-%d")
date_start_utc = date_start.strftime("%Y-%m-%d %H:%M:%S") date_start_utc = date_start.strftime("%Y-%m-%d %H:%M:%S")
Start=aliased(NodeHyperdata) Start=aliased(NodeHyperdata)
...@@ -134,7 +134,7 @@ def do_cooc(corpus=None ...@@ -134,7 +134,7 @@ def do_cooc(corpus=None
if end is not None: if end is not None:
# TODO : more complexe date format here. # TODO : more complexe date format here.
date_end = datetime.datetime.strptime (str(end), "%Y") date_end = datetime.datetime.strptime (str(end), "%Y-%m-%d")
date_end_utc = date_end.strftime("%Y-%m-%d %H:%M:%S") date_end_utc = date_end.strftime("%Y-%m-%d %H:%M:%S")
End=aliased(NodeHyperdata) End=aliased(NodeHyperdata)
......
from django.http import HttpResponse, Http404
from django.core.exceptions import PermissionDenied, SuspiciousOperation
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from sqlalchemy import text, distinct, or_,not_
from sqlalchemy.sql import func
from sqlalchemy.orm import aliased
import datetime
import copy
import collections
from gargantext_web.views import move_to_trash
from gargantext_web.db import *
from gargantext_web.validation import validate, ValidationException
from node import models
def DebugHttpResponse(data):
return HttpResponse('<html><body style="background:#000;color:#FFF"><pre>%s</pre></body></html>' % (str(data), ))
import json
class JSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()[:19] + 'Z'
else:
return super(self.__class__, self).default(obj)
json_encoder = JSONEncoder(indent=4)
def JsonHttpResponse(data, status=200):
return HttpResponse(
content = json_encoder.encode(data),
content_type = 'application/json; charset=utf-8',
status = status
)
Http400 = SuspiciousOperation
Http403 = PermissionDenied
import csv
def CsvHttpResponse(data, headers=None, status=200):
response = HttpResponse(
content_type = "text/csv",
status = status
)
writer = csv.writer(response, delimiter=',')
if headers:
writer.writerow(headers)
for row in data:
writer.writerow(row)
return response
from rest_framework.authentication import SessionAuthentication, BasicAuthentication
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.exceptions import APIException as _APIException
class APIException(_APIException):
def __init__(self, message, code=500):
self.status_code = code
self.detail = message
class NodeNgramsQueries(APIView):
_resolutions = {
'second': lambda d: d + datetime.timedelta(seconds=1),
'minute': lambda d: d + datetime.timedelta(minutes=1),
'hour': lambda d: d + datetime.timedelta(hours=1),
'day': lambda d: d + datetime.timedelta(days=1),
'week': lambda d: d + datetime.timedelta(days=7),
'month': lambda d: (d + datetime.timedelta(days=32)).replace(day=1),
'year': lambda d: (d + datetime.timedelta(days=367)).replace(day=1, month=1),
'decade': lambda d: (d + datetime.timedelta(days=3660)).replace(day=1, month=1),
'century': lambda d: (d + datetime.timedelta(days=36600)).replace(day=1, month=1),
}
def post(self, request, project_id):
# example only
input = request.data or {
'x': {
'with_empty': True,
'resolution': 'decade',
'value': 'publication_date',
},
'y': {
# 'divided_by': 'total_ngrams_count',
# 'divided_by': 'total_documents_count',
},
'filter': {
# 'ngrams': ['bees', 'bee', 'honeybee', 'honeybees', 'honey bee', 'honey bees'],
# 'ngrams': ['insecticide', 'pesticide'],
# 'corpora': [52633],
# 'date': {'min': '1995-12-31'}
},
# 'format': 'csv',
}
# input validation
input = validate(input, {'type': dict, 'default': {}, 'items': {
'x': {'type': dict, 'default': {}, 'items': {
# which hyperdata to choose for the date
'value': {'type': str, 'default': 'publication_date', 'range': {'publication_date', }},
# time resolution
'resolution': {'type': str, 'range': self._resolutions.keys(), 'default': 'month'},
# should we add zeroes for empty values?
'with_empty': {'type': bool, 'default': False},
}},
'y': {'type': dict, 'default': {}, 'items': {
# mesured value
'value': {'type': str, 'default': 'ngrams_count', 'range': {'ngrams_count', 'documents_count', 'ngrams_tfidf'}},
# value by which we should normalize
'divided_by': {'type': str, 'range': {'total_documents_count', 'documents_count', 'total_ngrams_count'}},
}},
# filtering
'filter': {'type': dict, 'default': {}, 'items': {
# filter by date
'date': {'type': dict, 'items': {
'min': {'type': datetime.datetime},
'max': {'type': datetime.datetime},
}, 'default': {}},
# filter by corpora
'corpora' : {'type': list, 'default': [], 'items': {'type': int}},
# filter by ngrams
'ngrams' : {'type': list, 'default': [], 'items': {'type': str}},
}},
# output format
'format': {'type': str, 'default': 'json', 'range': {'json', 'csv'}},
}})
# build query: prepare columns
column_x = func.date_trunc(input['x']['resolution'], Node_Hyperdata.value_datetime)
column_y = {
'documents_count': func.count(Node.id),
'ngrams_count': func.sum(Node_Ngram.weight),
# 'ngrams_tfidf': func.sum(Node_Node_Ngram.weight),
}[input['y']['value']]
# build query: base
query_base = (session
.query(column_x)
.select_from(Node)
.join(Node_Ngram, Node_Ngram.node_id == Node.id)
.join(Node_Hyperdata, Node_Hyperdata.node_id == Node_Ngram.node_id)
.join(Hyperdata, Hyperdata.id == Node_Hyperdata.hyperdata_id)
.filter(Hyperdata.name == input['x']['value'])
.group_by(column_x)
.order_by(column_x)
)
# build query: base, filter by corpora or project
if 'corpora' in input['filter'] and input['filter']['corpora']:
query_base = (query_base
.filter(Node.parent_id.in_(input['filter']['corpora']))
)
else:
ParentNode = aliased(Node)
query_base = (query_base
.join(ParentNode, ParentNode.id == Node.parent_id)
.filter(ParentNode.parent_id == project_id)
)
# build query: base, filter by date
if 'date' in input['filter']:
if 'min' in input['filter']['date']:
query_base = query_base.filter(Node_Hyperdata.value_datetime >= input['filter']['date']['min'])
if 'max' in input['filter']['date']:
query_base = query_base.filter(Node_Hyperdata.value_datetime <= input['filter']['date']['max'])
# build query: filter by ngrams
query_result = query_base.add_columns(column_y)
if 'ngrams' in input['filter'] and input['filter']['ngrams']:
query_result = (query_result
.join(Ngram, Ngram.id == Node_Ngram.ngram_id)
.filter(Ngram.terms.in_(input['filter']['ngrams']))
)
# build result: prepare data
date_value_list = query_result.all()
if date_value_list:
date_min = date_value_list[0][0].replace(tzinfo=None)
date_max = date_value_list[-1][0].replace(tzinfo=None)
# build result: prepare interval
result = collections.OrderedDict()
if input['x']['with_empty'] and date_value_list:
compute_next_date = self._resolutions[input['x']['resolution']]
date = date_min
while date <= date_max:
result[date] = 0.0
date = compute_next_date(date)
# build result: integrate
for date, value in date_value_list:
result[date.replace(tzinfo=None)] = value
# build result: normalize
query_normalize = None
if date_value_list and 'divided_by' in input['y'] and input['y']['divided_by']:
if input['y']['divided_by'] == 'total_documents_count':
query_normalize = query_base.add_column(func.count(Node.id))
elif input['y']['divided_by'] == 'total_ngrams_count':
query_normalize = query_base.add_column(func.sum(Node_Ngram.weight))
if query_normalize is not None:
for date, value in query_normalize:
date = date.replace(tzinfo=None)
if date in result:
result[date] /= value
# return result with proper formatting
if input['format'] == 'json':
return JsonHttpResponse({
'query': input,
'result': sorted(result.items()),
}, 201)
elif input['format'] == 'csv':
return CsvHttpResponse(sorted(result.items()), ('date', 'value'), 201)
...@@ -240,7 +240,13 @@ def get_or_create_node(nodetype=None,corpus=None,corpus_id=None,name_str=None,hy ...@@ -240,7 +240,13 @@ def get_or_create_node(nodetype=None,corpus=None,corpus_id=None,name_str=None,hy
if nodetype is None: if nodetype is None:
print("Need to give a type node") print("Need to give a type node")
else: else:
ntype=cache.NodeType[nodetype] try:
ntype = cache.NodeType[nodetype]
except KeyError:
ntype = cache.NodeType[nodetype] = NodeType()
ntype.name = nodetype
session.add(ntype)
session.commit()
if corpus_id is not None and corpus is None: if corpus_id is not None and corpus is None:
corpus = session.query(Node).filter(Node.id==corpus_id).first() corpus = session.query(Node).filter(Node.id==corpus_id).first()
......
...@@ -9,6 +9,7 @@ import gargantext_web.corpus_views as corpus_views ...@@ -9,6 +9,7 @@ import gargantext_web.corpus_views as corpus_views
from annotations import urls as annotations_urls from annotations import urls as annotations_urls
from annotations.views import main as annotations_main_view from annotations.views import main as annotations_main_view
import gargantext_web.api2
import tests.ngramstable.views as samtest import tests.ngramstable.views as samtest
...@@ -75,6 +76,7 @@ urlpatterns = patterns('', ...@@ -75,6 +76,7 @@ urlpatterns = patterns('',
url(r'^corpus/(\d+)/node_link.json$', views.node_link), # => api.analysis('type': 'node_link', 'format' : 'json') url(r'^corpus/(\d+)/node_link.json$', views.node_link), # => api.analysis('type': 'node_link', 'format' : 'json')
url(r'^corpus/(\d+)/adjacency.json$', views.adjacency), # => api.analysis('type': 'adjacency', 'format' : 'json') url(r'^corpus/(\d+)/adjacency.json$', views.adjacency), # => api.analysis('type': 'adjacency', 'format' : 'json')
url(r'^api2/nodes/(\d+)/histories$', gargantext_web.api2.NodeNgramsQueries.as_view()),
url(r'^ngrams$', views.ngrams), # to be removed url(r'^ngrams$', views.ngrams), # to be removed
url(r'^nodeinfo/(\d+)$', views.nodeinfo), # to be removed ? url(r'^nodeinfo/(\d+)$', views.nodeinfo), # to be removed ?
url(r'^tfidf/(\d+)/(\w+)$', views_optimized.tfidf), url(r'^tfidf/(\d+)/(\w+)$', views_optimized.tfidf),
......
...@@ -40,7 +40,7 @@ case "$1" in ...@@ -40,7 +40,7 @@ case "$1" in
;; ;;
status) status)
status_of_proc "$DAEMON_NAME" "$DAEMON" && exit 0 || exit $? ps -e | grep "`cat $DAEMON_PID` "
;; ;;
*) *)
echo "Usage: $DAEMON_NAME {start|stop|restart|status}" echo "Usage: $DAEMON_NAME {start|stop|restart|status}"
......
...@@ -103,15 +103,13 @@ class NodesChildrenNgrams(APIView): ...@@ -103,15 +103,13 @@ class NodesChildrenNgrams(APIView):
# query ngrams # query ngrams
ParentNode = aliased(Node) ParentNode = aliased(Node)
ngrams_query = (session ngrams_query = (session
.query(Ngram.terms, func.count().label('count')) .query(Ngram.terms, func.sum(Node_Ngram.weight).label('count'))
# .query(Ngram.id, Ngram.terms, func.count().label('count'))
.join(Node_Ngram, Node_Ngram.ngram_id == Ngram.id) .join(Node_Ngram, Node_Ngram.ngram_id == Ngram.id)
.join(Node, Node.id == Node_Ngram.node_id) .join(Node, Node.id == Node_Ngram.node_id)
.filter(Node.parent_id == node_id) .filter(Node.parent_id == node_id)
.group_by(Ngram.terms) .group_by(Ngram.terms)
# .group_by(Ngram) # .group_by(Ngram)
.order_by(func.count().desc(), Ngram.terms) .order_by(func.sum(Node_Ngram.weight).desc(), Ngram.terms)
# .order_by(func.count().desc(), Ngram.id)
) )
# filters # filters
if 'startwith' in request.GET: if 'startwith' in request.GET:
...@@ -349,8 +347,8 @@ class NodesChildrenDuplicates(APIView): ...@@ -349,8 +347,8 @@ class NodesChildrenDuplicates(APIView):
'deleted': count 'deleted': count
}) })
class NodesChildrenMetatadata(APIView): # retrieve metadata from a given list of parent node
def get(self, request, node_id): def get_metadata(corpus_id_list):
# query hyperdata keys # query hyperdata keys
ParentNode = aliased(Node) ParentNode = aliased(Node)
...@@ -358,7 +356,7 @@ class NodesChildrenMetatadata(APIView): ...@@ -358,7 +356,7 @@ class NodesChildrenMetatadata(APIView):
.query(Hyperdata) .query(Hyperdata)
.join(Node_Hyperdata, Node_Hyperdata.hyperdata_id == Hyperdata.id) .join(Node_Hyperdata, Node_Hyperdata.hyperdata_id == Hyperdata.id)
.join(Node, Node.id == Node_Hyperdata.node_id) .join(Node, Node.id == Node_Hyperdata.node_id)
.filter(Node.parent_id == node_id) .filter(Node.parent_id.in_(corpus_id_list))
.group_by(Hyperdata) .group_by(Hyperdata)
) )
...@@ -377,7 +375,7 @@ class NodesChildrenMetatadata(APIView): ...@@ -377,7 +375,7 @@ class NodesChildrenMetatadata(APIView):
node_hyperdata_query = (session node_hyperdata_query = (session
.query(value_column) .query(value_column)
.join(Node, Node.id == Node_Hyperdata.node_id) .join(Node, Node.id == Node_Hyperdata.node_id)
.filter(Node.parent_id == node_id) .filter(Node.parent_id.in_(corpus_id_list))
.filter(Node_Hyperdata.hyperdata_id == hyperdata.id) .filter(Node_Hyperdata.hyperdata_id == hyperdata.id)
.group_by(value_column) .group_by(value_column)
.order_by(value_column) .order_by(value_column)
...@@ -403,8 +401,68 @@ class NodesChildrenMetatadata(APIView): ...@@ -403,8 +401,68 @@ class NodesChildrenMetatadata(APIView):
'valuesCount': values_count, 'valuesCount': values_count,
}) })
# give the result back
return collection
class ApiHyperdata(APIView):
def get(self, request):
corpus_id_list = list(map(int, request.GET['corpus_id'].split(',')))
return JsonHttpResponse({ return JsonHttpResponse({
'data': collection, 'data': get_metadata(corpus_id_list),
})
# retrieve ngrams from a given list of parent node
def get_ngrams(corpus_id_list):
pass
class ApiNgrams(APIView):
def get(self, request):
# parameters retrieval and validation
startwith = request.GET.get('startwith', '').replace("'", "\\'")
# query ngrams
ParentNode = aliased(Node)
ngrams_query = (session
.query(Ngram.terms, func.sum(Node_Ngram.weight).label('count'))
.join(Node_Ngram, Node_Ngram.ngram_id == Ngram.id)
.join(Node, Node.id == Node_Ngram.node_id)
.group_by(Ngram.terms)
# .group_by(Ngram)
.order_by(func.sum(Node_Ngram.weight).desc(), Ngram.terms)
)
# filters
if 'startwith' in request.GET:
ngrams_query = ngrams_query.filter(Ngram.terms.startswith(request.GET['startwith']))
if 'contain' in request.GET:
ngrams_query = ngrams_query.filter(Ngram.terms.contains(request.GET['contain']))
if 'corpus_id' in request.GET:
corpus_id_list = list(map(int, request.GET.get('corpus_id', '').split(',')))
if corpus_id_list and corpus_id_list[0]:
ngrams_query = ngrams_query.filter(Node.parent_id.in_(corpus_id_list))
# pagination
offset = int(request.GET.get('offset', 0))
limit = int(request.GET.get('limit', 20))
total = ngrams_query.count()
# return formatted result
return JsonHttpResponse({
'pagination': {
'offset': offset,
'limit': limit,
'total': total,
},
'data': [
{
'terms': ngram.terms,
'count': ngram.count,
}
for ngram in ngrams_query[offset : offset+limit]
],
}) })
class NodesChildrenQueries(APIView): class NodesChildrenQueries(APIView):
......
...@@ -22,8 +22,8 @@ urlpatterns = patterns('', ...@@ -22,8 +22,8 @@ urlpatterns = patterns('',
url(r'node/(\d+)/ngrams$', ngrams.Ngrams.as_view()), url(r'node/(\d+)/ngrams$', ngrams.Ngrams.as_view()),
url(r'node/(\d+)/ngrams$', ngrams.Ngrams.as_view()), url(r'node/(\d+)/ngrams$', ngrams.Ngrams.as_view()),
url(r'nodes/(\d+)/children/hyperdata$', api.NodesChildrenMetatadata.as_view()), #url(r'nodes/(\d+)/children/hyperdata$', api.NodesChildrenMetatadata.as_view()),
url(r'nodes/(\d+)/children/hyperdata$', api.NodesChildrenMetatadata.as_view()), #url(r'nodes/(\d+)/children/hyperdata$', api.NodesChildrenMetatadata.as_view()),
url(r'nodes/(\d+)/children/queries$', api.NodesChildrenQueries.as_view()), url(r'nodes/(\d+)/children/queries$', api.NodesChildrenQueries.as_view()),
url(r'nodes/(\d+)/children/queries$', api.NodesChildrenQueries.as_view()), url(r'nodes/(\d+)/children/queries$', api.NodesChildrenQueries.as_view()),
...@@ -37,6 +37,9 @@ urlpatterns = patterns('', ...@@ -37,6 +37,9 @@ urlpatterns = patterns('',
url(r'nodes/(\d+)/graph$', graph.Graph.as_view()), url(r'nodes/(\d+)/graph$', graph.Graph.as_view()),
url(r'corpus/(\d+)/graph$', graph.Graph.as_view()), url(r'corpus/(\d+)/graph$', graph.Graph.as_view()),
url(r'hyperdata$', api.ApiHyperdata.as_view()),
url(r'ngrams$', api.ApiNgrams.as_view()),
url(r'tfidf/(\d+)/(\w+)$', views_optimized.tfidf), url(r'tfidf/(\d+)/(\w+)$', views_optimized.tfidf),
) )
This diff is collapsed.
This diff is collapsed.
d3.sankey = function() {
var sankey = {},
nodeWidth = 24,
nodePadding = 8,
size = [1, 1],
nodes = [],
links = [];
sankey.nodeWidth = function(_) {
if (!arguments.length) return nodeWidth;
nodeWidth = +_;
return sankey;
};
sankey.nodePadding = function(_) {
if (!arguments.length) return nodePadding;
nodePadding = +_;
return sankey;
};
sankey.nodes = function(_) {
if (!arguments.length) return nodes;
nodes = _;
return sankey;
};
sankey.links = function(_) {
if (!arguments.length) return links;
links = _;
return sankey;
};
sankey.size = function(_) {
if (!arguments.length) return size;
size = _;
return sankey;
};
sankey.layout = function(iterations) {
computeNodeLinks();
computeNodeValues();
computeNodeBreadths();
computeNodeDepths(iterations);
computeLinkDepths();
return sankey;
};
sankey.relayout = function() {
computeLinkDepths();
return sankey;
};
sankey.link = function() {
var curvature = .5;
function link(d) {
var x0 = d.source.x + d.source.dx,
x1 = d.target.x,
xi = d3.interpolateNumber(x0, x1),
x2 = xi(curvature),
x3 = xi(1 - curvature),
y0 = d.source.y + d.sy + d.dy / 2,
y1 = d.target.y + d.ty + d.dy / 2;
return "M" + x0 + "," + y0
+ "C" + x2 + "," + y0
+ " " + x3 + "," + y1
+ " " + x1 + "," + y1;
}
link.curvature = function(_) {
if (!arguments.length) return curvature;
curvature = +_;
return link;
};
return link;
};
// Populate the sourceLinks and targetLinks for each node.
// Also, if the source and target are not objects, assume they are indices.
function computeNodeLinks() {
nodes.forEach(function(node) {
node.sourceLinks = [];
node.targetLinks = [];
});
links.forEach(function(link) {
var source = link.source,
target = link.target;
if (typeof source === "number") source = link.source = nodes[link.source];
if (typeof target === "number") target = link.target = nodes[link.target];
source.sourceLinks.push(link);
target.targetLinks.push(link);
});
}
// Compute the value (size) of each node by summing the associated links.
function computeNodeValues() {
nodes.forEach(function(node) {
node.value = Math.max(
d3.sum(node.sourceLinks, value),
d3.sum(node.targetLinks, value)
);
});
}
// Iteratively assign the breadth (x-position) for each node.
// Nodes are assigned the maximum breadth of incoming neighbors plus one;
// nodes with no incoming links are assigned breadth zero, while
// nodes with no outgoing links are assigned the maximum breadth.
function computeNodeBreadths() {
var remainingNodes = nodes,
nextNodes,
x = 0;
while (remainingNodes.length) {
nextNodes = [];
remainingNodes.forEach(function(node) {
node.x = x;
node.dx = nodeWidth;
node.sourceLinks.forEach(function(link) {
nextNodes.push(link.target);
});
});
remainingNodes = nextNodes;
++x;
}
//
moveSinksRight(x);
scaleNodeBreadths((width - nodeWidth) / (x - 1));
}
function moveSourcesRight() {
nodes.forEach(function(node) {
if (!node.targetLinks.length) {
node.x = d3.min(node.sourceLinks, function(d) { return d.target.x; }) - 1;
}
});
}
function moveSinksRight(x) {
nodes.forEach(function(node) {
if (!node.sourceLinks.length) {
node.x = x - 1;
}
});
}
function scaleNodeBreadths(kx) {
nodes.forEach(function(node) {
node.x *= kx;
});
}
function computeNodeDepths(iterations) {
var nodesByBreadth = d3.nest()
.key(function(d) { return d.x; })
.sortKeys(d3.ascending)
.entries(nodes)
.map(function(d) { return d.values; });
//
initializeNodeDepth();
resolveCollisions();
for (var alpha = 1; iterations > 0; --iterations) {
relaxRightToLeft(alpha *= .99);
resolveCollisions();
relaxLeftToRight(alpha);
resolveCollisions();
}
function initializeNodeDepth() {
var ky = d3.min(nodesByBreadth, function(nodes) {
return (size[1] - (nodes.length - 1) * nodePadding) / d3.sum(nodes, value);
});
nodesByBreadth.forEach(function(nodes) {
nodes.forEach(function(node, i) {
node.y = i;
node.dy = node.value * ky;
});
});
links.forEach(function(link) {
link.dy = link.value * ky;
});
}
function relaxLeftToRight(alpha) {
nodesByBreadth.forEach(function(nodes, breadth) {
nodes.forEach(function(node) {
if (node.targetLinks.length) {
var y = d3.sum(node.targetLinks, weightedSource) / d3.sum(node.targetLinks, value);
node.y += (y - center(node)) * alpha;
}
});
});
function weightedSource(link) {
return center(link.source) * link.value;
}
}
function relaxRightToLeft(alpha) {
nodesByBreadth.slice().reverse().forEach(function(nodes) {
nodes.forEach(function(node) {
if (node.sourceLinks.length) {
var y = d3.sum(node.sourceLinks, weightedTarget) / d3.sum(node.sourceLinks, value);
node.y += (y - center(node)) * alpha;
}
});
});
function weightedTarget(link) {
return center(link.target) * link.value;
}
}
function resolveCollisions() {
nodesByBreadth.forEach(function(nodes) {
var node,
dy,
y0 = 0,
n = nodes.length,
i;
// Push any overlapping nodes down.
nodes.sort(ascendingDepth);
for (i = 0; i < n; ++i) {
node = nodes[i];
dy = y0 - node.y;
if (dy > 0) node.y += dy;
y0 = node.y + node.dy + nodePadding;
}
// If the bottommost node goes outside the bounds, push it back up.
dy = y0 - nodePadding - size[1];
if (dy > 0) {
y0 = node.y -= dy;
// Push any overlapping nodes back up.
for (i = n - 2; i >= 0; --i) {
node = nodes[i];
dy = node.y + node.dy + nodePadding - y0;
if (dy > 0) node.y -= dy;
y0 = node.y;
}
}
});
}
function ascendingDepth(a, b) {
return a.y - b.y;
}
}
function computeLinkDepths() {
nodes.forEach(function(node) {
node.sourceLinks.sort(ascendingTargetDepth);
node.targetLinks.sort(ascendingSourceDepth);
});
nodes.forEach(function(node) {
var sy = 0, ty = 0;
node.sourceLinks.forEach(function(link) {
link.sy = sy;
sy += link.dy;
});
node.targetLinks.forEach(function(link) {
link.ty = ty;
ty += link.dy;
});
});
function ascendingSourceDepth(a, b) {
return a.source.y - b.source.y;
}
function ascendingTargetDepth(a, b) {
return a.target.y - b.target.y;
}
}
function center(node) {
return node.y + node.dy / 2;
}
function value(link) {
return link.value;
}
return sankey;
};
This diff is collapsed.
{% load staticfiles %}
<style>
.node rect {
cursor: move;
fill-opacity: .9;
shape-rendering: crispEdges;
}
.node text {
pointer-events: none;
text-shadow: 0 1px 0 #fff;
}
.link {
fill: none;
stroke: #000;
stroke-opacity: .2;
}
.link:hover {
stroke-opacity: .5;
}
</style>
<body>
<p id="chart">
<script src="http://d3js.org/d3.v3.min.js"></script>
<script src="{% static "js/sankey.js" %}"></script>
<script>
var units = "Widgets";
var margin = {top: 10, right: 10, bottom: 10, left: 10},
width = 700 - margin.left - margin.right,
height = 300 - margin.top - margin.bottom;
var formatNumber = d3.format(",.0f"), // zero decimal places
format = function(d) { return formatNumber(d) + " " + units; },
color = d3.scale.category20();
// append the svg canvas to the page
var svg = d3.select("#chart").append("svg")
.attr("width", width + margin.left + margin.right)
.attr("height", height + margin.top + margin.bottom)
.append("g")
.attr("transform",
"translate(" + margin.left + "," + margin.top + ")");
// Set the sankey diagram properties
var sankey = d3.sankey()
.nodeWidth(36)
.nodePadding(40)
.size([width, height]);
var path = sankey.link();
// load the data (using the timelyportfolio csv method)
d3.csv("/corpus/{{corpus.id}}/sankey.csv", function(error, data) {
//set up graph in same style as original example but empty
graph = {"nodes" : [], "links" : []};
data.forEach(function (d) {
graph.nodes.push({ "name": d.source });
graph.nodes.push({ "name": d.target });
graph.links.push({ "source": d.source,
"target": d.target,
"value": +d.value });
});
// return only the distinct / unique nodes
graph.nodes = d3.keys(d3.nest()
.key(function (d) { return d.name; })
.map(graph.nodes));
// loop through each link replacing the text with its index from node
graph.links.forEach(function (d, i) {
graph.links[i].source = graph.nodes.indexOf(graph.links[i].source);
graph.links[i].target = graph.nodes.indexOf(graph.links[i].target);
});
//now loop through each nodes to make nodes an array of objects
// rather than an array of strings
graph.nodes.forEach(function (d, i) {
graph.nodes[i] = { "name": d };
});
sankey
.nodes(graph.nodes)
.links(graph.links)
.layout(32);
// add in the links
var link = svg.append("g").selectAll(".link")
.data(graph.links)
.enter().append("path")
.attr("class", "link")
.attr("d", path)
.style("stroke-width", function(d) { return Math.max(1, d.dy); })
.sort(function(a, b) { return b.dy - a.dy; });
// add the link titles
link.append("title")
.text(function(d) {
return d.source.name + " → " +
d.target.name + "\n" + format(d.value); });
// add in the nodes
var node = svg.append("g").selectAll(".node")
.data(graph.nodes)
.enter().append("g")
.attr("class", "node")
.attr("transform", function(d) {
return "translate(" + d.x + "," + d.y + ")"; })
.call(d3.behavior.drag()
.origin(function(d) { return d; })
.on("dragstart", function() {
this.parentNode.appendChild(this); })
.on("drag", dragmove));
// add the rectangles for the nodes
node.append("rect")
.attr("height", function(d) { return d.dy; })
.attr("width", sankey.nodeWidth())
.style("fill", function(d) {
return d.color = color(d.name.replace(/ .*/, "")); })
.style("stroke", function(d) {
return d3.rgb(d.color).darker(2); })
.append("title")
.text(function(d) {
return d.name + "\n" + format(d.value); });
// add in the title for the nodes
node.append("text")
.attr("x", -6)
.attr("y", function(d) { return d.dy / 2; })
.attr("dy", ".35em")
.attr("text-anchor", "end")
.attr("transform", null)
.text(function(d) { return d.name; })
.filter(function(d) { return d.x < width / 2; })
.attr("x", 6 + sankey.nodeWidth())
.attr("text-anchor", "start");
// the function for moving the nodes
function dragmove(d) {
d3.select(this).attr("transform",
"translate(" + d.x + "," + (
d.y = Math.max(0, Math.min(height - d.dy, d3.event.y))
) + ")");
sankey.relayout();
link.attr("d", path);
}
});
</script>
</body>
</html>
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment