Commit abf79e21 authored by Romain Loth's avatar Romain Loth

Merge commit '35df7c66900a3366b3f74e7ccf046f08bd2c8c68'

parents 6f225e23 b0f4aeaa
......@@ -9,7 +9,18 @@ This package is:
The app can be tested by simply opening explorerjs.html and providing a graph in `json` or `gexf` format (see exemples in the `data/` dir.)
#### Basic integration policy
#### Input file setup and advanced usage
ProjectExplorer allows 4 main ways of input :
- **localfile**: a local file from the client machine
- **serverfile**: a static file from the remote server
- **servermenu**: a list of static files from the remote server
- **api**: a dataset from a remote server API
To set up the desired mode, you need to change the (`TW.conf.sourcemode`) value in settings_explorerjs.js or add `sourcemode=xxx` as a URL argument.
See [the developer's manual](https://github.com/moma/ProjectExplorer/blob/master/00.DOCUMENTATION/C-advanced/developer_manual.md) for more information.
#### Integration policy
As a client-side lib, **tinawebJS can entirely reside in the `static` directory of your app**.
For the rest of the exemples we assume you cloned the code in a directory called `path/to/yourapp/static/ProjectExplorer`
......@@ -68,22 +79,3 @@ There are two exceptions are:
- while part of the distribution, they are **standalone micro-servers**
- they contain their own readme as to how to run them on a server
- once they are configured or run, the communication between them and the main tinaweb client module happens via XHR requests: therefore they can actually reside in any convenient place for your deployment.
#### Advanced usage
cf. developer_manual.md
#### old TODO update
- "JS Mode": TinawebJS est utilisé juste a niveau Javascript (HTML+CSS+JS), sans aide des modules php/python. C'est la version standalone, ça veut dire "lecture d'un fichier GEXF ou JSON".
- Graph uni-partite. http://localhost/TinawebJS_2015/explorerjs.html?file=data/0-terms-terms-MainNodes.gexf
- Graph bi-partite. http://localhost/TinawebJS_2015/explorerjs.html?file=data/cnrsfolder/2015-11-06T15:15:02.121958_graph.json
- "JS+PHP Mode": Quand il y a des GEXF|JSON et en plus une BD en sqlite en format CorText (manager.cortext.net). En ce mode on dois declarer une db.json avec l'info necessaire.
- Graph bi-partite. http://localhost/TinawebJS_2015/explorerjs.html
......@@ -13,9 +13,9 @@ Après commits de la semaine 26-30 juin 2017, une structure plus facile pour les
│   └── (fusionné avec ./doc)
├── data
│   └── (graphes par sous-projets)
├── db.json
├── explorerjs.html <= point d'entrée lancement
├── settings_explorerjs.js <= point d'entrée config
├── settings_explorerjs.js <= config générale
├── db.json <= config additionnelle par sources gexf/json
├── favicon.ico
├── LICENSE
├── README.md
......@@ -34,7 +34,6 @@ Après commits de la semaine 26-30 juin 2017, une structure plus facile pour les
│   ├── jquery-3
│   ├── readmore.js
│   ├── sigma_v1.2
│   ├── sigma_v1.5
│   └── tweets
|
├── twmain <= ancien dossier tinawebJS
......
......@@ -64,55 +64,103 @@ Having a node0.name entry and optionally a node1.name is enough to display the g
The servermenu file also allows configuration of associated queries for selected node(s): **relatedDocs**
To enable it, you need to add to your node entry the `reldbfile` key:
To enable it, you need to add to your node entry the `reldbs` key with minimally a db type :
```json
"node0": {
"name": "$$blabla",
"reldbfile": "$$relpath/to/csv/or/sqlite"
"reldbs": {
"$$myType" : {}
}
}
```
The presence of this property `reldbfile` makes the API usable in db.json.
The presence of this property "reldbs" makes the API usable in db.json.
##### More relatedDocs settings
In addition, for full configuration, the following entries can be set under node0 or node1.
###### => for a CSV doc-by-doc table
Expected type is `"csv"` and you should fill the columns to search in.
Expected type is `"csv"` and you should fill the columns to search in and the template to use to render hits
```json
"reldbtype": "csv",
"reldbqcols": ["list", "of", "columns", "to", "search", "in", "for", "node0"]
"reldbs": {
"csv" : {
"file": "$$relpath/to/some.csv",
"qcols": ["list", "of", "cols", "to", "search", "in", "for", "node0"],
"template": "bib_details"
}
}
```
###### Real life example
###### => for a cortext sql base
Expected type is `"CortextDB"` and you should fill the tables to search in.
```json
"reldbs": {
"CortextDB": {
"file": "$$relpath/to/some.db",
"qtable": "$$tableNameToSearchIn",
"template": "cortext_with_link"
}
}
```
###### => for twitter queries
Expected type is `"twitter"` and no additional conf is needed (POSS for the future: add twitter query context, ex: "Présidentielles 2017 AND (query)").
```json
"reldbs": {
"twitter": {}
}
```
###### Real life examples
```json
"data/gargistex": {
"first": "shale_and_ice.gexf",
"graphs": {
"shale_and_ice.gexf": {
"node0": {
"name": "terms",
"reldbtype": "csv",
"reldbfile": "shale_and_ice.csv",
"reldbqcols": ["title", "abstract"]
"graphs":{
"model_calibration.gexf": {
"node0": {
"name": "terms",
"reldbs": {
"csv": {
"file": "model_calibration.csv",
"qcols": ["title"],
"template": "bib_details"
},
"twitter": {}
}
}
}
}
},
"data/test": {
"first" : "mini_for_csv.gexf",
"graphs": {
"mini_for_csv.gexf": {
"node0": {
"name": "terms",
"reldbs": {
"csv": {
"file": "mini_for_csv.csv",
"qcols": ["title","keywords","text"],
"template": "bib_details"
},
"twitter": {}
}
},
"model_calibration.gexf": {
"node0": {
"name": "terms",
"reldbtype": "csv",
"reldbfile": "model_calibration.csv",
"reldbqcols": ["title", "abstract"]
"node1": {
"name": "authors",
"reldbs": {
"csv": {
"file": "mini_for_csv.csv",
"qcols": ["author"],
"template": "bib_details"
}
}
}
}
}
}
```
###### => for CortextDB SQL tables
Expected type is `"CortextDB"` and you should fill the table to search in.
```json
"reldbtype": "CortextDB",
"reldbqtable": []
```
In the last exemple, we have two nodetypes:
- node0 allows both CSV and twitter relatedDocs tabs.
- node1 allows only the CSV relatedDocs tab.
This is a stub for a future documentation for developers.
#### About settings
- system-wide settings are in `settings_explorerjs.js`
- source-by-source settings (nodetypes, relatedDocs APIs) are in `db.json`
## Graph input choices
......@@ -13,7 +16,7 @@ Tina allows 3 main ways of input :
The `sourcemode` value is by default the one in settings_explorerjs.js (`TW.conf.sourcemode`), unless an url argument of the same name is present.
The `serverfile` option has an extended version called `servermenu`. It opens a list of files called `db.json` on the server, providing a menu to choose from it.
The `serverfile` option has an extended version called `servermenu`. It opens the list of files from `db.json` on the server, providing a menu to choose from it.
The detailed implementation of these choices can be found in the function `syncRemoteGraphData()` in main.js.
......@@ -32,7 +35,8 @@ This will still evolve but the main steps for any graph initialization messily u
4. [`main.js`] mainStartGraph() function runs all the rest
1. precomputes display properties (grey color, etc.)
2. calls [`sigmaUtils`] where the function `FillGraph()` was a central point for filtering and preparing properties but now with 2 and 3 it just creates a filtered copy of the nodes and edges of the current active types to a new structure that groups them together (POSSIBLE remove this extra step)
3. back in [`main.js`], finally all sigma settings (user + defaults) are merged and we initialize the sigma instance (`new sigma` etc.)
3. back in [`main.js`], finally all sigma settings (user + defaults) are merged and we initialize the sigma instance (`new sigma` etc.)
at this point, any additional conf located in db.json is used for nodeTypes and relatedDocsTypes
4. finally a call to [`TinawebJS`] initializes the action listeners and this phase should crucially initialize items that need the sigma instance (because they may depend the displayed categories, the number of displayed nodes, etc)
......
Thanks for using TinawebJS
# ProjectExplorer: a graph client-side engine
This work is lead by the Complex Systems Institute of Paris Ile-de-France (ISC-PIF, http://iscpif.fr) and the Centre D'analyse et de Mathématiques Sociale, both CNRS entities.
### Presentation
HOMEPAGE
http://tinasoft.eu/
Thank you for using ProjectExplorer/TinawebJS.
SOURCE CODE REPOSITORY
This work is lead by the Complex Systems Institute of Paris Ile-de-France ([ISCPIF](http://iscpif.fr)) and the [Centre d'Analyse et de Mathématique Sociales](http://cams.ehess.fr/), both [CNRS](http://www.cnrs.fr/) entities.
https://github.com/moma/explorerjs
###### Source code repository
https://github.com/moma/ProjectExplorer
AUTHORS
###### Authors
- Researchers and engineers of the ISC-PIF
David Chavalarias <david dot chavalarias at iscpif ... fr>
Samuel Castillo
Researchers and engineers of the [ISCPIF/CNRS - UPS 3611](http://iscpif.fr)
- Dr. David Chavalarias
- Samuel Castillo
- Romain Loth
Aknowledgments
You can contact the authors by email (<firstname.lastname@iscpif.fr>).
Former Tina developpers (java based software from which tinawebJS is adapted)
elias showk <elishowk_at-nonutc.fr>
julian bilcke <julian.bilcke_at-iscpif.fr>
###### Acknowledgements
- TinawebJS is build on top of Alexis Jacomy and Guillaume Plique's [sigmaJS](http://sigmajs.org)
- This work is the continuation of the TINA project, an European Union FP7 project - FP7-ICT-2009-C
- Former Tina developpers (java based software from which tinawebJS is adapted)
- [Elias Showk](https://github.com/elishowk)
- [Julian Bilcke](https://github.com/jbilcke)
TinawebJS is build on Alexis Jacomy's sigmaJS (http://sigmajs.org)
This work is the continuation of the TINA project, an European Union FP7 project - FP7-ICT-2009-C
REQUIREMENTS
WEB BROWSER compatible with javascript : we recommend Chrome/Chromium or Firefox
### Usage
HOW TO USE:
ProjectExplorer is a versatile app that can be used as standalone or as a client library. The documentation concerning the different setup cases is being updated after a major refactoring and will grow in time.
- Put a mono or bipartite gexf, e.g.: Somemap.gexf, inside "data/" folder.
Here are the main points.
- And then see it in your http://localhost/../../explorerjs.html?file=data/Somemap.gexf
###### Getting started
In the simplest setup, just clone the repository and open explorerjs.html in a modern browser.
```
git clone https://github.com/moma/ProjectExplorer.git
cd ProjectExplorer
firefox explorerjs.html
```
=> An input in the upper right side allows you to open any gexf file.
###### Usage on a web server
To activate all features, you should:
1. configure a web server like apache or nginx, for instance on your localhost
2. define a new "location" in your apache or nginx conf, pointing to the directory you cloned
COPYRIGHT AND LICENSE
Copyright (C) 2013-2016 Institut des Systèmes Complexes de Paris Ile-de-France
CAMS - Centre National de la Recherche Scientifique
Now you can already use ProjectExplorer as a showcase for a given file
- Put a mono or bipartite gexf, e.g.: `Somemap.gexf`, inside the `data/` folder.
- And then see it in your browser:
http://localhost/explorerjs.html?sourcemode=serverfile&file=data/Somemap.gexf
This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by
Once you have this webserver running and some source data files, you may also configure a "sources list":
- it will be shown as a **menu** to select graphs in the interface
- it allows to define associated **node types** for each source
- it allows to define associated **search backends** for each source
- to use this, follow the guidelines in the **[Servermenu HOWTO](https://github.com/moma/ProjectExplorer/blob/master/00.DOCUMENTATION/A-Introduction/servermenu_config.md)**
the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
###### Integration in a larger app
To integrate ProjectExplorer in a larger web application, you may have several locations with subdirectories defined on your server. In this case, you'll need to use the provided path modification tool (see this [integration procedure example](https://github.com/moma/ProjectExplorer/tree/master/00.DOCUMENTATION/A-Introduction#integration-policy))
###### Advanced settings
For more information about other ProjectExplorer's settings (settings file, input modes, attribute processing options), please refer to the [developer's manual](https://github.com/moma/ProjectExplorer/blob/master/00.DOCUMENTATION/C-advanced/developer_manual.md).
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
### Copyright and license
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/gpl.html>.
Copyright (c) 2013-2017 **ISCPIF** --
**CAMS** -- **Centre National de la Recherche Scientifique**
This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with this program. If not, see http://www.gnu.org/licenses/gpl.html.
{
"data/ClimateChange": {
"graphs": {
"Maps_S_800.gexf": {
"node0": {
"name": "ISItermsWhitelistV2Oct_5 &amp; ISItermsWhitelistV2Oct_5",
"reldbs": {
"CortextDB": {
"file": "wos_climate-change_title_2014-2015.db",
"qtable": "ISItermsWhitelistV2Oct_5",
"template": "cortext_with_link"
},
"twitter": {}
}
}
}
}
},
"data/gargistex": {
"first" : "shale_and_ice.gexf",
"graphs":{
"shale_and_ice.gexf": {
"node0": {
"name": "terms",
"reldbtype": "csv",
"reldbfile": "shale_and_ice.csv",
"reldbqcols": ["title"],
"reltemplate": "bib_details"
"reldbs": {
"csv": {
"file": "shale_and_ice.csv",
"qcols": ["title"],
"template": "bib_details"
},
"twitter": {}
}
}
},
"model_calibration.gexf": {
"node0": {
"name": "terms",
"reldbtype": "csv",
"reldbfile": "model_calibration.csv",
"reldbqcols": ["title"],
"reltemplate": "bib_details"
"reldbs": {
"csv": {
"file": "model_calibration.csv",
"qcols": ["title"],
"template": "bib_details"
},
"twitter": {}
}
}
}
}
......@@ -37,17 +62,24 @@
"mini_for_csv.gexf": {
"node0": {
"name": "term",
"reldbtype": "csv",
"reldbfile": "mini_for_csv.csv",
"reldbqcols": ["title","keywords","text"],
"reltemplate": "bib_details"
"reldbs": {
"csv": {
"file": "mini_for_csv.csv",
"qcols": ["title","keywords","text"],
"template": "bib_details"
},
"twitter": {}
}
},
"node1": {
"name": "person",
"reldbtype": "csv",
"reldbfile": "mini_for_csv.csv",
"reldbqcols": ["author"],
"reltemplate": "bib_details"
"reldbs": {
"csv": {
"file": "mini_for_csv.csv",
"qcols": ["author"],
"template": "bib_details"
}
}
}
},
"test_with_various_atts.gexf": {}
......@@ -59,23 +91,21 @@
"first" : "ProgrammeDesCandidats.enrichi.gexf",
"graphs": {
"ProgrammeDesCandidats.enrichi.gexf": {
"node0": { "name": "terms" }
"node0": { "name": "terms", "reldbs": { "twitter": {} } }
},
"ProgrammeDesCandidats.gexf": {
"node0": { "name": "terms" }
"node0": { "name": "terms", "reldbs": { "twitter": {} } }
}
}
},
"data/ClimateChange": {
"graphapi": {
"__comment__": "special subproject for api sourcemode",
"__comment__": "allows setting nodetypes and reldocs for api graph sources",
"__comment__": "POSS can be avoided if 2 files; filemenu.json, nodeconfs.json",
"graphs": {
"Maps_S_800.gexf": {
"node0": {
"name": "ISItermsWhitelistV2Oct_5 &amp; ISItermsWhitelistV2Oct_5",
"reldbqtable": "ISItermsWhitelistV2Oct_5",
"reldbfile" : "wos_climate-change_title_2014-2015.db",
"reldbtype": "CortextDB",
"reltemplate": "cortext_with_link"
}
"default": {
"node0": { "name": "NGram", "reldbs": { "twitter": {} } },
"node1": { "name": "Document" }
}
}
}
......
......@@ -475,9 +475,13 @@
<!-- One tab pane
(only one with topPapers, we change content ourselves)
-->
<div class="tab-content">
<div id="topPapers" role="tabpanel" class="tab-pane active">
</div>
<div id="reldocs-boxes" class="tab-content">
<!--
exemple:
<div id="rd-0-csv" role="tabpanel" class="topPapers tab-pane active">
</div>
-->
</div>
</div>
......@@ -764,7 +768,7 @@
<script src="twlibs3/freshslider/freshslider.1.0.js" type="text/javascript" ></script>
<script src="twlibs3/readmore.js" type="text/javascript"></script>
<script src="twlibs3/tweets/widgets.js" type="text/javascript" language="javascript"></script>
<script src="twlibs3/bootstrap-native/bootstrap-native.min.js"></script>
<script src="twlibs3/bootstrap-native/bootstrap-native.js"></script>
<!-- new sigma 1.2 imports -->
<!-- <script src="twlibs3/sigma_v1.2/sigma.min.js" type="text/javascript" language="javascript"></script> -->
<script src="twlibs3/sigma_v1.2/sigma.noIndexes.js" type="text/javascript" language="javascript"></script>
......
......@@ -31,13 +31,15 @@ TW.conf = (function(TW){
TWConf.getRelatedDocs = false
TWConf.relatedDocsMax = 10
TWConf.relatedDocsType = "twitter" // accepted: "twitter" | "LocalDB"
// fallback type (if no detailed source-by-source conf from db.json)
TWConf.relatedDocsType = "csv" // accepted: "twitter" | "csv" | "CortextDB"
// POSSible: "elastic"
// routes by corresponding type
TWConf.relatedDocsAPIS = {
// routes by corresponding type
"LocalDB": "twbackends/phpAPI",
"twitter": "http://127.0.0.1:5000/twitter_search"
"twitter": "http://127.0.0.1:5000/twitter_search",
"CortextDB": "twbackends/phpAPI",
"csv": "twbackends/phpAPI"
}
// fallback topPapers API if none found by type
......
......@@ -6,7 +6,7 @@ $elems = json_decode($query);
// the table used as search perimeter is from db.json conf
$table = $my_conf[$ntid]['reldbqtable'] ;
$table = $my_conf["node".$ntid][$dbtype]['qtable'] ;
// values for CortextDB that seem to never change: /!\ hardcoded here /!\
// the column accessors
......@@ -100,7 +100,7 @@ foreach ($wos_ids as $id => $score) {
foreach ($base->query($sql) as $row) {
$external_link="<a href=http://google.com/webhp?#q=".urlencode('"'.$row['data'].'"')." target=blank>".' <img width=15px src="'.$our_libs_root.'/img/google.png"></a>';
$link = 'JavaScript:newPopup(\''.$our_php_root.'/default_doc_details.php?gexf='.urlencode($gexf).'&index='.$table.'&query='.urlencode($query).'&type='.urlencode($_GET['type']).'&id='.$id.' \')';
$link = 'JavaScript:newPopup(\''.$our_php_root.'/default_doc_details.php?gexf='.urlencode($gexf).'&dbtype='.$dbtype.'&query='.urlencode($query).'&ndtype='.$ntid.'&id='.$id.'\')';
if ($output_mode == "html") {
$htmlout.="<li title='".$score."'>";
......
......@@ -6,6 +6,9 @@ $base = new PDO("sqlite:" .$mainpath.$graphdb);
$query = str_replace( '__and__', '&', $_GET["query"] );
$terms_of_query = json_decode($query);
// the table used as search perimeter is from db.json conf
$table = $my_conf["node".$ntid][$dbtype]['qtable'] ;
// echo "mainpath: ".$mainpath."<br>";
// echo "thedb: ".$mainpath.$graphdb."<br>";
// echo "thequery: ".var_dump($terms_of_query);
......@@ -32,7 +35,7 @@ echo '
<div id="tabs">
<ul>
<li><a href="#tabs-1">Selected Document</a></li>
<li><a href="full_doc_list.php?'.'gexf='.urlencode($gexf).'&query='.urlencode($_GET["query"]).'&index='.$_GET["index"].'&type='.urlencode($_GET["type"]).'">Full list</a></li>';
<li><a href="full_doc_list.php?'.'gexf='.urlencode($gexf).'&query='.urlencode($_GET["query"]).'&ndtype='.$ntid.'&dbtype='.$dbtype.'">Full list</a></li>';
echo '</ul>';
echo '<div id="tabs-1">';
......@@ -81,7 +84,7 @@ $id=$_GET["id"];
// }
// // get the date
if(strpos($_GET["index"],'terms') ) $sql = 'SELECT data FROM '.$_GET["index"].' WHERE id='.$id;
if(strpos($table,'terms') ) $sql = "SELECT data FROM $table WHERE id=".$id;
else $sql = 'SELECT data FROM ISItermsListV1 WHERE id='.$id;
$output.='<br/><b>Keywords: </b>';
$terms=array();
......
......@@ -8,7 +8,7 @@ $base = new PDO("sqlite:" .$mainpath.$graphdb);
$output = "<ul>"; // string sent to the javascript for display
$type = $_GET["type"];
$type = $_GET["ndtype"];
$query = str_replace( '__and__', '&', $_GET["query"] );
$terms_of_query=json_decode($_GET["query"]);
$elems = json_decode($query);
......@@ -19,25 +19,20 @@ foreach ($base->query($sql) as $row) {
$table_size=$row['COUNT(*)'];
}
$table = "";
$column = "";
$id="";
// the table used as search perimeter is from db.json conf
$table = $my_conf["node".$ntid][$dbtype]['qtable'] ;
if($type=="social"){
$table = "ISIAUTHOR";
$column = "data";
$id = "id";
$restriction='';
$factor=10;// factor for normalisation of stars
}
// values for CortextDB that seem to never change: /!\ hardcoded here /!\
// the column accessors
$column = "data";
$id = "id";
if($type=="semantic"){
$table = $_GET["index"];
$column = "data";
$id = "id";
$restriction='';
$factor=10;
}
// the output tables
$author_table = "ISIAUTHOR";
$titles_table = "ISITITLE";
$factor=10;// factor for normalisation of stars
$restriction='';
$sql = 'SELECT count(*),'.$id.'
......@@ -88,7 +83,7 @@ foreach ($wos_ids as $id => $score) {
$count+=1;
$output.="<li title='".$score."'>";
$output.=imagestar($score,$factor,'./').' ';
$sql = 'SELECT data FROM ISITITLE WHERE id='.$id." group by data";
$sql = "SELECT data FROM $titles_table WHERE id=".$id." group by data";
foreach ($base->query($sql) as $row) {
$output.='<a href="default_doc_details.php?gexf='.urlencode($gexf).'&type='.urlencode($_GET["type"]).'&query='.urlencode($query).'&id='.$id.'">'.$row['data']." </a> ";
......@@ -96,7 +91,7 @@ foreach ($wos_ids as $id => $score) {
}
// get the authors
$sql = 'SELECT data FROM ISIAUTHOR WHERE id='.$id;
$sql = "SELECT data FROM $author_table WHERE id=".$id;
foreach ($base->query($sql) as $row) {
$output.=strtoupper($row['data']).', ';
}
......
......@@ -14,25 +14,6 @@ if ($output_mode == "json") {
header('Content-Type: application/json');
}
$dbtype = null;
if (array_key_exists('reldbtype', $my_conf[$ntid])) {
$dbtype = $my_conf[$ntid]['reldbtype'];
}
else {
$guess_src = '';
if (array_key_exists('dbtype', $_GET)) {
$dbtype = $_GET['dbtype'];
$guess_src = "via url parameters";
}
else {
$dbtype = 'csv'; // new default
$guess_src = "by default";
}
errmsg("not filled", "$gexf -> node$ntid -> 'reldbtype'", "...Assuming dbtype is $dbtype ($guess_src).");
}
if ($dbtype == "CortextDB") {
$base = new PDO("sqlite:".$mainpath.$graphdb);
include('default_div.php');
......@@ -42,13 +23,20 @@ else {
// to index: the union of "searchable columns" qcols for all nodetypes
$idxcolsbytype = [];
for ($i = 0; $i < $ntypes ; $i++) {
if ($my_conf[$i]['active']) {
$idxcolsbytype[$i] = [];
$idxcolsbytype[$i] = $my_conf[$i]['reldbqcols'];
// if nodetype is active
if (count($my_conf["node".$i])) {
// ... and well-formed
if (array_key_exists('qcols', $my_conf["node".$i][$dbtype])) {
$idxcolsbytype[$i] = $my_conf["node".$i][$dbtype]['qcols'];
}
else {
echo("<p>Your settings for relatedDocsType are set on a local database,
but your servermenu file does not provide any information about
the CSV or DB table to query for related documents
(on nodetypeId ".$i.")</p>");
}
}
// else {
// echo("no nodetype ".$i."<br>");
// }
}
if (! $idxcolsbytype) {
......@@ -98,7 +86,7 @@ else {
// DO THE SEARCH
// -------------
$searchcols = $my_conf[$ntid]['reldbqcols'];
$searchcols = $my_conf["node".$ntid][$dbtype]['qcols'];
// a - split the query
$qtokens = preg_split('/\W/', $_GET["query"]);
......
......@@ -21,7 +21,10 @@ $mainpath=dirname(dirname(getcwd()))."/"; // default fs path to ProjectExplorer
$project_menu_path = "db.json";
// 3 - others
$ntypes = 2; // max node types
$ntypes = 2; // max node types (node0 & node1)
// accepted entries in db.json -> source -> reldbs -> dbtype
$supported_dbtypes = ['csv', 'CortextDB'];
// number of docs to display setting
$max_item_displayed = 7;
......@@ -38,30 +41,39 @@ $memport = 11211;
// CONFIGURATION PARAMS
// --------------------
// parse db.json project menu and create a conf by file
$conf = read_conf($mainpath.$project_menu_path, $ntypes);
$conf = read_conf($mainpath.$project_menu_path, $ntypes, $supported_dbtypes);
// =======================================
// echodump("== READ CONF ==<br>", $conf);
// =======================================
$gexf= str_replace('"','',$_GET["gexf"]);
$ndtype = $_GET["type"];
$ntid = null;
$ntid = $_GET["ndtype"];
$dbtype = $_GET["dbtype"];
$ndtype = null;
$my_conf = null;
// legacy types => generic types with 0 as default
if ($ndtype == 'social') { $ntid = 1; }
else { $ntid = 0; }
// new types => legacy types (with semantic as default)
if ($ntid == 0) { $ndtype = 'social' ; }
else { $ndtype = 'semantic'; }
// echodump("params: node type id", $ntid);
if (! $conf[$gexf][$ntid]['active']) {
errmsg("not active", "your graph ($gexf)");
if (! count($conf[$gexf]['node'.$ntid])) {
errmsg("has no php reldbs configured for nodetype $ntid", "your graph ($gexf)");
exit(1);
}
else if (! array_key_exists($dbtype, $conf[$gexf]['node'.$ntid])) {
errmsg("reldbs isn't configured for nodes of type $ntid and dbtype $dbtype", "your graph ($gexf)");
exit(1);
}
else if (! array_key_exists('file', $conf[$gexf]['node'.$ntid][$dbtype])) {
errmsg("reldb has no DB file for nodes of type $ntid and dbtype $dbtype", "your graph ($gexf)");
exit(1);
}
else {
$my_conf = $conf[$gexf];
$graphdb = $my_conf[$ntid]['dir'].'/'.$my_conf[$ntid]['reldbfile'];
$graphdb = $my_conf['node'.$ntid][$dbtype]['file'];
}
// echodump("params: reldb", $graphdb);
......
......@@ -11,13 +11,15 @@ function echodump($title, $anyObj) {
function errmsg($message, $context, $more = "") {
echo "<p class='micromessage'>The relatedDocs DB conf for $context is $message
echo "<p class='micromessage'>The relatedDocs DB conf for $context $message
(please read A-Introduction/servermenu_config.md).<br>$more</p>";
}
// reading db.json associations
// source graph file <=> (db, dbtype, cols) as relatedDocs php API
function read_conf($filepath, $ntypes) {
// 1) we filter db.json entries by active/inactive nodetypes
// 2) we filter db.json entries by supported dbtypes
function read_conf($filepath, $ntypes, $our_dbtypes) {
$project_menu_fh = fopen($filepath, "r");
$json_st = '';
while (!feof($project_menu_fh)) {
......@@ -45,21 +47,40 @@ function read_conf($filepath, $ntypes) {
// node0 <=> classic type 'semantic'
// node1 <=> classic type 'social'
$conf[$gpath] = array($ntypes);
// NB2 now additionnally, each nodetype can have several dbs configured !
// $conf[$gpath] = array($ntypes);
for ($i = 0 ; $i < $ntypes ; $i++) {
// check node0, node1, etc to see if they at least have a reldbfile
if (! property_exists($graph_conf, 'node'.$i)
|| ! property_exists($graph_conf->{'node'.$i}, 'reldbfile') ) {
$conf[$gpath][$i] = array('active' => false);
continue;
$conf[$gpath]['node'.$i] = array();
// check node0, node1, etc to see if they at least have a reldb conf
if (property_exists($graph_conf, 'node'.$i)
&& property_exists($graph_conf->{'node'.$i}, 'reldbs') ) {
// check for each configured db that is listed under reldbs
$dbinfos = $graph_conf->{'node'.$i}->reldbs;
foreach ($dbinfos as $dbtype => $dbconf) {
// filter: supported and valid conf cases
if (in_array($dbtype, $our_dbtypes) && $dbconf->file) {
// we have a file for this nodetype and dbtype: copy entire conf
$conf[$gpath]['node'.$i][$dbtype] = (array)$dbconf ;
// update files path with dirpath
if (array_key_exists('file', $conf[$gpath]['node'.$i][$dbtype])) {
$relpath = $conf[$gpath]['node'.$i][$dbtype]['file'];
$conf[$gpath]['node'.$i][$dbtype]['file'] = $project_dir.'/'.$relpath;
}
}
}
// echodump("got conf", $conf[$gpath]['node'.$i]);
}
else {
// we have a file for this type: copy entire conf
$conf[$gpath][$i] = (array)$graph_conf->{'node'.$i};
$conf[$gpath][$i]['active'] = true;
$conf[$gpath][$i]['dir'] = $project_dir;
else {
// empty array <=> inactive nodetype or no supported dbs
$conf[$gpath]['node'.$i] = array ();
}
// POSS here info on higher level may be propagated for lower ones
// (ex: if dbtype is on the project level, its value should count
......@@ -67,6 +88,8 @@ function read_conf($filepath, $ntypes) {
}
}
}
// echodump("full conf", $conf);
return $conf;
}
......
......@@ -26,7 +26,7 @@
background-color: #BBB;
}
#topPapers {
.topPapers {
-moz-box-shadow: none ;
-webkit-box-shadow: none ;
box-shadow: none;
......
......@@ -325,12 +325,14 @@ ul.infoitems {
border-right: 1px solid #222;
}
#topPapers{
.topPapers{
display: none;
color:black;
}
.tab-pane {
transition: height 0.5s ease-out;
}
......
......@@ -369,53 +369,6 @@ var TinaWebJS = function ( sigmacanvas ) {
defaultTab: 'li#tabneigh'
});
// initialize reldocs tabs
if (TW.conf.getRelatedDocs) {
// POSSible: create them on a settings list (currently in the HTML)
let ul = document.getElementById('reldocs-tabs')
let tabEls = []
for (var possibleAPI in TW.conf.relatedDocsAPIS) {
// create valid tabs
let newLi = document.createElement('li')
newLi.setAttribute("role", "presentation")
let newRDTab = document.createElement('a')
newRDTab.text = possibleAPI
newRDTab.href = '#topPapers'
newRDTab.setAttribute("role", "tab")
newRDTab.dataset.toggle = 'tab'
newRDTab.dataset.reldocstype = possibleAPI
if (possibleAPI == TW.conf.relatedDocsType) {
newLi.setAttribute("class", "active")
}
// add to DOM
ul.append(newLi)
newLi.append(newRDTab)
// keep access
TW.gui.reldocTabs[possibleAPI] = newRDTab
}
// afterwards to get all types and the active type
for (let rdtype in TW.gui.reldocTabs) {
let tab = TW.gui.reldocTabs[rdtype]
// init toggle mecanisms (bootstrap.native/#componentTab)
// (just used for the tabs active/inactive handling,
// content is *always* topPapers and we modify it ourselves)
new Tab(tab);
// add handler to switch relatedDocsType
tab.addEventListener('click', function(){
TW.conf.relatedDocsType = this.dataset.reldocstype
getTopPapers()
})
}
}
// show any already existing panel
document.getElementById("graph-panels").style.display = "block"
......@@ -745,11 +698,6 @@ var TinaWebJS = function ( sigmacanvas ) {
}
})
// select currently preferred reldoc tab
if (TW.conf.getRelatedDocs && document.getElementById('reldocs-tabs')) {
TW.gui.reldocTabs[TW.conf.relatedDocsType].Tab.show()
}
$("#tips").html(getTips());
// we start with no selection
......@@ -915,7 +863,7 @@ var TinaWebJS = function ( sigmacanvas ) {
// to init local, instance-related listeners (need to run at new sigma instance)
// args: @partialGraph = a sigma instance
this.initSigmaListeners = function(partialGraph, initialActivetypes, initialActivereltypes) {
this.initSigmaListeners = function(partialGraph, initialActivetypes, initialActivereltypes, optionalConfEntry) {
// console.log("initSigmaListeners TW.categories / types array / reltypeskeys array: ", TW.categories, initialActivetypes, initialActivereltypes)
......@@ -1105,10 +1053,18 @@ var TinaWebJS = function ( sigmacanvas ) {
}
});
if (TW.conf.filterSliders) {
// initialize reldocs tabs if declared in additionalConf
if (TW.conf.getRelatedDocs) {
let moreConfKey = optionalConfEntry || TW.File
// the indice of the first cat to be active (ex: '1')
let activeId = initialActivetypes.indexOf(true)
resetTabs(initialActivetypes, TW.gmenuInfos[moreConfKey])
}
// select currently active sliders
if (TW.conf.filterSliders) {
// also for all active cats
for (let activeId in initialActivetypes) {
if (initialActivetypes[activeId]) {
// args: for display: target div ,
......
......@@ -25,7 +25,7 @@ TW.gui.foldedSide=false;
TW.gui.manuallyChecked = false;
TW.gui.handpickedcolor = false; // <= changes edge rendering strategy
TW.gui.lastFilters = {}
TW.gui.reldocTabs = []
TW.gui.reldocTabs = [{}, {}] // <= by nodetype and then dbtype
TW.gui.sizeRatios = [1,1] // sizeRatios per nodetype
......@@ -248,7 +248,9 @@ function createFilechooserEl () {
TW.resetGraph()
// run
mainStartGraph(theFormat, rdr.result, TW.instance)
mainStartGraph(theFormat, rdr.result, null, TW.instance)
// NB 3rd arg null = we got no additional conf for this "unknown" file
writeLabel(`Local file: ${clientLocalGraphFile.name}`)
}
......@@ -1126,6 +1128,120 @@ function createWaitIcon(idname, width) {
return icon
}
activateRDTab = function(elTgt) {
let relDbType = elTgt.dataset.reldocstype
let ndTypeId = elTgt.dataset.nodetype
let tabs = document.querySelectorAll('ul#reldocs-tabs > li')
for (var tabLi of tabs) {
if (tabLi != elTgt.parentNode)
tabLi.classList.remove("active")
else
tabLi.classList.add("active")
}
let divs = document.querySelectorAll("div#reldocs-boxes > div.tab-pane")
let theId = `rd-${ndTypeId}-${relDbType}`
// POSS: animate with transitions here
for (var tabDiv of divs) {
if (tabDiv.id != theId)
tabDiv.classList.remove("active", "in")
else
tabDiv.classList.add("active", "in")
}
}
// set up tabs for a given activetypes state and db.json entry
function resetTabs(activetypes, dbconf) {
let ul = document.getElementById('reldocs-tabs')
let divs = document.getElementById('reldocs-boxes')
// remove any previous tabs
ul.innerHTML = ""
divs.innerHTML = ""
TW.gui.reldocTabs = [{},{}]
// used with no args for full reset
if (!activetypes || !dbconf) {
return
}
console.log("dbconf for this source", dbconf)
// for all active nodetypes
for (let nodetypeId in activetypes) {
if (activetypes[nodetypeId]) {
let additionalConf = dbconf[nodetypeId]
if (TW.conf.debug.logSettings)
console.log ("additionalConf for this source", additionalConf)
let possibleAPIs = []
if (additionalConf.reldbs) {
possibleAPIs = additionalConf.reldbs
// 3 vars to know which one to activate
let nAPIs = Object.keys(possibleAPIs).length
let iAPI = 0
let didActiveFlag = false
for (var possibleAPI in possibleAPIs){
// the tab's id
let tabref = `rd-${nodetypeId}-${possibleAPI}`
// create valid tabs
let newLi = document.createElement('li')
newLi.setAttribute("role", "presentation")
let newRDTab = document.createElement('a')
newRDTab.text = `${possibleAPI} (${nodetypeId==0?'sem':'soc'})`
newRDTab.setAttribute("role", "tab")
newRDTab.dataset.reldocstype = possibleAPI
newRDTab.dataset.nodetype = nodetypeId
newRDTab.setAttribute("class", `for-nodecategory-${nodetypeId}`)
// newRDTab.dataset.toggle = 'tab' // only needed if using bootstrap
// keep access
TW.gui.reldocTabs[nodetypeId][possibleAPI] = newRDTab
// create corresponding content box
let newContentDiv = document.createElement('div')
newContentDiv.setAttribute("role", "tabpanel")
newContentDiv.setAttribute("class", "topPapers tab-pane")
newContentDiv.id = tabref
// add to DOM
ul.append(newLi)
newLi.append(newRDTab)
divs.append(newContentDiv)
// select currently preferred reldoc tabs
// (we activate if favorite or if no matching favorite and last)
if (possibleAPI == TW.conf.relatedDocsType
|| (!didActiveFlag && iAPI == nAPIs - 1)) {
newLi.classList.add("active")
newContentDiv.classList.add("active", "in")
didActiveFlag = true
}
// add handler to switch relatedDocsType
newRDTab.addEventListener('click', function(e){
// tab mecanism
activateRDTab(e.target)
// no need to run associated query:
// (updateRelatedNodesPanel did it at selection time)
})
iAPI++
}
}
}
}
}
function jsActionOnGexfSelector(graphBasename){
let graphPath = TW.gmenuPaths[graphBasename] || graphBasename+".gexf"
......@@ -1141,20 +1257,8 @@ function jsActionOnGexfSelector(graphBasename){
// remove any previous instance and flags
TW.resetGraph()
// override default categories with the ones from db.json
if (TW.gmenuInfos[graphPath]) {
if (TW.gmenuInfos[graphPath][0] && TW.gmenuInfos[graphPath][0].name) {
TW.conf.catSem = TW.gmenuInfos[graphPath][0].name
console.log("new catSem:", TW.conf.catSem)
}
if (TW.gmenuInfos[graphPath][1] && TW.gmenuInfos[graphPath][1].name) {
TW.conf.catSoc = TW.gmenuInfos[graphPath][1].name
console.log("new catSoc:", TW.conf.catSoc)
}
}
mainStartGraph(newDataRes["format"], newDataRes["data"], TW.instance)
writeLabel(graphBasename)
TW.File = graphPath
mainStartGraph(newDataRes["format"], newDataRes["data"], TW.File, TW.instance)
writeLabel(graphBasename)
}
//============================= </OTHER ACTIONS > =============================//
This diff is collapsed.
......@@ -298,7 +298,6 @@ saferString = function(string) {
}
/**
* function to test if file exists
* via XHR, enhanced from http://stackoverflow.com/questions/5115141
......
This diff is collapsed.
......@@ -60,6 +60,16 @@ TW.pushGUIState = function( args ) {
}
}
// recreate tabs after type changes
// db.json conf entry (£TODO unify s/(?:TW.File|inConfKey)/TW.sourceId/g)
let inConfKey = (sourcemode != "api") ? TW.File : 'graphapi/default'
if (TW.conf.getRelatedDocs
&& !isUndef(args.activetypes)
&& TW.gmenuInfos[inConfKey]) {
resetTabs(newState.activetypes, TW.gmenuInfos[inConfKey])
}
// 4) store it in TW.states
TW.states.push(newState)
......@@ -78,16 +88,12 @@ TW.resetGraph = function() {
// remove the selection
cancelSelection(false, {norender: true})
// and set tabs to none
resetTabs()
// call the sigma graph clearing
TW.instance.clearSigma()
// TW.categories, TW.Nodes and TW.Edges will be reset by mainStartGraph
// reset remaining global vars
TW.labels = []
TW.Relations = {}
TW.states = [TW.initialSystemState]
// reset rendering gui flags
TW.gui.selectionActive = false
TW.gui.handpickedcolor = false
......@@ -99,6 +105,90 @@ TW.resetGraph = function() {
// reset other gui flags
TW.gui.checkBox=false
TW.gui.lastFilters = {}
// remaining global vars will be reset by new graph mainStartGraph
}
// read all sources' detailed confs
// -> list of source paths available
// -> declared nodetypes
// -> declared rDocs conf
function readMenu(infofile) {
// exemple entry
// --------------
// "data/gargistex": {
// "first" : "shale_and_ice.gexf",
// "graphs":{
// "shale_and_ice.gexf": {
// "node0": {
// "name": "terms",
// "reldbs": {
// "csv": {
// "file": "shale_and_ice.csv",
// "qcols": ["title"],
// "template": "bib_details"
// },
// "twitter": {}
// }
// }
// }
// }
// }
if (TW.conf.debug.logFetchers) console.info(`attempting to load filemenu ${infofile}`)
var preRES = AjaxSync({ url: infofile, datatype:"json" });
if (preRES['OK'] && preRES.data) {
if (TW.conf.debug.logFetchers) console.log('initial AjaxSync result preRES', preRES)
}
// 1 - store the first one (b/c we'll loose order)
var first_file = "", first_dir = "" , first_path = ""
for( var path in preRES.data ) {
if (TW.conf.debug.logFetchers) console.log("db.json path", path)
first_file = preRES.data[path]["first"] || Object.keys(preRES.data[path]["graphs"])[0]
first_dir = path
break;
}
first_path = first_dir+"/"+first_file
// 2 - process all the paths and associated confs
let paths = {}
let details = {}
for( var path in preRES.data ) {
var theGraphs = preRES.data[path]["graphs"]
for(var aGraph in theGraphs) {
var graphBasename = aGraph.replace(/\.gexf$/, "") // more human-readable in the menu
paths[graphBasename] = path+"/"+aGraph
// ex : "RiskV2PageRank1000.gexf":data/AXA/RiskV2PageRank1000.gexf
// (we assume there's no duplicate basenames)
if (TW.conf.debug.logSettings)
console.log("db conf entry: "+graphBasename)
// for associated LocalDB php queries: CSV (or CortextDBs sql)
if (theGraphs[aGraph]) {
let gSrcEntry = theGraphs[aGraph]
details[path+"/"+aGraph] = new Array(2)
if (gSrcEntry.node0) {
details[path+"/"+aGraph][0] = gSrcEntry.node0
}
if (gSrcEntry.node1) {
details[path+"/"+aGraph][1] = gSrcEntry.node1
}
}
else {
details[path+"/"+aGraph] = null
}
}
}
return [paths, details, first_path]
}
......@@ -183,20 +273,6 @@ function getNActive(someState) {
return TW.SystemState().activetypes.filter(function(bool){return bool}).length
}
// transitional function:
// ----------------------
// Goal: determine if a single nodetype or global activetype is semantic or social
// Explanation: some older functions (eg topPapers) used this distinction
// (via semi-deprecated global swclickActual),
// but the specification changed twice since then:
// - 1st change: types described as type 0 and type 1 and possible default type
// - 2nd change default type of monopartite case changed from document to semantic
function swActual(aNodetype) {
return (aNodetype == TW.categories[0]) ? 'semantic' : 'social'
}
// changes attributes of nodes and edges to remove active, highlight and activeEdge flags
// NB: "low-level" <=> by design, does NOT change the state, gui nor global flag
......@@ -307,6 +383,7 @@ function clearHover() {
// nodes information div
// POSS: merge with hit_templates from additional conf
function htmlfied_nodesatts(elems){
var socnodes=[]
......@@ -321,7 +398,7 @@ function htmlfied_nodesatts(elems){
var id=elems[i]
var node = TW.Nodes[id]
if(swActual(node.type) == 'social'){
if(TW.catDict[node.type] == 1){
information += '<li><b>' + node.label + '</b></li>';
if(node.htmlCont==""){
if (!isUndef(node.level)) {
......@@ -332,14 +409,13 @@ function htmlfied_nodesatts(elems){
}
socnodes.push(information)
}
if(swActual(node.type) == 'semantic'){
information += '<li><b>' + node.label + '</b></li>';
let google='<a href=http://www.google.com/#hl=en&source=hp&q=%20'+node.label.replace(" ","+")+'%20><img src="'+TW.conf.paths.ourlibs+'/img/google.png"></img></a>';
let wiki = '<a href=http://en.wikipedia.org/wiki/'+node.label.replace(" ","_")+'><img src="'+TW.conf.paths.ourlibs+'/img/wikipedia.png"></img></a>';
let flickr= '<a href=http://www.flickr.com/search/?w=all&q='+node.label.replace(" ","+")+'><img src="'+TW.conf.paths.ourlibs+'/img/flickr.png"></img></a>';
information += '<li>'+google+"&nbsp;"+wiki+"&nbsp;"+flickr+'</li><br>';
semnodes.push(information)
else {
information += '<li><b>' + node.label + '</b></li>';
let google='<a href=http://www.google.com/#hl=en&source=hp&q=%20'+node.label.replace(" ","+")+'%20><img src="'+TW.conf.paths.ourlibs+'/img/google.png"></img></a>';
let wiki = '<a href=http://en.wikipedia.org/wiki/'+node.label.replace(" ","_")+'><img src="'+TW.conf.paths.ourlibs+'/img/wikipedia.png"></img></a>';
let flickr= '<a href=http://www.flickr.com/search/?w=all&q='+node.label.replace(" ","+")+'><img src="'+TW.conf.paths.ourlibs+'/img/flickr.png"></img></a>';
information += '<li>'+google+"&nbsp;"+wiki+"&nbsp;"+flickr+'</li><br>';
semnodes.push(information)
}
}
......@@ -383,6 +459,11 @@ function htmlProportionalLabels(elems , limit, selectableFlag) {
fontSize = 1
}
// normalize sizes by special attribute "normfactor" if present
if (TW.Nodes[id].attributes.normfactor) {
fontSize = fontSize * TW.Nodes[id].attributes.normfactor
}
// debug
// console.log('htmlfied_tagcloud (',id, TW.Nodes[id].label,') freq',frec,' fontSize', fontSize)
......@@ -442,12 +523,30 @@ function updateRelatedNodesPanel( sels , same, oppos ) {
$("#information").html(informationDIV);
if (TW.conf.getRelatedDocs) {
$("#reldocs-tabs-wrapper").show();
$("#topPapers").show();
getTopPapers()
let rdTabCount = 0
// update all related docs tabs
for (let ntId in TW.SystemState().activetypes) {
if (TW.SystemState().activetypes[ntId]) {
let qWords = queryForType(ntId)
// console.log("available topPapers tabs:", TW.gui.reldocTabs[ntId])
for (let relDbType in TW.gui.reldocTabs[ntId]) {
let tabId = `rd-${ntId}-${relDbType}`
rdTabCount ++
// if not already done
if (! TW.lastRelDocQueries[tabId]
|| TW.lastRelDocQueries[tabId] != qWords) {
getTopPapers(qWords, ntId, relDbType, tabId)
// memoize
TW.lastRelDocQueries[tabId] = qWords
}
}
}
}
if (rdTabCount > 0) $("#reldocs-tabs-wrapper").show();
}
else {
$("#topPapers").hide()
$("#reldocs-tabs-wrapper").hide();
}
}
......
......@@ -3,7 +3,7 @@
// (for instance loop on full gexf in scanGexf then again in dictfyGexf)
// Level-01
var ParseCustom = function ( format , data ) {
var ParseCustom = function ( format , data, optionalConf ) {
if (format == 'gexf') {
this.data = $.parseXML(data)
......@@ -14,9 +14,13 @@ var ParseCustom = function ( format , data ) {
this.format = format;
this.nbCats = 0;
this.additionalConf = optionalConf
// input = GEXFstring
this.getGEXFCategories = function() {
return scanGexf( this.data );
let observedCategories = scanGexf(this.data)
let finalCategories = sortNodeTypes(observedCategories, this.additionalConf)
return finalCategories;
}// output = {'cats':[ "cat1" , "cat2" , ...], 'rev': {cat1: 0, cat2: 1...}}
......@@ -29,7 +33,9 @@ var ParseCustom = function ( format , data ) {
// input = JSONstring
this.getJSONCategories = function(json) {
return scanJSON( this.data );
let observedCategories = scanJSON(this.data)
let finalCategories = sortNodeTypes(observedCategories, this.additionalConf)
return finalCategories;
}// output = {'cats':[ "cat1" , "cat2" , ...], 'rev': {cat1: 0, cat2: 1...}}
......@@ -201,8 +207,7 @@ function scanGexf(gexfContent) {
}
}
// sorting observed json node types into Sem (=> 1)/Soc (=> 0)
return sortNodeTypes(categoriesDict)
return categoriesDict
}
// sorting observed node types into Sem/Soc
......@@ -212,10 +217,25 @@ function scanGexf(gexfContent) {
// expected content: usually a just a few cats over all nodes
// ex: terms
// ex: ISItermsriskV2_140 & ISItermsriskV2_140
function sortNodeTypes(observedTypesDict) {
function sortNodeTypes(observedTypesDict, optConf) {
var observedTypes = Object.keys(observedTypesDict)
observedTypes.sort(function(a,b) {return observedTypesDict[b] - observedTypesDict[a]})
let nbNodeTypes = 2
var declaredTypes = []
for (var i = 0 ; i < nbNodeTypes ; i++ ) {
if (optConf[i] && optConf[i].name) {
declaredTypes[i] = optConf[i].name
if (TW.conf.debug.logSettings)
console.log("expected cat (from db.json addtional conf)", i, declaredTypes[i])
}
else {
declaredTypes[i] = TW.conf[i == 0 ? 'catSem' : 'catSoc']
if (TW.conf.debug.logSettings)
console.log("expected cat (from settings_explorer defaults)", i, declaredTypes[i])
}
}
var newcats = []
var catDict = {}
......@@ -236,13 +256,14 @@ function sortNodeTypes(observedTypesDict) {
// allows multiple node types, with an "all the rest" node1
// try stipulated cats, then fallbacks
if (observedTypesDict[TW.conf.catSem]) {
newcats[0] = TW.conf.catSem;
catDict[TW.conf.catSem] = 0;
// possible: loop
if (observedTypesDict[declaredTypes[0]]) {
newcats[0] = declaredTypes[0];
catDict[declaredTypes[0]] = 0;
}
if (observedTypesDict[TW.conf.catSoc]) {
newcats[1] = TW.conf.catSoc;
catDict[TW.conf.catSoc] = 1;
if (observedTypesDict[declaredTypes[1]]) {
newcats[1] = declaredTypes[1];
catDict[declaredTypes[1]] = 1;
}
// NB: type for nodes0 will be the majoritary by default, unless taken
......@@ -630,10 +651,16 @@ function dictfyGexf( gexf , categories ){
// NB nodesByType lists arrays of ids per nodetype
// (equivalent to TW.partialGraph.graph.getNodesByType but on full nodeset)
for(var i in categories) {
catDict[categories[i]] = i
nodesByType[i] = []
let subCats = categories[i].split(/\//g)
for (var j in subCats) {
catDict[subCats[j]] = i
}
}
var elsNodes = gexf.getElementsByTagName('nodes') // The list of xml nodes 'nodes' (plural)
TW.labels = [];
......@@ -1018,8 +1045,7 @@ function scanJSON( data ) {
}
}
// sorting observed json node types into Sem (=> 1)/Soc (=> 0)
return sortNodeTypes(categoriesDict);
return categoriesDict
}
// Level-00
......@@ -1037,8 +1063,13 @@ function dictfyJSON( data , categories ) {
// NB nodesByType lists arrays of ids per nodetype
// (equivalent to TW.partialGraph.graph.getNodesByType but on full nodeset)
for(var i in categories) {
catDict[categories[i]] = i
nodesByType[i] = []
let subCats = categories[i].split(/\//g)
for (var j in subCats) {
catDict[subCats[j]] = i
}
}
// normalization, same as parseGexf
......@@ -1091,6 +1122,7 @@ function dictfyJSON( data , categories ) {
// record
nodes[node.id] = node;
if (!nodesByType[catDict[node.type]]) {
console.warn("unrecognized type:", node.type)
}
......
......@@ -97,11 +97,10 @@ var SigmaUtils = function () {
settings('labelSizeRatio') * size;
// apply type-specific size slider ratios
var typeId = TW.categories.indexOf(node.type) || 0
var typeId = TW.catDict[node.type] || 0
size *= TW.gui.sizeRatios[typeId]
fontSize *= TW.gui.sizeRatios[typeId]
if (!activeFlag && fontSize < settings('labelThreshold') * settings('labelSizeRatio'))
return;
......@@ -292,7 +291,7 @@ var SigmaUtils = function () {
// NB cost of this condition seems small:
// - without: [11 - 30] ms for 23 nodes
// - with : [11 - 33] ms for 23 nodes
var typeId = TW.categories.indexOf(node.type) || 0
var typeId = TW.catDict[node.type] || 0
// apply type-specific size slider ratios
nodeSize *= TW.gui.sizeRatios[typeId]
......@@ -421,7 +420,7 @@ var SigmaUtils = function () {
// largerall: our customized size boosts
// apply type-specific size slider ratios
var typeId = TW.categories.indexOf(node.type) || 0
var typeId = TW.catDict[node.type] || 0
size *= TW.gui.sizeRatios[typeId]
fontSize *= TW.gui.sizeRatios[typeId]
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment