Commit f3d96c2e authored by Romain Loth's avatar Romain Loth

WIP separated reldb entries in json for tabs by db kind AND nodetype

parent dba5a3c1
...@@ -5,19 +5,25 @@ ...@@ -5,19 +5,25 @@
"shale_and_ice.gexf": { "shale_and_ice.gexf": {
"node0": { "node0": {
"name": "terms", "name": "terms",
"reldbtype": "csv", "reldbs": {
"reldbfile": "shale_and_ice.csv", "csv": {
"reldbqcols": ["title"], "file": "shale_and_ice.csv",
"reltemplate": "bib_details" "qcols": ["title"],
"template": "bib_details"
}
}
} }
}, },
"model_calibration.gexf": { "model_calibration.gexf": {
"node0": { "node0": {
"name": "terms", "name": "terms",
"reldbtype": "csv", "reldbs": {
"reldbfile": "model_calibration.csv", "csv": {
"reldbqcols": ["title"], "file": "model_calibration.csv",
"reltemplate": "bib_details" "qcols": ["title"],
"template": "bib_details"
}
}
} }
} }
} }
...@@ -37,17 +43,23 @@ ...@@ -37,17 +43,23 @@
"mini_for_csv.gexf": { "mini_for_csv.gexf": {
"node0": { "node0": {
"name": "term", "name": "term",
"reldbtype": "csv", "reldbs": {
"reldbfile": "mini_for_csv.csv", "csv": {
"reldbqcols": ["title","keywords","text"], "file": "mini_for_csv.csv",
"reltemplate": "bib_details" "qcols": ["title","keywords","text"],
"template": "bib_details"
}
}
}, },
"node1": { "node1": {
"name": "person", "name": "person",
"reldbtype": "csv", "reldbs": {
"reldbfile": "mini_for_csv.csv", "csv": {
"reldbqcols": ["author"], "file": "mini_for_csv.csv",
"reltemplate": "bib_details" "qcols": ["author"],
"template": "bib_details"
}
}
} }
}, },
"test_with_various_atts.gexf": {} "test_with_various_atts.gexf": {}
...@@ -71,12 +83,15 @@ ...@@ -71,12 +83,15 @@
"Maps_S_800.gexf": { "Maps_S_800.gexf": {
"node0": { "node0": {
"name": "ISItermsWhitelistV2Oct_5 & ISItermsWhitelistV2Oct_5", "name": "ISItermsWhitelistV2Oct_5 & ISItermsWhitelistV2Oct_5",
"reldbqtable": "ISItermsWhitelistV2Oct_5", "reldbs": {
"reldbfile" : "wos_climate-change_title_2014-2015.db", "CortextDB": {
"reldbtype": "CortextDB", "file": "wos_climate-change_title_2014-2015.db",
"qtable": "ISItermsWhitelistV2Oct_5",
"reltemplate": "cortext_with_link" "reltemplate": "cortext_with_link"
} }
} }
} }
} }
}
}
} }
...@@ -21,7 +21,10 @@ $mainpath=dirname(dirname(getcwd()))."/"; // default fs path to ProjectExplorer ...@@ -21,7 +21,10 @@ $mainpath=dirname(dirname(getcwd()))."/"; // default fs path to ProjectExplorer
$project_menu_path = "db.json"; $project_menu_path = "db.json";
// 3 - others // 3 - others
$ntypes = 2; // max node types $ntypes = 2; // max node types (node0 & node1)
// accepted entries in db.json -> source -> reldbs -> dbtype
$supported_dbtypes = ['csv', 'CortextDB'];
// number of docs to display setting // number of docs to display setting
$max_item_displayed = 7; $max_item_displayed = 7;
...@@ -38,7 +41,7 @@ $memport = 11211; ...@@ -38,7 +41,7 @@ $memport = 11211;
// CONFIGURATION PARAMS // CONFIGURATION PARAMS
// -------------------- // --------------------
// parse db.json project menu and create a conf by file // parse db.json project menu and create a conf by file
$conf = read_conf($mainpath.$project_menu_path, $ntypes); $conf = read_conf($mainpath.$project_menu_path, $ntypes, $supported_dbtypes);
// ======================================= // =======================================
// echodump("== READ CONF ==<br>", $conf); // echodump("== READ CONF ==<br>", $conf);
......
...@@ -17,7 +17,7 @@ function errmsg($message, $context, $more = "") { ...@@ -17,7 +17,7 @@ function errmsg($message, $context, $more = "") {
// reading db.json associations // reading db.json associations
// source graph file <=> (db, dbtype, cols) as relatedDocs php API // source graph file <=> (db, dbtype, cols) as relatedDocs php API
function read_conf($filepath, $ntypes) { function read_conf($filepath, $ntypes, $our_dbtypes) {
$project_menu_fh = fopen($filepath, "r"); $project_menu_fh = fopen($filepath, "r");
$json_st = ''; $json_st = '';
while (!feof($project_menu_fh)) { while (!feof($project_menu_fh)) {
...@@ -37,7 +37,7 @@ function read_conf($filepath, $ntypes) { ...@@ -37,7 +37,7 @@ function read_conf($filepath, $ntypes) {
continue; continue;
} }
foreach ($dir_items->graphs as $graph_file => $graph_conf){ foreach ($dir_items->graphs as $graph_file => $graph_conf){
// echodump("== $graph_file ==", $graph_conf); echodump("== $graph_file ==", $graph_conf);
$gpath = $project_dir.'/'.$graph_file; $gpath = $project_dir.'/'.$graph_file;
...@@ -45,21 +45,44 @@ function read_conf($filepath, $ntypes) { ...@@ -45,21 +45,44 @@ function read_conf($filepath, $ntypes) {
// node0 <=> classic type 'semantic' // node0 <=> classic type 'semantic'
// node1 <=> classic type 'social' // node1 <=> classic type 'social'
// NB2 now additionnally, each nodetype can have several dbs configured !
$conf[$gpath] = array($ntypes); $conf[$gpath] = array($ntypes);
for ($i = 0 ; $i < $ntypes ; $i++) { for ($i = 0 ; $i < $ntypes ; $i++) {
// check node0, node1, etc to see if they at least have a reldbfile // check node0, node1, etc to see if they at least have a reldb conf
if (! property_exists($graph_conf, 'node'.$i) if (! property_exists($graph_conf, 'node'.$i)
|| ! property_exists($graph_conf->{'node'.$i}, 'reldbfile') ) { || ! property_exists($graph_conf->{'node'.$i}, 'reldbs') ) {
// all dbtypes inactive on this file and nodetype
foreach ($our_dbtypes as $dbtype) {
// $conf[$gpath][$i][$dbtype] = array('active' => false);
$conf[$gpath][$i] = array('active' => false); $conf[$gpath][$i] = array('active' => false);
}
continue; continue;
} }
else { else {
// we have a file for this type: copy entire conf // further check for each configured db that is listed under reldbs
$conf[$gpath][$i] = (array)$graph_conf->{'node'.$i}; $dbinfos = $graph_conf->{'node'.$i}->reldbs;
foreach ($dbinfos as $dbtype => $dbconf) {
echodump("reldbtype", $dbtype);
echodump("reldbconf", $dbconf);
// valid conf cases
if (in_array($dbtype, $our_dbtypes) && $dbconf->file) {
// we have a file for this nodetype and dbtype: copy entire conf
$conf[$gpath][$i][$dbtype] = array();
$test = (array)$dbconf;
echodump("conf copy", $test);
// $conf[$gpath][$i][$dbtype] = (array)$dbconf;
}
}
echodump("got conf", $conf[$gpath][$i]);
$conf[$gpath][$i]['active'] = true; // $conf[$gpath][$i]['active'] = true;
$conf[$gpath][$i]['dir'] = $project_dir; // $conf[$gpath][$i]['dir'] = $project_dir;
} }
// POSS here info on higher level may be propagated for lower ones // POSS here info on higher level may be propagated for lower ones
// (ex: if dbtype is on the project level, its value should count // (ex: if dbtype is on the project level, its value should count
......
...@@ -339,7 +339,7 @@ function set_ClustersLegend ( daclass, groupedByTicks ) { ...@@ -339,7 +339,7 @@ function set_ClustersLegend ( daclass, groupedByTicks ) {
// myFetcher('START', 'hello', function(aStr) {myFetcher(aStr, "world", displayFun)}) // myFetcher('START', 'hello', function(aStr) {myFetcher(aStr, "world", displayFun)})
// } // }
function getTopPapers(){ function getTopPapers(choosenAPI){
// waiting image // waiting image
let image='<img style="display:block; margin: 0px auto;" src="twlibs/img/loader.gif"></img>'; let image='<img style="display:block; margin: 0px auto;" src="twlibs/img/loader.gif"></img>';
$("#topPapers").html(image); $("#topPapers").html(image);
...@@ -371,13 +371,15 @@ function getTopPapers(){ ...@@ -371,13 +371,15 @@ function getTopPapers(){
// do the first then the nested call // do the first then the nested call
topPapersFetcher( topPapersFetcher(
swNodetypes[0], swNodetypes[0], // <= the queried nodetype
qWordsbySwType[swNodetypes[0]], qWordsbySwType[swNodetypes[0]], // <= the query as array of words
[[],[]], choosenAPI, // <= the API backend from db.json
function(priorJsonHits) { [[],[]], // <= json hit arrays by nodetype
function(priorJsonHits) { // <= the callback
topPapersFetcher( topPapersFetcher(
swNodetypes[1], swNodetypes[1],
qWordsbySwType[swNodetypes[1]], qWordsbySwType[swNodetypes[1]],
choosenAPI,
priorJsonHits, priorJsonHits,
displayTopPapers displayTopPapers
) )
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment