mirror of https://gitee.com/bigwinds/arangodb
Merge branch 'devel' of https://github.com/triAGENS/ArangoDB into devel
This commit is contained in:
commit
638a0cb05f
|
@ -1,6 +1,8 @@
|
|||
v1.4
|
||||
------
|
||||
|
||||
* issue #547: Javascript error in the web interface
|
||||
|
||||
* issue #550: Make AQL graph functions support key in addition to id
|
||||
|
||||
* issue #526: Unable to escape when an errorneous command is entered into the js shell
|
||||
|
@ -15,6 +17,8 @@ v1.4
|
|||
v1.3.2 (2013-XX-XX)
|
||||
-------------------
|
||||
|
||||
* issue #545: AQL FILTER unnecessary (?) loop
|
||||
|
||||
* issue #549: wrong return code with --daemon
|
||||
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ Introduction to Hash Indexes{#IndexHashIntro}
|
|||
This is an introduction to ArangoDB's hash indexes.
|
||||
|
||||
It is possible to define a hash index on one or more attributes (or paths) of a
|
||||
documents. This hash is then used in queries to locate documents in O(1)
|
||||
document. This hash index is then used in queries to locate documents in O(1)
|
||||
operations. If the hash is unique, then no two documents are allowed to have the
|
||||
same set of attribute values.
|
||||
|
||||
|
|
|
@ -2350,6 +2350,32 @@ static TRI_aql_attribute_name_t* GetAttributeName (TRI_aql_context_t* const cont
|
|||
return NULL;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief check if two attribute access nodes refer to the same base variable
|
||||
/// e.g. FILTER a.x == a.y
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static bool IsSameAttributeAccess (const TRI_aql_node_t* const lhs,
|
||||
const TRI_aql_node_t* const rhs) {
|
||||
assert(lhs != NULL);
|
||||
assert(rhs != NULL);
|
||||
|
||||
if (lhs->_type == TRI_AQL_NODE_ATTRIBUTE_ACCESS &&
|
||||
rhs->_type == TRI_AQL_NODE_ATTRIBUTE_ACCESS) {
|
||||
|
||||
TRI_aql_node_t* lNode = TRI_AQL_NODE_MEMBER(lhs, 0);
|
||||
TRI_aql_node_t* rNode = TRI_AQL_NODE_MEMBER(rhs, 0);
|
||||
|
||||
if (lNode->_type == TRI_AQL_NODE_REFERENCE &&
|
||||
rNode->_type == TRI_AQL_NODE_REFERENCE &&
|
||||
TRI_EqualString(TRI_AQL_NODE_STRING(lNode), TRI_AQL_NODE_STRING(rNode))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief process a condition node and recurse into its subnodes
|
||||
///
|
||||
|
@ -2437,6 +2463,11 @@ static TRI_vector_pointer_t* ProcessNode (TRI_aql_context_t* const context,
|
|||
node2 = rhs;
|
||||
operator = node->_type;
|
||||
|
||||
if (IsSameAttributeAccess(lhs, rhs)) {
|
||||
// we must not optimise something like FILTER a.x == a.x
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (rhs->_type == TRI_AQL_NODE_REFERENCE || rhs->_type == TRI_AQL_NODE_ATTRIBUTE_ACCESS || rhs->_type == TRI_AQL_NODE_FCALL) {
|
||||
// expression of type reference|attribute access|fcall operator reference|attribute access|fcall
|
||||
useBoth = true;
|
||||
|
@ -2448,6 +2479,12 @@ static TRI_vector_pointer_t* ProcessNode (TRI_aql_context_t* const context,
|
|||
node1 = rhs;
|
||||
node2 = lhs;
|
||||
operator = TRI_ReverseOperatorRelationalAql(node->_type);
|
||||
|
||||
if (IsSameAttributeAccess(lhs, rhs)) {
|
||||
// we must not optimise something like FILTER a.x == a.x
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
||||
TRI_ASSERT_MAINTAINER(operator != TRI_AQL_NODE_NOP);
|
||||
|
||||
|
|
|
@ -195,14 +195,6 @@ static uint64_t HashElement (TRI_hash_array_t* array,
|
|||
/// @{
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief size of a cache line, in bytes
|
||||
/// the memory acquired for the hash table is aligned to a multiple of this
|
||||
/// value
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
#define CACHE_LINE_SIZE (64)
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief initial preallocation size of the hash table when the table is
|
||||
/// first created
|
||||
|
@ -273,7 +265,7 @@ static int AllocateTable (TRI_hash_array_t* array, size_t numElements) {
|
|||
TRI_hash_index_element_t* table;
|
||||
|
||||
table = TRI_Allocate(TRI_UNKNOWN_MEM_ZONE,
|
||||
CACHE_LINE_SIZE + (sizeof(TRI_hash_index_element_t) * numElements),
|
||||
sizeof(TRI_hash_index_element_t) * numElements,
|
||||
true);
|
||||
|
||||
if (table == NULL) {
|
||||
|
|
|
@ -4218,7 +4218,7 @@ static v8::Handle<v8::Value> JS_EnsureGeoConstraintVocbaseCol (v8::Arguments con
|
|||
/// uniqueness is violated. If any attribute value is null for a document, this
|
||||
/// document is ignored by the index.
|
||||
///
|
||||
/// Note that non-existing attribute paths in a document are treat as if the
|
||||
/// Note that non-existing attribute paths in a document are treated as if the
|
||||
/// value were @LIT{null}.
|
||||
///
|
||||
/// In case that the index was successfully created, the index identifier is
|
||||
|
@ -4246,10 +4246,10 @@ static v8::Handle<v8::Value> JS_LookupUniqueConstraintVocbaseCol (v8::Arguments
|
|||
///
|
||||
/// @FUN{ensureHashIndex(@FA{field1}, @FA{field2}, ...,@FA{fieldn})}
|
||||
///
|
||||
/// Creates a unique hash index on all documents using @FA{field1}, @FA{field2},
|
||||
/// Creates a non-unique hash index on all documents using @FA{field1}, @FA{field2},
|
||||
/// ... as attribute paths. At least one attribute path must be given.
|
||||
///
|
||||
/// Note that non-existing attribute paths in a document are treat as if the
|
||||
/// Note that non-existing attribute paths in a document are treated as if the
|
||||
/// value were @LIT{null}.
|
||||
///
|
||||
/// In case that the index was successfully created, the index identifier
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
/*jslint indent: 2, nomen: true, maxlen: 100, sloppy: true, vars: true, white: true, plusplus: true */
|
||||
/*global window, Backbone */
|
||||
window.FoxxCollection = Backbone.Collection.extend({
|
||||
model: window.Foxx,
|
||||
|
||||
|
|
|
@ -0,0 +1,387 @@
|
|||
/*jslint indent: 2, nomen: true, maxlen: 100, white: true plusplus: true */
|
||||
/*global $, _ */
|
||||
/*global NodeReducer */
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief Graph functionality
|
||||
///
|
||||
/// @file
|
||||
///
|
||||
/// DISCLAIMER
|
||||
///
|
||||
/// Copyright 2010-2012 triagens GmbH, Cologne, Germany
|
||||
///
|
||||
/// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
/// you may not use this file except in compliance with the License.
|
||||
/// You may obtain a copy of the License at
|
||||
///
|
||||
/// http://www.apache.org/licenses/LICENSE-2.0
|
||||
///
|
||||
/// Unless required by applicable law or agreed to in writing, software
|
||||
/// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
/// See the License for the specific language governing permissions and
|
||||
/// limitations under the License.
|
||||
///
|
||||
/// Copyright holder is triAGENS GmbH, Cologne, Germany
|
||||
///
|
||||
/// @author Michael Hackstein
|
||||
/// @author Copyright 2011-2013, triAGENS GmbH, Cologne, Germany
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
||||
function AbstractAdapter(nodes, edges) {
|
||||
"use strict";
|
||||
|
||||
if (nodes === undefined) {
|
||||
throw "The nodes have to be given.";
|
||||
}
|
||||
if (edges === undefined) {
|
||||
throw "The edges have to be given.";
|
||||
}
|
||||
|
||||
var self = this,
|
||||
initialX = {},
|
||||
initialY = {},
|
||||
cachedCommunities = {},
|
||||
joinedInCommunities = {},
|
||||
limit,
|
||||
reducer,
|
||||
childLimit,
|
||||
exports = {},
|
||||
|
||||
setWidth = function(w) {
|
||||
initialX.range = w / 2;
|
||||
initialX.start = w / 4;
|
||||
initialX.getStart = function () {
|
||||
return this.start + Math.random() * this.range;
|
||||
};
|
||||
},
|
||||
|
||||
setHeight = function(h) {
|
||||
initialY.range = h / 2;
|
||||
initialY.start = h / 4;
|
||||
initialY.getStart = function () {
|
||||
return this.start + Math.random() * this.range;
|
||||
};
|
||||
},
|
||||
|
||||
findNode = function(id) {
|
||||
var intId = joinedInCommunities[id] || id,
|
||||
res = $.grep(nodes, function(e){
|
||||
return e._id === intId;
|
||||
});
|
||||
if (res.length === 0) {
|
||||
return false;
|
||||
}
|
||||
if (res.length === 1) {
|
||||
return res[0];
|
||||
}
|
||||
throw "Too many nodes with the same ID, should never happen";
|
||||
},
|
||||
|
||||
findEdge = function(id) {
|
||||
var res = $.grep(edges, function(e){
|
||||
return e._id === id;
|
||||
});
|
||||
if (res.length === 0) {
|
||||
return false;
|
||||
}
|
||||
if (res.length === 1) {
|
||||
return res[0];
|
||||
}
|
||||
throw "Too many edges with the same ID, should never happen";
|
||||
},
|
||||
|
||||
insertNode = function(data) {
|
||||
var node = {
|
||||
_data: data,
|
||||
_id: data._id
|
||||
},
|
||||
n = findNode(node._id);
|
||||
if (n) {
|
||||
return n;
|
||||
}
|
||||
node.x = initialX.getStart();
|
||||
node.y = initialY.getStart();
|
||||
nodes.push(node);
|
||||
node._outboundCounter = 0;
|
||||
node._inboundCounter = 0;
|
||||
return node;
|
||||
},
|
||||
|
||||
insertEdge = function(data) {
|
||||
var source,
|
||||
target,
|
||||
edge = {
|
||||
_data: data,
|
||||
_id: data._id
|
||||
},
|
||||
e = findEdge(edge._id),
|
||||
edgeToPush;
|
||||
if (e) {
|
||||
return e;
|
||||
}
|
||||
source = findNode(data._from);
|
||||
target = findNode(data._to);
|
||||
if (!source) {
|
||||
throw "Unable to insert Edge, source node not existing " + edge._from;
|
||||
}
|
||||
if (!target) {
|
||||
throw "Unable to insert Edge, target node not existing " + edge._to;
|
||||
}
|
||||
edge.source = source;
|
||||
edge.target = target;
|
||||
edges.push(edge);
|
||||
|
||||
|
||||
if (cachedCommunities[source._id] !== undefined) {
|
||||
edgeToPush = {};
|
||||
edgeToPush.type = "s";
|
||||
edgeToPush.id = edge._id;
|
||||
edgeToPush.source = $.grep(cachedCommunities[source._id].nodes, function(e){
|
||||
return e._id === data._from;
|
||||
})[0];
|
||||
edgeToPush.source._outboundCounter++;
|
||||
cachedCommunities[source._id].edges.push(edgeToPush);
|
||||
} else {
|
||||
source._outboundCounter++;
|
||||
}
|
||||
if (cachedCommunities[target._id] !== undefined) {
|
||||
edgeToPush = {};
|
||||
edgeToPush.type = "t";
|
||||
edgeToPush.id = edge._id;
|
||||
edgeToPush.target = $.grep(cachedCommunities[target._id].nodes, function(e){
|
||||
return e._id === data._to;
|
||||
})[0];
|
||||
edgeToPush.target._inboundCounter++;
|
||||
cachedCommunities[target._id].edges.push(edgeToPush);
|
||||
} else {
|
||||
target._inboundCounter++;
|
||||
}
|
||||
return edge;
|
||||
},
|
||||
|
||||
removeNode = function (node) {
|
||||
var i;
|
||||
for ( i = 0; i < nodes.length; i++ ) {
|
||||
if ( nodes[i] === node ) {
|
||||
nodes.splice( i, 1 );
|
||||
return;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
removeEdge = function (edge) {
|
||||
var i;
|
||||
for ( i = 0; i < edges.length; i++ ) {
|
||||
if ( edges[i] === edge ) {
|
||||
edges.splice( i, 1 );
|
||||
return;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
removeEdgesForNode = function (node) {
|
||||
var i;
|
||||
for (i = 0; i < edges.length; i++ ) {
|
||||
if (edges[i].source === node) {
|
||||
node._outboundCounter--;
|
||||
edges[i].target._inboundCounter--;
|
||||
edges.splice( i, 1 );
|
||||
i--;
|
||||
} else if (edges[i].target === node) {
|
||||
node._inboundCounter--;
|
||||
edges[i].source._outboundCounter--;
|
||||
edges.splice( i, 1 );
|
||||
i--;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
combineCommunityEdges = function (nodes, commNode) {
|
||||
var i, j, s, t,
|
||||
cachedCommEdges = cachedCommunities[commNode._id].edges,
|
||||
edgeToPush;
|
||||
for (i = 0; i < edges.length; i++ ) {
|
||||
edgeToPush = {};
|
||||
// s and t keep old values yay!
|
||||
s = edges[i].source;
|
||||
t = edges[i].target;
|
||||
for (j = 0; j < nodes.length; j++) {
|
||||
if (s === nodes[j]) {
|
||||
if (edgeToPush.type !== undefined) {
|
||||
edges[i].target = edgeToPush.target;
|
||||
delete edgeToPush.target;
|
||||
edgeToPush.type = "b";
|
||||
edgeToPush.edge = edges[i];
|
||||
edges.splice( i, 1 );
|
||||
i--;
|
||||
break;
|
||||
}
|
||||
edges[i].source = commNode;
|
||||
edgeToPush.type = "s";
|
||||
edgeToPush.id = edges[i]._id;
|
||||
edgeToPush.source = s;
|
||||
}
|
||||
if (t === nodes[j]) {
|
||||
if (edgeToPush.type !== undefined) {
|
||||
edges[i].source = edgeToPush.source;
|
||||
delete edgeToPush.source;
|
||||
edgeToPush.type = "b";
|
||||
edgeToPush.edge = edges[i];
|
||||
edges.splice( i, 1 );
|
||||
i--;
|
||||
break;
|
||||
}
|
||||
edges[i].target = commNode;
|
||||
edgeToPush.type = "t";
|
||||
edgeToPush.id = edges[i]._id;
|
||||
edgeToPush.target = t;
|
||||
}
|
||||
}
|
||||
if (edgeToPush.type !== undefined) {
|
||||
cachedCommEdges.push(edgeToPush);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// Helper function to easily remove all outbound edges for one node
|
||||
removeOutboundEdgesFromNode = function ( node ) {
|
||||
if (node._outboundCounter > 0) {
|
||||
var removed = [],
|
||||
i;
|
||||
for ( i = 0; i < edges.length; i++ ) {
|
||||
if ( edges[i].source === node ) {
|
||||
removed.push(edges[i]);
|
||||
node._outboundCounter--;
|
||||
edges[i].target._inboundCounter--;
|
||||
edges.splice( i, 1 );
|
||||
if (node._outboundCounter === 0) {
|
||||
break;
|
||||
}
|
||||
i--;
|
||||
}
|
||||
}
|
||||
return removed;
|
||||
}
|
||||
},
|
||||
|
||||
collapseCommunity = function (community) {
|
||||
var commId = "*community_" + Math.floor(Math.random()* 1000000),
|
||||
commNode = {
|
||||
_id: commId,
|
||||
edges: []
|
||||
},
|
||||
nodesToRemove = _.map(community, function(id) {
|
||||
return findNode(id);
|
||||
});
|
||||
commNode.x = nodesToRemove[0].x;
|
||||
commNode.y = nodesToRemove[0].y;
|
||||
cachedCommunities[commId] = {};
|
||||
cachedCommunities[commId].nodes = nodesToRemove;
|
||||
cachedCommunities[commId].edges = [];
|
||||
|
||||
combineCommunityEdges(nodesToRemove, commNode);
|
||||
_.each(nodesToRemove, function(n) {
|
||||
joinedInCommunities[n._id] = commId;
|
||||
removeNode(n);
|
||||
});
|
||||
nodes.push(commNode);
|
||||
},
|
||||
|
||||
expandCommunity = function (commNode) {
|
||||
var commId = commNode._id,
|
||||
nodesToAdd = cachedCommunities[commId].nodes,
|
||||
edgesToChange = cachedCommunities[commId].edges,
|
||||
com;
|
||||
removeNode(commNode);
|
||||
if (limit < nodes.length + nodesToAdd.length) {
|
||||
com = reducer.getCommunity(limit);
|
||||
collapseCommunity(com);
|
||||
}
|
||||
_.each(nodesToAdd, function(n) {
|
||||
delete joinedInCommunities[n._id];
|
||||
nodes.push(n);
|
||||
});
|
||||
_.each(edgesToChange, function(e) {
|
||||
var edge;
|
||||
switch(e.type) {
|
||||
case "t":
|
||||
edge = findEdge(e.id);
|
||||
edge.target = e.target;
|
||||
break;
|
||||
case "s":
|
||||
edge = findEdge(e.id);
|
||||
edge.source = e.source;
|
||||
break;
|
||||
case "b":
|
||||
edges.push(e.edge);
|
||||
break;
|
||||
}
|
||||
});
|
||||
delete cachedCommunities[commId];
|
||||
},
|
||||
|
||||
checkSizeOfInserted = function (inserted) {
|
||||
var buckets;
|
||||
if (_.size(inserted) > childLimit) {
|
||||
buckets = reducer.bucketNodes(_.values(inserted), childLimit);
|
||||
_.each(buckets, function(b) {
|
||||
if (b.length > 1) {
|
||||
var ids = _.map(b, function(n) {
|
||||
return n._id;
|
||||
});
|
||||
collapseCommunity(ids);
|
||||
}
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
checkNodeLimit = function (focus) {
|
||||
if (limit < nodes.length) {
|
||||
var com = reducer.getCommunity(limit, focus);
|
||||
collapseCommunity(com);
|
||||
}
|
||||
},
|
||||
|
||||
setNodeLimit = function (pLimit, callback) {
|
||||
limit = pLimit;
|
||||
if (limit < nodes.length) {
|
||||
var com = reducer.getCommunity(limit);
|
||||
collapseCommunity(com);
|
||||
if (callback !== undefined) {
|
||||
callback();
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
setChildLimit = function (pLimit) {
|
||||
childLimit = pLimit;
|
||||
};
|
||||
|
||||
childLimit = Number.POSITIVE_INFINITY;
|
||||
|
||||
reducer = new NodeReducer(nodes, edges);
|
||||
|
||||
initialX.getStart = function() {return 0;};
|
||||
initialY.getStart = function() {return 0;};
|
||||
|
||||
exports.setWidth = setWidth;
|
||||
exports.setHeight = setHeight;
|
||||
exports.insertNode = insertNode;
|
||||
exports.insertEdge = insertEdge;
|
||||
|
||||
exports.removeNode = removeNode;
|
||||
exports.removeEdge = removeEdge;
|
||||
exports.removeEdgesForNode = removeEdgesForNode;
|
||||
|
||||
exports.expandCommunity = expandCommunity;
|
||||
|
||||
exports.setNodeLimit = setNodeLimit;
|
||||
exports.setChildLimit = setChildLimit;
|
||||
|
||||
exports.checkSizeOfInserted = checkSizeOfInserted;
|
||||
exports.checkNodeLimit = checkNodeLimit;
|
||||
|
||||
return exports;
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
/*jslint indent: 2, nomen: true, maxlen: 100, white: true plusplus: true */
|
||||
/*global $, d3, _, console, document*/
|
||||
/*global NodeReducer*/
|
||||
/*global AbstractAdapter*/
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief Graph functionality
|
||||
///
|
||||
|
@ -48,41 +48,15 @@ function ArangoAdapter(nodes, edges, config) {
|
|||
}
|
||||
|
||||
var self = this,
|
||||
initialX = {},
|
||||
initialY = {},
|
||||
absAdapter = new AbstractAdapter(nodes, edges),
|
||||
api = {},
|
||||
queries = {},
|
||||
cachedCommunities = {},
|
||||
joinedInCommunities = {},
|
||||
nodeCollection,
|
||||
edgeCollection,
|
||||
limit,
|
||||
childLimit,
|
||||
reducer,
|
||||
arangodb,
|
||||
width,
|
||||
height,
|
||||
direction,
|
||||
|
||||
setWidth = function(w) {
|
||||
initialX.range = w / 2;
|
||||
initialX.start = w / 4;
|
||||
initialX.getStart = function () {
|
||||
return this.start + Math.random() * this.range;
|
||||
};
|
||||
},
|
||||
|
||||
setHeight = function(h) {
|
||||
initialY.range = h / 2;
|
||||
initialY.start = h / 4;
|
||||
initialY.getStart = function () {
|
||||
return this.start + Math.random() * this.range;
|
||||
};
|
||||
},
|
||||
|
||||
|
||||
parseConfig = function(config) {
|
||||
initialX.getStart = function() {return 0;};
|
||||
initialY.getStart = function() {return 0;};
|
||||
nodeCollection = config.nodeCollection;
|
||||
edgeCollection = config.edgeCollection;
|
||||
if (config.host === undefined) {
|
||||
|
@ -91,10 +65,10 @@ function ArangoAdapter(nodes, edges, config) {
|
|||
arangodb = config.host;
|
||||
}
|
||||
if (config.width !== undefined) {
|
||||
setWidth(config.width);
|
||||
absAdapter.setWidth(config.width);
|
||||
}
|
||||
if (config.height !== undefined) {
|
||||
setHeight(config.height);
|
||||
absAdapter.setHeight(config.height);
|
||||
}
|
||||
if (config.undirected !== undefined) {
|
||||
if (config.undirected === true) {
|
||||
|
@ -107,208 +81,6 @@ function ArangoAdapter(nodes, edges, config) {
|
|||
}
|
||||
},
|
||||
|
||||
findNode = function(id) {
|
||||
var intId = joinedInCommunities[id] || id,
|
||||
res = $.grep(nodes, function(e){
|
||||
return e._id === intId;
|
||||
});
|
||||
if (res.length === 0) {
|
||||
return false;
|
||||
}
|
||||
if (res.length === 1) {
|
||||
return res[0];
|
||||
}
|
||||
throw "Too many nodes with the same ID, should never happen";
|
||||
},
|
||||
|
||||
findEdge = function(id) {
|
||||
var res = $.grep(edges, function(e){
|
||||
return e._id === id;
|
||||
});
|
||||
if (res.length === 0) {
|
||||
return false;
|
||||
}
|
||||
if (res.length === 1) {
|
||||
return res[0];
|
||||
}
|
||||
throw "Too many edges with the same ID, should never happen";
|
||||
},
|
||||
|
||||
insertNode = function(data) {
|
||||
var node = {
|
||||
_data: data,
|
||||
_id: data._id
|
||||
},
|
||||
n = findNode(node._id);
|
||||
if (n) {
|
||||
return n;
|
||||
}
|
||||
node.x = initialX.getStart();
|
||||
node.y = initialY.getStart();
|
||||
nodes.push(node);
|
||||
node._outboundCounter = 0;
|
||||
node._inboundCounter = 0;
|
||||
return node;
|
||||
},
|
||||
|
||||
insertEdge = function(data) {
|
||||
var source,
|
||||
target,
|
||||
edge = {
|
||||
_data: data,
|
||||
_id: data._id
|
||||
},
|
||||
e = findEdge(edge._id),
|
||||
edgeToPush;
|
||||
if (e) {
|
||||
return e;
|
||||
}
|
||||
source = findNode(data._from);
|
||||
target = findNode(data._to);
|
||||
if (!source) {
|
||||
throw "Unable to insert Edge, source node not existing " + edge._from;
|
||||
}
|
||||
if (!target) {
|
||||
throw "Unable to insert Edge, target node not existing " + edge._to;
|
||||
}
|
||||
edge.source = source;
|
||||
edge.target = target;
|
||||
edges.push(edge);
|
||||
|
||||
|
||||
if (cachedCommunities[source._id] !== undefined) {
|
||||
edgeToPush = {};
|
||||
edgeToPush.type = "s";
|
||||
edgeToPush.id = edge._id;
|
||||
edgeToPush.source = $.grep(cachedCommunities[source._id].nodes, function(e){
|
||||
return e._id === data._from;
|
||||
})[0];
|
||||
edgeToPush.source._outboundCounter++;
|
||||
cachedCommunities[source._id].edges.push(edgeToPush);
|
||||
} else {
|
||||
source._outboundCounter++;
|
||||
}
|
||||
if (cachedCommunities[target._id] !== undefined) {
|
||||
edgeToPush = {};
|
||||
edgeToPush.type = "t";
|
||||
edgeToPush.id = edge._id;
|
||||
edgeToPush.target = $.grep(cachedCommunities[target._id].nodes, function(e){
|
||||
return e._id === data._to;
|
||||
})[0];
|
||||
edgeToPush.target._inboundCounter++;
|
||||
cachedCommunities[target._id].edges.push(edgeToPush);
|
||||
} else {
|
||||
target._inboundCounter++;
|
||||
}
|
||||
return edge;
|
||||
},
|
||||
|
||||
removeNode = function (node) {
|
||||
var i;
|
||||
for ( i = 0; i < nodes.length; i++ ) {
|
||||
if ( nodes[i] === node ) {
|
||||
nodes.splice( i, 1 );
|
||||
return;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
removeEdge = function (edge) {
|
||||
var i;
|
||||
for ( i = 0; i < edges.length; i++ ) {
|
||||
if ( edges[i] === edge ) {
|
||||
edges.splice( i, 1 );
|
||||
return;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
removeEdgesForNode = function (node) {
|
||||
var i;
|
||||
for (i = 0; i < edges.length; i++ ) {
|
||||
if (edges[i].source === node) {
|
||||
node._outboundCounter--;
|
||||
edges[i].target._inboundCounter--;
|
||||
edges.splice( i, 1 );
|
||||
i--;
|
||||
} else if (edges[i].target === node) {
|
||||
node._inboundCounter--;
|
||||
edges[i].source._outboundCounter--;
|
||||
edges.splice( i, 1 );
|
||||
i--;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
combineCommunityEdges = function (nodes, commNode) {
|
||||
var i, j, s, t,
|
||||
cachedCommEdges = cachedCommunities[commNode._id].edges,
|
||||
edgeToPush;
|
||||
for (i = 0; i < edges.length; i++ ) {
|
||||
edgeToPush = {};
|
||||
// s and t keep old values yay!
|
||||
s = edges[i].source;
|
||||
t = edges[i].target;
|
||||
for (j = 0; j < nodes.length; j++) {
|
||||
if (s === nodes[j]) {
|
||||
if (edgeToPush.type !== undefined) {
|
||||
edges[i].target = edgeToPush.target;
|
||||
delete edgeToPush.target;
|
||||
edgeToPush.type = "b";
|
||||
edgeToPush.edge = edges[i];
|
||||
edges.splice( i, 1 );
|
||||
i--;
|
||||
break;
|
||||
}
|
||||
edges[i].source = commNode;
|
||||
edgeToPush.type = "s";
|
||||
edgeToPush.id = edges[i]._id;
|
||||
edgeToPush.source = s;
|
||||
}
|
||||
if (t === nodes[j]) {
|
||||
if (edgeToPush.type !== undefined) {
|
||||
edges[i].source = edgeToPush.source;
|
||||
delete edgeToPush.source;
|
||||
edgeToPush.type = "b";
|
||||
edgeToPush.edge = edges[i];
|
||||
edges.splice( i, 1 );
|
||||
i--;
|
||||
break;
|
||||
}
|
||||
edges[i].target = commNode;
|
||||
edgeToPush.type = "t";
|
||||
edgeToPush.id = edges[i]._id;
|
||||
edgeToPush.target = t;
|
||||
}
|
||||
}
|
||||
if (edgeToPush.type !== undefined) {
|
||||
cachedCommEdges.push(edgeToPush);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// Helper function to easily remove all outbound edges for one node
|
||||
removeOutboundEdgesFromNode = function ( node ) {
|
||||
if (node._outboundCounter > 0) {
|
||||
var removed = [],
|
||||
i;
|
||||
for ( i = 0; i < edges.length; i++ ) {
|
||||
if ( edges[i].source === node ) {
|
||||
removed.push(edges[i]);
|
||||
node._outboundCounter--;
|
||||
edges[i].target._inboundCounter--;
|
||||
edges.splice( i, 1 );
|
||||
if (node._outboundCounter === 0) {
|
||||
break;
|
||||
}
|
||||
i--;
|
||||
}
|
||||
}
|
||||
return removed;
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
sendQuery = function(query, bindVars, onSuccess) {
|
||||
if (query !== queries.connectedEdges) {
|
||||
bindVars["@nodes"] = nodeCollection;
|
||||
|
@ -343,106 +115,37 @@ function ArangoAdapter(nodes, edges, config) {
|
|||
}
|
||||
});
|
||||
},
|
||||
|
||||
collapseCommunity = function (community) {
|
||||
var commId = "*community_" + Math.floor(Math.random()* 1000000),
|
||||
commNode = {
|
||||
_id: commId,
|
||||
edges: []
|
||||
},
|
||||
nodesToRemove = _.map(community, function(id) {
|
||||
return findNode(id);
|
||||
});
|
||||
commNode.x = nodesToRemove[0].x;
|
||||
commNode.y = nodesToRemove[0].y;
|
||||
cachedCommunities[commId] = {};
|
||||
cachedCommunities[commId].nodes = nodesToRemove;
|
||||
cachedCommunities[commId].edges = [];
|
||||
|
||||
combineCommunityEdges(nodesToRemove, commNode);
|
||||
_.each(nodesToRemove, function(n) {
|
||||
joinedInCommunities[n._id] = commId;
|
||||
removeNode(n);
|
||||
});
|
||||
nodes.push(commNode);
|
||||
},
|
||||
|
||||
expandCommunity = function (commNode) {
|
||||
var commId = commNode._id,
|
||||
nodesToAdd = cachedCommunities[commId].nodes,
|
||||
edgesToChange = cachedCommunities[commId].edges,
|
||||
com;
|
||||
removeNode(commNode);
|
||||
if (limit < nodes.length + nodesToAdd.length) {
|
||||
com = reducer.getCommunity(limit);
|
||||
collapseCommunity(com);
|
||||
}
|
||||
_.each(nodesToAdd, function(n) {
|
||||
delete joinedInCommunities[n._id];
|
||||
nodes.push(n);
|
||||
});
|
||||
_.each(edgesToChange, function(e) {
|
||||
var edge;
|
||||
switch(e.type) {
|
||||
case "t":
|
||||
edge = findEdge(e.id);
|
||||
edge.target = e.target;
|
||||
break;
|
||||
case "s":
|
||||
edge = findEdge(e.id);
|
||||
edge.source = e.source;
|
||||
break;
|
||||
case "b":
|
||||
edges.push(e.edge);
|
||||
break;
|
||||
}
|
||||
});
|
||||
delete cachedCommunities[commId];
|
||||
},
|
||||
|
||||
parseResultOfTraversal = function (result, callback) {
|
||||
result = result[0];
|
||||
var inserted = {},
|
||||
n = insertNode(result[0].vertex),
|
||||
n = absAdapter.insertNode(result[0].vertex),
|
||||
com, buckets;
|
||||
_.each(result, function(visited) {
|
||||
var node = insertNode(visited.vertex),
|
||||
var node = absAdapter.insertNode(visited.vertex),
|
||||
path = visited.path;
|
||||
inserted[node._id] = node;
|
||||
_.each(path.vertices, function(connectedNode) {
|
||||
var ins = insertNode(connectedNode);
|
||||
var ins = absAdapter.insertNode(connectedNode);
|
||||
inserted[ins._id] = ins;
|
||||
});
|
||||
_.each(path.edges, function(edge) {
|
||||
insertEdge(edge);
|
||||
absAdapter.insertEdge(edge);
|
||||
});
|
||||
});
|
||||
delete inserted[n._id];
|
||||
if (_.size(inserted) > childLimit) {
|
||||
buckets = reducer.bucketNodes(_.values(inserted), childLimit);
|
||||
_.each(buckets, function(b) {
|
||||
if (b.length > 1) {
|
||||
var ids = _.map(b, function(n) {
|
||||
return n._id;
|
||||
});
|
||||
collapseCommunity(ids);
|
||||
}
|
||||
});
|
||||
}
|
||||
if (limit < nodes.length) {
|
||||
com = reducer.getCommunity(limit, n);
|
||||
collapseCommunity(com);
|
||||
}
|
||||
absAdapter.checkSizeOfInserted(inserted);
|
||||
absAdapter.checkNodeLimit(n);
|
||||
if (callback) {
|
||||
callback(n);
|
||||
}
|
||||
},
|
||||
|
||||
/* Archive
|
||||
parseResultOfQuery = function (result, callback) {
|
||||
_.each(result, function (node) {
|
||||
var n = findNode(node._id);
|
||||
if (!n) {
|
||||
insertNode(node);
|
||||
absAdapter.insertNode(node);
|
||||
n = node;
|
||||
} else {
|
||||
n.children = node.children;
|
||||
|
@ -454,14 +157,14 @@ function ArangoAdapter(nodes, edges, config) {
|
|||
var check = findNode(id),
|
||||
newnode;
|
||||
if (check) {
|
||||
insertEdge(n, check);
|
||||
absAdapter.insertEdge(n, check);
|
||||
self.requestCentralityChildren(id, function(c) {
|
||||
n._centrality = c;
|
||||
});
|
||||
} else {
|
||||
newnode = {_id: id};
|
||||
insertNode(newnode);
|
||||
insertEdge(n, newnode);
|
||||
absAdapter.insertNode(newnode);
|
||||
absAdapter.insertEdge(n, newnode);
|
||||
self.requestCentralityChildren(id, function(c) {
|
||||
newnode._centrality = c;
|
||||
});
|
||||
|
@ -472,7 +175,7 @@ function ArangoAdapter(nodes, edges, config) {
|
|||
}
|
||||
});
|
||||
},
|
||||
|
||||
*/
|
||||
permanentlyRemoveEdgesOfNode = function (nodeId) {
|
||||
sendQuery(queries.connectedEdges, {
|
||||
id: nodeId
|
||||
|
@ -536,11 +239,7 @@ function ArangoAdapter(nodes, edges, config) {
|
|||
+ " FILTER e._to == @id"
|
||||
+ " || e._from == @id"
|
||||
+ " RETURN e";
|
||||
|
||||
childLimit = Number.POSITIVE_INFINITY;
|
||||
|
||||
reducer = new NodeReducer(nodes, edges);
|
||||
|
||||
/* Archive
|
||||
self.oldLoadNodeFromTreeById = function(nodeId, callback) {
|
||||
sendQuery(queries.nodeById, {
|
||||
id: nodeId
|
||||
|
@ -548,7 +247,7 @@ function ArangoAdapter(nodes, edges, config) {
|
|||
parseResultOfQuery(res, callback);
|
||||
});
|
||||
};
|
||||
|
||||
*/
|
||||
self.loadNode = function(nodeId, callback) {
|
||||
self.loadNodeFromTreeById(nodeId, callback);
|
||||
};
|
||||
|
@ -590,7 +289,7 @@ function ArangoAdapter(nodes, edges, config) {
|
|||
data._from = edgeToAdd.source._id;
|
||||
data._to = edgeToAdd.target._id;
|
||||
delete data.error;
|
||||
var edge = insertEdge(data);
|
||||
var edge = absAdapter.insertEdge(data);
|
||||
callback(edge);
|
||||
},
|
||||
error: function(data) {
|
||||
|
@ -608,7 +307,7 @@ function ArangoAdapter(nodes, edges, config) {
|
|||
dataType: "json",
|
||||
processData: false,
|
||||
success: function() {
|
||||
removeEdge(edgeToRemove);
|
||||
absAdapter.removeEdge(edgeToRemove);
|
||||
if (callback !== undefined && _.isFunction(callback)) {
|
||||
callback();
|
||||
}
|
||||
|
@ -649,7 +348,7 @@ function ArangoAdapter(nodes, edges, config) {
|
|||
contentType: "application/json",
|
||||
processData: false,
|
||||
success: function(data) {
|
||||
insertNode(data);
|
||||
absAdapter.insertNode(data);
|
||||
callback(data);
|
||||
},
|
||||
error: function(data) {
|
||||
|
@ -667,9 +366,9 @@ function ArangoAdapter(nodes, edges, config) {
|
|||
contentType: "application/json",
|
||||
processData: false,
|
||||
success: function() {
|
||||
removeEdgesForNode(nodeToRemove);
|
||||
absAdapter.removeEdgesForNode(nodeToRemove);
|
||||
permanentlyRemoveEdgesOfNode(nodeToRemove._id);
|
||||
removeNode(nodeToRemove);
|
||||
absAdapter.removeNode(nodeToRemove);
|
||||
if (callback !== undefined && _.isFunction(callback)) {
|
||||
callback();
|
||||
}
|
||||
|
@ -714,22 +413,15 @@ function ArangoAdapter(nodes, edges, config) {
|
|||
};
|
||||
|
||||
self.setNodeLimit = function (pLimit, callback) {
|
||||
limit = pLimit;
|
||||
if (limit < nodes.length) {
|
||||
var com = reducer.getCommunity(limit);
|
||||
collapseCommunity(com);
|
||||
if (callback !== undefined) {
|
||||
callback();
|
||||
}
|
||||
}
|
||||
absAdapter.setNodeLimit(pLimit, callback);
|
||||
};
|
||||
|
||||
self.setChildLimit = function (pLimit) {
|
||||
childLimit = pLimit;
|
||||
absAdapter.setChildLimit(pLimit);
|
||||
};
|
||||
|
||||
self.expandCommunity = function (commNode, callback) {
|
||||
expandCommunity(commNode);
|
||||
absAdapter.expandCommunity(commNode);
|
||||
if (callback !== undefined) {
|
||||
callback();
|
||||
}
|
||||
|
|
|
@ -558,7 +558,7 @@
|
|||
|
||||
waitsFor(function() {
|
||||
return callbackCheck;
|
||||
});
|
||||
}, 1000);
|
||||
|
||||
runs(function() {
|
||||
existNodes([c0, c1, c2, c3, c4]);
|
||||
|
@ -619,7 +619,7 @@
|
|||
|
||||
waitsFor(function() {
|
||||
return callbackCheck;
|
||||
});
|
||||
}, 1000);
|
||||
|
||||
runs(function() {
|
||||
expect(nodes[0]._data).toEqual({
|
||||
|
@ -793,7 +793,7 @@
|
|||
|
||||
waitsFor(function() {
|
||||
return callbackCheck;
|
||||
});
|
||||
}, 1000);
|
||||
|
||||
runs(function() {
|
||||
var callNodesIds = _.map(callNodes, function(n) {
|
||||
|
@ -858,7 +858,7 @@
|
|||
|
||||
waitsFor(function() {
|
||||
return callbackCheck;
|
||||
});
|
||||
}, 1000);
|
||||
|
||||
runs(function() {
|
||||
var callNodesIds = _.map(callNodes, function(n) {
|
||||
|
@ -1005,7 +1005,7 @@
|
|||
|
||||
waitsFor(function() {
|
||||
return callbackCheck;
|
||||
});
|
||||
}, 1000);
|
||||
|
||||
runs(function() {
|
||||
existNodes([c0, c1, c2, c3, c4, c5, c6, c7]);
|
||||
|
@ -1027,7 +1027,7 @@
|
|||
|
||||
waitsFor(function() {
|
||||
return callbackCheck;
|
||||
});
|
||||
}, 1000);
|
||||
|
||||
runs(function() {
|
||||
expect(toPatch._data.hello).toEqual("world");
|
||||
|
@ -1050,7 +1050,7 @@
|
|||
|
||||
waitsFor(function() {
|
||||
return callbackCheck;
|
||||
});
|
||||
}, 1000);
|
||||
|
||||
runs(function() {
|
||||
expect(toPatch._data.hello).toEqual("world");
|
||||
|
@ -1073,7 +1073,7 @@
|
|||
|
||||
waitsFor(function() {
|
||||
return callbackCheck;
|
||||
});
|
||||
}, 1000);
|
||||
|
||||
runs(function() {
|
||||
expect($.ajax).toHaveBeenCalledWith(
|
||||
|
@ -1097,7 +1097,7 @@
|
|||
|
||||
waitsFor(function() {
|
||||
return callbackCheck;
|
||||
});
|
||||
}, 1000);
|
||||
|
||||
runs(function() {
|
||||
expect($.ajax).toHaveBeenCalledWith(
|
||||
|
@ -1152,7 +1152,7 @@
|
|||
|
||||
waitsFor(function() {
|
||||
return callbackCheck;
|
||||
});
|
||||
}, 1000);
|
||||
|
||||
runs(function() {
|
||||
var commId = getCommunityNodesIds()[0];
|
||||
|
@ -1177,7 +1177,7 @@
|
|||
|
||||
waitsFor(function() {
|
||||
return callbackCheck;
|
||||
});
|
||||
}, 1000);
|
||||
|
||||
runs(function() {
|
||||
var commId = getCommunityNodesIds()[0];
|
||||
|
@ -1236,7 +1236,7 @@
|
|||
|
||||
waitsFor(function() {
|
||||
return called === 2;
|
||||
});
|
||||
}, 1000);
|
||||
|
||||
runs(function() {
|
||||
adapter.loadNode(v2, counterCallback);
|
||||
|
@ -1245,7 +1245,7 @@
|
|||
|
||||
waitsFor(function() {
|
||||
return called === 3;
|
||||
});
|
||||
}, 1000);
|
||||
|
||||
runs(function() {
|
||||
var commId = commNode._id;
|
||||
|
@ -1264,7 +1264,7 @@
|
|||
|
||||
waitsFor(function() {
|
||||
return called === 4;
|
||||
});
|
||||
}, 1000);
|
||||
|
||||
runs(function() {
|
||||
existNodes([v0, v1, v2, v3, v4]);
|
||||
|
@ -1320,7 +1320,7 @@
|
|||
|
||||
waitsFor(function() {
|
||||
return called === 2;
|
||||
});
|
||||
}, 1000);
|
||||
|
||||
runs(function() {
|
||||
adapter.loadNode(v2, counterCallback);
|
||||
|
@ -1329,7 +1329,7 @@
|
|||
|
||||
waitsFor(function() {
|
||||
return called === 3;
|
||||
});
|
||||
}, 1000);
|
||||
|
||||
runs(function() {
|
||||
adapter.setNodeLimit(20);
|
||||
|
@ -1338,7 +1338,7 @@
|
|||
|
||||
waitsFor(function() {
|
||||
return called === 4;
|
||||
});
|
||||
}, 1000);
|
||||
|
||||
runs(function() {
|
||||
var checkNodeWithInAndOut = function(id, inbound, outbound) {
|
||||
|
@ -1413,7 +1413,7 @@
|
|||
|
||||
waitsFor(function() {
|
||||
return callbackCheck;
|
||||
});
|
||||
}, 1000);
|
||||
|
||||
runs(function() {
|
||||
var newCommId = getCommunityNodesIds()[0];
|
||||
|
@ -1476,7 +1476,7 @@
|
|||
|
||||
waitsFor(function() {
|
||||
return callbackCheck;
|
||||
});
|
||||
}, 1000);
|
||||
|
||||
runs(function() {
|
||||
existEdge(c3, firstCommId);
|
||||
|
@ -1523,7 +1523,7 @@
|
|||
|
||||
waitsFor(function() {
|
||||
return callbackCheck;
|
||||
});
|
||||
}, 1000);
|
||||
|
||||
runs(function() {
|
||||
existNodes([c0, c1, c2, c3, c4, c8, c9]);
|
||||
|
@ -1557,7 +1557,7 @@
|
|||
|
||||
waitsFor(function() {
|
||||
return callbackCheck;
|
||||
});
|
||||
}, 1000);
|
||||
|
||||
runs(function() {
|
||||
expect($.ajax).toHaveBeenCalledWith(
|
||||
|
@ -1666,7 +1666,7 @@
|
|||
|
||||
waitsFor(function() {
|
||||
return callbackCheck;
|
||||
});
|
||||
}, 1000);
|
||||
|
||||
runs(function() {
|
||||
callbackCheck = false;
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,5 +1,5 @@
|
|||
/*jslint indent: 2, nomen: true, maxlen: 100, sloppy: true, vars: true, white: true, plusplus: true */
|
||||
/*global require, exports, Backbone, EJS, $*/
|
||||
/*global require, exports, Backbone, EJS, window, SwaggerUi, hljs, document, $*/
|
||||
|
||||
window.AppDocumentationView = Backbone.View.extend({
|
||||
|
||||
|
@ -15,7 +15,7 @@ window.AppDocumentationView = Backbone.View.extend({
|
|||
supportHeaderParams: true,
|
||||
supportedSubmitMethods: ['get', 'post', 'put', 'delete', 'patch', 'head'],
|
||||
onComplete: function(swaggerApi, swaggerUi){
|
||||
$('pre code').each(function(i, e) {hljs.highlightBlock(e)});
|
||||
$('pre code').each(function(i, e) {hljs.highlightBlock(e);});
|
||||
},
|
||||
onFailure: function(data) {
|
||||
var div = document.createElement("div"),
|
||||
|
|
|
@ -87,6 +87,7 @@ var documentSourceView = Backbone.View.extend({
|
|||
});
|
||||
var editor = ace.edit("sourceEditor");
|
||||
editor.setValue(arangoHelper.FormatJSON(data));
|
||||
editor.clearSelection();
|
||||
},
|
||||
stateReplace: function (value) {
|
||||
var inString = false;
|
||||
|
|
|
@ -35,7 +35,7 @@ var documentsView = Backbone.View.extend({
|
|||
|
||||
returnPressedHandler: function(event) {
|
||||
if (event.keyCode === 13) {
|
||||
if (!!$("#confirmDeleteBtn").attr("disabled") === false) {
|
||||
if ($("#confirmDeleteBtn").attr("disabled") === false) {
|
||||
this.confirmDelete();
|
||||
}
|
||||
}
|
||||
|
@ -142,14 +142,20 @@ var documentsView = Backbone.View.extend({
|
|||
|
||||
},
|
||||
confirmDelete: function () {
|
||||
$("#confirmDeleteBtn").attr("disabled", true);
|
||||
this.reallyDelete();
|
||||
$("#confirmDeleteBtn").attr("disabled", true);
|
||||
var hash = window.location.hash.split("/");
|
||||
var check = hash[3];
|
||||
//todo - find wrong event handler
|
||||
if (check !== 'source') {
|
||||
this.reallyDelete();
|
||||
}
|
||||
},
|
||||
reallyDelete: function () {
|
||||
var self = this;
|
||||
var row = $(self.target).closest("tr").get(0);
|
||||
var hash = window.location.hash.split("/");
|
||||
var page = hash[3];
|
||||
|
||||
var deleted = false;
|
||||
this.docid = $(self.idelement).next().text();
|
||||
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
/*jslint indent: 2, nomen: true, maxlen: 100, sloppy: true, vars: true, white: true, plusplus: true */
|
||||
/*global Backbone, EJS, $, window, _ */
|
||||
|
||||
var FoxxActiveListView = Backbone.View.extend({
|
||||
el: '#content',
|
||||
template: new EJS({url: 'js/templates/foxxListView.ejs'}),
|
||||
|
|
|
@ -1,27 +1,40 @@
|
|||
/*jslint indent: 2, nomen: true, maxlen: 100, sloppy: true, vars: true, white: true, plusplus: true */
|
||||
/*global Backbone, $, window, EJS, _ */
|
||||
|
||||
window.FoxxActiveView = Backbone.View.extend({
|
||||
tagName: 'li',
|
||||
className: "span3",
|
||||
template: new EJS({url: 'js/templates/foxxActiveView.ejs'}),
|
||||
|
||||
|
||||
events: {
|
||||
'click .icon-edit': 'editFoxx',
|
||||
'click' : 'showDocu'
|
||||
},
|
||||
|
||||
|
||||
initialize: function(){
|
||||
_.bindAll(this, 'render');
|
||||
},
|
||||
|
||||
|
||||
editFoxx: function(event) {
|
||||
event.stopPropagation();
|
||||
window.App.navigate("application/installed/" + encodeURIComponent(this.model.get("_key")), {trigger: true});
|
||||
window.App.navigate(
|
||||
"application/installed/" + encodeURIComponent(this.model.get("_key")),
|
||||
{
|
||||
trigger: true
|
||||
}
|
||||
);
|
||||
},
|
||||
|
||||
|
||||
showDocu: function(event) {
|
||||
event.stopPropagation();
|
||||
window.App.navigate("application/documentation/" + encodeURIComponent(this.model.get("_key")), {trigger: true});
|
||||
window.App.navigate(
|
||||
"application/documentation/" + encodeURIComponent(this.model.get("_key")),
|
||||
{
|
||||
trigger: true
|
||||
}
|
||||
);
|
||||
},
|
||||
|
||||
|
||||
render: function(){
|
||||
$(this.el).html(this.template.render(this.model));
|
||||
return $(this.el);
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
/*jslint indent: 2, nomen: true, maxlen: 100, sloppy: true, vars: true, white: true, plusplus: true */
|
||||
/*global Backbone, EJS, $, window, _ */
|
||||
|
||||
window.foxxEditView = Backbone.View.extend({
|
||||
el: '#modalPlaceholder',
|
||||
initialize: function () {
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
/*jslint indent: 2, nomen: true, maxlen: 100, sloppy: true, vars: true, white: true, plusplus: true */
|
||||
/*global Backbone, EJS, $, window, _ */
|
||||
|
||||
var FoxxInstalledListView = Backbone.View.extend({
|
||||
el: '#content',
|
||||
template: new EJS({url: 'js/templates/foxxListView.ejs'}),
|
||||
|
|
|
@ -1,21 +1,29 @@
|
|||
/*jslint indent: 2, nomen: true, maxlen: 100, sloppy: true, vars: true, white: true, plusplus: true */
|
||||
/*global Backbone, EJS, $, window, _ */
|
||||
|
||||
window.FoxxInstalledView = Backbone.View.extend({
|
||||
tagName: 'li',
|
||||
className: "span3",
|
||||
template: new EJS({url: 'js/templates/foxxInstalledView.ejs'}),
|
||||
|
||||
|
||||
events: {
|
||||
'click #install': 'installFoxx'
|
||||
},
|
||||
|
||||
|
||||
initialize: function(){
|
||||
_.bindAll(this, 'render');
|
||||
},
|
||||
|
||||
|
||||
installFoxx: function(event) {
|
||||
event.stopPropagation();
|
||||
window.App.navigate("application/available/" + encodeURIComponent(this.model.get("_key")), {trigger: true});
|
||||
window.App.navigate(
|
||||
"application/available/" + encodeURIComponent(this.model.get("_key")),
|
||||
{
|
||||
trigger: true
|
||||
}
|
||||
);
|
||||
},
|
||||
|
||||
|
||||
render: function(){
|
||||
$(this.el).html(this.template.render(this.model));
|
||||
return $(this.el);
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
/*jslint indent: 2, nomen: true, maxlen: 100, sloppy: true, vars: true, white: true, plusplus: true, forin: true, regexp: true */
|
||||
/*global alert, Backbone, EJS, $, window */
|
||||
|
||||
window.foxxMountView = Backbone.View.extend({
|
||||
el: '#modalPlaceholder',
|
||||
m: {},
|
||||
|
||||
initialize: function () {
|
||||
this.m = this.model.attributes;
|
||||
console.log(this.m);
|
||||
},
|
||||
template: new EJS({url: 'js/templates/foxxMountView.ejs'}),
|
||||
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
/*jslint indent: 2, nomen: true, maxlen: 100, sloppy: true, vars: true, white: true, plusplus: true, forin: true */
|
||||
/*global Backbone, $, window, EJS, GraphViewerUI */
|
||||
|
||||
window.graphView = Backbone.View.extend({
|
||||
el: '#content',
|
||||
|
||||
|
@ -38,11 +41,11 @@ window.graphView = Backbone.View.extend({
|
|||
nodeShaper: {
|
||||
label: label
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
$("#background").remove();
|
||||
ui = new GraphViewerUI(document.getElementById("content"), aaconfig, 940, 680, config);
|
||||
var ui = new GraphViewerUI($("#content"), aaconfig, 940, 680, config);
|
||||
},
|
||||
|
||||
|
||||
|
|
|
@ -49,6 +49,9 @@ JAVASCRIPT_JSLINT = \
|
|||
`find @srcdir@/js/client/modules -name "*.js"` \
|
||||
`find @srcdir@/js/server/modules -name "*.js"` \
|
||||
`find @srcdir@/html/admin/js/models -name "*.js"` \
|
||||
`find @srcdir@/html/admin/js/views -name "*.js"` \
|
||||
`find @srcdir@/html/admin/js/collections -name "*.js"` \
|
||||
`find @srcdir@/html/admin/js/routers -name "*.js"` \
|
||||
\
|
||||
@srcdir@/js/client/client.js \
|
||||
@srcdir@/js/server/server.js \
|
||||
|
|
|
@ -329,7 +329,7 @@ function processGithubRepository (source) {
|
|||
var tempFile = fs.getTempFile("downloads", false);
|
||||
|
||||
try {
|
||||
var result = internal.download(url, "get", tempFile);
|
||||
var result = internal.download(url, "", { method: "get", followRedirects: true, timeout: 30 }, tempFile);
|
||||
|
||||
if (result.code >= 200 && result.code <= 299) {
|
||||
source.filename = tempFile;
|
||||
|
@ -471,7 +471,7 @@ function updateFishbowl () {
|
|||
var path = fs.getTempFile("zip", false);
|
||||
|
||||
try {
|
||||
var result = internal.download(url, "get", filename);
|
||||
var result = internal.download(url, "", { method: "get", followRedirects: true, timeout: 30 }, filename);
|
||||
|
||||
if (result.code < 200 || result.code > 299) {
|
||||
throw "github download failed";
|
||||
|
|
|
@ -27,6 +27,7 @@
|
|||
|
||||
var internal = require("internal");
|
||||
var jsunity = require("jsunity");
|
||||
var EXPLAIN = internal.AQL_EXPLAIN;
|
||||
var QUERY = internal.AQL_QUERY;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -46,6 +47,14 @@ function ahuacatlHashTestSuite () {
|
|||
return cursor;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief explain a given query
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function explainQuery (query, bindVars) {
|
||||
return EXPLAIN(query, bindVars);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief execute a given query and return the results as an array
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -101,10 +110,15 @@ function ahuacatlHashTestSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testEqSingle1 : function () {
|
||||
var query = "FOR v IN " + hash.name() + " FILTER v.c == 1 SORT v.b RETURN [ v.b ]";
|
||||
var expected = [ [ 1 ], [ 2 ], [ 3 ], [ 4 ], [ 5 ] ];
|
||||
var actual = getQueryResults("FOR v IN " + hash.name() + " FILTER v.c == 1 SORT v.b RETURN [ v.b ]");
|
||||
var actual = getQueryResults(query);
|
||||
|
||||
assertEqual(expected, actual);
|
||||
|
||||
var explain = explainQuery(query);
|
||||
assertEqual("for", explain[0].type);
|
||||
assertEqual("index", explain[0].expression.extra.accessType);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -112,10 +126,15 @@ function ahuacatlHashTestSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testEqSingle2 : function () {
|
||||
var query = "FOR v IN " + hash.name() + " FILTER 1 == v.c SORT v.b RETURN [ v.b ]";
|
||||
var expected = [ [ 1 ], [ 2 ], [ 3 ], [ 4 ], [ 5 ] ];
|
||||
var actual = getQueryResults("FOR v IN " + hash.name() + " FILTER 1 == v.c SORT v.b RETURN [ v.b ]");
|
||||
var actual = getQueryResults(query);
|
||||
|
||||
assertEqual(expected, actual);
|
||||
|
||||
var explain = explainQuery(query);
|
||||
assertEqual("for", explain[0].type);
|
||||
assertEqual("index", explain[0].expression.extra.accessType);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -147,10 +166,15 @@ function ahuacatlHashTestSuite () {
|
|||
testEqMultiAll1 : function () {
|
||||
for (var i = 1; i <= 5; ++i) {
|
||||
for (var j = 1; j <=5; ++j) {
|
||||
var query = "FOR v IN " + hash.name() + " FILTER v.a == @a && v.b == @b RETURN [ v.a, v.b ]";
|
||||
var expected = [ [ i, j ] ];
|
||||
var actual = getQueryResults("FOR v IN " + hash.name() + " FILTER v.a == @a && v.b == @b RETURN [ v.a, v.b ]", { "a" : i, "b" : j });
|
||||
var actual = getQueryResults(query, { "a": i, "b": j });
|
||||
|
||||
assertEqual(expected, actual);
|
||||
|
||||
var explain = explainQuery(query, { "a": i, "b": j });
|
||||
assertEqual("for", explain[0].type);
|
||||
assertEqual("index", explain[0].expression.extra.accessType);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -162,10 +186,15 @@ function ahuacatlHashTestSuite () {
|
|||
testEqMultiAll2 : function () {
|
||||
for (var i = 1; i <= 5; ++i) {
|
||||
for (var j = 1; j <=5; ++j) {
|
||||
var query = "FOR v IN " + hash.name() + " FILTER @a == v.a && @b == v.b RETURN [ v.a, v.b ]";
|
||||
var expected = [ [ i, j ] ];
|
||||
var actual = getQueryResults("FOR v IN " + hash.name() + " FILTER @a == v.a && @b == v.b RETURN [ v.a, v.b ]", { "a" : i, "b" : j });
|
||||
var actual = getQueryResults(query, { "a": i, "b": j });
|
||||
|
||||
assertEqual(expected, actual);
|
||||
|
||||
var explain = explainQuery(query, { "a": i, "b": j });
|
||||
assertEqual("for", explain[0].type);
|
||||
assertEqual("index", explain[0].expression.extra.accessType);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -175,10 +204,16 @@ function ahuacatlHashTestSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRefConst1 : function () {
|
||||
var query = "LET x = 4 FOR v IN " + hash.name() + " FILTER v.c == x SORT v.b RETURN [ v.b, v.c ]";
|
||||
var expected = [ [ 1, 4 ], [ 2, 4 ], [ 3, 4 ], [ 4, 4 ], [ 5, 4 ] ];
|
||||
var actual = getQueryResults("LET x = 4 FOR v IN " + hash.name() + " FILTER v.c == x SORT v.b RETURN [ v.b, v.c ]");
|
||||
var actual = getQueryResults(query);
|
||||
|
||||
assertEqual(expected, actual);
|
||||
|
||||
var explain = explainQuery(query);
|
||||
assertEqual("let", explain[0].type);
|
||||
assertEqual("for", explain[1].type);
|
||||
assertEqual("index", explain[1].expression.extra.accessType);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -255,7 +290,103 @@ function ahuacatlHashTestSuite () {
|
|||
|
||||
assertEqual(expected, actual);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test ref access
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRefMulti3 : function () {
|
||||
var query = "FOR v1 IN " + hash.name() + " FILTER @a == v1.a && @b == v1.b RETURN [ v1.a, v1.b ]";
|
||||
var expected = [ [ 2, 3 ] ];
|
||||
var actual = getQueryResults(query, { "a": 2, "b": 3 });
|
||||
|
||||
assertEqual(expected, actual);
|
||||
|
||||
var explain = explainQuery(query, { "a": 2, "b": 3 });
|
||||
assertEqual("for", explain[0].type);
|
||||
assertEqual("index", explain[0].expression.extra.accessType);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test ref access with filters on the same attribute
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRefFilterSame1 : function () {
|
||||
var query = "FOR a IN " + hash.name() + " FILTER a.a == a.a SORT a.a RETURN a.a";
|
||||
var expected = [ 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5 ];
|
||||
var actual = getQueryResults(query);
|
||||
|
||||
assertEqual(expected, actual);
|
||||
|
||||
var explain = explainQuery(query);
|
||||
assertEqual("for", explain[0].type);
|
||||
assertEqual("all", explain[0].expression.extra.accessType);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test ref access with filters on the same attribute
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRefFilterSame2 : function () {
|
||||
var query = "FOR a IN " + hash.name() + " FILTER a.a == a.c SORT a.a RETURN a.a";
|
||||
var expected = [ 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5 ];
|
||||
var actual = getQueryResults(query);
|
||||
|
||||
assertEqual(expected, actual);
|
||||
|
||||
var explain = explainQuery(query);
|
||||
assertEqual("for", explain[0].type);
|
||||
assertEqual("all", explain[0].expression.extra.accessType);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test ref access with filters on the same attribute
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRefNon1 : function () {
|
||||
var query = "FOR a IN " + hash.name() + " FILTER a.a == 1 RETURN a.a";
|
||||
var expected = [ 1, 1, 1, 1, 1 ];
|
||||
var actual = getQueryResults(query);
|
||||
|
||||
assertEqual(expected, actual);
|
||||
|
||||
var explain = explainQuery(query);
|
||||
assertEqual("for", explain[0].type);
|
||||
assertEqual("all", explain[0].expression.extra.accessType);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test ref access with filters on the same attribute
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRefNon2 : function () {
|
||||
var query = "FOR a IN " + hash.name() + " FILTER a.d == a.a SORT a.a RETURN a.a";
|
||||
var expected = [ ];
|
||||
var actual = getQueryResults(query);
|
||||
|
||||
assertEqual(expected, actual);
|
||||
|
||||
var explain = explainQuery(query);
|
||||
assertEqual("for", explain[0].type);
|
||||
assertEqual("all", explain[0].expression.extra.accessType);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test ref access with filters on the same attribute
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRefNon3 : function () {
|
||||
var query = "FOR a IN " + hash.name() + " FILTER a.d == 1 SORT a.a RETURN a.a";
|
||||
var expected = [ ];
|
||||
var actual = getQueryResults(query);
|
||||
|
||||
assertEqual(expected, actual);
|
||||
|
||||
var explain = explainQuery(query);
|
||||
assertEqual("for", explain[0].type);
|
||||
assertEqual("all", explain[0].expression.extra.accessType);
|
||||
},
|
||||
|
||||
};
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@
|
|||
|
||||
var internal = require("internal");
|
||||
var jsunity = require("jsunity");
|
||||
var EXPLAIN = internal.AQL_EXPLAIN;
|
||||
var QUERY = internal.AQL_QUERY;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -46,6 +47,14 @@ function ahuacatlSkiplistTestSuite () {
|
|||
return cursor;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief explain a given query
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function explainQuery (query) {
|
||||
return EXPLAIN(query);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief execute a given query and return the results as an array
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -99,10 +108,15 @@ function ahuacatlSkiplistTestSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testEqSingleVoid1 : function () {
|
||||
var query = "FOR v IN " + skiplist.name() + " FILTER v.a == 99 RETURN v";
|
||||
var expected = [ ];
|
||||
var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a == 99 RETURN v");
|
||||
var actual = getQueryResults(query);
|
||||
|
||||
assertEqual(expected, actual);
|
||||
|
||||
var explain = explainQuery(query);
|
||||
assertEqual("for", explain[0].type);
|
||||
assertEqual("index", explain[0].expression.extra.accessType);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -110,10 +124,15 @@ function ahuacatlSkiplistTestSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testEqSingleVoid2 : function () {
|
||||
var query = "FOR v IN " + skiplist.name() + " FILTER 99 == v.a RETURN v";
|
||||
var expected = [ ];
|
||||
var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 99 == v.a RETURN v");
|
||||
var actual = getQueryResults(query);
|
||||
|
||||
assertEqual(expected, actual);
|
||||
|
||||
var explain = explainQuery(query);
|
||||
assertEqual("for", explain[0].type);
|
||||
assertEqual("index", explain[0].expression.extra.accessType);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -121,10 +140,15 @@ function ahuacatlSkiplistTestSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testEqSingle1 : function () {
|
||||
var query = "FOR v IN " + skiplist.name() + " FILTER v.a == 1 SORT v.b RETURN [ v.a, v.b ]";
|
||||
var expected = [ [ 1, 1 ], [ 1, 2 ], [ 1, 3 ], [ 1, 4 ], [ 1, 5 ] ];
|
||||
var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a == 1 SORT v.b RETURN [ v.a, v.b ]");
|
||||
var actual = getQueryResults(query);
|
||||
|
||||
assertEqual(expected, actual);
|
||||
|
||||
var explain = explainQuery(query);
|
||||
assertEqual("for", explain[0].type);
|
||||
assertEqual("index", explain[0].expression.extra.accessType);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -132,10 +156,15 @@ function ahuacatlSkiplistTestSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testEqSingle2 : function () {
|
||||
var query = "FOR v IN " + skiplist.name() + " FILTER 1 == v.a SORT v.b RETURN [ v.a, v.b ]";
|
||||
var expected = [ [ 1, 1 ], [ 1, 2 ], [ 1, 3 ], [ 1, 4 ], [ 1, 5 ] ];
|
||||
var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 1 == v.a SORT v.b RETURN [ v.a, v.b ]");
|
||||
var actual = getQueryResults(query);
|
||||
|
||||
assertEqual(expected, actual);
|
||||
|
||||
var explain = explainQuery(query);
|
||||
assertEqual("for", explain[0].type);
|
||||
assertEqual("index", explain[0].expression.extra.accessType);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -187,10 +216,15 @@ function ahuacatlSkiplistTestSuite () {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testGeSingle1 : function () {
|
||||
var query = "FOR v IN " + skiplist.name() + " FILTER v.a >= 5 SORT v.b RETURN [ v.a, v.b ]";
|
||||
var expected = [ [ 5, 1 ], [ 5, 2 ], [ 5, 3 ], [ 5, 4 ], [ 5, 5 ] ];
|
||||
var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a >= 5 SORT v.b RETURN [ v.a, v.b ]");
|
||||
var actual = getQueryResults(query);
|
||||
|
||||
assertEqual(expected, actual);
|
||||
|
||||
var explain = explainQuery(query);
|
||||
assertEqual("for", explain[0].type);
|
||||
assertEqual("index", explain[0].expression.extra.accessType);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -1012,6 +1046,49 @@ function ahuacatlSkiplistTestSuite () {
|
|||
var actual = getQueryResults("FOR v1 IN " + skiplist.name() + " FOR v2 IN " + skiplist.name() + " FILTER 1 == v1.a && v1.a == v2.a && 1 == v1.b SORT v1.a, v2.b RETURN [ v1.a, v2.b ]");
|
||||
|
||||
assertEqual(expected, actual);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test ref access with filters on the same attribute
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRefFilterSame : function () {
|
||||
skiplist.ensureSkiplist("c");
|
||||
skiplist.ensureSkiplist("d");
|
||||
|
||||
skiplist.truncate();
|
||||
|
||||
for (var i = 1; i <= 5; ++i) {
|
||||
for (var j = 1; j <= 5; ++j) {
|
||||
skiplist.save({ "c" : i, "d": j });
|
||||
}
|
||||
}
|
||||
|
||||
var query = "FOR a IN " + skiplist.name() + " FILTER a.c == a.d SORT a.c RETURN [ a.c, a.d ]";
|
||||
var expected = [ [ 1, 1 ], [ 2, 2 ], [ 3, 3 ], [ 4, 4 ], [ 5, 5 ] ];
|
||||
var actual = getQueryResults(query);
|
||||
|
||||
assertEqual(expected, actual);
|
||||
|
||||
var explain = explainQuery(query);
|
||||
assertEqual("for", explain[0].type);
|
||||
assertEqual("all", explain[0].expression.extra.accessType);
|
||||
},
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief test ref access with filters on the same attribute
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
testRefFilterNonExisting : function () {
|
||||
var query = "FOR a IN " + skiplist.name() + " FILTER a.e == a.f SORT a.a, a.b RETURN [ a.a, a.b ]";
|
||||
var expected = [ [ 1, 1 ], [ 1, 2 ], [ 1, 3 ], [ 1, 4 ], [ 1, 5 ], [ 2, 1 ], [ 2, 2 ], [ 2, 3 ], [ 2, 4 ], [ 2, 5 ], [ 3, 1 ], [ 3, 2 ], [ 3, 3 ], [ 3, 4 ], [ 3, 5 ], [ 4, 1 ], [ 4, 2 ], [ 4, 3 ], [ 4, 4 ], [ 4, 5 ], [ 5, 1 ], [ 5, 2 ], [ 5, 3 ], [ 5, 4 ], [ 5, 5 ] ];
|
||||
var actual = getQueryResults(query);
|
||||
|
||||
assertEqual(expected, actual);
|
||||
|
||||
var explain = explainQuery(query);
|
||||
assertEqual("for", explain[0].type);
|
||||
assertEqual("all", explain[0].expression.extra.accessType);
|
||||
}
|
||||
|
||||
};
|
||||
|
|
|
@ -144,39 +144,10 @@ char const* HttpRequest::requestPath () const {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void HttpRequest::write (TRI_string_buffer_t* buffer) const {
|
||||
switch (_type) {
|
||||
case HTTP_REQUEST_GET:
|
||||
TRI_AppendString2StringBuffer(buffer, "GET ", 4);
|
||||
break;
|
||||
const string& method = translateMethod(_type);
|
||||
|
||||
case HTTP_REQUEST_POST:
|
||||
TRI_AppendString2StringBuffer(buffer, "POST ", 5);
|
||||
break;
|
||||
|
||||
case HTTP_REQUEST_PUT:
|
||||
TRI_AppendString2StringBuffer(buffer, "PUT ", 4);
|
||||
break;
|
||||
|
||||
case HTTP_REQUEST_DELETE:
|
||||
TRI_AppendString2StringBuffer(buffer, "DELETE ", 7);
|
||||
break;
|
||||
|
||||
case HTTP_REQUEST_HEAD:
|
||||
TRI_AppendString2StringBuffer(buffer, "HEAD ", 5);
|
||||
break;
|
||||
|
||||
case HTTP_REQUEST_OPTIONS:
|
||||
TRI_AppendString2StringBuffer(buffer, "OPTIONS ", 8);
|
||||
break;
|
||||
|
||||
case HTTP_REQUEST_PATCH:
|
||||
TRI_AppendString2StringBuffer(buffer, "PATCH ", 6);
|
||||
break;
|
||||
|
||||
default:
|
||||
TRI_AppendString2StringBuffer(buffer, "UNKNOWN ", 8);
|
||||
break;
|
||||
}
|
||||
TRI_AppendString2StringBuffer(buffer, method.c_str(), method.size());
|
||||
TRI_AppendCharStringBuffer(buffer, ' ');
|
||||
|
||||
// do NOT url-encode the path, we need to distingush between
|
||||
// "/document/a/b" and "/document/a%2fb"
|
||||
|
@ -1194,38 +1165,76 @@ void HttpRequest::addSuffix (char const* part) {
|
|||
// --SECTION-- public static methods
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief translate an enum value into an HTTP method string
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
string HttpRequest::translateMethod (const HttpRequestType method) {
|
||||
if (method == HTTP_REQUEST_DELETE) {
|
||||
return "DELETE";
|
||||
}
|
||||
else if (method == HTTP_REQUEST_GET) {
|
||||
return "GET";
|
||||
}
|
||||
else if (method == HTTP_REQUEST_HEAD) {
|
||||
return "HEAD";
|
||||
}
|
||||
else if (method == HTTP_REQUEST_OPTIONS) {
|
||||
return "OPTIONS";
|
||||
}
|
||||
else if (method == HTTP_REQUEST_PATCH) {
|
||||
return "PATCH";
|
||||
}
|
||||
else if (method == HTTP_REQUEST_POST) {
|
||||
return "POST";
|
||||
}
|
||||
else if (method == HTTP_REQUEST_PUT) {
|
||||
return "PUT";
|
||||
}
|
||||
|
||||
LOGGER_WARNING("illegal http request method encountered in switch");
|
||||
return "UNKNOWN";
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief translate an HTTP method string into an enum value
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
HttpRequest::HttpRequestType HttpRequest::translateMethod (const string& method) {
|
||||
const string methodString = StringUtils::toupper(method);
|
||||
|
||||
if (methodString == "DELETE") {
|
||||
return HTTP_REQUEST_DELETE;
|
||||
}
|
||||
else if (methodString == "GET") {
|
||||
return HTTP_REQUEST_GET;
|
||||
}
|
||||
else if (methodString == "HEAD") {
|
||||
return HTTP_REQUEST_HEAD;
|
||||
}
|
||||
else if (methodString == "OPTIONS") {
|
||||
return HTTP_REQUEST_OPTIONS;
|
||||
}
|
||||
else if (methodString == "PATCH") {
|
||||
return HTTP_REQUEST_PATCH;
|
||||
}
|
||||
else if (methodString == "POST") {
|
||||
return HTTP_REQUEST_POST;
|
||||
}
|
||||
else if (methodString == "PUT") {
|
||||
return HTTP_REQUEST_PUT;
|
||||
}
|
||||
|
||||
return HTTP_REQUEST_ILLEGAL;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief append the request method string to a string buffer
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void HttpRequest::appendMethod (HttpRequestType method, StringBuffer* buffer) {
|
||||
switch (method) {
|
||||
case HTTP_REQUEST_GET:
|
||||
buffer->appendText("GET ");
|
||||
break;
|
||||
case HTTP_REQUEST_POST:
|
||||
buffer->appendText("POST ");
|
||||
break;
|
||||
case HTTP_REQUEST_PUT:
|
||||
buffer->appendText("PUT ");
|
||||
break;
|
||||
case HTTP_REQUEST_DELETE:
|
||||
buffer->appendText("DELETE ");
|
||||
break;
|
||||
case HTTP_REQUEST_OPTIONS:
|
||||
buffer->appendText("OPTIONS ");
|
||||
break;
|
||||
case HTTP_REQUEST_PATCH:
|
||||
buffer->appendText("PATCH ");
|
||||
break;
|
||||
case HTTP_REQUEST_HEAD:
|
||||
buffer->appendText("HEAD ");
|
||||
break;
|
||||
case HTTP_REQUEST_ILLEGAL:
|
||||
buffer->appendText("UNKNOWN ");
|
||||
LOGGER_WARNING("illegal http request method encountered in switch");
|
||||
break;
|
||||
}
|
||||
buffer->appendText(translateMethod(method));
|
||||
buffer->appendChar(' ');
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -417,6 +417,18 @@ namespace triagens {
|
|||
// --SECTION-- public static methods
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief translate an enum value into an HTTP method string
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static string translateMethod (const HttpRequestType);
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief translate an HTTP method string into an enum value
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static HttpRequestType translateMethod (const string&);
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief append the request method string to a string buffer
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -332,45 +332,126 @@ static v8::Handle<v8::Value> JS_Parse (v8::Arguments const& argv) {
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// @brief downloads data from a URL
|
||||
///
|
||||
/// @FUN{internal.download(@FA{url}, @FA{method}, @FA{outfile}, @FA{timeout})}
|
||||
/// @FUN{internal.download(@FA{url}, @FA{body}, @FA{options}, @FA{outfile})}
|
||||
///
|
||||
/// Downloads the data from the URL specified by @FA{url} and saves the
|
||||
/// response body to @FA{outfile}.
|
||||
/// response body to @FA{outfile}. The following @FA{options} are supported:
|
||||
///
|
||||
/// - @LIT{method}: the HTTP method to be used. The supported HTTP methods are
|
||||
/// @LIT{DELETE}, @LIT{GET}, @LIT{HEAD}, @LIT{POST}, @LIT{PUT}, @LIT{PATCH}
|
||||
///
|
||||
/// - @LIT{timeout}: a timeout value for the connection
|
||||
///
|
||||
/// - @LIT{followRedirects}: whether or not to follow redirects
|
||||
///
|
||||
/// - @LIT{headers}: an optional array of headers to be sent for the first
|
||||
/// (non-redirect) request.
|
||||
///
|
||||
/// Up to 5 redirects will be followed. Any user-defined headers will only be
|
||||
/// sent for the first request. If no timeout is given, a default timeout will
|
||||
/// be used.
|
||||
///
|
||||
/// If @FA{outfile} is specified, the result body will be saved in a file
|
||||
/// specified by @FA{outfile}. If @FA{outfile} already exists, an error will
|
||||
/// be thrown.
|
||||
///
|
||||
/// If @FA{outfile} is not specified, the result body will be returned in the
|
||||
/// @LIT{body} attribute of the result object.
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
static v8::Handle<v8::Value> JS_Download (v8::Arguments const& argv) {
|
||||
v8::HandleScope scope;
|
||||
|
||||
const string signature = "download(<url>, <body>, <options>, <outfile>)";
|
||||
|
||||
if (argv.Length() < 3) {
|
||||
TRI_V8_EXCEPTION_USAGE(scope, "download(<url>, <method>, <outfile>, <timeout>)");
|
||||
TRI_V8_EXCEPTION_USAGE(scope, signature);
|
||||
}
|
||||
|
||||
string url = TRI_ObjectToString(argv[0]);
|
||||
|
||||
string body;
|
||||
if (argv[1]->IsString() || argv[1]->IsStringObject()) {
|
||||
body = TRI_ObjectToString(argv[1]);
|
||||
}
|
||||
|
||||
// options
|
||||
// ------------------------------------------------------------------------
|
||||
|
||||
if (! argv[2]->IsObject()) {
|
||||
TRI_V8_EXCEPTION_USAGE(scope, signature);
|
||||
}
|
||||
|
||||
v8::Handle<v8::Array> options = v8::Handle<v8::Array>::Cast(argv[2]);
|
||||
if (options.IsEmpty()) {
|
||||
TRI_V8_EXCEPTION_USAGE(scope, signature);
|
||||
}
|
||||
|
||||
// method
|
||||
HttpRequest::HttpRequestType method = HttpRequest::HTTP_REQUEST_GET;
|
||||
const string methodString = TRI_ObjectToString(argv[1]);
|
||||
|
||||
if (methodString == "head") {
|
||||
method = HttpRequest::HTTP_REQUEST_HEAD;
|
||||
}
|
||||
else if (methodString == "delete") {
|
||||
method = HttpRequest::HTTP_REQUEST_DELETE;
|
||||
if (options->Has(TRI_V8_SYMBOL("method"))) {
|
||||
string methodString = TRI_ObjectToString(options->Get(TRI_V8_SYMBOL("method")));
|
||||
|
||||
method = HttpRequest::translateMethod(methodString);
|
||||
}
|
||||
|
||||
const string outfile = TRI_ObjectToString(argv[2]);
|
||||
// headers
|
||||
map<string, string> headerFields;
|
||||
if (options->Has(TRI_V8_SYMBOL("headers"))) {
|
||||
v8::Handle<v8::Object> v8Headers = options->Get(TRI_V8_SYMBOL("headers")).As<v8::Object> ();
|
||||
if (v8Headers->IsObject()) {
|
||||
v8::Handle<v8::Array> props = v8Headers->GetPropertyNames();
|
||||
|
||||
for (uint32_t i = 0; i < props->Length(); i++) {
|
||||
v8::Handle<v8::Value> key = props->Get(v8::Integer::New(i));
|
||||
headerFields[TRI_ObjectToString(key)] = TRI_ObjectToString(v8Headers->Get(key));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// timeout
|
||||
double timeout = 10.0;
|
||||
if (argv.Length() > 3) {
|
||||
timeout = TRI_ObjectToDouble(argv[3]);
|
||||
if (options->Has(TRI_V8_SYMBOL("timeout"))) {
|
||||
if (! options->Get(TRI_V8_SYMBOL("timeout"))->IsNumber()) {
|
||||
TRI_V8_EXCEPTION_MESSAGE(scope, TRI_ERROR_BAD_PARAMETER, "invalid option value for timeout");
|
||||
}
|
||||
|
||||
timeout = TRI_ObjectToDouble(options->Get(TRI_V8_SYMBOL("timeout")));
|
||||
}
|
||||
|
||||
if (TRI_ExistsFile(outfile.c_str())) {
|
||||
TRI_V8_EXCEPTION(scope, TRI_ERROR_CANNOT_OVERWRITE_FILE);
|
||||
// follow redirects
|
||||
bool followRedirects = true;
|
||||
if (options->Has(TRI_V8_SYMBOL("followRedirects"))) {
|
||||
followRedirects = TRI_ObjectToBoolean(options->Get(TRI_V8_SYMBOL("followRedirects")));
|
||||
}
|
||||
|
||||
if (body.size() > 0 &&
|
||||
(method == HttpRequest::HTTP_REQUEST_GET ||
|
||||
method == HttpRequest::HTTP_REQUEST_HEAD)) {
|
||||
TRI_V8_EXCEPTION_MESSAGE(scope, TRI_ERROR_BAD_PARAMETER, "should not provide a body value for this request method");
|
||||
}
|
||||
|
||||
|
||||
// outfile
|
||||
string outfile;
|
||||
if (argv.Length() == 4) {
|
||||
if (argv[3]->IsString() || argv[3]->IsStringObject()) {
|
||||
outfile = TRI_ObjectToString(argv[3]);
|
||||
}
|
||||
|
||||
if (outfile == "") {
|
||||
TRI_V8_EXCEPTION_MESSAGE(scope, TRI_ERROR_BAD_PARAMETER, "invalid value provided for outfile");
|
||||
}
|
||||
|
||||
if (TRI_ExistsFile(outfile.c_str())) {
|
||||
TRI_V8_EXCEPTION(scope, TRI_ERROR_CANNOT_OVERWRITE_FILE);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
int numRedirects = 0;
|
||||
|
||||
while (numRedirects++ < 5) {
|
||||
while (numRedirects < 5) {
|
||||
string endpoint;
|
||||
string relative;
|
||||
|
||||
|
@ -413,9 +494,13 @@ static v8::Handle<v8::Value> JS_Download (v8::Arguments const& argv) {
|
|||
SimpleHttpClient client(connection, timeout, false);
|
||||
|
||||
v8::Handle<v8::Object> result = v8::Object::New();
|
||||
|
||||
if (numRedirects > 0) {
|
||||
// do not send extra headers now
|
||||
headerFields.clear();
|
||||
}
|
||||
|
||||
// connect to server and get version number
|
||||
map<string, string> headerFields;
|
||||
// send the actual request
|
||||
SimpleHttpResult* response = client.request(method, relative, 0, 0, headerFields);
|
||||
|
||||
int returnCode;
|
||||
|
@ -434,7 +519,9 @@ static v8::Handle<v8::Value> JS_Download (v8::Arguments const& argv) {
|
|||
returnMessage = response->getHttpReturnMessage();
|
||||
returnCode = response->getHttpReturnCode();
|
||||
|
||||
if (returnCode == 301 || returnCode == 302) {
|
||||
// follow redirects?
|
||||
if (followRedirects &&
|
||||
(returnCode == 301 || returnCode == 302)) {
|
||||
bool found;
|
||||
url = response->getHeaderField(string("location"), found);
|
||||
|
||||
|
@ -445,12 +532,14 @@ static v8::Handle<v8::Value> JS_Download (v8::Arguments const& argv) {
|
|||
TRI_V8_EXCEPTION_INTERNAL(scope, "caught invalid redirect URL");
|
||||
}
|
||||
|
||||
numRedirects++;
|
||||
continue;
|
||||
}
|
||||
|
||||
result->Set(v8::String::New("code"), v8::Number::New(returnCode));
|
||||
result->Set(v8::String::New("message"), v8::String::New(returnMessage.c_str()));
|
||||
|
||||
// process response headers
|
||||
const map<string, string> responseHeaders = response->getHeaderFields();
|
||||
map<string, string>::const_iterator it;
|
||||
|
||||
|
@ -460,9 +549,17 @@ static v8::Handle<v8::Value> JS_Download (v8::Arguments const& argv) {
|
|||
}
|
||||
result->Set(v8::String::New("headers"), headers);
|
||||
|
||||
|
||||
if (returnCode >= 200 && returnCode <= 299) {
|
||||
try {
|
||||
FileUtils::spit(outfile, response->getBody().str());
|
||||
if (outfile.size() > 0) {
|
||||
// save outfile
|
||||
FileUtils::spit(outfile, response->getBody().str());
|
||||
}
|
||||
else {
|
||||
// set "body" attribute in result
|
||||
result->Set(v8::String::New("body"), v8::String::New(response->getBody().str().c_str(), response->getBody().str().length()));
|
||||
}
|
||||
}
|
||||
catch (...) {
|
||||
|
||||
|
|
Loading…
Reference in New Issue