1
0
Fork 0

replication tests

This commit is contained in:
Jan Steemann 2013-07-24 15:22:45 +02:00
parent 33f1bd87dd
commit 4f14062d10
14 changed files with 1322 additions and 159 deletions

View File

@ -0,0 +1,854 @@
# coding: utf-8
require 'rspec'
require 'json'
require './arangodb.rb'
describe ArangoDB do
api = "/_api/replication"
prefix = "api-replication"
context "dealing with the replication interface:" do
################################################################################
## logger
################################################################################
context "dealing with the logger" do
before do
end
after do
ArangoDB.put(api + "/logger-stop", :body => "")
end
################################################################################
## state
################################################################################
it "checks the state" do
# fetch state
cmd = api + "/logger-state"
doc = ArangoDB.log_get("#{prefix}-logger-state", cmd, :body => "")
doc.code.should eq(200)
all = doc.parsed_response
all.should have_key('state')
all.should have_key('server')
all.should have_key('clients')
state = all['state']
state['running'].should eq(false)
state['lastLogTick'].should match(/^\d+$/)
state['time'].should match(/^\d+-\d+-\d+T\d+:\d+:\d+Z$/)
server = all['server']
server['serverId'].should match(/^\d+$/)
server.should have_key('version')
end
################################################################################
## start
################################################################################
it "starting the logger" do
cmd = api + "/logger-start"
doc = ArangoDB.log_put("#{prefix}-logger-start", cmd, :body => "")
doc.code.should eq(200)
doc.parsed_response['running'].should eq(true)
# restart
cmd = api + "/logger-start"
doc = ArangoDB.log_put("#{prefix}-logger-start", cmd, :body => "")
doc.code.should eq(200)
doc.parsed_response['running'].should eq(true)
# fetch state
cmd = api + "/logger-state"
doc = ArangoDB.log_get("#{prefix}-logger-start", cmd, :body => "")
doc.code.should eq(200)
all = doc.parsed_response
all.should have_key('state')
all.should have_key('server')
all.should have_key('clients')
state = all['state']
state['running'].should eq(true)
state['lastLogTick'].should match(/^\d+$/)
state['time'].should match(/^\d+-\d+-\d+T\d+:\d+:\d+Z$/)
server = all['server']
server['serverId'].should match(/^\d+$/)
server.should have_key('version')
end
################################################################################
## start / stop
################################################################################
it "starting and stopping the logger" do
cmd = api + "/logger-start"
doc = ArangoDB.log_put("#{prefix}-logger-startstop", cmd, :body => "")
doc.code.should eq(200)
doc.parsed_response['running'].should eq(true)
# stop
cmd = api + "/logger-stop"
doc = ArangoDB.log_put("#{prefix}-logger-startstop", cmd, :body => "")
doc.code.should eq(200)
doc.parsed_response['running'].should eq(false)
# fetch state
cmd = api + "/logger-state"
doc = ArangoDB.log_get("#{prefix}-logger-startstop", cmd, :body => "")
doc.code.should eq(200)
all = doc.parsed_response
all.should have_key('state')
all.should have_key('server')
all.should have_key('clients')
state = all['state']
state['running'].should eq(false)
state['lastLogTick'].should match(/^\d+$/)
state['time'].should match(/^\d+-\d+-\d+T\d+:\d+:\d+Z$/)
server = all['server']
server['serverId'].should match(/^\d+$/)
server.should have_key('version')
# stop again
cmd = api + "/logger-stop"
doc = ArangoDB.log_put("#{prefix}-logger-startstop", cmd, :body => "")
doc.code.should eq(200)
doc.parsed_response['running'].should eq(false)
# start after stop
cmd = api + "/logger-start"
doc = ArangoDB.log_put("#{prefix}-logger-startstop", cmd, :body => "")
doc.code.should eq(200)
doc.parsed_response['running'].should eq(true)
end
################################################################################
## follow
################################################################################
it "fetches the empty follow log" do
cmd = api + "/logger-start"
doc = ArangoDB.log_put("#{prefix}-follow-empty", cmd, :body => "")
doc.code.should eq(200)
cmd = api + "/logger-state"
doc = ArangoDB.log_get("#{prefix}-follow-empty", cmd, :body => "")
doc.code.should eq(200)
doc.parsed_response["state"]["running"].should eq(true)
fromTick = doc.parsed_response["state"]["lastLogTick"]
cmd = api + "/logger-follow?from=" + fromTick
doc = ArangoDB.log_get("#{prefix}-follow-empty", cmd, :body => "", :format => :plain)
doc.code.should eq(200)
doc.headers["x-arango-replication-checkmore"].should eq("false")
doc.headers["x-arango-replication-lastincluded"].should match(/^\d+$/)
doc.headers["x-arango-replication-lastincluded"].should eq("0")
doc.headers["content-type"].should eq("application/x-arango-dump; charset=utf-8")
body = doc.response.body
body.should eq("")
end
it "fetches a create collection action from the follow log" do
ArangoDB.drop_collection("UnitTestsReplication")
cmd = api + "/logger-start"
doc = ArangoDB.log_put("#{prefix}-follow-create-collection", cmd, :body => "")
doc.code.should eq(200)
cmd = api + "/logger-state"
doc = ArangoDB.log_get("#{prefix}-follow-create-collection", cmd, :body => "")
doc.code.should eq(200)
doc.parsed_response["state"]["running"].should eq(true)
fromTick = doc.parsed_response["state"]["lastLogTick"]
cid = ArangoDB.create_collection("UnitTestsReplication")
cmd = api + "/logger-follow?from=" + fromTick
doc = ArangoDB.log_get("#{prefix}-follow-create-collection", cmd, :body => "", :format => :plain)
doc.code.should eq(200)
doc.headers["x-arango-replication-checkmore"].should eq("false")
doc.headers["x-arango-replication-lastincluded"].should match(/^\d+$/)
doc.headers["x-arango-replication-lastincluded"].should_not eq("0")
doc.headers["content-type"].should eq("application/x-arango-dump; charset=utf-8")
body = doc.response.body
document = JSON.parse(body)
document.should have_key("tick")
document.should have_key("type")
document.should have_key("cid")
document.should have_key("collection")
document["tick"].should match(/^\d+$/)
document["tick"].to_i.should >= fromTick.to_i
document["type"].should eq(2000)
document["cid"].should eq(cid)
c = document["collection"]
c.should have_key("version")
c["type"].should eq(2)
c["cid"].should eq(cid)
c["deleted"].should eq(false)
c["doCompact"].should eq(true)
c.should have_key("maximalSize")
c["maximalSize"].should be_kind_of(Integer)
c["name"].should eq("UnitTestsReplication")
c["isVolatile"].should eq(false)
c["waitForSync"].should eq(true)
end
it "fetches some collection operations the follow log" do
ArangoDB.drop_collection("UnitTestsReplication")
cmd = api + "/logger-start"
doc = ArangoDB.log_put("#{prefix}-follow-collection", cmd, :body => "")
doc.code.should eq(200)
cmd = api + "/logger-state"
doc = ArangoDB.log_get("#{prefix}-follow-collection", cmd, :body => "")
doc.code.should eq(200)
doc.parsed_response["state"]["running"].should eq(true)
fromTick = doc.parsed_response["state"]["lastLogTick"]
# create collection
cid = ArangoDB.create_collection("UnitTestsReplication")
# create document
cmd = "/_api/document?collection=UnitTestsReplication"
body = "{ \"_key\" : \"test\", \"test\" : false }"
doc = ArangoDB.log_post("#{prefix}-follow-collection", cmd, :body => body)
doc.code.should eq(201)
# delete document
cmd = "/_api/document/UnitTestsReplication/test"
doc = ArangoDB.log_delete("#{prefix}-follow-collection", cmd)
doc.code.should eq(200)
# drop collection
cmd = "/_api/collection/UnitTestsReplication"
doc = ArangoDB.log_delete("#{prefix}-follow-collection", cmd)
doc.code.should eq(200)
cmd = api + "/logger-follow?from=" + fromTick
doc = ArangoDB.log_get("#{prefix}-follow-create-collection", cmd, :body => "", :format => :plain)
doc.code.should eq(200)
doc.headers["x-arango-replication-checkmore"].should eq("false")
doc.headers["x-arango-replication-lastincluded"].should match(/^\d+$/)
doc.headers["x-arango-replication-lastincluded"].should_not eq("0")
doc.headers["content-type"].should eq("application/x-arango-dump; charset=utf-8")
body = doc.response.body
i = 0
while 1
position = body.index("\n")
break if position == nil
part = body.slice(0, position)
document = JSON.parse(part)
if i == 0
# create collection
document.should have_key("tick")
document.should have_key("type")
document.should have_key("cid")
document.should have_key("collection")
document["tick"].should match(/^\d+$/)
document["tick"].to_i.should >= fromTick.to_i
document["type"].should eq(2000)
document["cid"].should eq(cid)
c = document["collection"]
c.should have_key("version")
c["type"].should eq(2)
c["cid"].should eq(cid)
c["deleted"].should eq(false)
c["doCompact"].should eq(true)
c.should have_key("maximalSize")
c["maximalSize"].should be_kind_of(Integer)
c["name"].should eq("UnitTestsReplication")
c["isVolatile"].should eq(false)
c["waitForSync"].should eq(true)
elsif i == 1
# create document
document.should have_key("tick")
document.should have_key("type")
document.should have_key("cid")
document.should have_key("key")
document.should have_key("rev")
document["tick"].should match(/^\d+$/)
document["tick"].to_i.should >= fromTick.to_i
document["type"].should eq(2300)
document["cid"].should eq(cid)
document["key"].should eq("test")
rev = document["rev"]
rev.should match(/^\d+$/)
rev.should_not eq("0")
document["data"]["_key"].should eq("test")
document["data"]["_rev"].should eq(rev)
document["data"]["test"].should eq(false)
elsif i == 2
# delete document
document.should have_key("tick")
document.should have_key("type")
document.should have_key("cid")
document.should have_key("key")
document.should have_key("rev")
document["tick"].should match(/^\d+$/)
document["tick"].to_i.should >= fromTick.to_i
document["type"].should eq(2302)
document["cid"].should eq(cid)
document["key"].should eq("test")
document["oldRev"].should eq(rev)
document["rev"].should match(/^\d+$/)
document["rev"].should_not eq(rev)
elsif i == 3
# drop collection
document.should have_key("tick")
document.should have_key("type")
document.should have_key("cid")
document["tick"].should match(/^\d+$/)
document["tick"].to_i.should >= fromTick.to_i
document["type"].should eq(2001)
document["cid"].should eq(cid)
end
body = body.slice(position + 1, body.length)
i = i + 1
end
end
end
################################################################################
## inventory / dump
################################################################################
context "dealing with the initial dump" do
before do
ArangoDB.drop_collection("UnitTestsReplication")
ArangoDB.drop_collection("UnitTestsReplication2")
end
after do
ArangoDB.put(api + "/logger-stop", :body => "")
ArangoDB.drop_collection("UnitTestsReplication")
ArangoDB.drop_collection("UnitTestsReplication2")
end
################################################################################
## inventory
################################################################################
it "checks the initial inventory" do
cmd = api + "/inventory"
doc = ArangoDB.log_get("#{prefix}-inventory", cmd, :body => "")
doc.code.should eq(200)
all = doc.parsed_response
all.should have_key('collections')
all.should have_key('state')
all['collections'].should eq([ ])
state = all['state']
state['running'].should eq(false)
state['lastLogTick'].should match(/^\d+$/)
state['time'].should match(/^\d+-\d+-\d+T\d+:\d+:\d+Z$/)
end
it "checks the inventory after creating collections" do
cid = ArangoDB.create_collection("UnitTestsReplication", false)
cid2 = ArangoDB.create_collection("UnitTestsReplication2", true, 3)
cmd = api + "/inventory"
doc = ArangoDB.log_get("#{prefix}-inventory", cmd, :body => "")
doc.code.should eq(200)
all = doc.parsed_response
all.should have_key('collections')
all.should have_key('state')
state = all['state']
state['running'].should eq(false)
state['lastLogTick'].should match(/^\d+$/)
state['time'].should match(/^\d+-\d+-\d+T\d+:\d+:\d+Z$/)
collections = all['collections']
collections.length.should eq(2)
# first collection
c = collections[0]
c.should have_key("parameters")
c.should have_key("indexes")
parameters = c['parameters']
parameters.should have_key("version")
parameters["version"].should be_kind_of(Integer)
parameters["type"].should be_kind_of(Integer)
parameters["type"].should eq(2)
parameters["cid"].should eq(cid)
parameters["deleted"].should eq(false)
parameters["doCompact"].should eq(true)
parameters.should have_key("maximalSize")
parameters["maximalSize"].should be_kind_of(Integer)
parameters["name"].should eq("UnitTestsReplication")
parameters["isVolatile"].should eq(false)
parameters["waitForSync"].should eq(false)
c['indexes'].should eq([ ])
# second collection
c = collections[1]
c.should have_key("parameters")
c.should have_key("indexes")
parameters = c['parameters']
parameters.should have_key("version")
parameters["version"].should be_kind_of(Integer)
parameters["type"].should be_kind_of(Integer)
parameters["type"].should eq(3)
parameters["cid"].should eq(cid2)
parameters["deleted"].should eq(false)
parameters["doCompact"].should eq(true)
parameters.should have_key("maximalSize")
parameters["maximalSize"].should be_kind_of(Integer)
parameters["name"].should eq("UnitTestsReplication2")
parameters["isVolatile"].should eq(false)
parameters["waitForSync"].should eq(true)
c['indexes'].should eq([ ])
end
it "checks the inventory with indexes" do
cid = ArangoDB.create_collection("UnitTestsReplication", false)
cid2 = ArangoDB.create_collection("UnitTestsReplication2", false)
# create indexes for first collection
body = "{ \"type\" : \"cap\", \"size\" : 9991 }"
doc = ArangoDB.log_post("#{prefix}-inventory2", "/_api/index?collection=UnitTestsReplication", :body => body)
doc.code.should eq(201)
body = "{ \"type\" : \"hash\", \"unique\" : false, \"fields\" : [ \"a\", \"b\" ] }"
doc = ArangoDB.log_post("#{prefix}-inventory2", "/_api/index?collection=UnitTestsReplication", :body => body)
doc.code.should eq(201)
body = "{ \"type\" : \"skiplist\", \"unique\" : false, \"fields\" : [ \"c\" ] }"
doc = ArangoDB.log_post("#{prefix}-inventory2", "/_api/index?collection=UnitTestsReplication", :body => body)
doc.code.should eq(201)
# create indexes for second collection
body = "{ \"type\" : \"geo\", \"unique\" : false, \"fields\" : [ \"a\", \"b\" ] }"
doc = ArangoDB.log_post("#{prefix}-inventory2", "/_api/index?collection=UnitTestsReplication2", :body => body)
doc.code.should eq(201)
body = "{ \"type\" : \"skiplist\", \"unique\" : true, \"fields\" : [ \"d\" ] }"
doc = ArangoDB.log_post("#{prefix}-inventory2", "/_api/index?collection=UnitTestsReplication2", :body => body)
doc.code.should eq(201)
body = "{ \"type\" : \"fulltext\", \"minLength\" : 8, \"fields\" : [ \"ff\" ] }"
doc = ArangoDB.log_post("#{prefix}-inventory2", "/_api/index?collection=UnitTestsReplication2", :body => body)
doc.code.should eq(201)
body = "{ \"type\" : \"cap\", \"byteSize\" : 1048576 }"
doc = ArangoDB.log_post("#{prefix}-inventory2", "/_api/index?collection=UnitTestsReplication2", :body => body)
doc.code.should eq(201)
cmd = api + "/inventory"
doc = ArangoDB.log_get("#{prefix}-inventory2", cmd, :body => "")
doc.code.should eq(200)
all = doc.parsed_response
all.should have_key('collections')
all.should have_key('state')
state = all['state']
state['running'].should eq(false)
state['lastLogTick'].should match(/^\d+$/)
state['time'].should match(/^\d+-\d+-\d+T\d+:\d+:\d+Z$/)
collections = all['collections']
collections.length.should eq(2)
# first collection
c = collections[0]
c.should have_key("parameters")
c.should have_key("indexes")
parameters = c['parameters']
parameters.should have_key("version")
parameters["version"].should be_kind_of(Integer)
parameters["type"].should be_kind_of(Integer)
parameters["type"].should eq(2)
parameters["cid"].should eq(cid)
parameters["deleted"].should eq(false)
parameters["doCompact"].should eq(true)
parameters.should have_key("maximalSize")
parameters["maximalSize"].should be_kind_of(Integer)
parameters["name"].should eq("UnitTestsReplication")
parameters["isVolatile"].should eq(false)
parameters["waitForSync"].should eq(false)
indexes = c['indexes']
indexes.length.should eq(3)
idx = indexes[0]
idx["id"].should match(/^\d+$/)
idx["type"].should eq("cap")
idx["size"].should eq(9991)
idx["byteSize"].should eq(0)
idx = indexes[1]
idx["id"].should match(/^\d+$/)
idx["type"].should eq("hash")
idx["unique"].should eq(false)
idx["fields"].should eq([ "a", "b" ])
idx = indexes[2]
idx["id"].should match(/^\d+$/)
idx["type"].should eq("skiplist")
idx["unique"].should eq(false)
idx["fields"].should eq([ "c" ])
# second collection
c = collections[1]
c.should have_key("parameters")
c.should have_key("indexes")
parameters = c['parameters']
parameters.should have_key("version")
parameters["version"].should be_kind_of(Integer)
parameters["type"].should be_kind_of(Integer)
parameters["type"].should eq(2)
parameters["cid"].should eq(cid2)
parameters["deleted"].should eq(false)
parameters["doCompact"].should eq(true)
parameters.should have_key("maximalSize")
parameters["maximalSize"].should be_kind_of(Integer)
parameters["name"].should eq("UnitTestsReplication2")
parameters["isVolatile"].should eq(false)
parameters["waitForSync"].should eq(false)
indexes = c['indexes']
indexes.length.should eq(4)
idx = indexes[0]
idx["id"].should match(/^\d+$/)
idx["type"].should eq("geo2")
idx["unique"].should eq(false)
idx["fields"].should eq([ "a", "b" ])
idx = indexes[1]
idx["id"].should match(/^\d+$/)
idx["type"].should eq("skiplist")
idx["unique"].should eq(true)
idx["fields"].should eq([ "d" ])
idx = indexes[2]
idx["id"].should match(/^\d+$/)
idx["type"].should eq("fulltext")
idx["unique"].should eq(false)
idx["minLength"].should eq(8)
idx["fields"].should eq([ "ff" ])
idx = indexes[3]
idx["id"].should match(/^\d+$/)
idx["type"].should eq("cap")
idx["size"].should eq(0)
idx["byteSize"].should eq(1048576)
end
################################################################################
## dump
################################################################################
it "checks the dump for an empty collection" do
cid = ArangoDB.create_collection("UnitTestsReplication", false)
cmd = api + "/dump?collection=UnitTestsReplication"
doc = ArangoDB.log_get("#{prefix}-dump-empty", cmd, :body => "")
doc.code.should eq(200)
doc.headers["x-arango-replication-checkmore"].should eq("false")
doc.headers["x-arango-replication-lastincluded"].should eq("0")
doc.headers["content-type"].should eq("application/x-arango-dump; charset=utf-8")
doc.response.body.should eq("")
end
it "checks the dump for a non-empty collection" do
cid = ArangoDB.create_collection("UnitTestsReplication", false)
(0...100).each{|i|
body = "{ \"_key\" : \"test" + i.to_s + "\", \"test\" : " + i.to_s + " }"
doc = ArangoDB.post("/_api/document?collection=UnitTestsReplication", :body => body)
doc.code.should eq(202)
}
cmd = api + "/dump?collection=UnitTestsReplication"
doc = ArangoDB.log_get("#{prefix}-dump-non-empty", cmd, :body => "", :format => :plain)
doc.code.should eq(200)
doc.headers["x-arango-replication-checkmore"].should eq("false")
doc.headers["x-arango-replication-lastincluded"].should match(/^\d+$/)
doc.headers["x-arango-replication-lastincluded"].should_not eq("0")
doc.headers["content-type"].should eq("application/x-arango-dump; charset=utf-8")
body = doc.response.body
i = 0
while 1
position = body.index("\n")
break if position == nil
part = body.slice(0, position)
doc = JSON.parse(part)
doc['type'].should eq(2300)
doc['key'].should eq("test" + i.to_s)
doc['rev'].should match(/^\d+$/)
doc['data']['_key'].should eq("test" + i.to_s)
doc['data']['_rev'].should match(/^\d+$/)
doc['data']['test'].should eq(i)
body = body.slice(position + 1, body.length)
i = i + 1
end
i.should eq(100)
end
it "checks the dump for an edge collection" do
cid = ArangoDB.create_collection("UnitTestsReplication", false)
cid2 = ArangoDB.create_collection("UnitTestsReplication2", false, 3)
(0...500).each{|i|
body = "{ \"_key\" : \"test" + i.to_s + "\", \"test1\" : " + i.to_s + ", \"test2\" : false, \"test3\" : [ ], \"test4\" : { } }"
doc = ArangoDB.post("/_api/edge?collection=UnitTestsReplication2&from=UnitTestsReplication/foo&to=UnitTestsReplication/bar", :body => body)
doc.code.should eq(202)
}
cmd = api + "/dump?collection=UnitTestsReplication2"
doc = ArangoDB.log_get("#{prefix}-dump-edge", cmd, :body => "", :format => :plain)
doc.code.should eq(200)
doc.headers["x-arango-replication-checkmore"].should eq("false")
doc.headers["x-arango-replication-lastincluded"].should match(/^\d+$/)
doc.headers["x-arango-replication-lastincluded"].should_not eq("0")
doc.headers["content-type"].should eq("application/x-arango-dump; charset=utf-8")
body = doc.response.body
i = 0
while 1
position = body.index("\n")
break if position == nil
part = body.slice(0, position)
document = JSON.parse(part)
document['type'].should eq(2301)
document['key'].should eq("test" + i.to_s)
document['rev'].should match(/^\d+$/)
document['data']['_key'].should eq("test" + i.to_s)
document['data']['_rev'].should match(/^\d+$/)
document['data']['_from'].should eq(cid + "/foo")
document['data']['_to'].should eq(cid + "/bar")
document['data']['test1'].should eq(i)
document['data']['test2'].should eq(false)
document['data']['test3'].should eq([ ])
document['data']['test4'].should eq({ })
body = body.slice(position + 1, body.length)
i = i + 1
end
i.should eq(500)
end
it "checks the dump for a collection with deleted documents" do
cid = ArangoDB.create_collection("UnitTestsReplication", false)
(0...10).each{|i|
body = "{ \"_key\" : \"test" + i.to_s + "\", \"test\" : " + i.to_s + " }"
doc = ArangoDB.post("/_api/document?collection=UnitTestsReplication", :body => body)
doc.code.should eq(202)
doc = ArangoDB.delete("/_api/document/UnitTestsReplication/test" + i.to_s, :body => body)
doc.code.should eq(202)
}
cmd = api + "/dump?collection=UnitTestsReplication"
doc = ArangoDB.log_get("#{prefix}-deleted", cmd, :body => "", :format => :plain)
doc.code.should eq(200)
doc.headers["x-arango-replication-checkmore"].should eq("false")
doc.headers["x-arango-replication-lastincluded"].should match(/^\d+$/)
doc.headers["x-arango-replication-lastincluded"].should_not eq("0")
doc.headers["content-type"].should eq("application/x-arango-dump; charset=utf-8")
body = doc.response.body
i = 0
while 1
position = body.index("\n")
break if position == nil
part = body.slice(0, position)
document = JSON.parse(part)
if i % 2 == 1
document['type'].should eq(2302)
document['key'].should eq("test" + (i / 2).floor.to_s)
document['rev'].should match(/^\d+$/)
else
document['type'].should eq(2300)
document['key'].should eq("test" + (i / 2).floor.to_s)
document['rev'].should match(/^\d+$/)
document['data']['_key'].should eq("test" + (i / 2).floor.to_s)
document['data']['_rev'].should match(/^\d+$/)
document['data']['test'].should eq((i / 2).floor)
end
body = body.slice(position + 1, body.length)
i = i + 1
end
i.should eq(20)
end
it "checks the dump for a truncated collection" do
cid = ArangoDB.create_collection("UnitTestsReplication", false)
(0...10).each{|i|
body = "{ \"_key\" : \"test" + i.to_s + "\", \"test\" : " + i.to_s + " }"
doc = ArangoDB.post("/_api/document?collection=UnitTestsReplication", :body => body)
doc.code.should eq(202)
}
# truncate
cmd = "/_api/collection/UnitTestsReplication/truncate"
doc = ArangoDB.log_put("#{prefix}-truncated", cmd, :body => "", :format => :plain)
doc.code.should eq(200)
cmd = api + "/dump?collection=UnitTestsReplication"
doc = ArangoDB.log_get("#{prefix}-truncated", cmd, :body => "", :format => :plain)
doc.code.should eq(200)
doc.headers["x-arango-replication-checkmore"].should eq("false")
doc.headers["x-arango-replication-lastincluded"].should match(/^\d+$/)
doc.headers["x-arango-replication-lastincluded"].should_not eq("0")
doc.headers["content-type"].should eq("application/x-arango-dump; charset=utf-8")
body = doc.response.body
i = 0
while 1
position = body.index("\n")
break if position == nil
part = body.slice(0, position)
document = JSON.parse(part)
if i >= 10
document['type'].should eq(2302)
# truncate order is undefined
document['key'].should match(/^test\d+$/)
document['rev'].should match(/^\d+$/)
else
document['type'].should eq(2300)
document['key'].should eq("test" + i.to_s)
document['rev'].should match(/^\d+$/)
document['data']['_key'].should eq("test" + i.to_s)
document['data']['_rev'].should match(/^\d+$/)
document['data']['test'].should eq(i)
end
body = body.slice(position + 1, body.length)
i = i + 1
end
i.should eq(20)
end
it "fetches incremental parts of a collection dump" do
cid = ArangoDB.create_collection("UnitTestsReplication", false)
(0...10).each{|i|
body = "{ \"_key\" : \"test" + i.to_s + "\", \"test\" : " + i.to_s + " }"
doc = ArangoDB.post("/_api/document?collection=UnitTestsReplication", :body => body)
doc.code.should eq(202)
}
fromTick = "0"
(0...10).each{|i|
cmd = api + "/dump?collection=UnitTestsReplication&from=" + fromTick + "&chunkSize=1"
doc = ArangoDB.log_get("#{prefix}-incremental", cmd, :body => "", :format => :plain)
doc.code.should eq(200)
if i == 9
doc.headers["x-arango-replication-checkmore"].should eq("false")
else
doc.headers["x-arango-replication-checkmore"].should eq("true")
end
doc.headers["x-arango-replication-lastincluded"].should match(/^\d+$/)
doc.headers["x-arango-replication-lastincluded"].should_not eq("0")
doc.headers["x-arango-replication-lastincluded"].to_i.should >= fromTick.to_i
doc.headers["content-type"].should eq("application/x-arango-dump; charset=utf-8")
fromTick = doc.headers["x-arango-replication-lastincluded"]
body = doc.response.body
document = JSON.parse(body)
document['type'].should eq(2300)
document['key'].should eq("test" + i.to_s)
document['rev'].should match(/^\d+$/)
document['data']['_key'].should eq("test" + i.to_s)
document['data']['_rev'].should match(/^\d+$/)
document['data']['test'].should eq(i)
}
end
end
end
end

View File

@ -1265,7 +1265,7 @@ int ReplicationFetcher::getLocalState (string& errorMsg) {
int ReplicationFetcher::getMasterState (string& errorMsg) {
map<string, string> headers;
static const string url = BaseUrl +
"/log-state" +
"/logger-state" +
"?serverId=" + _localServerIdString;
// send request
@ -2009,7 +2009,7 @@ int ReplicationFetcher::followMasterLog (string& errorMsg,
bool& worked,
bool& masterActive) {
const string baseUrl = BaseUrl +
"/log-follow?chunkSize=" + StringUtils::itoa(getChunkSize());
"/logger-follow?chunkSize=" + StringUtils::itoa(getChunkSize());
map<string, string> headers;

View File

@ -120,25 +120,25 @@ Handler::status_e RestReplicationHandler::execute() {
if (len == 1) {
const string& command = suffix[0];
if (command == "log-start") {
if (command == "logger-start") {
if (type != HttpRequest::HTTP_REQUEST_PUT) {
goto BAD_CALL;
}
handleCommandLoggerStart();
}
else if (command == "log-stop") {
else if (command == "logger-stop") {
if (type != HttpRequest::HTTP_REQUEST_PUT) {
goto BAD_CALL;
}
handleCommandLoggerStop();
}
else if (command == "log-state") {
else if (command == "logger-state") {
if (type != HttpRequest::HTTP_REQUEST_GET) {
goto BAD_CALL;
}
handleCommandLoggerState();
}
else if (command == "log-follow") {
else if (command == "logger-follow") {
if (type != HttpRequest::HTTP_REQUEST_GET) {
goto BAD_CALL;
}
@ -156,7 +156,7 @@ Handler::status_e RestReplicationHandler::execute() {
}
handleCommandDump();
}
else if (command == "apply-config") {
else if (command == "applier-config") {
if (type == HttpRequest::HTTP_REQUEST_GET) {
handleCommandApplierGetConfig();
}
@ -167,19 +167,19 @@ Handler::status_e RestReplicationHandler::execute() {
handleCommandApplierSetConfig();
}
}
else if (command == "apply-start") {
else if (command == "applier-start") {
if (type != HttpRequest::HTTP_REQUEST_PUT) {
goto BAD_CALL;
}
handleCommandApplierStart();
}
else if (command == "apply-stop") {
else if (command == "applier-stop") {
if (type != HttpRequest::HTTP_REQUEST_PUT) {
goto BAD_CALL;
}
handleCommandApplierStop();
}
else if (command == "apply-state") {
else if (command == "applier-state") {
if (type == HttpRequest::HTTP_REQUEST_DELETE) {
handleCommandApplierDeleteState();
}
@ -290,9 +290,11 @@ uint64_t RestReplicationHandler::determineChunkSize () const {
const char* value = _request->value("chunkSize", found);
if (found) {
// url parameter "chunkSize" specified
chunkSize = (uint64_t) StringUtils::uint64(value);
}
if (chunkSize < minChunkSize) {
else {
// not specified, use default
chunkSize = minChunkSize;
}
@ -342,7 +344,6 @@ void RestReplicationHandler::handleCommandLoggerStop () {
TRI_Insert3ArrayJson(TRI_CORE_MEM_ZONE, &result, "running", TRI_CreateBooleanJson(TRI_CORE_MEM_ZONE, false));
generateResult(&result);
TRI_DestroyJson(TRI_CORE_MEM_ZONE, &result);
}

View File

@ -284,7 +284,7 @@ static int DatafileComparator (const void* lhs, const void* rhs) {
////////////////////////////////////////////////////////////////////////////////
static void SortFilenames (TRI_vector_string_t* files) {
if (files->_length < 1) {
if (files->_length <= 1) {
return;
}
@ -296,7 +296,7 @@ static void SortFilenames (TRI_vector_string_t* files) {
////////////////////////////////////////////////////////////////////////////////
static void SortDatafiles (TRI_vector_pointer_t* files) {
if (files->_length < 1) {
if (files->_length <= 1) {
return;
}
@ -1181,7 +1181,7 @@ TRI_json_t* TRI_ReadJsonCollectionInfo (TRI_vocbase_col_t* collection) {
/// @brief iterate over the index (JSON) files of a collection, using a callback
/// function for each.
/// This function does not require the collection to be loaded.
/// The caller must make sure that the files is not modified while this
/// The caller must make sure that the files are not modified while this
/// function is called.
////////////////////////////////////////////////////////////////////////////////
@ -1202,13 +1202,16 @@ int TRI_IterateJsonIndexesCollectionInfo (TRI_vocbase_col_t* collection,
files = TRI_FilesDirectory(collection->_path);
n = files._length;
res = TRI_ERROR_NO_ERROR;
// sort by index id
SortFilenames(&files);
for (i = 0; i < n; ++i) {
char const* file = files._buffer[i];
if (regexec(&re, file, (size_t) 0, NULL, 0) == 0) {
char* fqn = TRI_Concatenate2File(collection->_path, file);
res = filter(collection, fqn, data);
TRI_FreeString(TRI_CORE_MEM_ZONE, fqn);

View File

@ -57,11 +57,12 @@
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
/// @brief set flag to terminate the apply start
/// @brief set flag to terminate the applier thread
////////////////////////////////////////////////////////////////////////////////
static void SetTerminateFlag (TRI_replication_applier_t* applier,
bool value) {
TRI_LockCondition(&applier->_runStateChangeCondition);
applier->_terminateThread = value;
TRI_UnlockCondition(&applier->_runStateChangeCondition);
@ -82,7 +83,7 @@ static bool CheckTerminateFlag (TRI_replication_applier_t* applier) {
}
////////////////////////////////////////////////////////////////////////////////
/// @brief stringify an apply phase name
/// @brief stringify an applier phase name
////////////////////////////////////////////////////////////////////////////////
static const char* StringifyPhase (TRI_replication_apply_phase_e phase) {
@ -682,8 +683,8 @@ TRI_replication_applier_t* TRI_CreateReplicationApplier (TRI_vocbase_t* vocbase)
TRI_InitSpin(&applier->_threadLock);
TRI_InitCondition(&applier->_runStateChangeCondition);
applier->_vocbase = vocbase;
applier->_databaseName = TRI_DuplicateStringZ(TRI_CORE_MEM_ZONE, vocbase->_name);
applier->_vocbase = vocbase;
applier->_databaseName = TRI_DuplicateStringZ(TRI_CORE_MEM_ZONE, vocbase->_name);
SetTerminateFlag(applier, false);

View File

@ -116,6 +116,7 @@ typedef struct {
TRI_datafile_t* _data;
TRI_voc_tick_t _dataMin;
TRI_voc_tick_t _dataMax;
TRI_voc_tick_t _tickMax;
bool _isJournal;
}
df_entry_t;
@ -158,13 +159,15 @@ static int IterateDatafiles (TRI_vector_pointer_t const* datafiles,
df,
df->_dataMin,
df->_dataMax,
df->_tickMax,
isJournal
};
LOG_TRACE("checking datafile %llu with data range %llu - %llu",
LOG_TRACE("checking datafile %llu with data range %llu - %llu, tick max: %llu",
(unsigned long long) df->_fid,
(unsigned long long) df->_dataMin,
(unsigned long long) df->_dataMax);
(unsigned long long) df->_dataMax,
(unsigned long long) df->_tickMax);
if (df->_dataMin == 0 || df->_dataMax == 0) {
// datafile doesn't have any data
@ -548,7 +551,7 @@ static int DumpCollection (TRI_replication_dump_t* dump,
TRI_string_buffer_t* buffer;
TRI_voc_tick_t lastFoundTick;
TRI_voc_tid_t lastTid;
size_t i;
size_t i, n;
int res;
bool hasMore;
bool bufferFull;
@ -572,7 +575,9 @@ static int DumpCollection (TRI_replication_dump_t* dump,
bufferFull = false;
ignoreMarkers = false;
for (i = 0; i < datafiles._length; ++i) {
n = datafiles._length;
for (i = 0; i < n; ++i) {
df_entry_t* e = (df_entry_t*) TRI_AtVector(&datafiles, i);
TRI_datafile_t* datafile = e->_data;
TRI_vector_t* failedList;
@ -631,12 +636,6 @@ static int DumpCollection (TRI_replication_dump_t* dump,
ptr += TRI_DF_ALIGN_BLOCK(marker->_size);
if (marker->_type != TRI_DOC_MARKER_KEY_DOCUMENT &&
marker->_type != TRI_DOC_MARKER_KEY_EDGE &&
marker->_type != TRI_DOC_MARKER_KEY_DELETION) {
continue;
}
// get the marker's tick and check whether we should include it
foundTick = marker->_tick;
@ -650,6 +649,23 @@ static int DumpCollection (TRI_replication_dump_t* dump,
hasMore = false;
goto NEXT_DF;
}
if (marker->_type != TRI_DOC_MARKER_KEY_DOCUMENT &&
marker->_type != TRI_DOC_MARKER_KEY_EDGE &&
marker->_type != TRI_DOC_MARKER_KEY_DELETION) {
// found a non-data marker...
// check if we can abort searching
if (foundTick >= dataMax ||
(foundTick >= e->_tickMax && i == (n - 1))) {
// fetched the last available marker
hasMore = false;
goto NEXT_DF;
}
continue;
}
// note the last tick we processed
lastFoundTick = foundTick;
@ -684,11 +700,19 @@ static int DumpCollection (TRI_replication_dump_t* dump,
}
}
if (! StringifyMarkerDump(buffer, document, marker)) {
res = TRI_ERROR_INTERNAL;
goto NEXT_DF;
}
if (foundTick >= dataMax ||
(foundTick >= e->_tickMax && i == (n - 1))) {
// fetched the last available marker
hasMore = false;
goto NEXT_DF;
}
if ((uint64_t) TRI_LengthStringBuffer(buffer) > chunkSize) {
// abort the iteration
@ -748,7 +772,7 @@ static int DumpLog (TRI_replication_dump_t* dump,
TRI_document_collection_t* document;
TRI_string_buffer_t* buffer;
TRI_voc_tick_t lastFoundTick;
size_t i;
size_t i, n;
int res;
bool hasMore;
bool bufferFull;
@ -769,7 +793,9 @@ static int DumpLog (TRI_replication_dump_t* dump,
hasMore = true;
bufferFull = false;
for (i = 0; i < datafiles._length; ++i) {
n = datafiles._length;
for (i = 0; i < n; ++i) {
df_entry_t* e = (df_entry_t*) TRI_AtVector(&datafiles, i);
TRI_datafile_t* datafile = e->_data;
char const* ptr;
@ -805,15 +831,7 @@ static int DumpLog (TRI_replication_dump_t* dump,
}
ptr += TRI_DF_ALIGN_BLOCK(marker->_size);
if (marker->_type != TRI_DOC_MARKER_KEY_DOCUMENT) {
// we're only interested in document markers here
// the replication collection does not contain any edge markers
// and deletion markers in the replication collection
// will not be replicated
continue;
}
// get the marker's tick and check whether we should include it
foundTick = marker->_tick;
@ -821,12 +839,29 @@ static int DumpLog (TRI_replication_dump_t* dump,
// marker too old
continue;
}
if (foundTick > dataMax) {
// marker too new
hasMore = false;
goto NEXT_DF;
}
if (marker->_type != TRI_DOC_MARKER_KEY_DOCUMENT) {
// we're only interested in document markers here
// the replication collection does not contain any edge markers
// and deletion markers in the replication collection
// will not be replicated
// check if we can abort searching
if (foundTick >= dataMax ||
(foundTick >= e->_tickMax && i == (n - 1))) {
// fetched the last available marker
hasMore = false;
goto NEXT_DF;
}
continue;
}
// note the last tick we processed
lastFoundTick = foundTick;
@ -837,6 +872,13 @@ static int DumpLog (TRI_replication_dump_t* dump,
goto NEXT_DF;
}
if (foundTick >= dataMax ||
(foundTick >= e->_dataMax && i == (n - 1))) {
// fetched the last available marker
hasMore = false;
goto NEXT_DF;
}
if ((uint64_t) TRI_LengthStringBuffer(buffer) > chunkSize) {
// abort the iteration
bufferFull = true;

View File

@ -47,6 +47,116 @@
// --SECTION-- REPLICATION LOGGER
// -----------------------------------------------------------------------------
// -----------------------------------------------------------------------------
// --SECTION-- CLIENT HANDLING
// -----------------------------------------------------------------------------
// -----------------------------------------------------------------------------
// --SECTION-- private types
// -----------------------------------------------------------------------------
////////////////////////////////////////////////////////////////////////////////
/// @addtogroup VocBase
/// @{
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
/// @brief struct to hold a client action
////////////////////////////////////////////////////////////////////////////////
typedef struct logger_client_s {
TRI_server_id_t _serverId;
char* _url;
char _stamp[24];
}
logger_client_t;
////////////////////////////////////////////////////////////////////////////////
/// @}
////////////////////////////////////////////////////////////////////////////////
// -----------------------------------------------------------------------------
// --SECTION-- private functions
// -----------------------------------------------------------------------------
////////////////////////////////////////////////////////////////////////////////
/// @addtogroup VocBase
/// @{
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
/// @brief hashes a client id
////////////////////////////////////////////////////////////////////////////////
static uint64_t HashKeyClient (TRI_associative_pointer_t* array,
void const* key) {
TRI_server_id_t const* k = key;
return (uint64_t) *k;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief hashes a client struct
////////////////////////////////////////////////////////////////////////////////
static uint64_t HashElementClient (TRI_associative_pointer_t* array,
void const* element) {
logger_client_t const* e = element;
return (uint64_t) e->_serverId;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief compares a client key and a client struct
////////////////////////////////////////////////////////////////////////////////
static bool IsEqualKeyClient (TRI_associative_pointer_t* array,
void const* key,
void const* element) {
TRI_server_id_t const* k = key;
logger_client_t const* e = element;
return *k == e->_serverId;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief free a single registered client
////////////////////////////////////////////////////////////////////////////////
static void FreeClient (logger_client_t* client) {
if (client->_url != NULL) {
TRI_Free(TRI_UNKNOWN_MEM_ZONE, client->_url);
}
TRI_Free(TRI_UNKNOWN_MEM_ZONE, client);
}
////////////////////////////////////////////////////////////////////////////////
/// @brief free registered clients
////////////////////////////////////////////////////////////////////////////////
static void FreeClients (TRI_replication_logger_t* logger) {
uint32_t i, n;
n = logger->_clients._nrAlloc;
for (i = 0; i < n; ++i) {
logger_client_t* client = logger->_clients._table[i];
if (client != NULL) {
FreeClient(client);
}
}
}
////////////////////////////////////////////////////////////////////////////////
/// @}
////////////////////////////////////////////////////////////////////////////////
// -----------------------------------------------------------------------------
// --SECTION-- LOGGING
// -----------------------------------------------------------------------------
// -----------------------------------------------------------------------------
// --SECTION-- private defines
// -----------------------------------------------------------------------------
@ -112,7 +222,7 @@
/// @brief number of pre-allocated string buffers for logging
////////////////////////////////////////////////////////////////////////////////
static size_t NumBuffers = 8;
static size_t NumBuffers = 16;
////////////////////////////////////////////////////////////////////////////////
/// @brief pre-allocated size for each log buffer
@ -124,30 +234,6 @@ static size_t BufferSize = 256;
/// @}
////////////////////////////////////////////////////////////////////////////////
// -----------------------------------------------------------------------------
// --SECTION-- private types
// -----------------------------------------------------------------------------
////////////////////////////////////////////////////////////////////////////////
/// @addtogroup VocBase
/// @{
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
/// @brief struct to hold a client action
////////////////////////////////////////////////////////////////////////////////
typedef struct logger_client_s {
TRI_server_id_t _serverId;
char* _url;
char _stamp[24];
}
logger_client_t;
////////////////////////////////////////////////////////////////////////////////
/// @}
////////////////////////////////////////////////////////////////////////////////
// -----------------------------------------------------------------------------
// --SECTION-- private functions
// -----------------------------------------------------------------------------
@ -163,10 +249,10 @@ logger_client_t;
static TRI_replication_operation_e TranslateDocumentOperation (TRI_voc_document_operation_e type,
TRI_document_collection_t const* document) {
const bool isEdge = (document->base.base._info._type == TRI_COL_TYPE_EDGE);
if (type == TRI_VOC_DOCUMENT_OPERATION_INSERT || type == TRI_VOC_DOCUMENT_OPERATION_UPDATE) {
return isEdge ? MARKER_EDGE : MARKER_DOCUMENT;
const bool isEdgeCollection = (document->base.base._info._type == TRI_COL_TYPE_EDGE);
return isEdgeCollection ? MARKER_EDGE : MARKER_DOCUMENT;
}
else if (type == TRI_VOC_DOCUMENT_OPERATION_REMOVE) {
return MARKER_REMOVE;
@ -315,7 +401,7 @@ static int LogEvent (TRI_replication_logger_t* logger,
true);
}
LOG_TRACE("replication event, type: %d, tid: %llu, sync: %d, data: %s",
LOG_TRACE("logging replication event, type: %d, tid: %llu, sync: %d, data: %s",
(int) type,
(unsigned long long) tid,
(int) forceSync,
@ -347,9 +433,10 @@ static int LogEvent (TRI_replication_logger_t* logger,
return res;
}
// assert the write was successful
assert(mptr._data != NULL);
// note the last id that we've logged
// update the last tick that we've logged
TRI_LockSpin(&logger->_idLock);
logger->_state._lastLogTick = ((TRI_df_marker_t*) mptr._data)->_tick;
TRI_UnlockSpin(&logger->_idLock);
@ -398,6 +485,10 @@ static bool StringifyTickReplication (TRI_string_buffer_t* buffer,
static bool StringifyCreateCollection (TRI_string_buffer_t* buffer,
TRI_voc_cid_t cid,
TRI_json_t const* json) {
if (buffer == NULL) {
return false;
}
APPEND_STRING(buffer, "{\"cid\":\"");
APPEND_UINT64(buffer, (uint64_t) cid);
APPEND_STRING(buffer, "\",\"collection\":");
@ -413,6 +504,10 @@ static bool StringifyCreateCollection (TRI_string_buffer_t* buffer,
static bool StringifyDropCollection (TRI_string_buffer_t* buffer,
TRI_voc_cid_t cid) {
if (buffer == NULL) {
return false;
}
APPEND_CHAR(buffer, '{');
if (! StringifyCollection(buffer, cid)) {
@ -432,6 +527,10 @@ static bool StringifyRenameCollection (TRI_string_buffer_t* buffer,
TRI_voc_cid_t cid,
char const* name) {
if (buffer == NULL) {
return false;
}
APPEND_CHAR(buffer, '{');
if (! StringifyCollection(buffer, cid)) {
@ -453,6 +552,10 @@ static bool StringifyRenameCollection (TRI_string_buffer_t* buffer,
static bool StringifyCreateIndex (TRI_string_buffer_t* buffer,
TRI_voc_cid_t cid,
TRI_json_t const* json) {
if (buffer == NULL) {
return false;
}
APPEND_CHAR(buffer, '{');
if (! StringifyCollection(buffer, cid)) {
@ -473,6 +576,10 @@ static bool StringifyCreateIndex (TRI_string_buffer_t* buffer,
static bool StringifyDropIndex (TRI_string_buffer_t* buffer,
TRI_voc_cid_t cid,
TRI_idx_iid_t iid) {
if (buffer == NULL) {
return false;
}
APPEND_CHAR(buffer, '{');
if (! StringifyCollection(buffer, cid)) {
@ -499,6 +606,10 @@ static bool StringifyDocumentOperation (TRI_string_buffer_t* buffer,
TRI_voc_key_t key;
TRI_voc_rid_t oldRev;
TRI_voc_rid_t rid;
if (buffer == NULL) {
return false;
}
if (TRI_ReserveStringBuffer(buffer, 256) != TRI_ERROR_NO_ERROR) {
return false;
@ -614,6 +725,10 @@ static bool StringifyMetaTransaction (TRI_string_buffer_t* buffer,
TRI_transaction_t const* trx) {
size_t i, n;
bool printed;
if (buffer == NULL) {
return false;
}
APPEND_STRING(buffer, "{\"collections\":[");
@ -1049,71 +1164,6 @@ static int HandleTransaction (TRI_replication_logger_t* logger,
return res;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief hashes a client id
////////////////////////////////////////////////////////////////////////////////
static uint64_t HashKeyClient (TRI_associative_pointer_t* array,
void const* key) {
TRI_server_id_t const* k = key;
return (uint64_t) *k;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief hashes a client struct
////////////////////////////////////////////////////////////////////////////////
static uint64_t HashElementClient (TRI_associative_pointer_t* array,
void const* element) {
logger_client_t const* e = element;
return (uint64_t) e->_serverId;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief compares a client key and a client struct
////////////////////////////////////////////////////////////////////////////////
static bool IsEqualKeyClient (TRI_associative_pointer_t* array,
void const* key,
void const* element) {
TRI_server_id_t const* k = key;
logger_client_t const* e = element;
return *k == e->_serverId;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief free a single registered client
////////////////////////////////////////////////////////////////////////////////
static void FreeClient (logger_client_t* client) {
if (client->_url != NULL) {
TRI_Free(TRI_UNKNOWN_MEM_ZONE, client->_url);
}
TRI_Free(TRI_UNKNOWN_MEM_ZONE, client);
}
////////////////////////////////////////////////////////////////////////////////
/// @brief free registered clients
////////////////////////////////////////////////////////////////////////////////
static void FreeClients (TRI_replication_logger_t* logger) {
uint32_t i, n;
n = logger->_clients._nrAlloc;
for (i = 0; i < n; ++i) {
logger_client_t* client = logger->_clients._table[i];
if (client != NULL) {
FreeClient(client);
}
}
}
////////////////////////////////////////////////////////////////////////////////
/// @}
////////////////////////////////////////////////////////////////////////////////

View File

@ -1270,7 +1270,7 @@ static int FilterCollectionIndex (TRI_vocbase_col_t* collection,
if (id != NULL && id->_type == TRI_JSON_NUMBER) {
uint64_t iid = (uint64_t) id->_value._number;
if (iid >= (uint64_t) ij->_maxTick) {
if (iid > (uint64_t) ij->_maxTick) {
// index too new
TRI_FreeJson(TRI_CORE_MEM_ZONE, indexJson);
}
@ -1286,7 +1286,7 @@ static int FilterCollectionIndex (TRI_vocbase_col_t* collection,
else if (TRI_IsStringJson(id)) {
uint64_t iid = TRI_UInt64String2(id->_value._string.data, id->_value._string.length - 1);
if (iid >= (uint64_t) ij->_maxTick) {
if (iid > (uint64_t) ij->_maxTick) {
// index too new
TRI_FreeJson(TRI_CORE_MEM_ZONE, indexJson);
}
@ -1947,6 +1947,7 @@ TRI_json_t* TRI_InventoryCollectionsVocBase (TRI_vocbase_t* vocbase,
TRI_Insert3ArrayJson(TRI_CORE_MEM_ZONE, result, "parameters", collectionInfo);
indexesInfo = TRI_CreateListJson(TRI_CORE_MEM_ZONE);
if (indexesInfo != NULL) {
index_json_helper_t ij;
ij._list = indexesInfo;

View File

@ -144,6 +144,7 @@
<script src="js/modules/org/arangodb/graph-common.js"></script>
<script src="js/modules/org/arangodb/graph.js"></script>
<script src="js/modules/org/arangodb/mimetypes.js"></script>
<script src="js/modules/org/arangodb/replication.js"></script>
<script src="js/modules/org/arangodb/simple-query-common.js"></script>
<script src="js/modules/org/arangodb/simple-query.js"></script>
<script src="js/modules/org/arangodb/aql/functions.js"></script>

View File

@ -11,7 +11,7 @@ window.ArangoReplication = Backbone.Collection.extend({
$.ajax({
type: "GET",
cache: false,
url: "/_api/replication/log-state",
url: "/_api/replication/logger-state",
contentType: "application/json",
processData: false,
async: false,
@ -29,7 +29,7 @@ window.ArangoReplication = Backbone.Collection.extend({
$.ajax({
type: "GET",
cache: false,
url: "/_api/replication/apply-state",
url: "/_api/replication/applier-state",
contentType: "application/json",
processData: false,
async: false,

View File

@ -0,0 +1,209 @@
module.define("org/arangodb/replication", function(exports, module) {
/*jslint indent: 2, nomen: true, maxlen: 100, sloppy: true, vars: true, white: true, plusplus: true */
/*global require, exports */
////////////////////////////////////////////////////////////////////////////////
/// @brief Replication management
///
/// @file
///
/// DISCLAIMER
///
/// Copyright 2012 triagens GmbH, Cologne, Germany
///
/// Licensed under the Apache License, Version 2.0 (the "License");
/// you may not use this file except in compliance with the License.
/// You may obtain a copy of the License at
///
/// http://www.apache.org/licenses/LICENSE-2.0
///
/// Unless required by applicable law or agreed to in writing, software
/// distributed under the License is distributed on an "AS IS" BASIS,
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/// See the License for the specific language governing permissions and
/// limitations under the License.
///
/// Copyright holder is triAGENS GmbH, Cologne, Germany
///
/// @author Jan Steemann
/// @author Copyright 2013, triAGENS GmbH, Cologne, Germany
////////////////////////////////////////////////////////////////////////////////
var internal = require("internal");
var arangosh = require("org/arangodb/arangosh");
// -----------------------------------------------------------------------------
// --SECTION-- module "org/arangodb/replication"
// -----------------------------------------------------------------------------
// -----------------------------------------------------------------------------
// --SECTION-- private functions
// -----------------------------------------------------------------------------
////////////////////////////////////////////////////////////////////////////////
/// @addtogroup ArangoShell
/// @{
////////////////////////////////////////////////////////////////////////////////
var logger = { };
var applier = { };
////////////////////////////////////////////////////////////////////////////////
/// @brief starts the replication logger
////////////////////////////////////////////////////////////////////////////////
logger.start = function () {
'use strict';
var db = internal.db;
var requestResult = db._connection.PUT("_api/replication/logger-start", "");
arangosh.checkRequestResult(requestResult);
return requestResult;
};
////////////////////////////////////////////////////////////////////////////////
/// @brief stops the replication logger
////////////////////////////////////////////////////////////////////////////////
logger.stop = function () {
'use strict';
var db = internal.db;
var requestResult = db._connection.PUT("_api/replication/logger-stop", "");
arangosh.checkRequestResult(requestResult);
return requestResult;
};
////////////////////////////////////////////////////////////////////////////////
/// @brief return the replication logger state
////////////////////////////////////////////////////////////////////////////////
logger.state = function () {
'use strict';
var db = internal.db;
var requestResult = db._connection.GET("_api/replication/logger-state");
arangosh.checkRequestResult(requestResult);
return requestResult;
};
////////////////////////////////////////////////////////////////////////////////
/// @brief starts the replication applier
////////////////////////////////////////////////////////////////////////////////
applier.start = function (forceFullSynchronisation) {
'use strict';
var db = internal.db;
var append = (forceFullSynchronisation ? "?fullSync=true" : "");
var requestResult = db._connection.PUT("_api/replication/applier-start" + append, "");
arangosh.checkRequestResult(requestResult);
return requestResult;
};
////////////////////////////////////////////////////////////////////////////////
/// @brief stops the replication applier
////////////////////////////////////////////////////////////////////////////////
applier.stop = function () {
'use strict';
var db = internal.db;
var requestResult = db._connection.PUT("_api/replication/applier-stop", "");
arangosh.checkRequestResult(requestResult);
return requestResult;
};
////////////////////////////////////////////////////////////////////////////////
/// @brief return the replication applier state
////////////////////////////////////////////////////////////////////////////////
applier.state = function () {
'use strict';
var db = internal.db;
var requestResult = db._connection.GET("_api/replication/applier-state");
arangosh.checkRequestResult(requestResult);
return requestResult;
};
////////////////////////////////////////////////////////////////////////////////
/// @brief stop the replication applier state and "forget" all state
////////////////////////////////////////////////////////////////////////////////
applier.forget = function () {
'use strict';
var db = internal.db;
var requestResult = db._connection.DELETE("_api/replication/applier-state");
arangosh.checkRequestResult(requestResult);
return requestResult;
};
////////////////////////////////////////////////////////////////////////////////
/// @brief configures the replication applier
////////////////////////////////////////////////////////////////////////////////
applier.properties = function (config) {
'use strict';
var db = internal.db;
var requestResult;
if (config === undefined) {
requestResult = db._connection.GET("_api/replication/applier-config");
}
else {
requestResult = db._connection.PUT("_api/replication/applier-config",
JSON.stringify(config));
}
arangosh.checkRequestResult(requestResult);
return requestResult;
};
////////////////////////////////////////////////////////////////////////////////
/// @}
////////////////////////////////////////////////////////////////////////////////
// -----------------------------------------------------------------------------
// --SECTION-- module exports
// -----------------------------------------------------------------------------
////////////////////////////////////////////////////////////////////////////////
/// @addtogroup ArangoShell
/// @{
////////////////////////////////////////////////////////////////////////////////
exports.logger = logger;
exports.applier = applier;
////////////////////////////////////////////////////////////////////////////////
/// @}
////////////////////////////////////////////////////////////////////////////////
// -----------------------------------------------------------------------------
// --SECTION-- END-OF-FILE
// -----------------------------------------------------------------------------
// Local Variables:
// mode: outline-minor
// outline-regexp: "/// @brief\\|/// @addtogroup\\|// --SECTION--\\|/// @page\\|/// @}\\|/\\*jslint"
// End:
});

View File

@ -25,6 +25,7 @@ JAVASCRIPT_BROWSER = \
html/admin/js/modules/org/arangodb/graph/traversal.js \
html/admin/js/modules/org/arangodb/aql/functions.js \
html/admin/js/modules/org/arangodb/mimetypes.js \
html/admin/js/modules/org/arangodb/replication.js \
html/admin/js/modules/org/arangodb/simple-query-common.js \
\
html/admin/js/bootstrap/errors.js \

View File

@ -56,7 +56,7 @@ logger.start = function () {
var db = internal.db;
var requestResult = db._connection.PUT("_api/replication/log-start", "");
var requestResult = db._connection.PUT("_api/replication/logger-start", "");
arangosh.checkRequestResult(requestResult);
return requestResult;
@ -71,7 +71,7 @@ logger.stop = function () {
var db = internal.db;
var requestResult = db._connection.PUT("_api/replication/log-stop", "");
var requestResult = db._connection.PUT("_api/replication/logger-stop", "");
arangosh.checkRequestResult(requestResult);
return requestResult;
@ -86,7 +86,7 @@ logger.state = function () {
var db = internal.db;
var requestResult = db._connection.GET("_api/replication/log-state");
var requestResult = db._connection.GET("_api/replication/logger-state");
arangosh.checkRequestResult(requestResult);
return requestResult;
@ -102,7 +102,7 @@ applier.start = function (forceFullSynchronisation) {
var db = internal.db;
var append = (forceFullSynchronisation ? "?fullSync=true" : "");
var requestResult = db._connection.PUT("_api/replication/apply-start" + append, "");
var requestResult = db._connection.PUT("_api/replication/applier-start" + append, "");
arangosh.checkRequestResult(requestResult);
return requestResult;
@ -117,7 +117,7 @@ applier.stop = function () {
var db = internal.db;
var requestResult = db._connection.PUT("_api/replication/apply-stop", "");
var requestResult = db._connection.PUT("_api/replication/applier-stop", "");
arangosh.checkRequestResult(requestResult);
return requestResult;
@ -132,7 +132,7 @@ applier.state = function () {
var db = internal.db;
var requestResult = db._connection.GET("_api/replication/apply-state");
var requestResult = db._connection.GET("_api/replication/applier-state");
arangosh.checkRequestResult(requestResult);
return requestResult;
@ -147,7 +147,7 @@ applier.forget = function () {
var db = internal.db;
var requestResult = db._connection.DELETE("_api/replication/apply-state");
var requestResult = db._connection.DELETE("_api/replication/applier-state");
arangosh.checkRequestResult(requestResult);
return requestResult;
@ -164,10 +164,10 @@ applier.properties = function (config) {
var requestResult;
if (config === undefined) {
requestResult = db._connection.GET("_api/replication/apply-config");
requestResult = db._connection.GET("_api/replication/applier-config");
}
else {
requestResult = db._connection.PUT("_api/replication/apply-config",
requestResult = db._connection.PUT("_api/replication/applier-config",
JSON.stringify(config));
}

View File

@ -2269,7 +2269,7 @@ function ReplicationApplierSuite () {
assertFalse(state.state.running);
// configure && start
replication.applier.properties({ endpoint: "unix:///tmp/non-existing-socket1234" });
replication.applier.properties({ endpoint: "tcp://9.9.9.9:9999" });
replication.applier.start();
state = replication.applier.state();
@ -2294,7 +2294,7 @@ function ReplicationApplierSuite () {
assertFalse(state.state.running);
// configure && start
replication.applier.properties({ endpoint: "unix:///tmp/non-existing-socket1234" });
replication.applier.properties({ endpoint: "tcp://9.9.9.9:9999" });
replication.applier.start();
state = replication.applier.state();
@ -2341,11 +2341,11 @@ function ReplicationApplierSuite () {
}
replication.applier.properties({
endpoint: "unix:///tmp/non-existing-socket1234"
endpoint: "tcp://9.9.9.9:9999"
});
properties = replication.applier.properties();
assertEqual(properties.endpoint, "unix:///tmp/non-existing-socket1234");
assertEqual(properties.endpoint, "tcp://9.9.9.9:9999");
assertEqual(300, properties.requestTimeout);
assertEqual(10, properties.connectTimeout);
assertEqual(10, properties.maxConnectRetries);
@ -2353,7 +2353,7 @@ function ReplicationApplierSuite () {
assertTrue(properties.adaptivePolling);
replication.applier.properties({
endpoint: "unix:///tmp/non-existing-socket5678",
endpoint: "tcp://9.9.9.9:9998",
autoStart: true,
adaptivePolling: false,
requestTimeout: 5,
@ -2362,7 +2362,7 @@ function ReplicationApplierSuite () {
});
properties = replication.applier.properties();
assertEqual(properties.endpoint, "unix:///tmp/non-existing-socket5678");
assertEqual(properties.endpoint, "tcp://9.9.9.9:9998");
assertEqual(5, properties.requestTimeout);
assertEqual(9, properties.connectTimeout);
assertEqual(4, properties.maxConnectRetries);
@ -2377,14 +2377,14 @@ function ReplicationApplierSuite () {
testApplierPropertiesChange : function () {
var state;
replication.applier.properties({ endpoint: "unix:///tmp/non-existing-socket1234" });
replication.applier.properties({ endpoint: "tcp://9.9.9.9:9999" });
replication.applier.start();
state = replication.applier.state();
assertTrue(state.state.running);
try {
replication.applier.properties({ endpoint: "unix:///tmp/non-existing-socket5678" });
replication.applier.properties({ endpoint: "tcp://9.9.9.9:9998" });
}
catch (err) {
assertEqual(errors.ERROR_REPLICATION_RUNNING.code, err.errorNum);