本文整理汇总了Python中membase.helper.spatial_helper.SpatialHelper类的典型用法代码示例。如果您正苦于以下问题:Python SpatialHelper类的具体用法?Python SpatialHelper怎么用?Python SpatialHelper使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了SpatialHelper类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: SpatialCompactionTests
class SpatialCompactionTests(BaseTestCase):
def setUp(self):
super(SpatialCompactionTests, self).setUp()
self.start_cluster = self.input.param('start-cluster', len(self.servers))
self.servers_in = self.input.param('servers_in', 0)
self.servers_out = self.input.param('servers_out', 0)
self.bucket_name = "default"
if self.standard_buckets:
self.bucket_name = "standard_bucket0"
if self.sasl_buckets:
self.bucket_name = "bucket0"
self.helper = SpatialHelper(self, self.bucket_name)
if self.start_cluster > 1:
rebalance = self.cluster.async_rebalance(self.servers[:1],
self.servers[1:start_cluster], [])
rebalance.result()
def tearDown(self):
super(SpatialCompactionTests, self).tearDown()
def test_spatial_compaction(self):
self.log.info(
"description : test manual compaction for spatial indexes")
prefix = str(uuid.uuid4())[:7]
design_name = "dev_test_spatial_compaction"
self.helper.create_index_fun(design_name, prefix)
# Insert (resp. update, as they have the same prefix) and query
# the spatial index several time so that the compaction makes sense
for i in range(0, 8):
self.helper.insert_docs(2000, prefix)
self.helper.get_results(design_name)
# Get the index size prior to compaction
status, info = self.helper.info(design_name)
disk_size = info["spatial_index"]["disk_size"]
if self.servers_in or self.servers_out:
servs_in = servs_out = []
if self.servers_in:
servs_in = self.servers[self.start_cluster:self.servers_in + 1]
if self.servers_out:
servs_out = self.servers[-self.servers_out:]
rebalance = self.cluster.async_rebalance(self.servers, servs_in, servs_out)
# Do the compaction
self.helper.compact(design_name)
# Check if the index size got smaller
status, info = self.helper.info(design_name)
self.assertTrue(info["spatial_index"]["disk_size"] < disk_size,
"The file size ({0}) isn't smaller than the "
"pre compaction size ({1})."
.format(info["spatial_index"]["disk_size"],
disk_size))
if self.servers_in or self.servers_out:
rebalance.result()
示例2: setUp
def setUp(self):
super(SpatialViewsTests, self).setUp()
self.thread_crashed = Event()
self.thread_stopped = Event()
self.skip_rebalance = self.input.param("skip_rebalance", False)
self.use_dev_views = self.input.param("use-dev-views", False)
self.default_map = "function (doc) {emit(doc.geometry, doc.age);}"
self.map_updated = "function (doc) {emit(doc.geometry, doc.name);}"
self.default_ddoc_name = self.input.param("default_ddoc_name", "test-ddoc")
self.default_view_name = self.input.param("default_view_name", "test-view")
self.ddoc_op = self.input.param("ddoc-ops", "create") #create\update\delete
self.bucket_name = "default"
if self.standard_buckets:
self.bucket_name = "standard_bucket0"
if self.sasl_buckets:
self.bucket_name = "bucket0"
self.helper = SpatialHelper(self, self.bucket_name)
if not self.skip_rebalance:
self.cluster.rebalance(self.servers[:], self.servers[1:], [])
#load some items to verify
self.docs = self.helper.insert_docs(self.num_items, 'spatial-doc',
return_docs=True)
self.num_ddoc = self.input.param('num-ddoc', 1)
self.views_per_ddoc = self.input.param('views-per-ddoc', 1)
self.non_spatial_views_per_ddoc = self.input.param('non-spatial-views-per-ddoc', 0)
if self.ddoc_op == 'update' or self.ddoc_op == 'delete':
ddocs = self.make_ddocs(self.num_ddoc, self.views_per_ddoc,
self.non_spatial_views_per_ddoc)
self.create_ddocs(ddocs)
示例3: setUp
def setUp(self):
try:
if 'first_case' not in TestInputSingleton.input.test_params:
TestInputSingleton.input.test_params['default_bucket'] = False
TestInputSingleton.input.test_params['skip_cleanup'] = True
TestInputSingleton.input.test_params['skip_buckets_handle'] = True
self.default_bucket_name = 'default'
super(SpatialQueryErrorsTests, self).setUp()
if 'first_case' in TestInputSingleton.input.test_params:
self.cluster.rebalance(self.servers[:], self.servers[1:], [])
# We use only one bucket in this test suite
self.rest = RestConnection(self.master)
self.bucket = self.rest.get_bucket(Bucket(name=self.default_bucket_name))
# num_docs must be a multiple of the number of vbuckets
self.num_docs = self.input.param("num_docs", 2000)
# `testname` is used for the design document name as wel as the
# spatial function name
self.testname = 'query-errors'
self.helper = SpatialHelper(self, "default")
if 'first_case' in TestInputSingleton.input.test_params:
self.create_ddoc()
self.helper.insert_docs(self.num_docs, self.testname)
except Exception as ex:
self.input.test_params["stop-on-failure"] = True
self.log.error("SETUP WAS FAILED. ALL TESTS WILL BE SKIPPED")
self.fail(ex)
示例4: setUp
def setUp(self):
self.helper = SpatialHelper(self, "default")
super(SpatialQueryTests, self).setUp()
self.log = logger.Logger.get_logger()
self.helper.setup_cluster()
self.cluster = Cluster()
self.servers = self.helper.servers
示例5: setUp
def setUp(self):
super(SpatialCompactionTests, self).setUp()
self.start_cluster = self.input.param('start-cluster', len(self.servers))
self.servers_in = self.input.param('servers_in', 0)
self.servers_out = self.input.param('servers_out', 0)
self.bucket_name = "default"
if self.standard_buckets:
self.bucket_name = "standard_bucket0"
if self.sasl_buckets:
self.bucket_name = "bucket0"
self.helper = SpatialHelper(self, self.bucket_name)
if self.start_cluster > 1:
rebalance = self.cluster.async_rebalance(self.servers[:1],
self.servers[1:start_cluster], [])
rebalance.result()
示例6: IBRSpatialTests
class IBRSpatialTests(SpatialQueryTests):
def setUp(self):
self.input = TestInputSingleton.input
self.servers = self.input.servers
self.master = self.servers[0]
self.log = logger.Logger.get_logger()
self.helper = SpatialHelper(self, "default")
self.helper.setup_cluster()
self.cluster = Cluster()
self.default_bucket = self.input.param("default_bucket", True)
self.sasl_buckets = self.input.param("sasl_buckets", 0)
self.standard_buckets = self.input.param("standard_buckets", 0)
self.memcached_buckets = self.input.param("memcached_buckets", 0)
self.servers = self.helper.servers
self.shell = RemoteMachineShellConnection(self.master)
info = self.shell.extract_remote_info()
self.os = info.type.lower()
self.couchbase_login_info = "%s:%s" % (self.input.membase_settings.rest_username,
self.input.membase_settings.rest_password)
self.backup_location = self.input.param("backup_location", "/tmp/backup")
self.command_options = self.input.param("command_options", '')
def tearDown(self):
self.helper.cleanup_cluster()
def test_backup_with_spatial_data(self):
num_docs = self.helper.input.param("num-docs", 5000)
self.log.info("description : Make limit queries on a simple "
"dataset with {0} docs".format(num_docs))
data_set = SimpleDataSet(self.helper, num_docs)
data_set.add_limit_queries()
self._query_test_init(data_set)
if not self.command_options:
self.command_options = []
options = self.command_options + [' -m full']
self.total_backups = 1
self.shell.execute_cluster_backup(self.couchbase_login_info, self.backup_location, options)
time.sleep(2)
self.buckets = RestConnection(self.master).get_buckets()
bucket_names = [bucket.name for bucket in self.buckets]
BucketOperationHelper.delete_all_buckets_or_assert(self.servers, self)
gc.collect()
self.helper._create_default_bucket()
self.shell.restore_backupFile(self.couchbase_login_info, self.backup_location, bucket_names)
SimpleDataSet(self.helper, num_docs)._create_views()
self._query_test_init(data_set)
示例7: setUp
def setUp(self):
super(SpatialViewsTests, self).setUp()
self.skip_rebalance = self.input.param("skip_rebalance", False)
self.use_dev_views = self.input.param("use-dev-views", False)
self.default_map = "function (doc) {emit(doc.geometry, doc.age);}"
self.default_ddoc_name = self.input.param("default_ddoc_name", "test-ddoc")
self.default_view_name = self.input.param("default_view_name", "test-view")
self.bucket_name = "default"
if self.standard_buckets:
self.bucket_name = "standard_bucket0"
if self.sasl_buckets:
self.bucket_name = "bucket0"
self.helper = SpatialHelper(self, self.bucket_name)
if not self.skip_rebalance:
self.cluster.rebalance(self.servers[:], self.servers[1:], [])
#load some items to verify
self.docs = self.helper.insert_docs(self.num_items, 'spatial-doc',
wait_for_persistence=True,
return_docs=True)
示例8: setUp
def setUp(self):
self.input = TestInputSingleton.input
self.servers = self.input.servers
self.master = self.servers[0]
self.log = logger.Logger.get_logger()
self.helper = SpatialHelper(self, "default")
self.helper.setup_cluster()
self.cluster = Cluster()
self.default_bucket = self.input.param("default_bucket", True)
self.sasl_buckets = self.input.param("sasl_buckets", 0)
self.standard_buckets = self.input.param("standard_buckets", 0)
self.memcached_buckets = self.input.param("memcached_buckets", 0)
self.servers = self.helper.servers
self.shell = RemoteMachineShellConnection(self.master)
info = self.shell.extract_remote_info()
self.os = info.type.lower()
self.couchbase_login_info = "%s:%s" % (self.input.membase_settings.rest_username,
self.input.membase_settings.rest_password)
self.backup_location = self.input.param("backup_location", "/tmp/backup")
self.command_options = self.input.param("command_options", '')
示例9: SpatialViewQueriesTests
class SpatialViewQueriesTests(BaseTestCase):
def setUp(self):
super(SpatialViewQueriesTests, self).setUp()
self.thread_crashed = Event()
self.thread_stopped = Event()
self.skip_rebalance = self.input.param("skip_rebalance", False)
self.use_dev_views = self.input.param("use-dev-views", False)
self.all_view_one_ddoc = self.input.param("all-view-one-ddoc", False)
self.default_ddoc_name = "test-ddoc-query"
self.default_view_name = "test-view-query"
self.params = self.get_query_params()
self.bucket_name = "default"
if self.standard_buckets:
self.bucket_name = "standard_bucket0"
if self.sasl_buckets:
self.bucket_name = "bucket0"
self.helper = SpatialHelper(self, self.bucket_name)
if not self.skip_rebalance:
self.cluster.rebalance(self.servers[:], self.servers[1:], [])
#load some items to verify
self.docs = self.helper.insert_docs(self.num_items, 'spatial-doc',
return_docs=True)
self.ddocs = self.helper.create_default_views(
is_one_ddoc=self.all_view_one_ddoc)
def tearDown(self):
super(SpatialViewQueriesTests, self).tearDown()
def test_spatial_view_queries(self):
error = self.input.param('error', None)
try:
self.query_and_verify_result(self.docs, self.params)
except Exception as ex:
if error and str(ex).find(error) != -1:
self.log.info("Error caught as expected %s" % error)
return
else:
self.fail("Unexpected error appeared during run %s" % ex)
if error:
self.fail("Expected error '%s' didn't appear" % error)
def test_add_spatial_view_queries_threads(self):
diff_nodes = self.input.param("diff-nodes", False)
query_threads = []
for i in xrange(len(self.servers)):
node = (self.master, self.servers[i])[diff_nodes]
self.query_and_verify_result(self.docs, self.params, node=node)
q_thread = Thread(target=self.query_and_verify_result,
name="query_thread" + str(i),
args=([self.docs, self.params, node]))
query_threads.append(q_thread)
q_thread.start()
for q_thread in query_threads:
q_thread.join()
if self.thread_crashed.is_set():
self.fail("Error occured during run")
def test_view_queries_during_rebalance(self):
start_cluster = self.input.param('start-cluster', 1)
servers_in = self.input.param('servers_in', 0)
servers_out = self.input.param('servers_out', 0)
if start_cluster > 1:
rebalance = self.cluster.async_rebalance(self.servers[:1],
self.servers[1:start_cluster], [])
rebalance.result()
servs_in = []
servs_out = []
if servers_in:
servs_in = self.servers[start_cluster:servers_in + 1]
if servers_out:
if start_cluster > 1:
servs_out = self.servers[1:start_cluster]
servs_out = servs_out[-servers_out:]
else:
servs_out = self.servers[-servers_out:]
rebalance = self.cluster.async_rebalance(self.servers, servs_in, servs_out)
self.query_and_verify_result(self.docs, self.params)
rebalance.result()
def test_view_queries_node_pending_state(self):
operation = self.input.param('operation', 'add_node')
rest = RestConnection(self.master)
if operation == 'add_node':
self.log.info("adding the node %s:%s" % (
self.servers[1].ip, self.servers[1].port))
otpNode = rest.add_node(self.master.rest_username, self.master.rest_password,
self.servers[1].ip, self.servers[1].port)
elif operation == 'failover':
nodes = rest.node_statuses()
nodes = [node for node in nodes
if node.ip != self.master.ip or node.port != self.master.port]
rest.fail_over(nodes[0].id)
else:
self.fail("There is no operation %s" % operation)
self.query_and_verify_result(self.docs, self.params)
def test_view_queries_failover(self):
num_nodes = self.input.param('num-nodes', 1)
self.cluster.failover(self.servers,
#.........这里部分代码省略.........
示例10: SpatialViewsTests
class SpatialViewsTests(BaseTestCase):
def setUp(self):
super(SpatialViewsTests, self).setUp()
self.thread_crashed = Event()
self.thread_stopped = Event()
self.skip_rebalance = self.input.param("skip_rebalance", False)
self.use_dev_views = self.input.param("use-dev-views", False)
self.default_map = "function (doc) {emit(doc.geometry, doc.age);}"
self.map_updated = "function (doc) {emit(doc.geometry, doc.name);}"
self.default_ddoc_name = self.input.param("default_ddoc_name", "test-ddoc")
self.default_view_name = self.input.param("default_view_name", "test-view")
self.ddoc_op = self.input.param("ddoc-ops", "create") #create\update\delete
self.bucket_name = "default"
if self.standard_buckets:
self.bucket_name = "standard_bucket0"
if self.sasl_buckets:
self.bucket_name = "bucket0"
self.helper = SpatialHelper(self, self.bucket_name)
if not self.skip_rebalance:
self.cluster.rebalance(self.servers[:], self.servers[1:], [])
#load some items to verify
self.docs = self.helper.insert_docs(self.num_items, 'spatial-doc',
return_docs=True)
self.num_ddoc = self.input.param('num-ddoc', 1)
self.views_per_ddoc = self.input.param('views-per-ddoc', 1)
self.non_spatial_views_per_ddoc = self.input.param('non-spatial-views-per-ddoc', 0)
if self.ddoc_op == 'update' or self.ddoc_op == 'delete':
ddocs = self.make_ddocs(self.num_ddoc, self.views_per_ddoc,
self.non_spatial_views_per_ddoc)
self.create_ddocs(ddocs)
def tearDown(self):
super(SpatialViewsTests, self).tearDown()
def test_add_spatial_views(self):
ddocs = self.make_ddocs(self.num_ddoc, self.views_per_ddoc, self.non_spatial_views_per_ddoc)
self.perform_ddoc_ops(ddocs)
def test_add_spatial_views_case_sensative(self):
ddoc = DesignDocument(self.default_ddoc_name, [], spatial_views=[
View(self.default_view_name, self.default_map,
dev_view=self.use_dev_views, is_spatial=True),
View(self.default_view_name.upper(), self.default_map,
dev_view=self.use_dev_views, is_spatial=True)])
self.create_ddocs([ddoc])
def test_add_single_spatial_view(self):
name_lenght = self.input.param('name_lenght', None)
view_name = self.input.param('view_name', self.default_view_name)
if name_lenght:
view_name = ''.join(random.choice(string.lowercase) for x in xrange(name_lenght))
not_compilable = self.input.param('not_compilable', False)
error = self.input.param('error', None)
map_fn = (self.default_map, 'function (doc) {emit(doc.geometry, doc.age);')[not_compilable]
ddoc = DesignDocument(self.default_ddoc_name, [], spatial_views=[
View(view_name, map_fn,
dev_view=self.use_dev_views, is_spatial=True)])
try:
self.create_ddocs([ddoc])
except Exception as ex:
if error and str(ex).find(error) != -1:
self.log.info("Error caught as expected %s" % error)
return
else:
self.fail("Unexpected error appeared during run %s" % ex)
if error:
self.fail("Expected error '%s' didn't appear" % error)
def test_add_views_to_1_ddoc(self):
same_names = self.input.param('same-name', False)
error = self.input.param('error', None)
num_views_per_ddoc = 10
create_threads = []
try:
for i in xrange(num_views_per_ddoc):
ddoc = DesignDocument(self.default_ddoc_name, [], spatial_views=[
View(self.default_view_name + (str(i), "")[same_names],
self.default_map,
dev_view=self.use_dev_views, is_spatial=True)])
create_thread = Thread(target=self.create_ddocs,
name="create_thread" + str(i),
args=([ddoc,],))
create_threads.append(create_thread)
create_thread.start()
for create_thread in create_threads:
create_thread.join()
except Exception as ex:
if error and str(ex).find(error) != -1:
self.log.info("Error caught as expected %s" % error)
return
else:
self.fail("Unexpected error appeared during run %s" % ex)
if error:
self.fail("Expected error '%s' didn't appear" % error)
def test_add_spatial_views_threads(self):
same_names = self.input.param('same-name', False)
num_views_per_ddoc = 10
#.........这里部分代码省略.........
示例11: SpatialQueryTests
class SpatialQueryTests(unittest.TestCase):
def setUp(self):
self.log = logger.Logger.get_logger()
self.helper = SpatialHelper(self, "default")
self.helper.setup_cluster()
def tearDown(self):
self.helper.cleanup_cluster()
def test_simple_dataset_limit_queries(self):
num_docs = self.helper.input.param("num-docs", 1000)
self.log.info("description : Make limit queries on a simple " "dataset with {0} docs".format(num_docs))
data_set = SimpleDataSet(self.helper, num_docs)
data_set.add_limit_queries()
self._query_test_init(data_set)
def test_simple_dataset_skip_queries(self):
num_docs = self.helper.input.param("num-docs", 1000)
self.log.info(
"description : Make skip (and limit) queries on a " "simple dataset with {0} docs".format(num_docs)
)
data_set = SimpleDataSet(self.helper, num_docs)
data_set.add_skip_queries()
self._query_test_init(data_set)
def test_simple_dataset_bbox_queries(self):
num_docs = self.helper.input.param("num-docs", 1000)
self.log.info("description : Make bounding box queries on a simple " "dataset with {0} docs".format(num_docs))
data_set = SimpleDataSet(self.helper, num_docs)
data_set.add_bbox_queries()
self._query_test_init(data_set)
###
# load the data defined for this dataset.
# create views and query the data as it loads.
# verification is optional, and best practice is to
# set to False if you plan on running _query_all_views()
# later in the test case
###
def _query_test_init(self, data_set, verify_results=True):
views = data_set.views
# start loading data
t = Thread(target=data_set.load, name="load_data_set", args=())
t.start()
# run queries while loading data
while t.is_alive():
self._query_all_views(views, False)
time.sleep(5)
t.join()
# results will be verified if verify_results set
if verify_results:
self._query_all_views(views, verify_results)
else:
self._check_view_intergrity(views)
##
# run all queries for all views in parallel
##
def _query_all_views(self, views, verify_results=True):
query_threads = []
for view in views:
t = RunQueriesThread(view, verify_results)
query_threads.append(t)
t.start()
[t.join() for t in query_threads]
self._check_view_intergrity(query_threads)
##
# If an error occured loading or querying data for a view
# it is queued and checked here. Fail on the first one that
# occurs.
##
def _check_view_intergrity(self, thread_results):
for result in thread_results:
if result.test_results.errors:
self.fail(result.test_results.errors[0][1])
if result.test_results.failures:
self.fail(result.test_results.failures[0][1])
示例12: SpatialViewTests
class SpatialViewTests(unittest.TestCase):
def setUp(self):
self.log = logger.Logger.get_logger()
self.helper = SpatialHelper(self, "default")
self.helper.setup_cluster()
def tearDown(self):
self.helper.cleanup_cluster()
def test_create_x_design_docs(self):
num_design_docs = self.helper.input.param("num-design-docs", 5)
self.log.info("description : create {0} spatial views without "
"running any spatial view query".format(num_design_docs))
prefix = str(uuid.uuid4())
fun = "function (doc) {emit(doc.geometry, doc);}"
self._insert_x_design_docs(num_design_docs, prefix, fun)
def test_update_x_design_docs(self):
num_design_docs = self.helper.input.param("num-design-docs", 5)
self.log.info("description : update {0} spatial views without "
"running any spatial view query".format(num_design_docs))
prefix = str(uuid.uuid4())
fun = "function (doc) {emit(doc.geometry, doc);}"
self._insert_x_design_docs(num_design_docs, prefix, fun)
# Update the design docs with a different function
fun = "function (doc) {emit(doc.geometry, null);}"
self._insert_x_design_docs(num_design_docs, prefix, fun)
def _insert_x_design_docs(self, num_design_docs, prefix, fun):
rest = self.helper.rest
bucket = self.helper.bucket
name = "dev_test_multiple_design_docs"
for i in range(0, num_design_docs):
design_name = "{0}-{1}-{2}".format(name, i, prefix)
self.helper.create_index_fun(design_name, prefix, fun)
# Verify that the function was really stored
response, meta = rest.get_spatial(bucket, design_name)
self.assertTrue(response)
self.assertEquals(meta["id"],
"_design/{0}".format(design_name))
self.assertEquals(
response["spatial"][design_name].encode("ascii",
"ignore"),
fun)
def test_insert_x_docs(self):
num_docs = self.helper.input.param("num-docs", 100)
self.log.info("description : create a spatial view on {0} documents"\
.format(num_docs))
design_name = "dev_test_insert_{0}_docs".format(num_docs)
self._insert_x_docs_and_query(num_docs, design_name)
# Does verify the full docs and not only the keys
def test_insert_x_docs_full_verification(self):
num_docs = self.helper.input.param("num-docs", 100)
self.log.info("description : create a spatial view with {0} docs"
" and verify the full documents".format(num_docs))
design_name = "dev_test_insert_{0}_docs_full_verification"\
.format(num_docs)
prefix = str(uuid.uuid4())[:7]
self.helper.create_index_fun(design_name, prefix)
inserted_docs = self.helper.insert_docs(num_docs, prefix,
return_docs=True)
self.helper.query_index_for_verification(design_name, inserted_docs,
full_docs=True)
def test_insert_x_delete_y_docs(self):
num_docs = self.helper.input.param("num-docs", 15000)
num_deleted_docs = self.helper.input.param("num-deleted-docs", 10000)
self.log.info("description : create spatial view with {0} docs "
" and delete {1} docs".format(num_docs,
num_deleted_docs))
design_name = "dev_test_insert_{0}_delete_{1}_docs"\
.format(num_docs, num_deleted_docs)
prefix = str(uuid.uuid4())[:7]
inserted_keys = self._setup_index(design_name, num_docs, prefix)
# Delete documents and verify that the documents got deleted
deleted_keys = self.helper.delete_docs(num_deleted_docs, prefix)
num_expected = num_docs - len(deleted_keys)
results = self.helper.get_results(design_name, 2 * num_docs,
num_expected=num_expected)
result_keys = self.helper.get_keys(results)
self.assertEqual(len(result_keys), num_expected)
self.helper.verify_result(inserted_keys, deleted_keys + result_keys)
#.........这里部分代码省略.........
示例13: SpatialCompactionTests
class SpatialCompactionTests(unittest.TestCase):
def setUp(self):
self.log = logger.Logger.get_logger()
self.helper = SpatialHelper(self, "default")
self.helper.setup_cluster()
def tearDown(self):
self.helper.cleanup_cluster()
def test_spatial_compaction(self):
self.log.info(
"description : test manual compaction for spatial indexes")
rest = self.helper.rest
prefix = str(uuid.uuid4())[:7]
design_name = "dev_test_spatial_compaction"
self.helper.create_index_fun(design_name, prefix)
# Insert (resp. update, as they have the same prefix) and query
# the spatial index several time so that the compaction makes sense
for i in range(0, 8):
doc_names = self.helper.insert_docs(2000, prefix)
self.helper.get_results(design_name)
# Get the index size prior to compaction
status, info = self.helper.info(design_name)
disk_size = info["spatial_index"]["disk_size"]
# Do the compaction
self.helper.compact(design_name)
# Check if the index size got smaller
status, info = self.helper.info(design_name)
self.assertTrue(info["spatial_index"]["disk_size"] < disk_size,
"The file size ({0}) isn't smaller than the "
"pre compaction size ({1})."
.format(info["spatial_index"]["disk_size"],
disk_size))
示例14: SpatialQueryErrorsTests
class SpatialQueryErrorsTests(BaseTestCase):
def setUp(self):
try:
if 'first_case' not in TestInputSingleton.input.test_params:
TestInputSingleton.input.test_params['default_bucket'] = False
TestInputSingleton.input.test_params['skip_cleanup'] = True
TestInputSingleton.input.test_params['skip_buckets_handle'] = True
self.default_bucket_name = 'default'
super(SpatialQueryErrorsTests, self).setUp()
if 'first_case' in TestInputSingleton.input.test_params:
self.cluster.rebalance(self.servers[:], self.servers[1:], [])
# We use only one bucket in this test suite
self.rest = RestConnection(self.master)
self.bucket = self.rest.get_bucket(Bucket(name=self.default_bucket_name))
# num_docs must be a multiple of the number of vbuckets
self.num_docs = self.input.param("num_docs", 2000)
# `testname` is used for the design document name as wel as the
# spatial function name
self.testname = 'query-errors'
self.helper = SpatialHelper(self, "default")
if 'first_case' in TestInputSingleton.input.test_params:
self.create_ddoc()
self.helper.insert_docs(self.num_docs, self.testname)
except Exception as ex:
self.input.test_params["stop-on-failure"] = True
self.log.error("SETUP WAS FAILED. ALL TESTS WILL BE SKIPPED")
self.fail(ex)
def tearDown(self):
# clean up will only performed on the last run
if 'last_case' in TestInputSingleton.input.test_params:
TestInputSingleton.input.test_params['skip_cleanup'] = False
TestInputSingleton.input.test_params['skip_buckets_handle'] = False
super(SpatialQueryErrorsTests, self).tearDown()
else:
self.cluster.shutdown(force=True)
self._log_finish(self)
def test_query_errors(self):
all_params = ['skip', 'limit', 'stale', 'bbox', 'start_range',
'end_range']
query_params = {}
for key in self.input.test_params:
if key in all_params:
query_params[key] = str(self.input.test_params[key])
try:
self.spatial_query(query_params)
except QueryViewException as ex:
self.assertEquals(self.input.test_params['error'],
json.loads(ex.reason)['error'])
else:
self.fail("Query did not fail, but should have. "
"Query parameters were: {0}".format(query_params))
def create_ddoc(self):
view_fn = '''function (doc) {
if (doc.age !== undefined || doc.height !== undefined ||
doc.bloom !== undefined || doc.shed_leaves !== undefined) {
emit([doc.age, doc.height, [doc.bloom, doc.shed_leaves]], doc.name);
}}'''
self.helper.create_index_fun(self.testname, view_fn)
def spatial_query(self, params={}, ddoc='test'):
bucket = self.default_bucket_name
if not 'stale' in params:
params['stale'] = 'false'
return self.rest.query_view(self.testname, self.testname, bucket,
params, type="spatial")
示例15: SpatialViewsTests
class SpatialViewsTests(BaseTestCase):
def setUp(self):
super(SpatialViewsTests, self).setUp()
self.skip_rebalance = self.input.param("skip_rebalance", False)
self.use_dev_views = self.input.param("use-dev-views", False)
self.default_map = "function (doc) {emit(doc.geometry, doc.age);}"
self.default_ddoc_name = self.input.param("default_ddoc_name", "test-ddoc")
self.default_view_name = self.input.param("default_view_name", "test-view")
self.bucket_name = "default"
if self.standard_buckets:
self.bucket_name = "standard_bucket0"
if self.sasl_buckets:
self.bucket_name = "bucket0"
self.helper = SpatialHelper(self, self.bucket_name)
if not self.skip_rebalance:
self.cluster.rebalance(self.servers[:], self.servers[1:], [])
#load some items to verify
self.docs = self.helper.insert_docs(self.num_items, 'spatial-doc',
wait_for_persistence=True,
return_docs=True)
def tearDown(self):
super(SpatialViewsTests, self).tearDown()
def test_add_spatial_views(self):
num_ddoc = self.input.param('num-ddoc', 1)
views_per_ddoc = self.input.param('views-per-ddoc', 1)
non_spatial_views_per_ddoc = self.input.param('non-spatial-views-per-ddoc', 0)
ddocs = self.make_ddocs(num_ddoc, views_per_ddoc, non_spatial_views_per_ddoc)
self.create_ddocs(ddocs)
def test_add_spatial_views_case_sensative(self):
ddoc = DesignDocument(self.default_ddoc_name, [], spatial_views=[
View(self.default_view_name, self.default_map,
dev_view=self.use_dev_views, is_spatial=True),
View(self.default_view_name.upper(), self.default_map,
dev_view=self.use_dev_views, is_spatial=True)])
self.create_ddocs([ddoc])
def make_ddocs(self, ddocs_num, views_per_ddoc, non_spatial_views_per_ddoc):
ddocs = []
for i in xrange(ddocs_num):
views = []
for k in xrange(views_per_ddoc):
views.append(View(self.default_view_name + str(k), self.default_map,
dev_view=self.use_dev_views, is_spatial=True))
non_spatial_views = []
if non_spatial_views_per_ddoc:
for k in xrange(non_spatial_views_per_ddoc):
views.append(View(self.default_view_name + str(k), 'function (doc) { emit(null, doc);}',
dev_view=self.use_dev_views))
ddocs.append(DesignDocument(self.default_ddoc_name + str(i), non_spatial_views, spatial_views=views))
return ddocs
def create_ddocs(self, ddocs):
for ddoc in ddocs:
if not (ddoc.views or ddoc.spatial_views):
self.cluster.create_view(self.master, ddoc.name, [], bucket=self.bucket_name)
for view in ddoc.views:
self.cluster.create_view(self.master, ddoc.name, view, bucket=self.bucket_name)
for view in ddoc.spatial_views:
self.cluster.create_view(self.master, ddoc.name, view, bucket=self.bucket_name)