当前位置: 首页>>代码示例>>Python>>正文


Python SpatialHelper.setup_cluster方法代码示例

本文整理汇总了Python中membase.helper.spatial_helper.SpatialHelper.setup_cluster方法的典型用法代码示例。如果您正苦于以下问题:Python SpatialHelper.setup_cluster方法的具体用法?Python SpatialHelper.setup_cluster怎么用?Python SpatialHelper.setup_cluster使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在membase.helper.spatial_helper.SpatialHelper的用法示例。


在下文中一共展示了SpatialHelper.setup_cluster方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: SpatialInfoTests

# 需要导入模块: from membase.helper.spatial_helper import SpatialHelper [as 别名]
# 或者: from membase.helper.spatial_helper.SpatialHelper import setup_cluster [as 别名]
class SpatialInfoTests(unittest.TestCase):
    def setUp(self):
        self.log = logger.Logger.get_logger()
        self.helper = SpatialHelper(self, "default")
        self.helper.setup_cluster()


    def tearDown(self):
        self.helper.cleanup_cluster()


    def test_spatial_info(self):
        self.log.info(
            "description : test info for spatial indexes")
        rest = self.helper.rest
        prefix = str(uuid.uuid4())[:7]
        design_name = "dev_test_spatial_info"

        self.helper.create_index_fun(design_name, prefix)

        # Fill the database and add an index
        self.helper.insert_docs(2000, prefix)
        self.helper.get_results(design_name)
        status, info = self.helper.info(design_name)
        disk_size = info["spatial_index"]["disk_size"]

        self.assertTrue(disk_size > 0)
        self.assertEqual(info["name"], design_name)

        num_vbuckets = len(rest.get_vbuckets(self.helper.bucket))
        self.assertEqual(len(info["spatial_index"]["update_seq"]),
                         num_vbuckets)
        self.assertEqual(len(info["spatial_index"]["purge_seq"]),
                         num_vbuckets)
        self.assertFalse(info["spatial_index"]["updater_running"])
        self.assertFalse(info["spatial_index"]["waiting_clients"] > 0)
        self.assertFalse(info["spatial_index"]["compact_running"])

        # Insert a lot new documents, and return after starting to
        # build up (not waiting until it's done) the index to test
        # if the updater fields are set correctly
        self.helper.insert_docs(50000, prefix)
        self.helper.get_results(design_name,
                                extra_params={"stale": "update_after"})
        # Somehow stale=update_after doesn't really return immediately,
        # thus commenting this assertion out. There's no real reason
        # to investigate, as the indexing changes heavily in the moment
        # anyway
        #self.assertTrue(info["spatial_index"]["updater_running"])
        #self.assertTrue(info["spatial_index"]["waiting_commit"])
        #self.assertTrue(info["spatial_index"]["waiting_clients"] > 0)
        self.assertFalse(info["spatial_index"]["compact_running"])

        # Request the index again, to make sure it is fully updated
        self.helper.get_results(design_name)
        status, info = self.helper.info(design_name)
        self.assertFalse(info["spatial_index"]["updater_running"])
        self.assertFalse(info["spatial_index"]["waiting_clients"] > 0)
        self.assertFalse(info["spatial_index"]["compact_running"])
        self.assertTrue(info["spatial_index"]["disk_size"] > disk_size)
开发者ID:Boggypop,项目名称:testrunner,代码行数:62,代码来源:spatialinfotests.py

示例2: IBRSpatialTests

# 需要导入模块: from membase.helper.spatial_helper import SpatialHelper [as 别名]
# 或者: from membase.helper.spatial_helper.SpatialHelper import setup_cluster [as 别名]
class IBRSpatialTests(SpatialQueryTests):
    def setUp(self):
        self.input = TestInputSingleton.input
        self.servers = self.input.servers
        self.master = self.servers[0]
        self.log = logger.Logger.get_logger()
        self.helper = SpatialHelper(self, "default")
        self.helper.setup_cluster()
        self.cluster = Cluster()
        self.default_bucket = self.input.param("default_bucket", True)
        self.sasl_buckets = self.input.param("sasl_buckets", 0)
        self.standard_buckets = self.input.param("standard_buckets", 0)
        self.memcached_buckets = self.input.param("memcached_buckets", 0)
        self.servers = self.helper.servers
        self.shell = RemoteMachineShellConnection(self.master)
        info = self.shell.extract_remote_info()
        self.os = info.type.lower()
        self.couchbase_login_info = "%s:%s" % (self.input.membase_settings.rest_username,
                                               self.input.membase_settings.rest_password)
        self.backup_location = self.input.param("backup_location", "/tmp/backup")
        self.command_options = self.input.param("command_options", '')



    def tearDown(self):
        self.helper.cleanup_cluster()

    def test_backup_with_spatial_data(self):
        num_docs = self.helper.input.param("num-docs", 5000)
        self.log.info("description : Make limit queries on a simple "
                      "dataset with {0} docs".format(num_docs))
        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._query_test_init(data_set)

        if not self.command_options:
            self.command_options = []
        options = self.command_options + [' -m full']

        self.total_backups = 1
        self.shell.execute_cluster_backup(self.couchbase_login_info, self.backup_location, options)
        time.sleep(2)

        self.buckets = RestConnection(self.master).get_buckets()
        bucket_names = [bucket.name for bucket in self.buckets]
        BucketOperationHelper.delete_all_buckets_or_assert(self.servers, self)
        gc.collect()

        self.helper._create_default_bucket()
        self.shell.restore_backupFile(self.couchbase_login_info, self.backup_location, bucket_names)

        SimpleDataSet(self.helper, num_docs)._create_views()
        self._query_test_init(data_set)
开发者ID:EricACooper,项目名称:testrunner,代码行数:55,代码来源:ibr.py

示例3: SpatialCompactionTests

# 需要导入模块: from membase.helper.spatial_helper import SpatialHelper [as 别名]
# 或者: from membase.helper.spatial_helper.SpatialHelper import setup_cluster [as 别名]
class SpatialCompactionTests(unittest.TestCase):
    def setUp(self):
        self.log = logger.Logger.get_logger()
        self.helper = SpatialHelper(self, "default")
        self.helper.setup_cluster()


    def tearDown(self):
        self.helper.cleanup_cluster()


    def test_spatial_compaction(self):
        self.log.info(
            "description : test manual compaction for spatial indexes")
        rest = self.helper.rest
        prefix = str(uuid.uuid4())[:7]
        design_name = "dev_test_spatial_compaction"

        self.helper.create_index_fun(design_name, prefix)

        # Insert (resp. update, as they have the same prefix) and query
        # the spatial index several time so that the compaction makes sense
        for i in range(0, 8):
            doc_names = self.helper.insert_docs(2000, prefix)
            self.helper.get_results(design_name)

        # Get the index size prior to compaction
        status, info = self.helper.info(design_name)
        disk_size = info["spatial_index"]["disk_size"]

        # Do the compaction
        self.helper.compact(design_name)

        # Check if the index size got smaller
        status, info = self.helper.info(design_name)
        self.assertTrue(info["spatial_index"]["disk_size"] < disk_size,
                        "The file size ({0}) isn't smaller than the "
                        "pre compaction size ({1})."
                        .format(info["spatial_index"]["disk_size"],
                                disk_size))
开发者ID:steveyen,项目名称:testrunner,代码行数:42,代码来源:spatialcompaction.py

示例4: SpatialQueryTests

# 需要导入模块: from membase.helper.spatial_helper import SpatialHelper [as 别名]
# 或者: from membase.helper.spatial_helper.SpatialHelper import setup_cluster [as 别名]
class SpatialQueryTests(unittest.TestCase):
    def setUp(self):
        self.log = logger.Logger.get_logger()
        self.helper = SpatialHelper(self, "default")
        self.helper.setup_cluster()

    def tearDown(self):
        self.helper.cleanup_cluster()

    def test_simple_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs", 1000)
        self.log.info("description : Make limit queries on a simple " "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._query_test_init(data_set)

    def test_simple_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs", 1000)
        self.log.info(
            "description : Make skip (and limit) queries on a " "simple dataset with {0} docs".format(num_docs)
        )

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._query_test_init(data_set)

    def test_simple_dataset_bbox_queries(self):
        num_docs = self.helper.input.param("num-docs", 1000)
        self.log.info("description : Make bounding box queries on a simple " "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_bbox_queries()
        self._query_test_init(data_set)

    ###
    # load the data defined for this dataset.
    # create views and query the data as it loads.
    # verification is optional, and best practice is to
    # set to False if you plan on running _query_all_views()
    # later in the test case
    ###
    def _query_test_init(self, data_set, verify_results=True):
        views = data_set.views

        # start loading data
        t = Thread(target=data_set.load, name="load_data_set", args=())
        t.start()

        # run queries while loading data
        while t.is_alive():
            self._query_all_views(views, False)
            time.sleep(5)
        t.join()

        # results will be verified if verify_results set
        if verify_results:
            self._query_all_views(views, verify_results)
        else:
            self._check_view_intergrity(views)

    ##
    # run all queries for all views in parallel
    ##
    def _query_all_views(self, views, verify_results=True):

        query_threads = []
        for view in views:
            t = RunQueriesThread(view, verify_results)
            query_threads.append(t)
            t.start()

        [t.join() for t in query_threads]

        self._check_view_intergrity(query_threads)

    ##
    # If an error occured loading or querying data for a view
    # it is queued and checked here. Fail on the first one that
    # occurs.
    ##
    def _check_view_intergrity(self, thread_results):
        for result in thread_results:
            if result.test_results.errors:
                self.fail(result.test_results.errors[0][1])
            if result.test_results.failures:
                self.fail(result.test_results.failures[0][1])
开发者ID:paul-guo-,项目名称:appstack,代码行数:89,代码来源:spatialquerytests.py

示例5: SpatialViewTests

# 需要导入模块: from membase.helper.spatial_helper import SpatialHelper [as 别名]
# 或者: from membase.helper.spatial_helper.SpatialHelper import setup_cluster [as 别名]
class SpatialViewTests(unittest.TestCase):
    def setUp(self):
        self.log = logger.Logger.get_logger()
        self.helper = SpatialHelper(self, "default")
        self.helper.setup_cluster()


    def tearDown(self):
        self.helper.cleanup_cluster()


    def test_create_x_design_docs(self):
        num_design_docs = self.helper.input.param("num-design-docs", 5)
        self.log.info("description : create {0} spatial views without "
                      "running any spatial view query".format(num_design_docs))
        prefix = str(uuid.uuid4())

        fun = "function (doc) {emit(doc.geometry, doc);}"
        self._insert_x_design_docs(num_design_docs, prefix, fun)


    def test_update_x_design_docs(self):
        num_design_docs = self.helper.input.param("num-design-docs", 5)
        self.log.info("description : update {0} spatial views without "
                      "running any spatial view query".format(num_design_docs))
        prefix = str(uuid.uuid4())

        fun = "function (doc) {emit(doc.geometry, doc);}"
        self._insert_x_design_docs(num_design_docs, prefix, fun)

        # Update the design docs with a different function
        fun = "function (doc) {emit(doc.geometry, null);}"
        self._insert_x_design_docs(num_design_docs, prefix, fun)


    def _insert_x_design_docs(self, num_design_docs, prefix, fun):
        rest = self.helper.rest
        bucket = self.helper.bucket
        name = "dev_test_multiple_design_docs"

        for i in range(0, num_design_docs):
            design_name = "{0}-{1}-{2}".format(name, i, prefix)
            self.helper.create_index_fun(design_name, prefix, fun)

            # Verify that the function was really stored
            response, meta = rest.get_spatial(bucket, design_name)
            self.assertTrue(response)
            self.assertEquals(meta["id"],
                              "_design/{0}".format(design_name))
            self.assertEquals(
                response["spatial"][design_name].encode("ascii",
                                                                "ignore"),
                fun)


    def test_insert_x_docs(self):
        num_docs = self.helper.input.param("num-docs", 100)
        self.log.info("description : create a spatial view on {0} documents"\
                          .format(num_docs))
        design_name = "dev_test_insert_{0}_docs".format(num_docs)
        self._insert_x_docs_and_query(num_docs, design_name)


    # Does verify the full docs and not only the keys
    def test_insert_x_docs_full_verification(self):
        num_docs = self.helper.input.param("num-docs", 100)
        self.log.info("description : create a spatial view with {0} docs"
                      " and verify the full documents".format(num_docs))
        design_name = "dev_test_insert_{0}_docs_full_verification"\
            .format(num_docs)
        prefix = str(uuid.uuid4())[:7]

        self.helper.create_index_fun(design_name, prefix)
        inserted_docs = self.helper.insert_docs(num_docs, prefix,
                                                return_docs=True)
        self.helper.query_index_for_verification(design_name, inserted_docs,
                                                 full_docs=True)


    def test_insert_x_delete_y_docs(self):
        num_docs = self.helper.input.param("num-docs", 15000)
        num_deleted_docs = self.helper.input.param("num-deleted-docs", 10000)
        self.log.info("description : create spatial view with {0} docs "
                      " and delete {1} docs".format(num_docs,
                                                    num_deleted_docs))
        design_name = "dev_test_insert_{0}_delete_{1}_docs"\
            .format(num_docs, num_deleted_docs)
        prefix = str(uuid.uuid4())[:7]

        inserted_keys = self._setup_index(design_name, num_docs, prefix)

        # Delete documents and verify that the documents got deleted
        deleted_keys = self.helper.delete_docs(num_deleted_docs, prefix)
        num_expected = num_docs - len(deleted_keys)
        results = self.helper.get_results(design_name, 2 * num_docs,
                                          num_expected=num_expected)
        result_keys = self.helper.get_keys(results)
        self.assertEqual(len(result_keys), num_expected)
        self.helper.verify_result(inserted_keys, deleted_keys + result_keys)

#.........这里部分代码省略.........
开发者ID:mschoch,项目名称:testrunner,代码行数:103,代码来源:spatialviewtests.py

示例6: SpatialRebalanceTests

# 需要导入模块: from membase.helper.spatial_helper import SpatialHelper [as 别名]
# 或者: from membase.helper.spatial_helper.SpatialHelper import setup_cluster [as 别名]
class SpatialRebalanceTests(unittest.TestCase):
    def setUp(self):
        self.log = logger.Logger.get_logger()
        self.helper = SpatialHelper(self, "default")
        # Setup, but don't rebalance cluster
        self.helper.setup_cluster(False)


    def tearDown(self):
        self.log.info("tear down test")
        self.helper.cleanup_cluster()


    def test_insert_x_delete_y_docs_create_cluster(self):
        num_docs = self.helper.input.param("num-docs", 100000)
        num_deleted_docs = self.helper.input.param("num-deleted-docs", 10000)
        msg = "description : have a single node, insert {0} docs, "\
            "delete {1} docs while creating a cluster and query it"
        self.log.info(msg.format(num_docs, num_deleted_docs))
        design_name = "dev_test_delete_10k_docs_create_cluster"
        prefix = str(uuid.uuid4())[:7]

        # Make sure we are fully de-clustered
        ClusterOperationHelper.cleanup_cluster(self.helper.servers)

        self.helper.create_index_fun(design_name, prefix)
        inserted_keys = self.helper.insert_docs(num_docs, prefix)

        # Start creating the cluster and rebalancing it without waiting until
        # it's finished
        ClusterOperationHelper.add_and_rebalance(self.helper.servers, False)

        deleted_keys = self.helper.delete_docs(num_deleted_docs, prefix)
        self._wait_for_rebalance()

        # Verify that the docs got delete and are no longer part of the
        # spatial view
        results = self.helper.get_results(design_name, num_docs)
        result_keys = self.helper.get_keys(results)
        self.assertEqual(len(result_keys), num_docs - len(deleted_keys))
        self.helper.verify_result(inserted_keys, deleted_keys + result_keys)


    def test_insert_x_delete_y_docs_destroy_cluster(self):
        num_docs = self.helper.input.param("num-docs", 100000)
        num_deleted_docs = self.helper.input.param("num-deleted-docs", 10000)
        msg = "description : have a cluster, insert {0} docs, delete "\
            "{1} docs while destroying the cluster into a single node "\
            "and query it"
        self.log.info(msg.format(num_docs, num_deleted_docs))
        design_name = "dev_test_delete_{0}_docs_destroy_cluster".format(
            num_deleted_docs)
        prefix = str(uuid.uuid4())[:7]

        # Make sure we are fully clustered
        ClusterOperationHelper.add_and_rebalance(self.helper.servers)

        self.helper.create_index_fun(design_name, prefix)
        inserted_keys = self.helper.insert_docs(num_docs, prefix)

        # Start destroying the cluster and rebalancing it without waiting
        # until it's finished
        ClusterOperationHelper.cleanup_cluster(self.helper.servers,
                                                    False)

        deleted_keys = self.helper.delete_docs(num_deleted_docs, prefix)
        self._wait_for_rebalance()

        # Verify that the docs got delete and are no longer part of the
        # spatial view
        results = self.helper.get_results(design_name, num_docs)
        result_keys = self.helper.get_keys(results)
        self.assertEqual(len(result_keys), num_docs - len(deleted_keys))
        self.helper.verify_result(inserted_keys, deleted_keys + result_keys)


    def test_insert_x_docs_during_rebalance(self):
        num_docs = self.helper.input.param("num-docs", 100000)
        msg = "description : have a single node, insert {0} docs, "\
            "query it, add another node, start rebalancing, insert {0} "\
            "docs, finish rebalancing, keep on adding nodes..."
        self.log.info(msg.format(num_docs))
        design_name = "dev_test_insert_{0}_docs_during_rebalance".format(
            num_docs)
        prefix = str(uuid.uuid4())[:7]

        # Make sure we are fully de-clustered
        ClusterOperationHelper.cleanup_cluster(self.helper.servers)

        self.helper.create_index_fun(design_name)
        inserted_keys = self.helper.insert_docs(num_docs, prefix)

        # Add all servers to the master server one by one and start
        # rebalacing
        for server in self.helper.servers[1:]:
            ClusterOperationHelper.add_and_rebalance(
                [self.helper.master, server], False)
            # Docs with the same prefix are overwritten and not newly created
            prefix = str(uuid.uuid4())[:7]
            inserted_keys.extend(self.helper.insert_docs(
#.........这里部分代码省略.........
开发者ID:abhinavdangeti,项目名称:testrunner,代码行数:103,代码来源:spatialrebalancetests.py

示例7: SpatialQueryTests

# 需要导入模块: from membase.helper.spatial_helper import SpatialHelper [as 别名]
# 或者: from membase.helper.spatial_helper.SpatialHelper import setup_cluster [as 别名]
class SpatialQueryTests(unittest.TestCase):
    def setUp(self):
        self.log = logger.Logger.get_logger()
        self.helper = SpatialHelper(self, "default")
        self.helper.setup_cluster()
        self.cluster = Cluster()
        self.servers = self.helper.servers

    def tearDown(self):
        self.helper.cleanup_cluster()

    def test_simple_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Make limit queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._query_test_init(data_set)

    def test_simple_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Make skip (and limit) queries on a "
                      "simple dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._query_test_init(data_set)

    def test_simple_dataset_bbox_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Make bounding box queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_bbox_queries()
        self._query_test_init(data_set)

    def test_simple_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Make range queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._query_test_init(data_set)

    def test_multidim_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Make limit queries on a multidimensional "
                      "dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._query_test_init(data_set)

    def test_multidim_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Make skip (and limit) queries on a "
                      "multidimensional dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._query_test_init(data_set)

    def test_multidim_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Make range queries on a "
                      "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._query_test_init(data_set)

    def test_multidim_dataset_range_and_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Make range queries with limits on a "
                      "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_and_limit_queries()
        self._query_test_init(data_set)

## Rebalance In
    def test_rebalance_in_simple_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance In and limit queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_in_simple_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance In and skip (and limit) queries on a "
                      "simple dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
#.........这里部分代码省略.........
开发者ID:EricACooper,项目名称:testrunner,代码行数:103,代码来源:spatialquerytests.py

示例8: SpatialViewTests

# 需要导入模块: from membase.helper.spatial_helper import SpatialHelper [as 别名]
# 或者: from membase.helper.spatial_helper.SpatialHelper import setup_cluster [as 别名]
class SpatialViewTests(unittest.TestCase):
    def setUp(self):
        self.log = logger.Logger.get_logger()
        self.helper = SpatialHelper(self, "default")
        self.helper.setup_cluster()


    def tearDown(self):
        self.helper.cleanup_cluster()


    def test_create_multiple_development_spatial(self):
        self.log.info("description : create multiple spatial views without "
                      "running any spatial view query")
        rest = self.helper.rest
        bucket = self.helper.bucket
        prefix = str(uuid.uuid4())
        name = "dev_test_spatial_multiple"

        design_names = ["{0}-{1}-{2}".format(name, i, prefix) \
                             for i in range(0, 5)]
        for design_name in design_names:
            self.helper.create_index_fun(design_name)
            response = rest.get_spatial(bucket, design_name)
            self.assertTrue(response)
            self.assertEquals(response["_id"],
                              "_design/{0}".format(design_name))
            self.log.info(response)


    def test_insert_x_docs(self):
        num_docs = self.helper.input.param("num-docs", 100)
        self.log.info("description : create a spatial view on {0} documents"\
                          .format(num_docs))
        design_name = "dev_test_insert_{0}_docs".format(num_docs)
        prefix = str(uuid.uuid4())[:7]

        inserted_keys = self._setup_index(design_name, num_docs, prefix)
        self.assertEqual(len(inserted_keys), num_docs)


    # Does verify the full docs and not only the keys
    def test_insert_x_docs_full_verification(self):
        num_docs = self.helper.input.param("num-docs", 100)
        self.log.info("description : create a spatial view with {0} docs"
                      " and verify the full documents".format(num_docs))
        design_name = "dev_test_insert_{0}_docs_full_verification"\
            .format(num_docs)
        prefix = str(uuid.uuid4())[:7]

        self.helper.create_index_fun(design_name)
        inserted_docs = self.helper.insert_docs(num_docs, prefix,
                                                return_docs=True)
        self.helper.query_index_for_verification(design_name, inserted_docs,
                                                 full_docs=True)


    def test_insert_x_delete_y_docs(self):
        num_docs = self.helper.input.param("num-docs", 15000)
        num_deleted_docs = self.helper.input.param("num-deleted-docs", 10000)
        self.log.info("description : create spatial view with {0} docs "
                      " and delete {1} docs".format(num_docs,
                                                    num_deleted_docs))
        design_name = "dev_test_insert_{0}_delete_{1}_docs"\
            .format(num_docs, num_deleted_docs)
        prefix = str(uuid.uuid4())[:7]

        inserted_keys = self._setup_index(design_name, num_docs, prefix)

        # Delete documents and very that the documents got deleted
        deleted_keys = self.helper.delete_docs(num_deleted_docs, prefix)
        results = self.helper.get_results(design_name, 2*num_docs)
        result_keys = self.helper.get_keys(results)
        self.assertEqual(len(result_keys), num_docs-len(deleted_keys))
        self.helper.verify_result(inserted_keys, deleted_keys + result_keys)


    def test_insert_x_update_y_docs(self):
        num_docs = self.helper.input.param("num-docs", 15000)
        num_updated_docs = self.helper.input.param("num-updated-docs", 100)
        self.log.info("description : create spatial view with {0} docs "
                      " and update {1} docs".format(num_docs,
                                                    num_updated_docs))
        design_name = "dev_test_insert_{0}_delete_{1}_docs"\
            .format(num_docs, num_updated_docs)
        prefix = str(uuid.uuid4())[:7]

        self._setup_index(design_name, num_docs, prefix)

        # Update documents and verify that the documents got updated
        updated_keys = self.helper.insert_docs(num_updated_docs, prefix,
                                               dict(updated=True))
        results = self.helper.get_results(design_name, 2*num_docs)
        result_updated_keys = self._get_updated_docs_keys(results)
        self.assertEqual(len(updated_keys), len(result_updated_keys))
        self.helper.verify_result(updated_keys, result_updated_keys)


    def test_get_spatial_during_x_min_load_y_working_set(self):
        num_docs = self.helper.input.param("num-docs", 10000)
#.........这里部分代码省略.........
开发者ID:vmx,项目名称:testrunner,代码行数:103,代码来源:spatialviewtests.py


注:本文中的membase.helper.spatial_helper.SpatialHelper.setup_cluster方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。