当前位置: 首页>>代码示例>>Python>>正文


Python RestConnection.get_bucket_json方法代码示例

本文整理汇总了Python中membase.api.rest_client.RestConnection.get_bucket_json方法的典型用法代码示例。如果您正苦于以下问题:Python RestConnection.get_bucket_json方法的具体用法?Python RestConnection.get_bucket_json怎么用?Python RestConnection.get_bucket_json使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在membase.api.rest_client.RestConnection的用法示例。


在下文中一共展示了RestConnection.get_bucket_json方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: create_and_restore_csv

# 需要导入模块: from membase.api.rest_client import RestConnection [as 别名]
# 或者: from membase.api.rest_client.RestConnection import get_bucket_json [as 别名]
 def create_and_restore_csv(self):
     try:
         self.__load_data()
         shell_obj = RemoteMachineShellConnection(self.master)
         self.log.info("Removing backup folder if already present")
         info = shell_obj.extract_remote_info()
         path = "/tmp/backup/"
         if info.type.lower() == "windows":
             path = "/cygdrive/c" + path
         #TODO : Check for mac also
         shell_obj.delete_files(path)
         create_dir = "mkdir " + path
         data_type = "csv:"
         destination = path + "data.csv"
         shell_obj.execute_command(create_dir)
         source = "http://localhost:8091"
         options = "-b default" + self.username_arg + self.password_arg
         shell_obj.execute_cbtransfer(source, data_type + destination, options)
         self.log.info("Created csv file @ %s" % destination)
         source, destination = destination, source
         options = "-B standard_bucket0" + self.username_arg + self.password_arg
         self.log.info("Restoring data....!")
         shell_obj.execute_cbtransfer(source, destination, options)
         self.sleep(10)
         self.log.info("Checking whether number of items loaded match with the number of items restored.")
         rest = RestConnection(self.master)
         itemCount = rest.get_bucket_json('standard_bucket0')['basicStats']['itemCount']
         self.assertEqual(itemCount, self.num_items, msg="Number of items loaded do no match\
         with the number of items restored. Number of items loaded is {0} \
         but number of items restored is {1}".format(self.num_items, itemCount))
         self.log.info("Number of items loaded = Number of items restored. Pass!!")
     finally:
         shell_obj.disconnect()
开发者ID:arod1987,项目名称:testrunner,代码行数:35,代码来源:csvdatatest.py

示例2: _get_current_auto_compaction_percentage

# 需要导入模块: from membase.api.rest_client import RestConnection [as 别名]
# 或者: from membase.api.rest_client.RestConnection import get_bucket_json [as 别名]
    def _get_current_auto_compaction_percentage(self):
        """ check at bucket level and cluster level for compaction percentage """

        auto_compact_percentage = None
        rest = RestConnection(self.server)

        content = rest.get_bucket_json(self.bucket)
        if content["autoCompactionSettings"] != False:
            auto_compact_percentage =\
                content["autoCompactionSettings"]["viewFragmentationThreshold"]["percentage"]
        else:
            # try to read cluster level compaction settings
            content = rest.cluster_status()
            auto_compact_percentage =\
                content["autoCompactionSettings"]["viewFragmentationThreshold"]["percentage"]

        return auto_compact_percentage
开发者ID:jchris,项目名称:testrunner,代码行数:19,代码来源:task.py

示例3: store_range_json

# 需要导入模块: from membase.api.rest_client import RestConnection [as 别名]
# 或者: from membase.api.rest_client.RestConnection import get_bucket_json [as 别名]
 def store_range_json(self, buckets, backup_num, backup_validation_path, merge=False):
     rest_conn = RestConnection(self.backupset.cluster_host)
     shell = RemoteMachineShellConnection(self.backupset.backup_host)
     for bucket in buckets:
         bucket_stats = rest_conn.get_bucket_json(bucket)
         bucket_uuid = bucket_stats["uuid"]
         from_file_name = self.backupset.directory + "/" + self.backupset.name + "/" + \
                          self.backups[backup_num - 1] + "/" + bucket.name + "-" + bucket_uuid + "/" + "range.json"
         if merge:
             to_file_name = "{0}-{1}-{2}.json".format(bucket, "range", "merge")
         else:
             to_file_name = "{0}-{1}-{2}.json".format(bucket, "range", backup_num)
         to_file_path = os.path.join(backup_validation_path, to_file_name)
         output, error = shell.execute_command("cat " + from_file_name)
         output = [x.strip(' ') for x in output]
         if output:
             output = " ".join(output)
         with open(to_file_path, 'w') as f:
             json.dump(output, f)
开发者ID:membase,项目名称:testrunner,代码行数:21,代码来源:validation_base.py

示例4: check

# 需要导入模块: from membase.api.rest_client import RestConnection [as 别名]
# 或者: from membase.api.rest_client.RestConnection import get_bucket_json [as 别名]
    def check(self, task_manager):

        rest = RestConnection(self.server)
        try:
            # verify server accepted settings
            content = rest.get_bucket_json(self.bucket)
            if content["autoCompactionSettings"] == False:
                self.set_exception(Exception("Failed to set auto compaction settings"))
            else:
                # retrieved compaction settings
                self.set_result(True)
            self.state = FINISHED

        except GetBucketInfoFailed as e:
            # subsequent query failed! exit
            self.state = FINISHED
            self.set_exception(e)
        #catch and set all unexpected exceptions
        except Exception as e:
            self.state = FINISHED
            self.log.info("Unexpected Exception Caught")
            self.set_exception(e)
开发者ID:jchris,项目名称:testrunner,代码行数:24,代码来源:task.py

示例5: Upgrade_EpTests

# 需要导入模块: from membase.api.rest_client import RestConnection [as 别名]
# 或者: from membase.api.rest_client.RestConnection import get_bucket_json [as 别名]
class Upgrade_EpTests(UpgradeTests):

    def setUp(self):
        super(Upgrade_EpTests, self).setUp()
        print self.master
        self.rest = RestConnection(self.master)
        self.bucket = 'default' # temp fix
        self.client = VBucketAwareMemcached(self.rest, self.bucket)
        self.time_synchronization ='disabled'
        print 'checking for self.servers '.format(self.servers[1])
        self.prefix = "test_"
        self.expire_time = 5
        self.item_flag = 0
        self.value_size = 256


    def tearDown(self):
        #super(Upgrade_EpTests, self).tearDown()
        self.testcase = '2'
        if not "skip_cleanup" in TestInputSingleton.input.test_params:
            BucketOperationHelper.delete_all_buckets_or_assert(
                self.servers, self.testcase)
            ClusterOperationHelper.cleanup_cluster(self.servers)
            ClusterOperationHelper.wait_for_ns_servers_or_assert(
                self.servers, self.testcase)

    def test_upgrade(self):
        self.log.info('Starting upgrade tests...')

        #o = OpsChangeCasTests()

        self.log.info('Inserting few items pre upgrade')
        self._load_ops(ops='set', mutations=20, master=self.master, bucket=self.bucket)
        self.log.info('Upgrading ..')
        try:
            UpgradeTests.test_upgrade(self)
        finally:
            self.log.info(' Done with Upgrade ')

        self.log.info('Testing the meta details on items post upgrade')
        #self._check_config()
        self._check_cas(check_conflict_resolution=True, master=self.servers[1], bucket=self.bucket)

    def _check_config(self):
        result = self.rest.get_bucket_json(self.bucket)["timeSynchronization"]
        print result
        self.assertEqual(result,self.time_synchronization, msg='ERROR, Mismatch on expected time synchronization values')
        self.log.info("Verified results")

    def _load_ops(self, ops=None, mutations=1, master=None, bucket=None):

        if master:
            self.rest = RestConnection(master)
        if bucket:
            self.client = VBucketAwareMemcached(self.rest, bucket)

        k=0
        payload = MemcachedClientHelper.create_value('*', self.value_size)

        while k<10:
            key = "{0}{1}".format(self.prefix, k)
            k += 1
            for i in range(mutations):
                if ops=='set':
                    #print 'set'
                    self.client.memcached(key).set(key, 0, 0,payload)
                elif ops=='add':
                    #print 'add'
                    self.client.memcached(key).add(key, 0, 0,payload)
                elif ops=='replace':
                    self.client.memcached(key).replace(key, 0, 0,payload)
                    #print 'Replace'
                elif ops=='delete':
                    #print 'delete'
                    self.client.memcached(key).delete(key)
                elif ops=='expiry':
                    #print 'expiry'
                    self.client.memcached(key).set(key, self.expire_time ,0, payload)
                elif ops=='touch':
                    #print 'touch'
                    self.client.memcached(key).touch(key, 10)

        self.log.info("Done with specified {0} ops".format(ops))

    ''' Common function to verify the expected values on cas
    '''
    def _check_cas(self, check_conflict_resolution=False, master=None, bucket=None, time_sync=None):
        self.log.info(' Verifying cas and max cas for the keys')
        if master:
            self.rest = RestConnection(master)
            self.client = VBucketAwareMemcached(self.rest, bucket)

        k=0

        while k<10:
            key = "{0}{1}".format(self.prefix, k)
            k += 1
            mc_active = self.client.memcached(key)

            cas = mc_active.getMeta(key)[4]
#.........这里部分代码省略.........
开发者ID:EricACooper,项目名称:testrunner,代码行数:103,代码来源:upgrade_ep.py


注:本文中的membase.api.rest_client.RestConnection.get_bucket_json方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。