当前位置: 首页>>代码示例>>Python>>正文


Python HTTPClient.post方法代码示例

本文整理汇总了Python中treq.client.HTTPClient.post方法的典型用法代码示例。如果您正苦于以下问题:Python HTTPClient.post方法的具体用法?Python HTTPClient.post怎么用?Python HTTPClient.post使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在treq.client.HTTPClient的用法示例。


在下文中一共展示了HTTPClient.post方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: TestFakeDockerServer

# 需要导入模块: from treq.client import HTTPClient [as 别名]
# 或者: from treq.client.HTTPClient import post [as 别名]
class TestFakeDockerServer(TestCase):
    def setUp(self):
        self.dockerAPI = FakeDockerServer()
        self.dockerServer = reactor.listenTCP(0, self.dockerAPI)
        self.dockerPort = self.dockerServer.getHost().port
        self.agent = Agent(reactor) # no connectionpool
        self.client = HTTPClient(self.agent)

    def tearDown(self):
        return self.dockerServer.stopListening()

    def test_douglas_adams_would_be_proud(self):
        d = self.client.post('http://127.0.0.1:%d/towel' % (self.dockerPort,),
                      json.dumps({"hiding": "things"}),
                      headers={'Content-Type': ['application/json']})
        d.addCallback(treq.json_content)
        def verify(response):
            self.assertEqual(response,
                    {"hiding": "things", "SeenByFakeDocker": 42})
        d.addCallback(verify)
        return d
开发者ID:mbit-cloud,项目名称:powerstrip,代码行数:23,代码来源:test_testtools.py

示例2: TestAdderPlugin

# 需要导入模块: from treq.client import HTTPClient [as 别名]
# 或者: from treq.client.HTTPClient import post [as 别名]
class TestAdderPlugin(TestCase):
    def _getAdder(self, *args, **kw):
        self.adderAPI = AdderPlugin(*args, **kw)
        self.adderServer = reactor.listenTCP(0, self.adderAPI)
        self.adderPort = self.adderServer.getHost().port

    def setUp(self):
        self.agent = Agent(reactor) # no connectionpool
        self.client = HTTPClient(self.agent)

    def tearDown(self):
        return self.adderServer.stopListening()

    def test_adder_explode(self):
        """
        The adder adapter blows up (sends an HTTP 500) when asked to.
        """
        self._getAdder(explode=True)
        d = self.client.post('http://127.0.0.1:%d/adapter' % (self.adderPort,),
                      json.dumps({}),
                      headers={'Content-Type': ['application/json']})
        def verifyResponseCode(response):
            self.assertEqual(response.code, 500)
            return response
        d.addCallback(verifyResponseCode)
        d.addCallback(treq.content)
        def verify(body):
            self.assertEqual(body, "sadness for you, today.")
        d.addCallback(verify)
        return d

    def test_adder_pre(self):
        """
        The adder pre-hook increments an integer according to the protocol
        defined in the README.
        """
        self._getAdder(pre=True)
        d = self.client.post('http://127.0.0.1:%d/adapter' % (self.adderPort,),
                      json.dumps({
                          "PowerstripProtocolVersion": 1,
                          "Type": "pre-hook",
                          "ClientRequest": {
                              "Method": "POST",
                              "Request": "/fictional",
                              "Body": json.dumps({"Number": 7})}}),
                      headers={'Content-Type': ['application/json']})
        def verifyResponseCode(response):
            self.assertEqual(response.code, 200)
            return response
        d.addCallback(verifyResponseCode)
        d.addCallback(treq.json_content)
        def verify(body):
            self.assertEqual(json.loads(body["ModifiedClientRequest"]["Body"])["Number"], 8)
        d.addCallback(verify)
        return d

    def test_adder_post(self):
        """
        The adder post-hook increments an integer according to the protocol
        defined in the README.
        """
        self._getAdder(post=True)
        d = self.client.post('http://127.0.0.1:%d/adapter' % (self.adderPort,),
                      json.dumps({
                          "Type": "post-hook",
                          "ClientRequest": {
                              "Method": "POST",
                              "Request": "/fictional",
                              "Body": json.dumps({}),},
                          "ServerResponse": {
                              "ContentType": "application/json",
                              "Body": json.dumps({"Number": 7}),
                              "Code": 200,},
                          }),
                      headers={'Content-Type': ['application/json']})
        def verifyResponseCode(response):
            self.assertEqual(response.code, 200)
            return response
        d.addCallback(verifyResponseCode)
        d.addCallback(treq.json_content)
        def verify(body):
            self.assertEqual(json.loads(body["ModifiedServerResponse"]["Body"])["Number"], 8)
        d.addCallback(verify)
        return d
开发者ID:mbit-cloud,项目名称:powerstrip,代码行数:86,代码来源:test_testtools.py

示例3: DockerProxy

# 需要导入模块: from treq.client import HTTPClient [as 别名]
# 或者: from treq.client.HTTPClient import post [as 别名]
class DockerProxy(proxy.ReverseProxyResource):
    proxyClientFactoryClass = DockerProxyClientFactory


    def __init__(self, dockerAddr=None, dockerPort=None, dockerSocket=None,
            path='', reactor=reactor, config=None):
        """
        A docker proxy resource which knows how to connect to real Docker
        daemon either via socket (dockerSocket specified) or address + port for
        TCP connection (dockerAddr + dockerPort specified).
        """
        if config is None:
            # Try to get the configuration from the default place on the
            # filesystem.
            self.config = PluginConfiguration()
        else:
            self.config = config
        self.config.read_and_parse()
        self.parser = EndpointParser(self.config)
        Resource.__init__(self)
        self.host = dockerAddr
        self.port = dockerPort
        self.socket = dockerSocket
        self.path = path
        self.reactor = reactor
        proxy.ReverseProxyResource.__init__(self, dockerAddr, dockerPort, path, reactor) # NB dockerAddr is not actually used
        self.agent = Agent(reactor) # no connectionpool
        self.client = HTTPClient(self.agent)


    def render(self, request, reactor=reactor):
        # We are processing a leaf request.
        # Get the original request body from the client.
        skipPreHooks = False
        if request.requestHeaders.getRawHeaders('content-type') == ["application/json"]:
            originalRequestBody = request.content.read()
            request.content.seek(0) # hee hee
        elif request.requestHeaders.getRawHeaders('content-type') == ["application/tar"]:
            # We can't JSON encode binary data, so don't even try.
            skipPreHooks = True
            originalRequestBody = None
        else:
            originalRequestBody = None
        preHooks = []
        postHooks = []
        d = defer.succeed(None)
        for endpoint in self.parser.match_endpoint(request.method, request.uri.split("?")[0]):
            # It's possible for a request to match multiple endpoint
            # definitions.  Order of matched endpoint is not defined in
            # that case.
            adapters = self.config.endpoint(endpoint)
            preHooks.extend(adapters.pre)
            postHooks.extend(adapters.post)
        def callPreHook(result, hookURL):
            if result is None:
                newRequestBody = originalRequestBody
            else:
                newRequestBody = result["ModifiedClientRequest"]["Body"]
            return self.client.post(hookURL, json.dumps({
                        "PowerstripProtocolVersion": 1,
                        "Type": "pre-hook",
                        "ClientRequest": {
                            "Method": request.method,
                            "Request": request.uri,
                            "Body": newRequestBody,
                        }
                    }), headers={'Content-Type': ['application/json']})
        if not skipPreHooks:
            for preHook in preHooks:
                hookURL = self.config.adapter_uri(preHook)
                d.addCallback(callPreHook, hookURL=hookURL)
                d.addCallback(treq.json_content)
        def doneAllPrehooks(result):
            # Finally pass through the request to actual Docker.  For now we
            # mutate request in-place in such a way that ReverseProxyResource
            # understands it.
            if result is not None:
                requestBody = b""
                bodyFromAdapter = result["ModifiedClientRequest"]["Body"]
                if bodyFromAdapter is not None:
                    requestBody = bodyFromAdapter.encode("utf-8")
                request.content = StringIO.StringIO(requestBody)
                request.requestHeaders.setRawHeaders(b"content-length",
                        [str(len(requestBody))])
            ###########################
            # The following code is copied from t.w.proxy.ReverseProxy so that
            # clientFactory reference can be kept.
            if not self.socket:
                if self.port == 80:
                    host = self.host
                else:
                    host = "%s:%d" % (self.host, self.port)
                request.requestHeaders.setRawHeaders(b"host", [host])
            request.content.seek(0, 0)
            qs = urlparse.urlparse(request.uri)[4]
            if qs:
                rest = self.path + '?' + qs
            else:
                rest = self.path
            allRequestHeaders = request.getAllHeaders()
#.........这里部分代码省略.........
开发者ID:mercykevin,项目名称:powerstrip,代码行数:103,代码来源:powerstrip.py

示例4: MountResource

# 需要导入模块: from treq.client import HTTPClient [as 别名]
# 或者: from treq.client.HTTPClient import post [as 别名]

#.........这里部分代码省略.........
                    therefore that when it settles down to only show it on one
                    host that this means the move is complete.
                    """
                    print "Got", self.ip, self.host_uuid, "datasets:", datasets
                    matching_datasets = []
                    for dataset in datasets:
                        if dataset["dataset_id"] == dataset_id:
                            matching_datasets.append(dataset)
                    if len(matching_datasets) == 1:
                        if matching_datasets[0]["primary"] == self.host_uuid:
                            return matching_datasets[0]
                    return False
                d.addCallback(check_dataset_exists)
                return d
            d = loop_until(dataset_exists)
            d.addCallback(lambda dataset: (fs, dataset))
            return d

        d = self.client.get(self.base_url + "/state/nodes")
        d.addCallback(treq.json_content)
        def find_my_uuid(nodes):
            for node in nodes:
                if node["host"] == self.ip:
                    self.host_uuid = node["uuid"]
                    break
            return self.client.get(self.base_url + "/configuration/datasets")
        d.addCallback(find_my_uuid)

        d.addCallback(treq.json_content)
        def got_dataset_configuration(configured_datasets):
            # form a mapping from names onto dataset objects
            configured_dataset_mapping = {}
            for dataset in configured_datasets:
                if dataset["metadata"].get("name"):
                    configured_dataset_mapping[dataset["metadata"].get("name")] = dataset

            # iterate over the datasets we were asked to create by the docker client
            fs_create_deferreds = []
            old_binds = []
            print "got json_parsed...", json_parsed
            if json_parsed['Name'] is not None and json_parsed['Name'] != "":
                binds = [json_parsed['Name']]
                for bind in binds:
                    fs, remainder = bind, ""
                    # TODO validation
                    # if "/" in fs:
                    #    raise Exception("Not allowed flocker filesystems more than one level deep")
                    old_binds.append((fs, remainder))
                    # if a dataset exists, and is in the right place, we're cool.
                    if fs in configured_dataset_mapping:
                        dataset = configured_dataset_mapping[fs]
                        if dataset["primary"] == self.host_uuid:
                            # check / wait for the state to match the desired
                            # configuration
                            fs_create_deferreds.append(wait_until_volume_in_place(dataset, fs=fs))
                        else:
                            # if a dataset exists, but is on the wrong server [TODO
                            # and is not being used], then move it in place.
                            d = self.client.post(
                                self.base_url + "/configuration/datasets/%s" % (
                                    dataset["dataset_id"].encode('ascii'),),
                                json.dumps({"primary": self.host_uuid}),
                                headers={'Content-Type': ['application/json']})
                            d.addCallback(treq.json_content)
                            d.addCallback(wait_until_volume_in_place, fs=fs)
                            fs_create_deferreds.append(d)
                    else:
                        # if a dataset doesn't exist at all, create it on this server.
                        d = self.client.post(self.base_url + "/configuration/datasets",
                            json.dumps({"primary": self.host_uuid, "metadata": {"name": fs}}),
                            headers={'Content-Type': ['application/json']})
                        d.addCallback(treq.json_content)
                        d.addCallback(wait_until_volume_in_place, fs=fs)
                        fs_create_deferreds.append(d)

            d = defer.gatherResults(fs_create_deferreds)
            def got_created_and_moved_datasets(list_new_datasets):
                dataset_mapping = dict(list_new_datasets)
                print "constructed dataset_mapping", dataset_mapping
                new_binds = []
                for fs, remainder in old_binds:
                    # forget about remainder...
                    new_binds.append(dataset_mapping[fs]["path"])
                new_json = {}
                if new_binds:
                    new_json["Mountpoint"] = new_binds[0]
                    new_json["Err"] = None
                else:
                    # This is how you indicate not handling this request
                    new_json["Mountpoint"] = ""
                    new_json["Err"] = "unable to handle"

                print "<<< responding with", new_json
                request.write(json.dumps(new_json))
                request.finish()
            d.addCallback(got_created_and_moved_datasets)
            return d
        d.addCallback(got_dataset_configuration)
        d.addErrback(log.err, 'while processing configured datasets')
        return server.NOT_DONE_YET
开发者ID:binocarlos,项目名称:flocker-docker-plugin,代码行数:104,代码来源:adapter.py

示例5: HTTPClientTests

# 需要导入模块: from treq.client import HTTPClient [as 别名]
# 或者: from treq.client.HTTPClient import post [as 别名]
class HTTPClientTests(TestCase):
    def setUp(self):
        self.agent = mock.Mock(Agent)
        self.client = HTTPClient(self.agent)

        self.fbp_patcher = mock.patch('treq.client.FileBodyProducer')
        self.FileBodyProducer = self.fbp_patcher.start()
        self.addCleanup(self.fbp_patcher.stop)

        self.mbp_patcher = mock.patch('treq.multipart.MultiPartProducer')
        self.MultiPartProducer = self.mbp_patcher.start()
        self.addCleanup(self.mbp_patcher.stop)

    def assertBody(self, expected):
        body = self.FileBodyProducer.mock_calls[0][1][0]
        self.assertEqual(body.read(), expected)

    def test_post(self):
        self.client.post('http://example.com/')
        self.agent.request.assert_called_once_with(
            b'POST', b'http://example.com/',
            Headers({b'accept-encoding': [b'gzip']}), None)

    def test_request_uri_idn(self):
        self.client.request('GET', u'http://č.net')
        self.agent.request.assert_called_once_with(
            b'GET', b'http://xn--bea.net',
            Headers({b'accept-encoding': [b'gzip']}), None)

    def test_request_case_insensitive_methods(self):
        self.client.request('gEt', 'http://example.com/')
        self.agent.request.assert_called_once_with(
            b'GET', b'http://example.com/',
            Headers({b'accept-encoding': [b'gzip']}), None)

    def test_request_query_params(self):
        self.client.request('GET', 'http://example.com/',
                            params={'foo': ['bar']})

        self.agent.request.assert_called_once_with(
            b'GET', b'http://example.com/?foo=bar',
            Headers({b'accept-encoding': [b'gzip']}), None)

    def test_request_tuple_query_values(self):
        self.client.request('GET', 'http://example.com/',
                            params={'foo': ('bar',)})

        self.agent.request.assert_called_once_with(
            b'GET', b'http://example.com/?foo=bar',
            Headers({b'accept-encoding': [b'gzip']}), None)

    def test_request_merge_query_params(self):
        self.client.request('GET', 'http://example.com/?baz=bax',
                            params={'foo': ['bar', 'baz']})

        self.agent.request.assert_called_once_with(
            b'GET', b'http://example.com/?baz=bax&foo=bar&foo=baz',
            Headers({b'accept-encoding': [b'gzip']}), None)

    def test_request_merge_tuple_query_params(self):
        self.client.request('GET', 'http://example.com/?baz=bax',
                            params=[('foo', 'bar')])

        self.agent.request.assert_called_once_with(
            b'GET', b'http://example.com/?baz=bax&foo=bar',
            Headers({b'accept-encoding': [b'gzip']}), None)

    def test_request_dict_single_value_query_params(self):
        self.client.request('GET', 'http://example.com/',
                            params={'foo': 'bar'})

        self.agent.request.assert_called_once_with(
            b'GET', b'http://example.com/?foo=bar',
            Headers({b'accept-encoding': [b'gzip']}), None)

    def test_request_data_dict(self):
        self.client.request('POST', 'http://example.com/',
                            data={'foo': ['bar', 'baz']})

        self.agent.request.assert_called_once_with(
            b'POST', b'http://example.com/',
            Headers({b'Content-Type': [b'application/x-www-form-urlencoded'],
                     b'accept-encoding': [b'gzip']}),
            self.FileBodyProducer.return_value)

        self.assertBody(b'foo=bar&foo=baz')

    def test_request_data_single_dict(self):
        self.client.request('POST', 'http://example.com/',
                            data={'foo': 'bar'})

        self.agent.request.assert_called_once_with(
            b'POST', b'http://example.com/',
            Headers({b'Content-Type': [b'application/x-www-form-urlencoded'],
                     b'accept-encoding': [b'gzip']}),
            self.FileBodyProducer.return_value)

        self.assertBody(b'foo=bar')

    def test_request_data_tuple(self):
#.........这里部分代码省略.........
开发者ID:twisted,项目名称:treq,代码行数:103,代码来源:test_client.py

示例6: ProxyTests

# 需要导入模块: from treq.client import HTTPClient [as 别名]
# 或者: from treq.client.HTTPClient import post [as 别名]
class ProxyTests(TestCase, GenerallyUsefulPowerstripTestMixin):

    def setUp(self):
        """
        Construct a fake "Docker daemon" (one which does much less than the
        actual Docker daemon) and a Proxy instance.

        Pre- and post-hook API servers are provided by the individual tests.
        """
        self.agent = Agent(reactor) # no connectionpool
        self.client = HTTPClient(self.agent)

    def tearDown(self):
        shutdowns = [
            self.dockerServer.stopListening(),
            self.proxyServer.stopListening()]
        if hasattr(self, 'adderServer'):
            shutdowns.append(self.adderServer.stopListening())
        if hasattr(self, 'adderTwoServer'):
            shutdowns.append(self.adderTwoServer.stopListening())
        return defer.gatherResults(shutdowns)

    def test_empty_endpoints(self):
        """
        The proxy passes through requests when no endpoints are specified.

        In particular, when POST to the /towel endpoint on the *proxy*, we get
        to see that we were seen by the (admittedly fake) Docker daemon.
        """
        self._configure("endpoints: {}\nadapters: {}")
        d = self.client.post('http://127.0.0.1:%d/towel' % (self.proxyPort,),
                      json.dumps({"hiding": "things"}),
                      headers={'Content-Type': ['application/json']})
        d.addCallback(treq.json_content)
        def verify(response):
            self.assertEqual(response,
                    {"hiding": "things", "SeenByFakeDocker": 42})
        d.addCallback(verify)
        return d

    def test_empty_endpoints_socket(self):
        """
        The proxy is able to connect to Docker on a UNIX socket.
        """
        self._configure("endpoints: {}\nadapters: {}", dockerOnSocket=True)
        d = self.client.post('http://127.0.0.1:%d/towel' % (self.proxyPort,),
                      json.dumps({"hiding": "things"}),
                      headers={'Content-Type': ['application/json']})
        d.addCallback(treq.json_content)
        def verify(response):
            self.assertEqual(response,
                    {"hiding": "things", "SeenByFakeDocker": 42})
        d.addCallback(verify)
        return d

    def test_endpoint_and_empty_hooks(self):
        """
        An endpoint is specified, but no pre-or post hooks are added to it.
        Requests to the endpoint are proxied.
        """
        endpoint = "/towel"
        self._configure("""endpoints:
  "POST %s":
    pre: []
    post: []
adapters: {}""" % (endpoint,))
        d = self.client.post('http://127.0.0.1:%d%s' % (self.proxyPort, endpoint),
                             json.dumps({"hiding": "things"}),
                             headers={'Content-Type': ['application/json']})
        d.addCallback(treq.json_content)
        def verify(response):
            self.assertEqual(response,
                    {"hiding": "things", "SeenByFakeDocker": 42})
        d.addCallback(verify)
        return d

    def _getAdder(self, *args, **kw):
        self.adderAPI = TrafficLoggingFactory(AdderPlugin(*args, **kw), "adder-")
        self.adderServer = reactor.listenTCP(0, self.adderAPI)
        self.adderPort = self.adderServer.getHost().port

    def _getAdderTwo(self, *args, **kw):
        kw["incrementBy"] = 2
        self.adderTwoAPI = TrafficLoggingFactory(AdderPlugin(*args, **kw), "adder2-")
        self.adderTwoServer = reactor.listenTCP(0, self.adderTwoAPI)
        self.adderTwoPort = self.adderTwoServer.getHost().port

    def _hookTest(self, config_yml, adderArgs=dict(pre=True), adderTwoArgs=dict(pre=True)):
        """
        Generalised version of a pre-hook test.
        """
        self._getAdder(**adderArgs)
        self._getAdderTwo(**adderTwoArgs)
        self.dockerEndpoint = "/towel"
        self.adapterEndpoint = "/adapter"
        self.args = dict(dockerEndpoint=self.dockerEndpoint,
                         adapterEndpoint=self.adapterEndpoint,
                         adderPort=self.adderPort,
                         adderTwoPort=self.adderTwoPort)
        self._configure(config_yml % self.args)
#.........这里部分代码省略.........
开发者ID:mbit-cloud,项目名称:powerstrip,代码行数:103,代码来源:test_core.py

示例7: AdapterResource

# 需要导入模块: from treq.client import HTTPClient [as 别名]
# 或者: from treq.client.HTTPClient import post [as 别名]
class AdapterResource(resource.Resource):
    """
    A powerstrip pre-hook for container create.
    """
    isLeaf = True

    def __init__(self, *args, **kw):
        self._agent = Agent(reactor) # no connectionpool
        self.client = HTTPClient(self._agent)
        return resource.Resource.__init__(self, *args, **kw)

    def render_POST(self, request):
        """
        Handle a pre-hook: either create a filesystem, or move it in place.
        """
        requestJson = json.loads(request.content.read())
        if requestJson["Type"] != "pre-hook":
            raise Exception("unsupported hook type %s" %
                (requestJson["Type"],))

        pprint.pprint(os.environ)
        # BASE_URL like http://control-service/v1/ ^
        json_payload = requestJson["ClientRequest"]["Body"]
        json_parsed = json.loads(json_payload)

        self.base_url = os.environ.get("FLOCKER_CONTROL_SERVICE_BASE_URL")
        self.ip = os.environ.get("MY_NETWORK_IDENTITY")
        self.host_uuid = os.environ.get("MY_HOST_UUID")

        def wait_until_volume_in_place(result, fs):
            """
            Called after a dataset has been created or moved in the cluster's
            desired configuration. Wait until the volume shows up in the
            cluster actual state on the right host (either having been created
            or moved).

            :return: Deferred which fires with the tuple (fs, dataset_id) --
                that is, the filesystem and the corresponding flocker dataset
                uuid that the docker client asked for -- firing only once the
                filesystem has been created/moved and mounted (iow, exists on
                the right host in the cluster state).
            """
            dataset_id = result["dataset_id"]
            def dataset_exists():
                d = self.client.get(self.base_url + "/state/datasets")
                d.addCallback(treq.json_content)
                def check_dataset_exists(datasets):
                    """
                    The /v1/state/datasets API seems to show the volume as
                    being on two hosts at once during a move. We assume
                    therefore that when it settles down to only show it on one
                    host that this means the move is complete.
                    """
                    print "Got", self.ip, "datasets:", datasets
                    matching_datasets = []
                    for dataset in datasets:
                        if dataset["dataset_id"] == dataset_id:
                            matching_datasets.append(dataset)
                    if len(matching_datasets) == 1:
                        if matching_datasets[0]["primary"] == self.ip:
                            return True
                    return False
                d.addCallback(check_dataset_exists)
                return d
            d = loop_until(dataset_exists)
            d.addCallback(lambda ignored: (fs, dataset_id))
            return d

        d = self.client.get(self.base_url + "/configuration/datasets")
        d.addCallback(treq.json_content)
        def got_dataset_configuration(configured_datasets):
            # form a mapping from names onto dataset objects
            configured_dataset_mapping = {}
            for dataset in configured_datasets:
                if dataset["metadata"].get("name"):
                    configured_dataset_mapping[dataset["metadata"].get("name")] = dataset

            # iterate over the datasets we were asked to create by the docker client
            fs_create_deferreds = []
            old_binds = []
            if json_parsed['HostConfig']['Binds'] is not None:
                for bind in json_parsed['HostConfig']['Binds']:
                    host_path, remainder = bind.split(":", 1)
                    # TODO validation
                    # if "/" in fs:
                    #    raise Exception("Not allowed flocker filesystems more than one level deep")
                    if host_path.startswith("/flocker/"):
                        fs = host_path[len("/flocker/"):]
                        old_binds.append((fs, remainder))
                        # if a dataset exists, and is in the right place, we're cool.
                        if fs in configured_dataset_mapping:
                            dataset = configured_dataset_mapping[fs]
                            if dataset["primary"] == self.ip:
                                # simulate "immediate success"
                                fs_create_deferreds.append(defer.succeed((fs, dataset["dataset_id"])))
                            else:
                                # if a dataset exists, but is on the wrong server [TODO
                                # and is not being used], then move it in place.
                                d = self.client.post(
                                    self.base_url + "/configuration/datasets/%s" % (
#.........这里部分代码省略.........
开发者ID:carriercomm,项目名称:powerstrip-flocker,代码行数:103,代码来源:adapter.py


注:本文中的treq.client.HTTPClient.post方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。