本文整理汇总了Python中pyelasticsearch.downtime.DowntimePronePool.get方法的典型用法代码示例。如果您正苦于以下问题:Python DowntimePronePool.get方法的具体用法?Python DowntimePronePool.get怎么用?Python DowntimePronePool.get使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pyelasticsearch.downtime.DowntimePronePool
的用法示例。
在下文中一共展示了DowntimePronePool.get方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: ElasticSearch
# 需要导入模块: from pyelasticsearch.downtime import DowntimePronePool [as 别名]
# 或者: from pyelasticsearch.downtime.DowntimePronePool import get [as 别名]
class ElasticSearch(object):
"""
An object which manages connections to elasticsearch and acts as a
go-between for API calls to it
This object is thread-safe. You can create one instance and share it
among all threads.
"""
def __init__(self, urls, timeout=60, max_retries=0, revival_delay=300):
"""
:arg urls: A URL or iterable of URLs of ES nodes. These are full URLs
with port numbers, like ``http://elasticsearch.example.com:9200``.
:arg timeout: Number of seconds to wait for each request before raising
Timeout
:arg max_retries: How many other servers to try, in series, after a
request times out or a connection fails
:arg revival_delay: Number of seconds for which to avoid a server after
it times out or is uncontactable
"""
if isinstance(urls, string_types):
urls = [urls]
urls = [u.rstrip("/") for u in urls]
self.servers = DowntimePronePool(urls, revival_delay)
self.revival_delay = revival_delay
self.timeout = timeout
self.max_retries = max_retries
self.logger = getLogger("pyelasticsearch")
self.session = requests.session()
self.json_encoder = JsonEncoder
def _concat(self, items):
"""
Return a comma-delimited concatenation of the elements of ``items``,
with any occurrences of "_all" omitted.
If ``items`` is a string, promote it to a 1-item list.
"""
# TODO: Why strip out _all?
if items is None:
return ""
if isinstance(items, string_types):
items = [items]
return ",".join(i for i in items if i != "_all")
def _to_query(self, obj):
"""
Convert a native-Python object to a unicode or bytestring
representation suitable for a query string.
"""
# Quick and dirty thus far
if isinstance(obj, string_types):
return obj
if isinstance(obj, bool):
return "true" if obj else "false"
if isinstance(obj, integer_types):
return str(obj)
if isinstance(obj, float):
return repr(obj) # str loses precision.
if isinstance(obj, (list, tuple)):
return ",".join(self._to_query(o) for o in obj)
iso = _iso_datetime(obj)
if iso:
return iso
raise TypeError("_to_query() doesn't know how to represent %r in an ES" " query string." % obj)
def _utf8(self, thing):
"""Convert any arbitrary ``thing`` to a utf-8 bytestring."""
if isinstance(thing, binary_type):
return thing
if not isinstance(thing, text_type):
thing = text_type(thing)
return thing.encode("utf-8")
def _join_path(self, path_components):
"""
Smush together the path components, omitting '' and None ones.
Unicodes get encoded to strings via utf-8. Incoming strings are assumed
to be utf-8-encoded already.
"""
path = "/".join(quote_plus(self._utf8(p), "") for p in path_components if p is not None and p != "")
if not path.startswith("/"):
path = "/" + path
return path
def send_request(self, method, path_components, body="", query_params=None, encode_body=True):
"""
Send an HTTP request to ES, and return the JSON-decoded response.
This is mostly an internal method, but it also comes in handy if you
need to use a brand new ES API that isn't yet explicitly supported by
pyelasticsearch, while still taking advantage of our connection pooling
and retrying.
Retry the request on different servers if the first one is down and
``self.max_retries`` > 0.
:arg method: An HTTP method, like "GET"
#.........这里部分代码省略.........
示例2: ElasticSearch
# 需要导入模块: from pyelasticsearch.downtime import DowntimePronePool [as 别名]
# 或者: from pyelasticsearch.downtime.DowntimePronePool import get [as 别名]
class ElasticSearch(object):
"""
An object which manages connections to elasticsearch and acts as a
go-between for API calls to it
"""
def __init__(self, urls, timeout=60, max_retries=0, revival_delay=300):
"""
:arg timeout: Number of seconds to wait for each request before raising
Timeout
:arg max_retries: How many other servers to try, in series, after a
request times out or a connection fails
:arg revival_delay: Number of seconds for which to avoid a server after
it times out or is uncontactable
"""
if isinstance(urls, basestring):
urls = [urls]
urls = [u.rstrip('/') for u in urls]
self.servers = DowntimePronePool(urls, revival_delay)
self.revival_delay = revival_delay
self.timeout = timeout
self.max_retries = max_retries
self.logger = getLogger('pyelasticsearch')
self.session = requests.session()
json_converter = self.from_python
class DateSavvyJsonEncoder(json.JSONEncoder):
def default(self, value):
"""Convert more Python data types to ES-understandable JSON."""
return json_converter(value)
self.json_encoder = DateSavvyJsonEncoder
def _concat(self, items):
"""
Return a comma-delimited concatenation of the elements of ``items``,
with any occurrences of "_all" omitted.
If ``items`` is a string, promote it to a 1-item list.
"""
# TODO: Why strip out _all?
if items is None:
return ''
if isinstance(items, basestring):
items = [items]
return ','.join(i for i in items if i != '_all')
@classmethod
def _to_query(cls, obj):
"""Convert a native-Python object to a query string representation."""
# Quick and dirty thus far
if isinstance(obj, basestring):
return obj
if isinstance(obj, bool):
return 'true' if obj else 'false'
if isinstance(obj, (long, int, float)):
return str(obj)
if isinstance(obj, (list, tuple)):
return ','.join(cls._to_query(o) for o in obj)
iso = _iso_datetime(obj)
if iso:
return iso
raise TypeError("_to_query() doesn't know how to represent %r in an ES"
" query string." % obj)
def _send_request(self,
method,
path_components,
body='',
query_params=None,
encode_body=True):
"""
Send an HTTP request to ES, and return the JSON-decoded response.
Retry the request on different servers if the first one is down and
``self.max_retries`` > 0.
:arg method: An HTTP method, like "GET"
:arg path_components: An iterable of path components, to be joined by
"/"
:arg body: The request body
:arg query_params: A map of querystring param names to values or None
:arg encode_body: Whether to encode the body of the request as JSON
"""
def join_path(path_components):
"""Smush together the path components, ignoring empty ones."""
path = '/'.join(str(p) for p in path_components if p)
if not path.startswith('/'):
path = '/' + path
return path
path = join_path(path_components)
if query_params:
path = '?'.join(
[path, urlencode(dict((k, self._to_query(v)) for k, v in
query_params.iteritems()))])
kwargs = ({'data': self._encode_json(body) if encode_body else body}
if body else {})
req_method = getattr(self.session, method.lower())
#.........这里部分代码省略.........