本文整理匯總了Python中pyelasticsearch.downtime.DowntimePronePool類的典型用法代碼示例。如果您正苦於以下問題:Python DowntimePronePool類的具體用法?Python DowntimePronePool怎麽用?Python DowntimePronePool使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。
在下文中一共展示了DowntimePronePool類的5個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: __init__
def __init__(self, urls, timeout=60, max_retries=0, revival_delay=300):
"""
:arg timeout: Number of seconds to wait for each request before raising
Timeout
:arg max_retries: How many other servers to try, in series, after a
request times out or a connection fails
:arg revival_delay: Number of seconds for which to avoid a server after
it times out or is uncontactable
"""
if isinstance(urls, basestring):
urls = [urls]
urls = [u.rstrip('/') for u in urls]
self.servers = DowntimePronePool(urls, revival_delay)
self.revival_delay = revival_delay
self.timeout = timeout
self.max_retries = max_retries
self.logger = getLogger('pyelasticsearch')
self.session = requests.session()
json_converter = self.from_python
class DateSavvyJsonEncoder(json.JSONEncoder):
def default(self, value):
"""Convert more Python data types to ES-understandable JSON."""
return json_converter(value)
self.json_encoder = DateSavvyJsonEncoder
示例2: __init__
def __init__(self, urls, timeout=60, max_retries=0, revival_delay=300, auth=None):
"""
:arg urls: A URL or iterable of URLs of ES nodes. These are full URLs
with port numbers, like ``http://elasticsearch.example.com:9200``.
:arg timeout: Number of seconds to wait for each request before raising
Timeout
:arg max_retries: How many other servers to try, in series, after a
request times out or a connection fails
:arg revival_delay: Number of seconds for which to avoid a server after
it times out or is uncontactable
:arg auth: Optional HTTP Basic Auth tuple like ``('user', 'pass')``.
"""
if isinstance(urls, basestring):
urls = [urls]
urls = [u.rstrip('/') for u in urls]
self.servers = DowntimePronePool(urls, revival_delay)
self.revival_delay = revival_delay
self.timeout = timeout
self.max_retries = max_retries
self.logger = getLogger('pyelasticsearch')
self.session = requests.session()
if auth:
self.session.auth = auth
json_converter = self.from_python
class JsonEncoder(json.JSONEncoder):
def default(self, value):
"""Convert more Python data types to ES-understandable JSON."""
return json_converter(value)
self.json_encoder = JsonEncoder
示例3: __init__
def __init__(self, urls, timeout=60, max_retries=0, revival_delay=300):
"""
:arg urls: A URL or iterable of URLs of ES nodes. These are full URLs
with port numbers, like ``http://elasticsearch.example.com:9200``.
:arg timeout: Number of seconds to wait for each request before raising
Timeout
:arg max_retries: How many other servers to try, in series, after a
request times out or a connection fails
:arg revival_delay: Number of seconds for which to avoid a server after
it times out or is uncontactable
"""
if isinstance(urls, string_types):
urls = [urls]
urls = [u.rstrip("/") for u in urls]
self.servers = DowntimePronePool(urls, revival_delay)
self.revival_delay = revival_delay
self.timeout = timeout
self.max_retries = max_retries
self.logger = getLogger("pyelasticsearch")
self.session = requests.session()
self.json_encoder = JsonEncoder
示例4: ElasticSearch
class ElasticSearch(object):
"""
An object which manages connections to elasticsearch and acts as a
go-between for API calls to it
This object is thread-safe. You can create one instance and share it
among all threads.
"""
def __init__(self, urls, timeout=60, max_retries=0, revival_delay=300):
"""
:arg urls: A URL or iterable of URLs of ES nodes. These are full URLs
with port numbers, like ``http://elasticsearch.example.com:9200``.
:arg timeout: Number of seconds to wait for each request before raising
Timeout
:arg max_retries: How many other servers to try, in series, after a
request times out or a connection fails
:arg revival_delay: Number of seconds for which to avoid a server after
it times out or is uncontactable
"""
if isinstance(urls, string_types):
urls = [urls]
urls = [u.rstrip("/") for u in urls]
self.servers = DowntimePronePool(urls, revival_delay)
self.revival_delay = revival_delay
self.timeout = timeout
self.max_retries = max_retries
self.logger = getLogger("pyelasticsearch")
self.session = requests.session()
self.json_encoder = JsonEncoder
def _concat(self, items):
"""
Return a comma-delimited concatenation of the elements of ``items``,
with any occurrences of "_all" omitted.
If ``items`` is a string, promote it to a 1-item list.
"""
# TODO: Why strip out _all?
if items is None:
return ""
if isinstance(items, string_types):
items = [items]
return ",".join(i for i in items if i != "_all")
def _to_query(self, obj):
"""
Convert a native-Python object to a unicode or bytestring
representation suitable for a query string.
"""
# Quick and dirty thus far
if isinstance(obj, string_types):
return obj
if isinstance(obj, bool):
return "true" if obj else "false"
if isinstance(obj, integer_types):
return str(obj)
if isinstance(obj, float):
return repr(obj) # str loses precision.
if isinstance(obj, (list, tuple)):
return ",".join(self._to_query(o) for o in obj)
iso = _iso_datetime(obj)
if iso:
return iso
raise TypeError("_to_query() doesn't know how to represent %r in an ES" " query string." % obj)
def _utf8(self, thing):
"""Convert any arbitrary ``thing`` to a utf-8 bytestring."""
if isinstance(thing, binary_type):
return thing
if not isinstance(thing, text_type):
thing = text_type(thing)
return thing.encode("utf-8")
def _join_path(self, path_components):
"""
Smush together the path components, omitting '' and None ones.
Unicodes get encoded to strings via utf-8. Incoming strings are assumed
to be utf-8-encoded already.
"""
path = "/".join(quote_plus(self._utf8(p), "") for p in path_components if p is not None and p != "")
if not path.startswith("/"):
path = "/" + path
return path
def send_request(self, method, path_components, body="", query_params=None, encode_body=True):
"""
Send an HTTP request to ES, and return the JSON-decoded response.
This is mostly an internal method, but it also comes in handy if you
need to use a brand new ES API that isn't yet explicitly supported by
pyelasticsearch, while still taking advantage of our connection pooling
and retrying.
Retry the request on different servers if the first one is down and
``self.max_retries`` > 0.
:arg method: An HTTP method, like "GET"
#.........這裏部分代碼省略.........
示例5: ElasticSearch
class ElasticSearch(object):
"""
An object which manages connections to elasticsearch and acts as a
go-between for API calls to it
"""
def __init__(self, urls, timeout=60, max_retries=0, revival_delay=300):
"""
:arg timeout: Number of seconds to wait for each request before raising
Timeout
:arg max_retries: How many other servers to try, in series, after a
request times out or a connection fails
:arg revival_delay: Number of seconds for which to avoid a server after
it times out or is uncontactable
"""
if isinstance(urls, basestring):
urls = [urls]
urls = [u.rstrip('/') for u in urls]
self.servers = DowntimePronePool(urls, revival_delay)
self.revival_delay = revival_delay
self.timeout = timeout
self.max_retries = max_retries
self.logger = getLogger('pyelasticsearch')
self.session = requests.session()
json_converter = self.from_python
class DateSavvyJsonEncoder(json.JSONEncoder):
def default(self, value):
"""Convert more Python data types to ES-understandable JSON."""
return json_converter(value)
self.json_encoder = DateSavvyJsonEncoder
def _concat(self, items):
"""
Return a comma-delimited concatenation of the elements of ``items``,
with any occurrences of "_all" omitted.
If ``items`` is a string, promote it to a 1-item list.
"""
# TODO: Why strip out _all?
if items is None:
return ''
if isinstance(items, basestring):
items = [items]
return ','.join(i for i in items if i != '_all')
@classmethod
def _to_query(cls, obj):
"""Convert a native-Python object to a query string representation."""
# Quick and dirty thus far
if isinstance(obj, basestring):
return obj
if isinstance(obj, bool):
return 'true' if obj else 'false'
if isinstance(obj, (long, int, float)):
return str(obj)
if isinstance(obj, (list, tuple)):
return ','.join(cls._to_query(o) for o in obj)
iso = _iso_datetime(obj)
if iso:
return iso
raise TypeError("_to_query() doesn't know how to represent %r in an ES"
" query string." % obj)
def _send_request(self,
method,
path_components,
body='',
query_params=None,
encode_body=True):
"""
Send an HTTP request to ES, and return the JSON-decoded response.
Retry the request on different servers if the first one is down and
``self.max_retries`` > 0.
:arg method: An HTTP method, like "GET"
:arg path_components: An iterable of path components, to be joined by
"/"
:arg body: The request body
:arg query_params: A map of querystring param names to values or None
:arg encode_body: Whether to encode the body of the request as JSON
"""
def join_path(path_components):
"""Smush together the path components, ignoring empty ones."""
path = '/'.join(str(p) for p in path_components if p)
if not path.startswith('/'):
path = '/' + path
return path
path = join_path(path_components)
if query_params:
path = '?'.join(
[path, urlencode(dict((k, self._to_query(v)) for k, v in
query_params.iteritems()))])
kwargs = ({'data': self._encode_json(body) if encode_body else body}
if body else {})
req_method = getattr(self.session, method.lower())
#.........這裏部分代碼省略.........