本文整理汇总了Python中elasticsearch_dsl.Search方法的典型用法代码示例。如果您正苦于以下问题:Python elasticsearch_dsl.Search方法的具体用法?Python elasticsearch_dsl.Search怎么用?Python elasticsearch_dsl.Search使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类elasticsearch_dsl
的用法示例。
在下文中一共展示了elasticsearch_dsl.Search方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get_highlights
# 需要导入模块: import elasticsearch_dsl [as 别名]
# 或者: from elasticsearch_dsl import Search [as 别名]
def get_highlights(self, text):
# query top 10 guesses
s = Search(index='qb_ir_instance_of')[0:10].query('multi_match', query=text,
fields=['wiki_content', 'qb_content', 'source_content'])
s = s.highlight('qb_content').highlight('wiki_content')
results = list(s.execute())
guess = results[0] # take the best answer
_highlights = guess.meta.highlight
try:
wiki_content = list(_highlights.wiki_content)
except AttributeError:
wiki_content = None
try:
qb_content = list(_highlights.qb_content)
except AttributeError:
qb_content = None
highlights = {'wiki': wiki_content,
'qb': qb_content,
'guess': guess.page}
return highlights
示例2: build_query
# 需要导入模块: import elasticsearch_dsl [as 别名]
# 或者: from elasticsearch_dsl import Search [as 别名]
def build_query(self, level, page, size, limit, keywords=None):
s = Search(using=self.client, index=self.index_name)
if level:
ls = []
for k in levels:
if levels[k] <= levels[level]:
ls.append(k.lower())
s = s.query("terms", levelname=ls)
if page and size:
s = s[(page - 1) * size:page * size]
if keywords:
s = s.query("match", message=keywords)
if limit:
now = datetime.now()
start_time = now - timedelta(days=int(limit))
s = s.query("range", timestamp={"gte": format_date(start_time), "lte": format_date(now)})
s = s.sort({"timestamp": {"order": "desc", "unmapped_type": "date"}})
return s
示例3: bollinger_band
# 需要导入模块: import elasticsearch_dsl [as 别名]
# 或者: from elasticsearch_dsl import Search [as 别名]
def bollinger_band(index='cf_etf_hist_price', start_date='2018-12-26', end_date='2019-03-25', symbol='rfem'):
ESLowLevelClientByConnection.get_instance()
search = Search(index=index, using='high_level_client')[0:0]
search.query = Q(Bool(must=[Range(date={'gte': '2018-12-26', 'lte': '2019-03-25'}), Term(symbol='rfem')]))
aggs = A(DateHistogram(field='date', interval='1d', format='yyyy-MM-dd', min_doc_count=1))
aggs_tp = A(ScriptedMetric(init_script='state.totals=[]',
map_script='state.totals.add((doc.high.value+doc.low.value+doc.close.value)/3)',
combine_script='double total=0; for (t in state.totals) {total += t} return total',
reduce_script='double total=0; for (t in states) {total += t} return total'))
aggs_moving_avg = A(MovingAvg(model='simple', window=20, buckets_path='tp.value'))
aggs_bbu = A(BucketScript(buckets_path={'SMA':'20_trading_days_moving_avg'}, script='params.SMA + 0.5'))
aggs_bbl = A(BucketScript(buckets_path={'SMA': '20_trading_days_moving_avg'}, script='params.SMA - 0.5'))
search.aggs.bucket('Bollinger_band', aggs).metric('tp', aggs_tp).pipeline(
'20_trading_days_moving_avg', aggs_moving_avg).pipeline('BBU', aggs_bbu).pipeline('BBL', aggs_bbl)
response = search.execute()
print(response.to_dict())
示例4: search
# 需要导入模块: import elasticsearch_dsl [as 别名]
# 或者: from elasticsearch_dsl import Search [as 别名]
def search(self):
"""
Construct the Search object.
"""
s = Search(doc_type=self.doc_types, using=es.client,
index=es.index_name)
# don't return any fields, just the metadata
s = s.fields([])
# Sort from parameters
s = s.sort(*self.sorts)
# Paginate from parameters
s = s[self.page_start:self.page_end]
# Same construction as parent class
# Allows to give the same signature as simple search
# ie. Response(data) instead of Response(search, data)
return s.response_class(partial(SearchResult, self))
示例5: get_scan_generic_format
# 需要导入模块: import elasticsearch_dsl [as 别名]
# 或者: from elasticsearch_dsl import Search [as 别名]
def get_scan_generic_format(client, now, last_fetch_timestamp=None):
"""Gets a scan object in generic format"""
# if method is simple date - convert the date string to datetime
es = client.es
time_field = client.time_field
fetch_index = client.fetch_index
if not fetch_index:
fetch_index = '_all'
if time_field:
query = QueryString(query=time_field + ':*')
range_field = {
time_field: {'gt': last_fetch_timestamp, 'lte': now}} if last_fetch_timestamp else {
time_field: {'lte': now}}
search = Search(using=es, index=fetch_index).filter({'range': range_field}).query(query)
else:
search = Search(using=es, index=fetch_index).query(QueryString(query=client.query))
return search
示例6: get_scan_insight_format
# 需要导入模块: import elasticsearch_dsl [as 别名]
# 或者: from elasticsearch_dsl import Search [as 别名]
def get_scan_insight_format(client, now, last_fetch_timestamp=None, feed_type=None):
"""Gets a scan object in insight format"""
time_field = client.time_field
range_field = {
time_field: {'gt': last_fetch_timestamp, 'lte': now}} if last_fetch_timestamp else {
time_field: {'lte': now}}
es = client.es
query = QueryString(query=time_field + ":*")
indices = client.fetch_index
if feed_type == FEED_TYPE_CORTEX_MT:
indices = '*-shared*'
tenant_hash = demisto.getIndexHash()
if tenant_hash:
# all shared indexes minus this tenant shared
indices += f',-*{tenant_hash}*-shared*'
elif not indices:
indices = '_all'
search = Search(using=es, index=indices).filter({'range': range_field}).query(query)
return search
示例7: es_get_accounts
# 需要导入模块: import elasticsearch_dsl [as 别名]
# 或者: from elasticsearch_dsl import Search [as 别名]
def es_get_accounts(main_chain='eos', user_id=None, start_vol=None, end_vol=None, from_idx=0, size=100,
order='totalEos', fields=None):
index = '{}_account'.format(main_chain)
if not fields:
fields = ['id', 'timestamp', 'updateTimestamp', 'userId', 'totalEos', 'liquidEos', 'stackedEos',
'unstackingEos']
if user_id:
s = Search(using=es_client, index=index, doc_type='doc') \
.filter('term', userId=user_id)
elif start_vol and end_vol:
range = {order: {'gte': start_vol, 'lt': end_vol}}
s = Search(using=es_client, index=index, doc_type='doc') \
.source(include=fields) \
.filter('range', **range)
s = s.sort({order: {"order": "desc"}})
else:
s = Search(using=es_client, index=index, doc_type='doc').source(include=fields)
s = s.sort({order: {"order": "desc"}})
resp = s[from_idx:from_idx + size].execute()
return es_resp_to_payload(resp)
示例8: es_get_statistic
# 需要导入模块: import elasticsearch_dsl [as 别名]
# 或者: from elasticsearch_dsl import Search [as 别名]
def es_get_statistic(security_item, the_date=None, start_date=None, end_date=None, level='day',
from_idx=0, size=500):
security_item = to_security_item(security_item)
index = get_es_statistic_index(security_type=security_item['type'], exchange=security_item['exchange'],
level=level)
# 单日的日k线直接按id获取
if level == 'day' and the_date:
doc_id = '{}_{}'.format(security_item['id'], to_time_str(the_date))
return es_client.get_source(index=index, doc_type='doc', id=doc_id)
elif start_date and end_date:
s = Search(using=es_client, index=index, doc_type='doc') \
.filter('term', code=security_item['code']) \
.filter('range', timestamp={'gte': start_date, 'lte': end_date}) \
.sort({"timestamp": {"order": "asc"}})
resp = s[from_idx:from_idx + size].execute()
return es_resp_to_payload(resp)
示例9: get_trade_history
# 需要导入模块: import elasticsearch_dsl [as 别名]
# 或者: from elasticsearch_dsl import Search [as 别名]
def get_trade_history(size=10, from_date='2015-10-10', to_date='now', sort_by='-operation_id_num',
search_after=None, base="1.3.0", quote="1.3.121"):
s = Search(using=es, index="bitshares-*")
s = s.extra(size=size)
if search_after and search_after != '':
s = s.extra(search_after=search_after.split(','))
q = Q()
q = q & Q("match", operation_type=4)
q = q & Q("match", operation_history__op_object__is_maker=True)
q = q & Q("match", operation_history__op_object__fill_price__base__asset_id=base)
q = q & Q("match", operation_history__op_object__fill_price__quote__asset_id=quote)
range_query = Q("range", block_data__block_time={'gte': from_date, 'lte': to_date})
s.query = q & range_query
s = s.sort(*sort_by.split(','))
response = s.execute()
verify_es_response(response)
return [hit.to_dict() for hit in response]
示例10: get_daily_volume
# 需要导入模块: import elasticsearch_dsl [as 别名]
# 或者: from elasticsearch_dsl import Search [as 别名]
def get_daily_volume(self, from_date, to_date):
s = Search(using='operations', index="bitshares-*")
s = s.extra(size=0)
s = s.query('bool', filter = [
Q('term', operation_type=4),
Q('range', block_data__block_time={'gte': from_date, 'lte': to_date}),
Q('term', operation_history__op_object__fill_price__quote__asset_id__keyword=config.CORE_ASSET_ID)
])
a = A('date_histogram', field='block_data.block_time', interval='1d', format='yyyy-MM-dd') \
.metric('volume', 'sum', field='operation_history.op_object.fill_price.quote.amount')
s.aggs.bucket('volume_over_time', a)
response = s.execute()
daily_volumes = []
for daily_volume in response.aggregations.volume_over_time.buckets:
daily_volumes.append({ 'date': daily_volume.key_as_string, 'volume': daily_volume.volume.value })
return daily_volumes
示例11: _get_notifications_by_email
# 需要导入模块: import elasticsearch_dsl [as 别名]
# 或者: from elasticsearch_dsl import Search [as 别名]
def _get_notifications_by_email(self, email):
search = elasticsearch_dsl.Search(using=self.client, index=self.config['ELASTICSEARCH_INDEX'],
doc_type=self.config['ELASTICSEARCH_DOCTYPE'])
query = self.create_query_for_email(search, email)
# logger.debug("query:\n{!s}", pprint.pformat(query.to_dict()))
try:
response = query.execute()
except elasticsearch.exceptions.ElasticsearchException:
logger.exception("Exception caught in Elasticsearch query:\n index: {!r}\n doc_type: {!r}\n"
" query: {!s}".format(self.config['ELASTICSEARCH_INDEX'],
self.config['ELASTICSEARCH_DOCTYPE'], pprint.pformat(query.to_dict())))
# logger.debug("response:\n{!s}", pprint.pformat(response.to_dict()))
return response.hits.hits
示例12: run
# 需要导入模块: import elasticsearch_dsl [as 别名]
# 或者: from elasticsearch_dsl import Search [as 别名]
def run(self):
s = Search(using=self.esclient, index=self.esIndex)
count =0
while True:
try:
#response = s.execute(True)
response =[]
for hit in s.scan():
response.append(hit)
if count % 10 == 0:
count = 0
logging.info("Fetched registered faces from Elastic Search. Number of records found: %d",len(response))
facematch.update_known_faces(response)
count = count +1
except Exception as e:
logging.exception("Failed to get registered faces from Elastic Search.")
# Sleep for 60 secs
time.sleep(60)
示例13: index_search_document
# 需要导入模块: import elasticsearch_dsl [as 别名]
# 或者: from elasticsearch_dsl import Search [as 别名]
def index_search_document(self, *, index: str) -> None:
"""
Create or replace search document in named index.
Checks the local cache to see if the document has changed,
and if not aborts the update, else pushes to ES, and then
resets the local cache. Cache timeout is set as "cache_expiry"
in the settings, and defaults to 60s.
"""
cache_key = self.search_document_cache_key
new_doc = self.as_search_document(index=index)
cached_doc = cache.get(cache_key)
if new_doc == cached_doc:
logger.debug("Search document for %r is unchanged, ignoring update.", self)
return
cache.set(cache_key, new_doc, timeout=get_setting("cache_expiry", 60))
get_client().index(
index=index,
doc_type=self.search_doc_type,
body=new_doc,
id=self.pk, # type: ignore
)
示例14: execute
# 需要导入模块: import elasticsearch_dsl [as 别名]
# 或者: from elasticsearch_dsl import Search [as 别名]
def execute(
cls,
search: Search,
search_terms: str = "",
user: Optional[AbstractBaseUser] = None,
reference: Optional[str] = "",
save: bool = True,
) -> SearchQuery:
"""Create a new SearchQuery instance and execute a search against ES."""
warnings.warn(
"Deprecated - please use `execute_search` function instead.",
DeprecationWarning,
)
return execute_search(
search, search_terms=search_terms, user=user, reference=reference, save=save
)
示例15: __list_uniques
# 需要导入模块: import elasticsearch_dsl [as 别名]
# 或者: from elasticsearch_dsl import Search [as 别名]
def __list_uniques(self, date_range, field_name):
"""Retrieve a list of unique values in a given field within a date range.
:param date_range:
:param field_name:
:return: list of unique values.
"""
# Get project list
s = Search(using=self._es_conn, index=self._es_index)
s = s.filter('range', **date_range)
# from:to parameters (=> from: 0, size: 0)
s = s[0:0]
s.aggs.bucket('uniques', 'terms', field=field_name, size=1000)
response = s.execute()
uniques_list = []
for item in response.aggregations.uniques.buckets:
uniques_list.append(item.key)
return uniques_list