本文整理汇总了Python中elasticsearch.client.IndicesClient类的典型用法代码示例。如果您正苦于以下问题:Python IndicesClient类的具体用法?Python IndicesClient怎么用?Python IndicesClient使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了IndicesClient类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: setUp
def setUp(self):
""" Starts a new connector for every test
"""
try:
os.unlink("config.txt")
except OSError:
pass
open("config.txt", "w").close()
self.connector = Connector(
address='%s:%s' % (mongo_host, self.primary_p),
oplog_checkpoint='config.txt',
target_url=elastic_pair,
ns_set=['test.test'],
u_key='_id',
auth_key=None,
doc_manager='mongo_connector/doc_managers/elastic_doc_manager.py',
auto_commit_interval=0
)
# Clean out test databases
try:
self.elastic_doc._remove()
except OperationFailed:
try:
# Create test.test index if necessary
client = Elasticsearch(hosts=[elastic_pair])
idx_client = IndicesClient(client)
idx_client.create(index='test.test')
except es_exceptions.TransportError:
pass
self.conn.test.test.drop()
self.connector.start()
assert_soon(lambda: len(self.connector.shard_set) > 0)
assert_soon(lambda: sum(1 for _ in self.elastic_doc._search()) == 0)
示例2: TestSingleDocSigTerms
class TestSingleDocSigTerms(TestCase):
def setUp(self):
super(TestSingleDocSigTerms, self).setUp()
self.es = Elasticsearch(hosts=['localhost:%d' % es_runner.es_state.port])
self.ic = IndicesClient(self.es)
self.index = 'single_doc_sigterms_test'
self.doc_type = 'test-doc'
self.field = 'text'
if self.ic.exists(self.index):
self.ic.delete(self.index)
self.ic.create(self.index)
self.es.create(self.index, self.doc_type, {self.field: 'foo ba knark foo knirk knark foo'}, id='doc_1')
def test_tf_for_doc_id(self):
sigterms = SingleDocSigTerms(self.es, self.index, self.doc_type, self.field, None)
resp = dict(sigterms.tf_for_doc_id('doc_1'))
self.assertEquals(4, len(resp))
self.assertEquals(3, resp['foo'])
self.assertEquals(2, resp['knark'])
self.assertEquals(1, resp['ba'])
self.assertEquals(1, resp['knirk'])
示例3: handle
def handle(self, *args, **options):
es = Elasticsearch(hosts=[{'host': 'localhost', 'port': 9200}])
fop=open('spider/management/commands/'+str(argv[2]), 'r')
inds = IndicesClient(es)
mapping={ "mappings": { "product_type": { "properties": { "code": { "type" : "string" },"name": {"type" : "string"},"img": {"type" : "string"},"url": {"type" : "string"},"price_reg": {"type" : "float"},"price_discount": {"type" : "float"}}}}}
if not inds.exists(index='gearbest_index'):
inds.create(index='gearbest_index',body=mapping)
print 'gearbest_index created'
for jsonline in fop:
jobj=loads(jsonline)
del jobj["_type"]
es.index(index="gearbest_index",doc_type='product_type', body=jobj, id=jobj['code'])
disc=0
reg=0
if len(jobj['price_discount'])>0:
disc = float(jobj['price_discount'][0])
if len(jobj['price_reg'])>0:
reg = float(jobj['price_reg'][0])
#insert="INSERT into 'price_gb' ('price','price_disc','code','date') values ("+str(reg)+", "+str(disc)+", '"+str(jobj['code'])+"', '"+str(datetime.today())+"')"
#cursor = connection.cursor()
#cursor.execute(insert)
add_price=Price_gb(price=reg,price_disc=disc,code=str(jobj['code']),date=datetime.date.today())
add_price.save()
print 'code='+str(jobj['code'])
示例4: main
def main():
# Define the globals
global index_names
global STARTED_TIMESTAMP
global es
global es_indices
try:
# Initiate the elasticsearch session using ES low-level client.
# By default nodes are randomized before passed into the pool and round-robin strategy is used for load balancing.
es = Elasticsearch(ES_HOSTS, timeout=30)
es_indices = IndicesClient(es)
except:
print("Could not connect to elasticsearch!")
sys.exit(1)
print("Creating indices.. \n"),
indices = generate_indices()
print("Done!\n")
print("GET Settings \n"),
print json.dumps(es_indices.get_settings(index="_all"), sort_keys=True,indent=4, separators=(',', ': '))
print("Done!\n")
# We will Clean up the indices by default
# Default: True
if CLEANUP:
print("Cleaning up created indices.. "),
cleanup_indices()
print("Done!\n")
示例5: _reset_mapping
def _reset_mapping(self, mapping_path):
esi = IndicesClient(es.get_es_handle())
index = settings.ES_INDEX
if not esi.exists(index):
raise CommandError("Non existing index : %s"%index)
self.stdout.write(str(esi.delete(index=index)))
示例6: create_index
def create_index(name):
es = get_es()
ic = IndicesClient(es)
body = {}
# body.update(settings.INDEX_SETTINGS)
body.update(settings.INDEX_MAPPINGS)
resp = ic.create(name, json.dumps(body))
logger.debug('index create: ' + str(resp))
示例7: create_index
def create_index():
es = Elasticsearch()
client = IndicesClient(es)
try:
client.delete('physicians')
except Exception, e:
print e
示例8: __mapFile
def __mapFile(self, json_map_file):
es = Elasticsearch([{'host': self.elasticsearch_host, 'port': self.elasticsearch_port}])
# es = Elasticsearch([{'host': 'localhost', 'port': 9200}])
ic = IndicesClient(es)
with open(json_map_file) as json_data:
d = json.load(json_data)
doc_type = list(d.keys())[0]
ic.put_mapping(index='wow', doc_type=doc_type, body=d)
示例9: create_index_if_not_exists
def create_index_if_not_exists(self):
""" Check if index exists & if not exists create index & types & store their mappings. """
ic = IndicesClient(self.es)
response = ic.exists(index=[self.index_name])
if not response:
es_mappings = ElasticSearchController.get_index_mapper_dict()
index_response = ic.create(index=self.index_name, body={ "mappings":es_mappings })
示例10: __createIndex
def __createIndex(self):
es = Elasticsearch([{'host': self.elasticsearch_host, 'port': self.elasticsearch_port}])
ic = IndicesClient(es)
if(ic.exists(index='wow')):
print("deleting old index")
self.deleteIndex()
ic.create(index='wow')
# blah = glob.glob(os.path.join(self.map_directory, '*'))
for currentFile in glob.glob(os.path.join(self.map_directory, '*')):
print("MAP FILE: " + currentFile)
self.__mapFile(currentFile)
示例11: status
def status(self):
idx_client = IndicesClient(self.es)
for idx in ['raw-article', 'enhanced-article']:
es_index = self.indexinfo(idx)[0]
if idx_client.exists(es_index):
self.logger.info("%s contains %s documents." % (idx, self.es.count(index=es_index)['count']))
if idx == 'article':
query = {"query": {"term": {"status": 1}}}
self.logger.info(
"%s articles have been processed." % self.es.count(index=es_index, body=query)['count'])
else:
self.logger.info("%s does not exist" % es_index)
示例12: main
def main():
es_client = Elasticsearch([{'host': args.host, 'port': args.port}])
es_index = IndicesClient(es_client)
list_indexes = [index for index in es_index.status()['indices']]
regexp = re.compile(u'(\d{4})\.(\d{2})\.(\d{2})', re.IGNORECASE | re.UNICODE )
current_date = datetime.date.today()
for index in list_indexes:
res = regexp.search(index)
if res:
date_indx = datetime.date(year=int(res.group(1)), month=int(res.group(2)), day=int(res.group(3)))
if (current_date - date_indx).days > args.old:
es_index.delete(index)
示例13: delete_index
def delete_index(self, es):
"""
Delete the dataset index.
:param es: Elasticsearch client instance
:type es: elasticsearch.client.Elasticsearch
:rtype : NewsgroupsDataset
"""
ic = IndicesClient(es)
ic.delete(index=self.es_index, ignore=[400, 404])
return self
示例14: setUp
def setUp(self):
"""Empty ElasticSearch at the start of every test
"""
try:
self.elastic_doc._remove()
except OperationFailed:
try:
# Create test.test index if necessary
client = Elasticsearch(hosts=['localhost:9200'])
idx_client = IndicesClient(client)
idx_client.create(index='test.test')
except es_exceptions.TransportError:
pass
示例15: initialize
def initialize(self, conf, context):
host = conf.get('zeit.recommend.elasticsearch.host', 'localhost')
port = conf.get('zeit.recommend.elasticsearch.port', 9200)
self.es = Elasticsearch(hosts=[{'host': host, 'port': port}])
self.match = re.compile('seite-[0-9]|komplettansicht').match
self.index = '%s-%s' % date.today().isocalendar()[:2]
ic = IndicesClient(self.es)
try:
if not ic.exists(self.index):
ic.create(self.index)
except ConnectionError, e:
log('[UserIndexBolt] ConnectionError, index unreachable: %s' % e)
return