本文整理汇总了Python中SolrClient.IndexQ类的典型用法代码示例。如果您正苦于以下问题:Python IndexQ类的具体用法?Python IndexQ怎么用?Python IndexQ使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了IndexQ类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_add_string
def test_add_string(self):
index = IndexQ(test_config['indexqbase'], 'testq')
string_test = 'asd'
doc = index.add(string_test)
with open(doc) as f:
doc_data = f.read()
self.assertEqual(string_test, doc_data)
示例2: test_add_callback_no_size
def test_add_callback_no_size(self):
docs = self.rand_docs.get_docs(5)
index = IndexQ(test_config['indexqbase'], 'testq')
temp = []
def cb(path):
temp.append(path)
t = index.add(docs[0], callback=cb)
self.assertTrue(t in temp)
示例3: test_index_bad_send_method
def test_index_bad_send_method(self):
index = IndexQ(test_config['indexqbase'], 'testq')
solr = SolrClient(test_config['SOLR_SERVER'],
devel=True, auth=test_config['SOLR_CREDENTIALS'])
with self.assertRaises(AttributeError):
index.index(solr,
test_config['SOLR_COLLECTION'],
send_method='Doesnt exist')
示例4: test_complete_compress_basic
def test_complete_compress_basic(self):
log = logging.getLogger()
index = IndexQ(test_config['indexqbase'], 'testq', size = 1, log = log,
compress=True)
for item in self.docs[1:10]:
index.add(item, finalize=True)
files = []
for item in index.get_all_as_list():
files.append(index.complete(item))
[self.assertTrue(os.path.exists(x)) for x in files]
示例5: test_get_multi_with_sentinel
def test_get_multi_with_sentinel(self):
log = logging.getLogger()
index = IndexQ(test_config['indexqbase'], 'testq', size=1, log=log)
q = index.get_multi_q(sentinel='BLAH')
docs = self.rand_docs.get_docs(5000)
docs2 = self.rand_docs.get_docs(5000)
for item in docs + ['BLAH'] + docs2:
q.put(item)
index.join_indexer()
self.assertEqual(docs+docs2, index.get_all_json_from_indexq())
示例6: test_get_multi_q2
def test_get_multi_q2(self):
log = logging.getLogger()
index = IndexQ(test_config['indexqbase'], 'testq', size = 1, log = log)
q = index.get_multi_q()
docs = self.rand_docs.get_docs(50000)
for item in docs:
q.put(item)
q.put('STOP')
index.join_indexer()
self.assertEqual(docs, index.get_all_json_from_indexq())
示例7: test_ignore_fields
def test_ignore_fields(self):
"""
Will export documents from Solr and put them into an IndexQ.
"""
index = IndexQ(test_config["indexqbase"], "test_reindexer", size=0)
for dir in ["_todo_dir", "_done_dir"]:
[os.remove(x) for x in index.get_all_as_list(dir=dir)]
reindexer = Reindexer(source=self.solr, source_coll="source_coll", dest=index)
for field in ["_version_", "product_name_exact"]:
self.assertTrue(field in reindexer._ignore_fields)
示例8: test_thread_pool_low
def test_thread_pool_low(self):
'''
Index data using multiple threads.
Verity that each thread
'''
docs = self.rand_docs.get_docs(5)
threads = 5
index = IndexQ(test_config['indexqbase'],'testq', size = 1)
with ThreadPool(threads) as p:
p.map(index.add, docs)
self.check_file_contents(index.add(finalize=True), docs)
示例9: test_remove_copy_fields_from_data
def test_remove_copy_fields_from_data(self):
index = IndexQ(test_config["indexqbase"], "test_reindexer", size=0)
for dir in ["_todo_dir", "_done_dir"]:
[os.remove(x) for x in index.get_all_as_list(dir=dir)]
reindexer = Reindexer(source=self.solr, source_coll="source_coll", dest=index)
reindexer.reindex()
from_files = self.get_all_json_from_indexq(index)
excluded_fields = reindexer._ignore_fields
for doc in from_files:
for field in excluded_fields:
if field in doc:
print(doc)
示例10: test_complete_dir_rotate
def test_complete_dir_rotate(self):
log = logging.getLogger()
rotate_func = lambda: '{}/{}/{}'.format(dt.now().year, dt.now().month, dt.now().day)
index = IndexQ(test_config['indexqbase'], 'testq', size = 1, log = log,
rotate_complete = rotate_func)
dir_set = rotate_func()
docs = self.rand_docs.get_docs(69)
for item in self.docs[1:10]:
index.add(item, finalize=True)
files = []
for item in index.get_all_as_list():
files.append(index.complete(item))
[self.assertTrue(os.path.exists(x)) for x in files]
示例11: test_buffer_list_75m
def test_buffer_list_75m(self):
size = 75
index = IndexQ(test_config['indexqbase'], 'testq', size=size)
buff = []
while True:
doc = index.add(self.docs)
[buff.append(x) for x in self.docs]
if type(doc) is str:
break
self.check_file_contents(doc,buff)
self.assertLessEqual(os.path.getsize(doc),size*1000000)
self.assertGreaterEqual(os.path.getsize(doc), size*1000000*.90)
os.remove(doc)
示例12: test_index_multiproc
def test_index_multiproc(self):
index = IndexQ(test_config['indexqbase'], 'testq')
solr = SolrClient(test_config['SOLR_SERVER'], devel=True, auth=test_config['SOLR_CREDENTIALS'])
solr.delete_doc_by_id(test_config['SOLR_COLLECTION'],'*')
buff = []
files = []
for doc in self.docs:
files.append(index.add(doc, finalize=True))
index.index(solr,test_config['SOLR_COLLECTION'],threads=10)
solr.commit(test_config['SOLR_COLLECTION'],openSearcher=True)
for doc in self.docs:
res = solr.query(test_config['SOLR_COLLECTION'],{'q':'id:{}'.format(doc['id'])})
self.assertTrue(res.get_results_count()==1)
示例13: test_thread_pool_mid
def test_thread_pool_mid(self):
'''
Index data using multiple threads.
Verity that each thread
'''
docs = self.rand_docs.get_docs(5000)
threads = 5
index = IndexQ(test_config['indexqbase'],'testq', size = 1)
with ThreadPool(threads) as p:
p.map(index.add, docs)
index.add(finalize=True)
d = index.get_all_json_from_indexq()
self.assertEqual(sorted(d, key=lambda x: x['id']), sorted(docs, key=lambda x: x['id']))
示例14: test_buffer_list_75m_dump_early
def test_buffer_list_75m_dump_early(self):
size = 75
index = IndexQ(test_config['indexqbase'], 'testq', size=size)
buff = []
while True:
doc = index.add(self.docs)
[buff.append(x) for x in self.docs]
if doc > 40000000:
doc = index.add(finalize=True)
if type(doc) is str:
break
self.check_file_contents(doc,buff)
os.remove(doc)
示例15: test_buffer_dict_75m
def test_buffer_dict_75m(self):
size = 75
index = IndexQ(test_config['indexqbase'], 'testq', size=size)
buff = []
while True:
item = random.choice(self.docs)
doc = index.add(item)
buff.append(item)
if type(doc) is str:
break
self.check_file_contents(doc,buff)
self.assertLessEqual(os.path.getsize(doc), size * 1000000)
self.assertGreaterEqual(os.path.getsize(doc), size * 1000000 * .90)
os.remove(doc)