本文整理汇总了Python中corehq.apps.userreports.sql.IndicatorSqlAdapter类的典型用法代码示例。如果您正苦于以下问题:Python IndicatorSqlAdapter类的具体用法?Python IndicatorSqlAdapter怎么用?Python IndicatorSqlAdapter使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了IndicatorSqlAdapter类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: rebuild_indicators
def rebuild_indicators(indicator_config_id):
is_static = indicator_config_id.startswith(CustomDataSourceConfiguration._datasource_id_prefix)
if is_static:
config = CustomDataSourceConfiguration.by_id(indicator_config_id)
else:
config = DataSourceConfiguration.get(indicator_config_id)
# Save the start time now in case anything goes wrong. This way we'll be
# able to see if the rebuild started a long time ago without finishing.
config.meta.build.initiated = datetime.datetime.utcnow()
config.save()
adapter = IndicatorSqlAdapter(config)
adapter.rebuild_table()
couchdb = _get_db(config.referenced_doc_type)
relevant_ids = get_doc_ids(config.domain, config.referenced_doc_type,
database=couchdb)
for doc in iter_docs(couchdb, relevant_ids, chunksize=500):
try:
# save is a noop if the filter doesn't match
adapter.save(doc)
except DataError as e:
logging.exception('problem saving document {} to table. {}'.format(doc['_id'], e))
if not is_static:
config.meta.build.finished = True
config.save()
示例2: rebuild_indicators
def rebuild_indicators(indicator_config_id):
config = _get_config_by_id(indicator_config_id)
adapter = IndicatorSqlAdapter(config)
couchdb = _get_db(config.referenced_doc_type)
redis_client = get_redis_client().client.get_client()
redis_key = _get_redis_key_for_config(config)
if not is_static(indicator_config_id):
# Save the start time now in case anything goes wrong. This way we'll be
# able to see if the rebuild started a long time ago without finishing.
config.meta.build.initiated = datetime.datetime.utcnow()
config.meta.build.finished = False
config.save()
redis_key = _get_redis_key_for_config(config)
adapter.rebuild_table()
relevant_ids_chunk = []
for relevant_id in iterate_doc_ids_in_domain_by_type(
config.domain,
config.referenced_doc_type,
chunk_size=CHUNK_SIZE,
database=couchdb):
relevant_ids_chunk.append(relevant_id)
if len(relevant_ids_chunk) >= CHUNK_SIZE:
redis_client.sadd(redis_key, *relevant_ids_chunk)
_build_indicators(indicator_config_id, relevant_ids_chunk)
relevant_ids_chunk = []
if relevant_ids_chunk:
redis_client.sadd(redis_key, *relevant_ids_chunk)
_build_indicators(indicator_config_id, relevant_ids_chunk)
示例3: export_data_source
def export_data_source(request, domain, config_id):
config = get_document_or_404(DataSourceConfiguration, domain, config_id)
adapter = IndicatorSqlAdapter(config)
q = adapter.get_query_object()
table = adapter.get_table()
try:
params = process_url_params(request.GET, table.columns)
except UserQueryError as e:
return HttpResponse(e.message, status=400)
q = q.filter_by(**params.keyword_filters)
for sql_filter in params.sql_filters:
q = q.filter(sql_filter)
# build export
def get_table(q):
yield table.columns.keys()
for row in q:
yield row
fd, path = tempfile.mkstemp()
with os.fdopen(fd, 'wb') as tmpfile:
try:
tables = [[config.table_id, get_table(q)]]
export_from_tables(tables, tmpfile, params.format)
except exc.DataError:
msg = _("There was a problem executing your query, please make "
"sure your parameters are valid.")
return HttpResponse(msg, status=400)
return export_response(Temp(path), params.format, config.display_name)
示例4: _get_distinct_values
def _get_distinct_values(data_source_configuration, column_config, expansion_limit=DEFAULT_MAXIMUM_EXPANSION):
"""
Return a tuple. The first item is a list of distinct values in the given
ExpandedColumn no longer than expansion_limit. The second is a boolean which
is True if the number of distinct values in the column is greater than the
limit.
:param data_source_configuration:
:param column_config:
:param expansion_limit:
:return:
"""
from corehq.apps.userreports.sql import IndicatorSqlAdapter
adapter = IndicatorSqlAdapter(data_source_configuration)
too_many_values = False
table = adapter.get_table()
if not table.exists(bind=adapter.engine):
return [], False
if column_config.field not in table.c:
raise ColumnNotFoundError(_(
'The column "{}" does not exist in the report source! '
'Please double check your report configuration.').format(column_config.field)
)
column = table.c[column_config.field]
query = adapter.session_helper.Session.query(column).limit(expansion_limit + 1).distinct()
result = query.all()
distinct_values = [x[0] for x in result]
if len(distinct_values) > expansion_limit:
distinct_values = distinct_values[:expansion_limit]
too_many_values = True
return distinct_values, too_many_values
示例5: _build_indicators
def _build_indicators(indicator_config_id, relevant_ids):
config = _get_config_by_id(indicator_config_id)
adapter = IndicatorSqlAdapter(config)
couchdb = _get_db(config.referenced_doc_type)
redis_client = get_redis_client().client.get_client()
redis_key = _get_redis_key_for_config(config)
for doc in iter_docs(couchdb, relevant_ids, chunksize=500):
try:
# save is a noop if the filter doesn't match
adapter.save(doc)
redis_client.srem(redis_key, doc.get('_id'))
except Exception as e:
logging.exception('problem saving document {} to table. {}'.format(doc['_id'], e))
if not is_static(indicator_config_id):
redis_client.delete(redis_key)
config.meta.build.finished = True
try:
config.save()
except ResourceConflict:
current_config = DataSourceConfiguration.get(config._id)
# check that a new build has not yet started
if config.meta.build.initiated == current_config.meta.build.initiated:
current_config.meta.build.finished = True
current_config.save()
示例6: rebuild_indicators
def rebuild_indicators(indicator_config_id):
config = _get_config_by_id(indicator_config_id)
adapter = IndicatorSqlAdapter(config)
couchdb = _get_db(config.referenced_doc_type)
redis_client = get_redis_client().client.get_client()
redis_key = _get_redis_key_for_config(config)
if not is_static(indicator_config_id):
# Save the start time now in case anything goes wrong. This way we'll be
# able to see if the rebuild started a long time ago without finishing.
config.meta.build.initiated = datetime.datetime.utcnow()
config.meta.build.finished = False
config.save()
redis_key = _get_redis_key_for_config(config)
adapter.rebuild_table()
relevant_ids = get_doc_ids_in_domain_by_type(
config.domain,
config.referenced_doc_type,
database=couchdb,
)
for docs in chunked(relevant_ids, 1000):
redis_client.sadd(redis_key, *docs)
_build_indicators(indicator_config_id, relevant_ids)
示例7: IndicatorPillowTestBase
class IndicatorPillowTestBase(TestCase):
def setUp(self):
self.config = get_sample_data_source()
self.config.save()
self.adapter = IndicatorSqlAdapter(self.config)
self.fake_time_now = datetime(2015, 4, 24, 12, 30, 8, 24886)
def tearDown(self):
self.config.delete()
self.adapter.drop_table()
@patch('corehq.apps.userreports.specs.datetime')
def _check_sample_doc_state(self, expected_indicators, datetime_mock):
datetime_mock.utcnow.return_value = self.fake_time_now
self.assertEqual(1, self.adapter.get_query_object().count())
row = self.adapter.get_query_object()[0]
for k in row.keys():
v = getattr(row, k)
if isinstance(expected_indicators[k], decimal.Decimal):
self.assertAlmostEqual(expected_indicators[k], v)
else:
self.assertEqual(
expected_indicators[k], v,
'mismatched property: {} (expected {}, was {})'.format(
k, expected_indicators[k], v
)
)
示例8: _delete_data_source_shared
def _delete_data_source_shared(request, domain, config_id):
config = get_document_or_404(DataSourceConfiguration, domain, config_id)
adapter = IndicatorSqlAdapter(get_engine(), config)
adapter.drop_table()
config.delete()
messages.success(request,
_(u'Data source "{}" has been deleted.'.format(config.display_name)))
示例9: delete_data_source
def delete_data_source(request, domain, config_id):
config = get_document_or_404(DataSourceConfiguration, domain, config_id)
adapter = IndicatorSqlAdapter(get_engine(), config)
adapter.drop_table()
config.delete()
messages.success(request,
_(u'Data source "{}" has been deleted.'.format(config.display_name)))
return HttpResponseRedirect(reverse('configurable_reports_home', args=[domain]))
示例10: rebuild_aggregate_ucr
def rebuild_aggregate_ucr(request, domain, table_id):
table_definition = get_object_or_404(
AggregateTableDefinition, domain=domain, table_id=table_id
)
aggregate_table_adapter = IndicatorSqlAdapter(table_definition)
aggregate_table_adapter.rebuild_table()
populate_aggregate_table_data_task.delay(table_definition.id)
messages.success(request, 'Table rebuild successfully started.')
return HttpResponseRedirect(reverse(AggregateUCRView.urlname, args=[domain, table_id]))
示例11: export_data_source
def export_data_source(request, domain, config_id):
config, _ = get_datasource_config_or_404(config_id, domain)
adapter = IndicatorSqlAdapter(config)
q = adapter.get_query_object()
table = adapter.get_table()
try:
params = process_url_params(request.GET, table.columns)
allowed_formats = [
Format.CSV,
Format.HTML,
Format.XLS,
Format.XLS_2007,
]
if params.format not in allowed_formats:
msg = ugettext_lazy('format must be one of the following: {}').format(', '.join(allowed_formats))
return HttpResponse(msg, status=400)
except UserQueryError as e:
return HttpResponse(e.message, status=400)
q = q.filter_by(**params.keyword_filters)
for sql_filter in params.sql_filters:
q = q.filter(sql_filter)
# xls format has limit of 65536 rows
# First row is taken up by headers
if params.format == Format.XLS and q.count() >= 65535:
keyword_params = dict(**request.GET)
# use default format
if 'format' in keyword_params:
del keyword_params['format']
return HttpResponseRedirect(
'%s?%s' % (
reverse('export_configurable_data_source', args=[domain, config._id]),
urlencode(keyword_params)
)
)
# build export
def get_table(q):
yield table.columns.keys()
for row in q:
yield row
fd, path = tempfile.mkstemp()
with os.fdopen(fd, 'wb') as tmpfile:
try:
tables = [[config.table_id, get_table(q)]]
export_from_tables(tables, tmpfile, params.format)
except exc.DataError:
msg = ugettext_lazy(
"There was a problem executing your query, "
"please make sure your parameters are valid."
)
return HttpResponse(msg, status=400)
return export_response(Temp(path), params.format, config.display_name)
示例12: page_context
def page_context(self):
config, is_static = get_datasource_config_or_404(self.config_id, self.domain)
adapter = IndicatorSqlAdapter(config)
q = adapter.get_query_object()
return {
'data_source': config,
'columns': q.column_descriptions,
'data': q[:20],
'total_rows': q.count(),
}
示例13: page_context
def page_context(self):
context = super(PreviewAggregateUCRView, self).page_context
adapter = IndicatorSqlAdapter(self.table_definition)
q = adapter.get_query_object()
context.update({
'columns': q.column_descriptions,
'data': [list(row) for row in q[:20]],
'total_rows': q.count(),
})
return context
示例14: test_weekly_aggregation
def test_weekly_aggregation(self):
# generate our table
aggregate_table_adapter = IndicatorSqlAdapter(self.weekly_aggregate_table_definition)
aggregate_table_adapter.rebuild_table()
populate_aggregate_table_data(aggregate_table_adapter)
self._check_weekly_results()
# confirm it's also idempotent
populate_aggregate_table_data(aggregate_table_adapter)
self._check_weekly_results()
示例15: preview_data_source
def preview_data_source(request, domain, config_id):
config, is_static = get_datasource_config_or_404(config_id, domain)
adapter = IndicatorSqlAdapter(config)
q = adapter.get_query_object()
context = _shared_context(domain)
context.update({
'data_source': config,
'columns': q.column_descriptions,
'data': q[:20],
'total_rows': q.count(),
})
return render(request, "userreports/preview_data.html", context)