本文整理汇总了Python中soil.DownloadBase类的典型用法代码示例。如果您正苦于以下问题:Python DownloadBase类的具体用法?Python DownloadBase怎么用?Python DownloadBase使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了DownloadBase类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: import_products
def import_products(domain, download, task):
messages = []
products = []
data = download.get_content().split('\n')
processed = 0
total_rows = len(data) - 1
reader = csv.DictReader(data)
for row in reader:
try:
p = Product.from_csv(row)
if p:
if p.domain:
if p.domain != domain:
messages.append(
_("Product {product_name} belongs to another domain and was not updated").format(
product_name=p.name
)
)
continue
else:
p.domain = domain
products.append(p)
if task:
processed += 1
DownloadBase.set_progress(task, processed, total_rows)
except Exception, e:
messages.append(str(e))
示例2: prepare_form_multimedia
def prepare_form_multimedia(request, domain):
"""Gets the download_id for the multimedia zip and sends it to the
exportDownloadService in download_export.ng.js to begin polling for the
zip file download.
"""
form_or_case = request.POST.get('form_or_case')
sms_export = json.loads(request.POST.get('sms_export'))
permissions = ExportsPermissionsManager(form_or_case, domain, request.couch_user)
permissions.access_download_export_or_404()
view_helper = DownloadExportViewHelper.get(request, domain, form_or_case, sms_export)
filter_form_data = json.loads(request.POST.get('form_data'))
export_specs = json.loads(request.POST.get('exports'))
try:
filter_form = view_helper.get_filter_form(filter_form_data)
except ExportFormValidationException:
return json_response({
'error': _("Please check that you've submitted all required filters."),
})
download = DownloadBase()
export_object = view_helper.get_export(export_specs[0]['export_id'])
task_kwargs = filter_form.get_multimedia_task_kwargs(export_object, download.download_id, filter_form_data)
from corehq.apps.reports.tasks import build_form_multimedia_zip
download.set_task(build_form_multimedia_zip.delay(**task_kwargs))
return json_response({
'success': True,
'download_id': download.download_id,
})
示例3: build_form_multimedia_zip
def build_form_multimedia_zip(domain, xmlns, startdate, enddate, app_id,
export_id, zip_name, download_id, export_is_legacy):
form_ids = _get_form_ids(domain, app_id, xmlns, startdate, enddate, export_is_legacy)
properties = _get_export_properties(export_id, export_is_legacy)
if not app_id:
zip_name = 'Unrelated Form'
forms_info = list()
for form in FormAccessors(domain).iter_forms(form_ids):
if not zip_name:
zip_name = unidecode(form.name or 'unknown form')
forms_info.append(_extract_form_attachment_info(form, properties))
num_forms = len(forms_info)
DownloadBase.set_progress(build_form_multimedia_zip, 0, num_forms)
use_transfer = settings.SHARED_DRIVE_CONF.transfer_enabled
if use_transfer:
fpath = _get_download_file_path(xmlns, startdate, enddate, export_id, app_id, num_forms)
else:
_, fpath = tempfile.mkstemp()
_write_attachments_to_file(fpath, use_transfer, num_forms, forms_info)
_expose_download(fpath, use_transfer, zip_name, download_id, num_forms)
示例4: prepare_form_multimedia
def prepare_form_multimedia(self, in_data):
"""Gets the download_id for the multimedia zip and sends it to the
exportDownloadService in download_export.ng.js to begin polling for the
zip file download.
"""
try:
filter_form_data, export_specs = self._get_form_data_and_specs(in_data)
filter_form = FilterFormExportDownloadForm(
self.domain_object, self.timezone, filter_form_data
)
if not filter_form.is_valid():
raise ExportFormValidationException(
_("Please check that you've submitted all required filters.")
)
download = DownloadBase()
export_object = self.get_export_schema(self.domain, export_specs[0]['export_id'])
task_kwargs = filter_form.get_multimedia_task_kwargs(
export_object, download.download_id
)
from corehq.apps.reports.tasks import build_form_multimedia_zip
download.set_task(build_form_multimedia_zip.delay(**task_kwargs))
except Exception as e:
return format_angular_error(e)
return format_angular_success({
'download_id': download.download_id,
})
示例5: get_export_files
def get_export_files(self, format='', previous_export_id=None, filter=None,
use_cache=True, max_column_size=2000, separator='|', process=None, **kwargs):
# the APIs of how these methods are broken down suck, but at least
# it's DRY
from couchexport.export import get_writer, get_export_components, get_headers, get_formatted_rows
from django.core.cache import cache
import hashlib
export_tag = self.index
CACHE_TIME = 1 * 60 * 60 # cache for 1 hour, in seconds
def _build_cache_key(tag, prev_export_id, format, max_column_size):
def _human_readable_key(tag, prev_export_id, format, max_column_size):
return "couchexport_:%s:%s:%s:%s" % (tag, prev_export_id, format, max_column_size)
return hashlib.md5(_human_readable_key(tag, prev_export_id,
format, max_column_size)).hexdigest()
# check cache, only supported for filterless queries, currently
cache_key = _build_cache_key(export_tag, previous_export_id, format, max_column_size)
if use_cache and filter is None:
cached_data = cache.get(cache_key)
if cached_data:
(tmp, checkpoint) = cached_data
return ExportFiles(tmp, checkpoint)
fd, path = tempfile.mkstemp()
with os.fdopen(fd, 'wb') as tmp:
schema_index = export_tag
config, updated_schema, export_schema_checkpoint = get_export_components(schema_index,
previous_export_id, filter)
if config:
writer = get_writer(format)
# get cleaned up headers
formatted_headers = self.remap_tables(get_headers(updated_schema, separator=separator))
writer.open(formatted_headers, tmp, max_column_size=max_column_size)
total_docs = len(config.potentially_relevant_ids)
if process:
DownloadBase.set_progress(process, 0, total_docs)
for i, doc in config.enum_docs():
if self.transform:
doc = self.transform(doc)
writer.write(self.remap_tables(get_formatted_rows(
doc, updated_schema, include_headers=False,
separator=separator)))
if process:
DownloadBase.set_progress(process, i + 1, total_docs)
writer.close()
checkpoint = export_schema_checkpoint
if checkpoint:
if use_cache:
cache.set(cache_key, (path, checkpoint), CACHE_TIME)
return ExportFiles(path, checkpoint)
return None
示例6: prime_restore
def prime_restore(user_ids, version=V1, cache_timeout=None, overwrite_cache=False):
from corehq.apps.ota.views import get_restore_response
total = len(user_ids)
DownloadBase.set_progress(prime_restore, 0, total)
ret = {'messages': []}
for i, user_id in enumerate(user_ids):
try:
couch_user = CommCareUser.get(user_id)
except ResourceNotFound:
ret['messages'].append('User not found: {}'.format(user_id))
continue
try:
get_restore_response(
couch_user.domain,
couch_user,
since=None,
version=version,
force_cache=True,
cache_timeout=cache_timeout,
overwrite_cache=overwrite_cache
)
except Exception as e:
ret['messages'].append('Error processing user: {}'.format(str(e)))
DownloadBase.set_progress(prime_restore, i + 1, total)
return ret
示例7: _increment_progress
def _increment_progress(self):
if self._location_count is None:
self._location_count = SQLLocation.active_objects.filter(domain=self.domain).count()
self._progress_update_chunksize = max(10, self._location_count // 100)
self._locations_exported += 1
if self._locations_exported % self._progress_update_chunksize == 0:
DownloadBase.set_progress(self.async_task, self._locations_exported, self._location_count)
示例8: export
def export(schema_index, file, format=Format.XLS_2007,
previous_export_id=None, filter=None,
max_column_size=2000, separator='|', export_object=None, process=None):
"""
Exports data from couch documents matching a given tag to a file.
Returns true if it finds data, otherwise nothing
"""
config, updated_schema, export_schema_checkpoint = get_export_components(schema_index,
previous_export_id, filter)
# transform docs onto output and save
if config:
writer = get_writer(format)
# open the doc and the headers
formatted_headers = get_headers(updated_schema, separator=separator)
writer.open(formatted_headers, file, max_column_size=max_column_size)
total_docs = len(config.potentially_relevant_ids)
if process:
DownloadBase.set_progress(process, 0, total_docs)
for i, doc in config.enum_docs():
if export_object and export_object.transform:
doc = export_object.transform(doc)
writer.write(format_tables(create_intermediate_tables(doc, updated_schema),
include_headers=False, separator=separator))
if process:
DownloadBase.set_progress(process, i + 1, total_docs)
writer.close()
return export_schema_checkpoint
示例9: fixture_upload_async
def fixture_upload_async(domain, download_id, replace):
task = fixture_upload_async
DownloadBase.set_progress(task, 0, 100)
download_ref = DownloadBase.get(download_id)
result = upload_fixture_file(domain, download_ref.get_filename(), replace, task)
DownloadBase.set_progress(task, 100, 100)
return {"messages": result}
示例10: export_all_form_metadata_async
def export_all_form_metadata_async(req, domain):
datespan = req.datespan if req.GET.get("startdate") and req.GET.get("enddate") else None
group_id = req.GET.get("group")
ufilter = UserTypeFilter.get_user_filter(req)[0]
users = util.get_all_users_by_domain(
domain=domain,
group=group_id,
user_filter=ufilter,
simplified=True,
include_inactive=True
)
user_ids = filter(None, [u["user_id"] for u in users])
format = req.GET.get("format", Format.XLS_2007)
filename = "%s_forms" % domain
download = DownloadBase()
download.set_task(create_metadata_export.delay(
download.download_id,
domain,
format=format,
filename=filename,
datespan=datespan,
user_ids=user_ids,
))
return download.get_start_response()
示例11: write_export_instance
def write_export_instance(writer, export_instance, documents, progress_tracker=None):
"""
Write rows to the given open _Writer.
Rows will be written to each table in the export instance for each of
the given documents.
:param writer: An open _Writer
:param export_instance: An ExportInstance
:param documents: An iterable yielding documents
:param progress_tracker: A task for soil to track progress against
:return: None
"""
if progress_tracker:
DownloadBase.set_progress(progress_tracker, 0, documents.count)
start = _time_in_milliseconds()
total_bytes = 0
total_rows = 0
compute_total = 0
write_total = 0
for row_number, doc in enumerate(documents):
total_bytes += sys.getsizeof(doc)
for table in export_instance.selected_tables:
compute_start = _time_in_milliseconds()
try:
rows = table.get_rows(
doc,
row_number,
split_columns=export_instance.split_multiselects,
transform_dates=export_instance.transform_dates,
)
except Exception as e:
notify_exception(None, "Error exporting doc", details={
'domain': export_instance.domain,
'export_instance_id': export_instance.get_id,
'export_table': table.label,
'doc_id': doc.get('_id'),
})
e.sentry_capture = False
raise
compute_total += _time_in_milliseconds() - compute_start
write_start = _time_in_milliseconds()
for row in rows:
# It might be bad to write one row at a time when you can do more (from a performance perspective)
# Regardless, we should handle the batching of rows in the _Writer class, not here.
writer.write(table, row)
write_total += _time_in_milliseconds() - write_start
total_rows += len(rows)
if progress_tracker:
DownloadBase.set_progress(progress_tracker, row_number + 1, documents.count)
end = _time_in_milliseconds()
tags = ['format:{}'.format(writer.format)]
_record_datadog_export_write_rows(write_total, total_bytes, total_rows, tags)
_record_datadog_export_compute_rows(compute_total, total_bytes, total_rows, tags)
_record_datadog_export_duration(end - start, total_bytes, total_rows, tags)
_record_export_duration(end - start, export_instance)
示例12: add_progress
def add_progress(self, count=1):
self.progress += count
if self.task:
DownloadBase.set_progress(self.task, self.progress, self.total_rows)
if datetime.now() > self.last_update + timedelta(seconds=5):
self.log("processed %s / %s", self.progress, self.total_rows)
self.last_update = datetime.now()
示例13: location_importer_job_poll
def location_importer_job_poll(request, domain, download_id, template="locations/manage/partials/status.html"):
download_data = DownloadBase.get(download_id)
is_ready = False
if download_data is None:
download_data = DownloadBase(download_id=download_id)
try:
if download_data.task.failed():
return HttpResponseServerError()
except (TypeError, NotImplementedError):
# no result backend / improperly configured
pass
alive = True
if heartbeat_enabled():
alive = is_alive()
context = RequestContext(request)
if download_data.task.state == 'SUCCESS':
is_ready = True
context['result'] = download_data.task.result.get('messages')
context['is_ready'] = is_ready
context['is_alive'] = alive
context['progress'] = download_data.get_progress()
context['download_id'] = download_id
return render_to_response(template, context_instance=context)
示例14: toggle_demo_mode
def toggle_demo_mode(request, domain, user_id):
user = CommCareUser.get_by_user_id(user_id, domain)
demo_mode = request.POST.get('demo_mode', 'no')
demo_mode = True if demo_mode == 'yes' else False
edit_user_url = reverse(EditCommCareUserView.urlname, args=[domain, user_id])
# handle bad POST param
if user.is_demo_user == demo_mode:
warning = _("User is already in Demo mode!") if user.is_demo_user else _("User is not in Demo mode!")
messages.warning(request, warning)
return HttpResponseRedirect(edit_user_url)
if demo_mode:
download = DownloadBase()
res = turn_on_demo_mode_task.delay(user.get_id, domain)
download.set_task(res)
return HttpResponseRedirect(
reverse(
DemoRestoreStatusView.urlname,
args=[domain, download.download_id, user_id]
)
)
else:
from corehq.apps.app_manager.views.utils import unset_practice_mode_configured_apps, \
get_practice_mode_configured_apps
# if the user is being used as practice user on any apps, check/ask for confirmation
apps = get_practice_mode_configured_apps(domain)
confirm_turn_off = True if (request.POST.get('confirm_turn_off', 'no')) == 'yes' else False
if apps and not confirm_turn_off:
return HttpResponseRedirect(reverse(ConfirmTurnOffDemoModeView.urlname, args=[domain, user_id]))
turn_off_demo_mode(user)
unset_practice_mode_configured_apps(domain, user.get_id)
messages.success(request, _("Successfully turned off demo mode!"))
return HttpResponseRedirect(edit_user_url)
示例15: import_locations
def import_locations(domain, worksheet, update_existing=False, task=None):
fields = worksheet.headers
data = list(worksheet)
hierarchy_fields = []
loc_types = defined_location_types(domain)
for field in fields:
if field in loc_types:
hierarchy_fields.append(field)
else:
break
property_fields = fields[len(hierarchy_fields):]
if not hierarchy_fields:
yield 'missing location hierarchy-related fields in left columns. aborting import'
return
loc_cache = LocationCache(domain)
for index, loc in enumerate(data):
if task:
DownloadBase.set_progress(task, index, len(data))
for m in import_location(domain, loc, hierarchy_fields, property_fields, update_existing, loc_cache):
yield m