本文整理汇总了Python中ckanext.datastore.backend.DatastoreBackend类的典型用法代码示例。如果您正苦于以下问题:Python DatastoreBackend类的具体用法?Python DatastoreBackend怎么用?Python DatastoreBackend使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了DatastoreBackend类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_backend_functionality
def test_backend_functionality(self, get_engine):
engine = get_engine()
execute = engine.execute
fetchall = execute().fetchall
execute.reset_mock()
DatastoreExampleSqliteBackend.resource_fields = Mock(
return_value={u'meta': {}, u'schema': {
u'a': u'text'
}}
)
records = [
{u'a': u'x'}, {u'a': u'y'}, {u'a': u'z'},
]
DatastoreBackend.set_active_backend(config)
res = factories.Resource(url_type=u'datastore')
helpers.call_action(
u'datastore_create', resource_id=res['id'],
fields=[
{u'id': u'a'}
], records=records
)
# check, create and 3 inserts
assert_equal(5, execute.call_count)
insert_query = u'INSERT INTO "{0}"(a) VALUES(?)'.format(res['id'])
execute.assert_has_calls(
[
call(u' CREATE TABLE IF NOT EXISTS "{0}"(a text);'.format(
res['id']
)),
call(insert_query, ['x']),
call(insert_query, ['y']),
call(insert_query, ['z'])
])
execute.reset_mock()
fetchall.return_value = records
helpers.call_action(
u'datastore_search', resource_id=res['id'])
execute.assert_called_with(
u'SELECT * FROM "{0}" LIMIT 10'.format(res['id'])
)
execute.reset_mock()
helpers.call_action(
u'datastore_delete', resource_id=res['id'])
# check delete
execute.assert_called_with(
u'DROP TABLE IF EXISTS "{0}"'.format(res['id'])
)
execute.reset_mock()
helpers.call_action(
u'datastore_info', id=res['id'])
# check
c = u'''
select name from sqlite_master
where type = "table" and name = "{0}"'''.format(res['id'])
execute.assert_called_with(c)
示例2: update_config
def update_config(self, config):
DatastoreBackend.register_backends()
DatastoreBackend.set_active_backend(config)
templates_base = config.get('ckan.base_templates_folder')
p.toolkit.add_template_directory(config, templates_base)
self.backend = DatastoreBackend.get_active_backend()
示例3: datastore_search_sql
def datastore_search_sql(context, data_dict):
'''Execute SQL queries on the DataStore.
The datastore_search_sql action allows a user to search data in a resource
or connect multiple resources with join expressions. The underlying SQL
engine is the
`PostgreSQL engine <http://www.postgresql.org/docs/9.1/interactive/>`_.
There is an enforced timeout on SQL queries to avoid an unintended DOS.
The number of results returned is limited to 32000, unless set in the
site's configuration ``ckan.datastore.search.rows_max``
Queries are only allowed if you have access to the all the CKAN resources
in the query and send the appropriate authorization.
.. note:: This action is not available when
:ref:`ckan.datastore.sqlsearch.enabled` is set to false
.. note:: When source data columns (i.e. CSV) heading names are provided
in all UPPERCASE you need to double quote them in the SQL select
statement to avoid returning null results.
:param sql: a single SQL select statement
:type sql: string
**Results:**
The result of this action is a dictionary with the following keys:
:rtype: A dictionary with the following keys
:param fields: fields/columns and their extra metadata
:type fields: list of dictionaries
:param records: list of matching results
:type records: list of dictionaries
:param records_truncated: indicates whether the number of records returned
was limited by the internal limit, which is 32000 records (or other
value set in the site's configuration
``ckan.datastore.search.rows_max``). If records are truncated by this,
this key has value True, otherwise the key is not returned at all.
:type records_truncated: bool
'''
backend = DatastoreBackend.get_active_backend()
def check_access(table_names):
'''
Raise NotAuthorized if current user is not allowed to access
any of the tables passed
:type table_names: list strings
'''
p.toolkit.check_access(
'datastore_search_sql',
dict(context, table_names=table_names),
data_dict)
result = backend.search_sql(
dict(context, check_access=check_access),
data_dict)
result.pop('id', None)
result.pop('connection_url', None)
return result
示例4: datastore_info
def datastore_info(context, data_dict):
'''
Returns information about the data imported, such as column names
and types.
:rtype: A dictionary describing the columns and their types.
:param id: Id of the resource we want info about
:type id: A UUID
'''
backend = DatastoreBackend.get_active_backend()
p.toolkit.check_access('datastore_info', context, data_dict)
resource_id = _get_or_bust(data_dict, 'id')
p.toolkit.get_action('resource_show')(context, {'id': resource_id})
res_exists = backend.resource_exists(resource_id)
if not res_exists:
raise p.toolkit.ObjectNotFound(p.toolkit._(
u'Resource "{0}" was not found.'.format(resource_id)
))
info = backend.resource_fields(resource_id)
return info
示例5: datastore_run_triggers
def datastore_run_triggers(context, data_dict):
''' update each record with trigger
The datastore_run_triggers API action allows you to re-apply exisitng
triggers to an existing DataStore resource.
:param resource_id: resource id that the data is going to be stored under.
:type resource_id: string
**Results:**
:returns: The rowcount in the table.
:rtype: int
'''
res_id = data_dict['resource_id']
p.toolkit.check_access('datastore_trigger_each_row', context, data_dict)
backend = DatastoreBackend.get_active_backend()
connection = backend._get_write_engine().connect()
sql = sqlalchemy.text(u'''update {0} set _id=_id '''.format(
identifier(res_id)))
try:
results = connection.execute(sql)
except sqlalchemy.exc.DatabaseError as err:
message = err.args[0].split('\n')[0].decode('utf8')
raise p.toolkit.ValidationError({
u'records': [message.split(u') ', 1)[-1]]})
return results.rowcount
示例6: datastore_function_delete
def datastore_function_delete(context, data_dict):
u'''
Delete a trigger function
:param name: function name
:type name: string
'''
p.toolkit.check_access('datastore_function_delete', context, data_dict)
backend = DatastoreBackend.get_active_backend()
backend.drop_function(data_dict['name'], data_dict['if_exists'])
示例7: _resource_exists
def _resource_exists(context, data_dict):
''' Returns true if the resource exists in CKAN and in the datastore '''
model = _get_or_bust(context, 'model')
res_id = _get_or_bust(data_dict, 'resource_id')
if not model.Resource.get(res_id):
return False
backend = DatastoreBackend.get_active_backend()
return backend.resource_exists(res_id)
示例8: datastore_search_sql
def datastore_search_sql(context, data_dict):
'''Execute SQL queries on the DataStore.
The datastore_search_sql action allows a user to search data in a resource
or connect multiple resources with join expressions. The underlying SQL
engine is the
`PostgreSQL engine <http://www.postgresql.org/docs/9.1/interactive/>`_.
There is an enforced timeout on SQL queries to avoid an unintended DOS.
Queries are only allowed if you have access to the all the CKAN resources
in the query and send the appropriate authorization.
.. note:: This action is not available when
:ref:`ckan.datastore.sqlsearch.enabled` is set to false
.. note:: When source data columns (i.e. CSV) heading names are provdied
in all UPPERCASE you need to double quote them in the SQL select
statement to avoid returning null results.
:param sql: a single SQL select statement
:type sql: string
**Results:**
The result of this action is a dictionary with the following keys:
:rtype: A dictionary with the following keys
:param fields: fields/columns and their extra metadata
:type fields: list of dictionaries
:param records: list of matching results
:type records: list of dictionaries
'''
backend = DatastoreBackend.get_active_backend()
def check_access(table_names):
'''
Raise NotAuthorized if current user is not allowed to access
any of the tables passed
:type table_names: list strings
'''
p.toolkit.check_access(
'datastore_search_sql',
dict(context, table_names=table_names),
data_dict)
示例9: datastore_search_sql
def datastore_search_sql(context, data_dict):
'''Execute SQL queries on the DataStore.
The datastore_search_sql action allows a user to search data in a resource
or connect multiple resources with join expressions. The underlying SQL
engine is the
`PostgreSQL engine <http://www.postgresql.org/docs/9.1/interactive/>`_.
There is an enforced timeout on SQL queries to avoid an unintended DOS.
DataStore resource that belong to a private CKAN resource cannot be
searched with this action. Use
:meth:`~ckanext.datastore.logic.action.datastore_search` instead.
.. note:: This action is only available when using PostgreSQL 9.X and
using a read-only user on the database.
It is not available in :ref:`legacy mode<legacy-mode>`.
.. note:: When source data columns (i.e. CSV) heading names are provdied
in all UPPERCASE you need to double quote them in the SQL select
statement to avoid returning null results.
:param sql: a single SQL select statement
:type sql: string
**Results:**
The result of this action is a dictionary with the following keys:
:rtype: A dictionary with the following keys
:param fields: fields/columns and their extra metadata
:type fields: list of dictionaries
:param records: list of matching results
:type records: list of dictionaries
'''
backend = DatastoreBackend.get_active_backend()
p.toolkit.check_access('datastore_search_sql', context, data_dict)
result = backend.search_sql(context, data_dict)
result.pop('id', None)
result.pop('connection_url', None)
return result
示例10: datastore_function_create
def datastore_function_create(context, data_dict):
u'''
Create a trigger function for use with datastore_create
:param name: function name
:type name: string
:param or_replace: True to replace if function already exists
(default: False)
:type or_replace: bool
:param rettype: set to 'trigger'
(only trigger functions may be created at this time)
:type rettype: string
:param definition: PL/pgSQL function body for trigger function
:type definition: string
'''
p.toolkit.check_access('datastore_function_create', context, data_dict)
backend = DatastoreBackend.get_active_backend()
backend.create_function(
name=data_dict['name'],
arguments=data_dict.get('arguments', []),
rettype=data_dict['rettype'],
definition=data_dict['definition'],
or_replace=data_dict['or_replace'])
示例11: datastore_make_public
def datastore_make_public(context, data_dict):
''' Allow access to the DataStore table through
:meth:`~ckanext.datastore.logic.action.datastore_search_sql`.
This action is called automatically when a CKAN dataset becomes
public.
:param resource_id: if of resource that should become public
:type resource_id: string
'''
backend = DatastoreBackend.get_active_backend()
if 'id' in data_dict:
data_dict['resource_id'] = data_dict['id']
res_id = _get_or_bust(data_dict, 'resource_id')
if not _resource_exists(context, data_dict):
raise p.toolkit.ObjectNotFound(p.toolkit._(
u'Resource "{0}" was not found.'.format(res_id)
))
p.toolkit.check_access('datastore_change_permissions', context, data_dict)
backend.make_public(context, data_dict)
示例12: test_sqlite_engine
def test_sqlite_engine(self):
DatastoreBackend.set_active_backend(config)
assert_is_instance(
DatastoreBackend.get_active_backend(),
DatastoreExampleSqliteBackend)
示例13: datastore_create
def datastore_create(context, data_dict):
'''Adds a new table to the DataStore.
The datastore_create action allows you to post JSON data to be
stored against a resource. This endpoint also supports altering tables,
aliases and indexes and bulk insertion. This endpoint can be called
multiple times to initially insert more data, add fields, change the
aliases or indexes as well as the primary keys.
To create an empty datastore resource and a CKAN resource at the same time,
provide ``resource`` with a valid ``package_id`` and omit the
``resource_id``.
If you want to create a datastore resource from the content of a file,
provide ``resource`` with a valid ``url``.
See :ref:`fields` and :ref:`records` for details on how to lay out records.
:param resource_id: resource id that the data is going to be stored
against.
:type resource_id: string
:param force: set to True to edit a read-only resource
:type force: bool (optional, default: False)
:param resource: resource dictionary that is passed to
:meth:`~ckan.logic.action.create.resource_create`.
Use instead of ``resource_id`` (optional)
:type resource: dictionary
:param aliases: names for read only aliases of the resource. (optional)
:type aliases: list or comma separated string
:param fields: fields/columns and their extra metadata. (optional)
:type fields: list of dictionaries
:param records: the data, eg: [{"dob": "2005", "some_stuff": ["a", "b"]}]
(optional)
:type records: list of dictionaries
:param primary_key: fields that represent a unique key (optional)
:type primary_key: list or comma separated string
:param indexes: indexes on table (optional)
:type indexes: list or comma separated string
:param triggers: trigger functions to apply to this table on update/insert.
functions may be created with
:meth:`~ckanext.datastore.logic.action.datastore_function_create`.
eg: [
{"function": "trigger_clean_reference"},
{"function": "trigger_check_codes"}]
:type triggers: list of dictionaries
Please note that setting the ``aliases``, ``indexes`` or ``primary_key``
replaces the exising aliases or constraints. Setting ``records`` appends
the provided records to the resource.
**Results:**
:returns: The newly created data object, excluding ``records`` passed.
:rtype: dictionary
See :ref:`fields` and :ref:`records` for details on how to lay out records.
'''
backend = DatastoreBackend.get_active_backend()
schema = context.get('schema', dsschema.datastore_create_schema())
records = data_dict.pop('records', None)
resource = data_dict.pop('resource', None)
data_dict, errors = _validate(data_dict, schema, context)
resource_dict = None
if records:
data_dict['records'] = records
if resource:
data_dict['resource'] = resource
if errors:
raise p.toolkit.ValidationError(errors)
p.toolkit.check_access('datastore_create', context, data_dict)
if 'resource' in data_dict and 'resource_id' in data_dict:
raise p.toolkit.ValidationError({
'resource': ['resource cannot be used with resource_id']
})
if 'resource' not in data_dict and 'resource_id' not in data_dict:
raise p.toolkit.ValidationError({
'resource_id': ['resource_id or resource required']
})
if 'resource' in data_dict:
has_url = 'url' in data_dict['resource']
# A datastore only resource does not have a url in the db
data_dict['resource'].setdefault('url', '_datastore_only_resource')
resource_dict = p.toolkit.get_action('resource_create')(
context, data_dict['resource'])
data_dict['resource_id'] = resource_dict['id']
# create resource from file
if has_url:
if not p.plugin_loaded('datapusher'):
raise p.toolkit.ValidationError({'resource': [
'The datapusher has to be enabled.']})
p.toolkit.get_action('datapusher_submit')(context, {
'resource_id': resource_dict['id'],
'set_url_type': True
})
#.........这里部分代码省略.........
示例14: datastore_upsert
def datastore_upsert(context, data_dict):
'''Updates or inserts into a table in the DataStore
The datastore_upsert API action allows you to add or edit records to
an existing DataStore resource. In order for the *upsert* and *update*
methods to work, a unique key has to be defined via the datastore_create
action. The available methods are:
*upsert*
Update if record with same key already exists, otherwise insert.
Requires unique key.
*insert*
Insert only. This method is faster that upsert, but will fail if any
inserted record matches an existing one. Does *not* require a unique
key.
*update*
Update only. An exception will occur if the key that should be updated
does not exist. Requires unique key.
:param resource_id: resource id that the data is going to be stored under.
:type resource_id: string
:param force: set to True to edit a read-only resource
:type force: bool (optional, default: False)
:param records: the data, eg: [{"dob": "2005", "some_stuff": ["a","b"]}]
(optional)
:type records: list of dictionaries
:param method: the method to use to put the data into the datastore.
Possible options are: upsert, insert, update
(optional, default: upsert)
:type method: string
**Results:**
:returns: The modified data object.
:rtype: dictionary
'''
backend = DatastoreBackend.get_active_backend()
schema = context.get('schema', dsschema.datastore_upsert_schema())
records = data_dict.pop('records', None)
data_dict, errors = _validate(data_dict, schema, context)
if records:
data_dict['records'] = records
if errors:
raise p.toolkit.ValidationError(errors)
p.toolkit.check_access('datastore_upsert', context, data_dict)
resource_id = data_dict['resource_id']
if not data_dict.pop('force', False):
_check_read_only(context, resource_id)
res_exists = backend.resource_exists(resource_id)
if not res_exists:
raise p.toolkit.ObjectNotFound(p.toolkit._(
u'Resource "{0}" was not found.'.format(resource_id)
))
result = backend.upsert(context, data_dict)
result.pop('id', None)
result.pop('connection_url', None)
return result
示例15: bool
:type force: bool (optional, default: False)
:param filters: filters to apply before deleting (eg {"name": "fred"}).
If missing delete whole table and all dependent views.
(optional)
:type filters: dictionary
**Results:**
:returns: Original filters sent.
:rtype: dictionary
'''
schema = context.get('schema', dsschema.datastore_upsert_schema())
<<<<<<< HEAD
=======
backend = DatastoreBackend.get_active_backend()
>>>>>>> ckan-2.7.2
# Remove any applied filters before running validation.
filters = data_dict.pop('filters', None)
data_dict, errors = _validate(data_dict, schema, context)
if filters is not None:
if not isinstance(filters, dict):
raise p.toolkit.ValidationError({
'filters': [
'filters must be either a dict or null.'
]
})
data_dict['filters'] = filters