本文整理汇总了Python中invenio_records_files.api.Record类的典型用法代码示例。如果您正苦于以下问题:Python Record类的具体用法?Python Record怎么用?Python Record使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Record类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: glossary_terms
def glossary_terms():
"""Load demo terms records."""
from invenio_db import db
from invenio_records import Record
from invenio_indexer.api import RecordIndexer
from cernopendata.modules.records.minters.termid import \
cernopendata_termid_minter
indexer = RecordIndexer()
schema = current_app.extensions['invenio-jsonschemas'].path_to_url(
'records/glossary-term-v1.0.0.json'
)
data = pkg_resources.resource_filename('cernopendata',
'modules/fixtures/data')
glossary_terms_json = glob.glob(os.path.join(data, 'terms', '*.json'))
for filename in glossary_terms_json:
click.echo('Loading glossary-terms from {0} ...'.format(filename))
with open(filename, 'rb') as source:
for data in json.load(source):
if "collections" not in data and \
not isinstance(
data.get("collections", None), basestring):
data["collections"] = []
data["collections"].append({"primary": "Terms"})
id = uuid.uuid4()
cernopendata_termid_minter(id, data)
data['$schema'] = schema
record = Record.create(data, id_=id)
db.session.commit()
indexer.index(record)
db.session.expunge_all()
示例2: check_records_migration
def check_records_migration(app):
"""Check that a set of records have been migrated."""
expected_records = _load_json('expected_records.json')
for exp_record in expected_records:
db_record = Record.get_record(exp_record['id'], with_deleted=True)
assert str(db_record.created) == exp_record['created']
# If the record is deleted there is no metadata to check
if db_record.model.json is None:
continue
# Check that the parent pid is minted properly
parent_pid = b2share_parent_pid_fetcher(exp_record['id'],
db_record)
fetched_pid = b2share_record_uuid_fetcher(exp_record['id'], db_record)
record_pid = PersistentIdentifier.get(fetched_pid.pid_type,
fetched_pid.pid_value)
assert PIDVersioning(record_pid).parent.pid_value == parent_pid.pid_value
# Remove the parent pid as it has been added by the migration
db_record['_pid'].remove({
'type': RecordUUIDProvider.parent_pid_type,
'value': parent_pid.pid_value,
})
# The OAI-PMH identifier has been modified by the migration
if db_record.get('_oai'):
oai_prefix = app.config.get('OAISERVER_ID_PREFIX', 'oai:')
record_id = exp_record['metadata']['_deposit']['id']
assert db_record['_oai']['id'] == str(oai_prefix) + record_id
exp_record['metadata']['_oai']['id'] = db_record['_oai']['id']
assert db_record == exp_record['metadata']
示例3: attach_files
def attach_files(obj, eng):
if 'files' in obj.extra_data:
recid = obj.data['control_number']
pid = PersistentIdentifier.get('recid', recid)
existing_record = Record.get_record(pid.object_uuid)
if '_files' not in existing_record or not existing_record['_files']:
bucket = Bucket.create()
RecordsBuckets.create(record=existing_record.model, bucket=bucket)
for file_ in obj.extra_data['files']:
if file_['url'].startswith('http'):
data = requests_retry_session().get(file_['url'], headers=file_.get('headers', {}))
f = StringIO(data.content)
else:
f = open(file_['url'])
existing_record.files[file_['name']] = f
existing_record.files[file_['name']]['filetype'] = file_['filetype']
obj.save()
existing_record.commit()
db.session.commit()
else:
__halt_and_notify('No files found.', eng)
示例4: add_oai_information
def add_oai_information(obj, eng):
"""Adds OAI information like identifier"""
recid = obj.data['control_number']
pid = PersistentIdentifier.get('recid', recid)
existing_record = Record.get_record(pid.object_uuid)
if '_oai' not in existing_record:
try:
oaiid_minter(pid.object_uuid, existing_record)
except PIDAlreadyExists:
existing_record['_oai'] = {
'id': 'oai:beta.scoap3.org:%s' % recid,
'sets': _get_oai_sets(existing_record)
}
if 'id' not in existing_record['_oai']:
current_app.logger.info('adding new oai id')
oaiid_minter(pid.object_uuid, existing_record)
if 'sets' not in existing_record['_oai'] or not existing_record['_oai']['sets']:
existing_record['_oai']['sets'] = _get_oai_sets(existing_record)
existing_record['_oai']['updated'] = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
existing_record.commit()
obj.save()
db.session.commit()
示例5: update_expired_embargoes
def update_expired_embargoes():
"""Release expired embargoes every midnight."""
logger = current_app.logger
base_url = urlunsplit((
current_app.config.get('PREFERRED_URL_SCHEME', 'http'),
current_app.config['JSONSCHEMAS_HOST'],
current_app.config.get('APPLICATION_ROOT') or '', '', ''
))
# The task needs to run in a request context as JSON Schema validation
# will use url_for.
with current_app.test_request_context('/', base_url=base_url):
s = B2ShareRecordsSearch(
using=current_search_client,
index='records'
).query(
'query_string',
query='open_access:false AND embargo_date:{{* TO {0}}}'.format(
datetime.now(timezone.utc).isoformat()
),
allow_leading_wildcard=False
).fields([])
record_ids = [hit.meta.id for hit in s.scan()]
if record_ids:
logger.info('Changing access of {} embargoed publications'
' to public.'.format(len(record_ids)))
for record in Record.get_records(record_ids):
logger.debug('Making embargoed publication {} public'.format(
record.id))
record['open_access'] = True
record.commit()
db.session.commit()
indexer = RecordIndexer()
indexer.bulk_index(record_ids)
indexer.process_bulk_queue()
示例6: create_doc
def create_doc(data, schema):
"""Creates a new doc record."""
from invenio_records import Record
id = uuid.uuid4()
cernopendata_docid_minter(id, data)
data['$schema'] = schema
record = Record.create(data, id_=id)
return record
示例7: data_policies
def data_policies(skip_files):
"""Load demo Data Policy records."""
from invenio_db import db
from invenio_indexer.api import RecordIndexer
from cernopendata.modules.records.minters.recid import \
cernopendata_recid_minter
from invenio_files_rest.models import \
Bucket, FileInstance, ObjectVersion
from invenio_records_files.models import RecordsBuckets
from invenio_records_files.api import Record
from invenio_records.models import RecordMetadata
indexer = RecordIndexer()
schema = current_app.extensions['invenio-jsonschemas'].path_to_url(
'records/data-policies-v1.0.0.json'
)
data = pkg_resources.resource_filename('cernopendata',
'modules/fixtures/data')
data_policies_json = glob.glob(os.path.join(data, '*.json'))
for filename in data_policies_json:
click.echo('Loading data-policies from {0} ...'.format(filename))
with open(filename, 'rb') as source:
for data in json.load(source):
files = data.pop('files', [])
id = uuid.uuid4()
cernopendata_recid_minter(id, data)
data['$schema'] = schema
record = Record.create(data, id_=id)
bucket = Bucket.create()
RecordsBuckets.create(
record=record.model, bucket=bucket)
for file in files:
if skip_files:
break
assert 'uri' in file
assert 'size' in file
assert 'checksum' in file
f = FileInstance.create()
filename = file.get("uri").split('/')[-1:][0]
f.set_uri(file.get("uri"), file.get(
"size"), file.get("checksum"))
ObjectVersion.create(
bucket,
filename,
_file_id=f.id
)
db.session.commit()
indexer.index(record)
db.session.expunge_all()
示例8: test_file_download_ui
def test_file_download_ui(app, objects):
"""Test get buckets."""
app.config.update(dict(
FILES_REST_PERMISSION_FACTORY=lambda *a, **kw: type(
'Allow', (object, ), {'can': lambda self: True}
)(),
RECORDS_UI_DEFAULT_PERMISSION_FACTORY=None, # No permission checking
RECORDS_UI_ENDPOINTS=dict(
recid=dict(
pid_type='recid',
route='/records/<pid_value>',
),
recid_files=dict(
pid_type='recid',
route='/records/<pid_value>/files/<filename>',
view_imp='invenio_records_files.utils:file_download_ui',
record_class='invenio_records_files.api:Record',
),
)
))
InvenioRecordsUI(app)
obj1 = objects[0]
with app.test_request_context():
# Record 1 - Live record
rec_uuid = uuid.uuid4()
PersistentIdentifier.create(
'recid', '1', object_type='rec', object_uuid=rec_uuid,
status=PIDStatus.REGISTERED)
record = Record.create({
'title': 'Registered',
'recid': 1,
'_files': [
{'key': obj1.key, 'bucket': str(obj1.bucket_id),
'checksum': 'invalid'},
]
}, id_=rec_uuid)
RecordsBuckets.create(record=record.model, bucket=obj1.bucket)
db.session.commit()
main_url = url_for('invenio_records_ui.recid', pid_value='1')
file_url = url_for(
'invenio_records_ui.recid_files', pid_value='1', filename=obj1.key)
no_file_url = url_for(
'invenio_records_ui.recid_files', pid_value='1', filename='')
invalid_file_url = url_for(
'invenio_records_ui.recid_files', pid_value='1', filename='no')
with app.test_client() as client:
res = client.get(main_url)
assert res.status_code == 200
res = client.get(file_url)
assert res.status_code == 200
res = client.get(no_file_url)
assert res.status_code == 404
res = client.get(invalid_file_url)
assert res.status_code == 404
示例9: record_with_bucket
def record_with_bucket(full_record, bucket, db):
"""Create a bucket."""
record = RecordFile.create(full_record)
RecordsBuckets.create(bucket=bucket, record=record.model)
pid = PersistentIdentifier.create(
pid_type='recid', pid_value=12345, object_type='rec',
object_uuid=record.id, status='R')
db.session.commit()
return pid, record
示例10: record
def record(app, db):
"""Create a record."""
record = {
'title': 'fuu'
}
record = Record.create(record)
record.commit()
db.session.commit()
return record
示例11: test_RecordSIP_create
def test_RecordSIP_create(db, mocker):
"""Test create method from the API class RecordSIP."""
# we setup a file storage
tmppath = tempfile.mkdtemp()
db.session.add(Location(name='default', uri=tmppath, default=True))
# setup metadata
mtype = SIPMetadataType(title='JSON Test', name='json-test',
format='json', schema='url://to/schema')
db.session.add(mtype)
db.session.commit()
# first we create a record
recid = uuid.uuid4()
pid = PersistentIdentifier.create(
'recid',
'1337',
object_type='rec',
object_uuid=recid,
status=PIDStatus.REGISTERED)
mocker.patch('invenio_records.api.RecordBase.validate',
return_value=True, autospec=True)
record = Record.create(
{'title': 'record test', '$schema': 'url://to/schema'},
recid)
# we add a file to the record
bucket = Bucket.create()
content = b'Test file\n'
RecordsBuckets.create(record=record.model, bucket=bucket)
record.files['test.txt'] = BytesIO(content)
db.session.commit()
# Let's create a SIP
user = create_test_user('[email protected]')
agent = {'email': '[email protected]', 'ip_address': '1.1.1.1'}
rsip = RecordSIP.create(pid, record, True, user_id=user.id, agent=agent)
db.session.commit()
# test!
assert RecordSIP_.query.count() == 1
assert SIP_.query.count() == 1
assert SIPFile.query.count() == 1
assert SIPMetadata.query.count() == 1
assert len(rsip.sip.files) == 1
assert len(rsip.sip.metadata) == 1
metadata = rsip.sip.metadata[0]
assert metadata.type.format == 'json'
assert '"title": "record test"' in metadata.content
assert rsip.sip.archivable is True
# we try with no files
rsip = RecordSIP.create(pid, record, True, create_sip_files=False,
user_id=user.id, agent=agent)
assert SIPFile.query.count() == 1
assert SIPMetadata.query.count() == 2
assert len(rsip.sip.files) == 0
assert len(rsip.sip.metadata) == 1
# finalization
rmtree(tmppath)
示例12: publish
def publish(self, pid=None, id_=None):
"""Publish a deposit."""
pid = pid or self.pid
if not pid.is_registered():
raise PIDInvalidAction()
self['_deposit']['status'] = 'published'
if self['_deposit'].get('pid') is None: # First publishing
minter = current_pidstore.minters[
current_app.config['DEPOSIT_PID_MINTER']
]
id_ = id_ or uuid.uuid4()
record_pid = minter(id_, self)
self['_deposit']['pid'] = {
'type': record_pid.pid_type, 'value': record_pid.pid_value,
'revision_id': 0,
}
data = dict(self.dumps())
data['$schema'] = self.record_schema
# During first publishing create snapshot the bucket.
@contextmanager
def process_files(data):
"""Process deposit files."""
if self.files and self.files.bucket:
assert not self.files.bucket.locked
self.files.bucket.locked = True
snapshot = self.files.bucket.snapshot(lock=True)
data['_files'] = self.files.dumps(bucket=snapshot.id)
yield data
db.session.add(RecordsBuckets(
record_id=id_, bucket_id=snapshot.id
))
else:
yield data
with process_files(data) as data:
record = Record.create(data, id_=id_)
else: # Update after edit
record_pid, record = self.fetch_published()
# TODO add support for patching
assert record.revision_id == self['_deposit']['pid']['revision_id']
data = dict(self.dumps())
data['$schema'] = self.record_schema
record = record.__class__(data, model=record.model)
record.commit()
self.commit()
return self
示例13: record
def record(db):
"""Record fixture."""
rec_uuid = uuid.uuid4()
provider = RecordIdProvider.create(
object_type='rec', object_uuid=rec_uuid)
record = Record.create({
'control_number': provider.pid.pid_value,
'title': 'TestDefault',
}, id_=rec_uuid)
db.session.commit()
return record
示例14: check_dois
def check_dois(record, allrecords, update):
""" Checks that DOIs of records in the current instance are registered.
"""
if record:
record = Record.get_record(record)
check_record_doi(record, update)
elif allrecords:
click.secho('checking DOI for all records')
for record in list_db_published_records():
check_record_doi(record, update)
else:
raise click.ClickException('Either -r or -a option must be selected')
示例15: check_embargo
def check_embargo(record_id, is_embargoed):
with app.app_context():
with app.test_client() as client:
login_user(non_creator, client)
# test open_access field in record's metadata
record = Record.get_record(record_id)
assert record['open_access'] != is_embargoed
# test record's file access
subtest_file_bucket_permissions(
client, record.files.bucket,
access_level=None if is_embargoed else 'read',
is_authenticated=True
)