本文整理汇总了Python中mediagoblin.db.migration_tools.inspect_table函数的典型用法代码示例。如果您正苦于以下问题:Python inspect_table函数的具体用法?Python inspect_table怎么用?Python inspect_table使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了inspect_table函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: add_location_model
def add_location_model(db):
""" Add location model """
metadata = MetaData(bind=db.bind)
# Create location table
Location_V0.__table__.create(db.bind)
db.commit()
# Inspect the tables we need
user = inspect_table(metadata, "core__users")
collections = inspect_table(metadata, "core__collections")
media_entry = inspect_table(metadata, "core__media_entries")
media_comments = inspect_table(metadata, "core__media_comments")
# Now add location support to the various models
col = Column("location", Integer, ForeignKey(Location_V0.id))
col.create(user)
col = Column("location", Integer, ForeignKey(Location_V0.id))
col.create(collections)
col = Column("location", Integer, ForeignKey(Location_V0.id))
col.create(media_entry)
col = Column("location", Integer, ForeignKey(Location_V0.id))
col.create(media_comments)
db.commit()
示例2: upgrade
def upgrade():
"""
Removes comments which have been deleted and exist as a tombstone but still
have their Comment wrapper.
"""
db = op.get_bind()
metadata = MetaData(bind=db)
comment_table = inspect_table(metadata, "core__comment_links")
gmr_table = inspect_table(metadata, "core__generic_model_reference")
# Get the Comment wrappers
comment_wrappers = list(db.execute(comment_table.select()))
for wrapper in comment_wrappers:
# Query for the graveyard GMR comment
gmr = db.execute(gmr_table.select().where(and_(
gmr_table.c.id == wrapper.comment_id,
gmr_table.c.model_type == "core__graveyard"
))).first()
if gmr is not None:
# Okay delete this wrapper as it's to a deleted comment
db.execute(comment_table.delete().where(
comment_table.c.id == wrapper.id
))
示例3: upgrade
def upgrade():
"""
The problem is deletions are occuring and as we expect the
GenericModelReference objects are being updated to point to the tombstone
object. The issue is that collections now contain deleted items, this
causes problems when it comes to rendering them for example.
This migration is to remove any Graveyard objects (tombstones) from any
Collection.
"""
db = op.get_bind()
metadata = MetaData(bind=db)
gmr_table = inspect_table(metadata, "core__generic_model_reference")
collection_items_table = inspect_table(metadata, "core__collection_items")
graveyard_table = inspect_table(metadata, "core__graveyard")
res = list(db.execute(graveyard_table.select()))
for tombstone in res:
# Get GMR for tombstone
gmr = db.execute(gmr_table.select().where(and_(
gmr_table.c.obj_pk == tombstone.id,
gmr_table.c.model_type == "core__graveyard"
))).first()
# If there is no GMR, we're all good as it's required to be in a
# collection
if gmr is None:
continue
# Delete all the CollectionItem objects for this GMR
db.execute(collection_items_table.delete().where(
collection_items_table.c.object_id == gmr.id
))
示例4: add_new_notification_tables
def add_new_notification_tables(db):
metadata = MetaData(bind=db.bind)
user_table = inspect_table(metadata, 'core__users')
mediaentry_table = inspect_table(metadata, 'core__media_entries')
mediacomment_table = inspect_table(metadata, 'core__media_comments')
CommentSubscription_v0.__table__.create(db.bind)
Notification_v0.__table__.create(db.bind)
CommentNotification_v0.__table__.create(db.bind)
ProcessingNotification_v0.__table__.create(db.bind)
示例5: remove_gps_from_image
def remove_gps_from_image(db):
"""
This will remove GPS coordinates from the image model to put them
on the new Location model.
"""
metadata = MetaData(bind=db.bind)
image_table = inspect_table(metadata, "image__mediadata")
location_table = inspect_table(metadata, "core__locations")
media_entries_table = inspect_table(metadata, "core__media_entries")
# First do the data migration
for row in db.execute(image_table.select()):
fields = {
"longitude": row.gps_longitude,
"latitude": row.gps_latitude,
"altitude": row.gps_altitude,
"direction": row.gps_direction,
}
# Remove empty values
for k, v in fields.items():
if v is None:
del fields[k]
# No point in adding empty locations
if not fields:
continue
# JSONEncoded is actually a string field just json.dumped
# without the ORM we're responsible for that.
fields = json.dumps(fields)
location = db.execute(location_table.insert().values(position=fields))
# now store the new location model on Image
db.execute(
media_entries_table.update()
.values(location=location.inserted_primary_key[0])
.where(media_entries_table.c.id == row.media_entry)
)
db.commit()
# All that data has been migrated across lets remove the fields
image_table.columns["gps_longitude"].drop()
image_table.columns["gps_latitude"].drop()
image_table.columns["gps_altitude"].drop()
image_table.columns["gps_direction"].drop()
db.commit()
示例6: fix_CollectionItem_v0_constraint
def fix_CollectionItem_v0_constraint(db_conn):
"""Add the forgotten Constraint on CollectionItem"""
global collectionitem_unique_constraint_done
if collectionitem_unique_constraint_done:
# Reset it. Maybe the whole thing gets run again
# For a different db?
collectionitem_unique_constraint_done = False
return
metadata = MetaData(bind=db_conn.bind)
CollectionItem_table = inspect_table(metadata, 'core__collection_items')
constraint = UniqueConstraint('collection', 'media_entry',
name='core__collection_items_collection_media_entry_key',
table=CollectionItem_table)
try:
constraint.create()
except ProgrammingError:
# User probably has an install that was run since the
# collection tables were added, so we don't need to run this migration.
pass
db_conn.commit()
示例7: wants_notifications
def wants_notifications(db):
"""Add a wants_notifications field to User model"""
metadata = MetaData(bind=db.bind)
user_table = inspect_table(metadata, "core__users")
col = Column('wants_notifications', Boolean, default=True)
col.create(user_table)
db.commit()
示例8: unique_collections_slug
def unique_collections_slug(db):
"""Add unique constraint to collection slug"""
metadata = MetaData(bind=db.bind)
collection_table = inspect_table(metadata, "core__collections")
existing_slugs = {}
slugs_to_change = []
for row in db.execute(collection_table.select()):
# if duplicate slug, generate a unique slug
if row.creator in existing_slugs and row.slug in \
existing_slugs[row.creator]:
slugs_to_change.append(row.id)
else:
if not row.creator in existing_slugs:
existing_slugs[row.creator] = [row.slug]
else:
existing_slugs[row.creator].append(row.slug)
for row_id in slugs_to_change:
new_slug = unicode(uuid.uuid4())
db.execute(collection_table.update().
where(collection_table.c.id == row_id).
values(slug=new_slug))
# sqlite does not like to change the schema when a transaction(update) is
# not yet completed
db.commit()
constraint = UniqueConstraint('creator', 'slug',
name='core__collection_creator_slug_key',
table=collection_table)
constraint.create()
db.commit()
示例9: upload_limits
def upload_limits(db):
"""Add user upload limit columns"""
metadata = MetaData(bind=db.bind)
user_table = inspect_table(metadata, 'core__users')
media_entry_table = inspect_table(metadata, 'core__media_entries')
col = Column('uploaded', Integer, default=0)
col.create(user_table)
col = Column('upload_limit', Integer)
col.create(user_table)
col = Column('file_size', Integer, default=0)
col.create(media_entry_table)
db.commit()
示例10: add_transcoding_progress
def add_transcoding_progress(db_conn):
metadata = MetaData(bind=db_conn.bind)
media_entry = inspect_table(metadata, 'core__media_entries')
col = Column('transcoding_progress', SmallInteger)
col.create(media_entry)
db_conn.commit()
示例11: add_mediaentry_collected
def add_mediaentry_collected(db_conn):
metadata = MetaData(bind=db_conn.bind)
media_entry = inspect_table(metadata, 'core__media_entries')
col = Column('collected', Integer, default=0)
col.create(media_entry)
db_conn.commit()
示例12: add_license_preference
def add_license_preference(db):
metadata = MetaData(bind=db.bind)
user_table = inspect_table(metadata, 'core__users')
col = Column('license_preference', Unicode)
col.create(user_table)
db.commit()
示例13: add_file_metadata
def add_file_metadata(db):
"""Add file_metadata to MediaFile"""
metadata = MetaData(bind=db.bind)
media_file_table = inspect_table(metadata, "core__mediafiles")
col = Column('file_metadata', MutationDict.as_mutable(JSONEncoded))
col.create(media_file_table)
db.commit()
示例14: add_orig_metadata_column
def add_orig_metadata_column(db_conn):
metadata = MetaData(bind=db_conn.bind)
vid_data = inspect_table(metadata, "video__mediadata")
col = Column('orig_metadata', Unicode,
default=None, nullable=True)
col.create(vid_data)
db_conn.commit()
示例15: change_metadata_format
def change_metadata_format(db):
"""Change orig_metadata format for multi-stream a-v"""
db_metadata = MetaData(bind=db.bind)
vid_data = inspect_table(db_metadata, "video__mediadata")
for row in db.execute(vid_data.select()):
if not row.orig_metadata:
continue
metadata = json.loads(row.orig_metadata)
# before this migration there was info about only one video or audio
# stream. So, we store existing info as the first item in the list
new_metadata = {'audio': [], 'video': [], 'common': {}}
video_key_map = { # old: new
'videoheight': 'height',
'videowidth': 'width',
'videorate': 'rate',
}
audio_key_map = { # old: new
'audiochannels': 'channels',
}
common_key_map = {
'videolength': 'length',
}
new_metadata['video'] = [dict((v, metadata.get(k))
for k, v in video_key_map.items() if metadata.get(k))]
new_metadata['audio'] = [dict((v, metadata.get(k))
for k, v in audio_key_map.items() if metadata.get(k))]
new_metadata['common'] = dict((v, metadata.get(k))
for k, v in common_key_map.items() if metadata.get(k))
# 'mimetype' should be in tags
new_metadata['common']['tags'] = {'mimetype': metadata.get('mimetype')}
if 'tags' in metadata:
new_metadata['video'][0]['tags'] = {}
new_metadata['audio'][0]['tags'] = {}
tags = metadata['tags']
video_keys = ['encoder', 'encoder-version', 'video-codec']
audio_keys = ['audio-codec']
for t, v in tags.items():
if t in video_keys:
new_metadata['video'][0]['tags'][t] = tags[t]
elif t in audio_keys:
new_metadata['audio'][0]['tags'][t] = tags[t]
else:
new_metadata['common']['tags'][t] = tags[t]
db.execute(vid_data.update()
.where(vid_data.c.media_entry==row.media_entry)
.values(orig_metadata=json.dumps(new_metadata)))
db.commit()