本文整理汇总了Python中south.db.db.commit_transaction函数的典型用法代码示例。如果您正苦于以下问题:Python commit_transaction函数的具体用法?Python commit_transaction怎么用?Python commit_transaction使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了commit_transaction函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: backwards
def backwards(self, orm):
# Adding field 'Dataset.typed_columns'
db.add_column('panda_dataset', 'typed_columns', self.gf('panda.fields.JSONField')(default=None, null=True), keep_default=False)
# Adding field 'Dataset.column_types'
db.add_column('panda_dataset', 'column_types', self.gf('panda.fields.JSONField')(default=None, null=True), keep_default=False)
# Adding field 'Dataset.typed_column_names'
db.add_column('panda_dataset', 'typed_column_names', self.gf('panda.fields.JSONField')(default=None, null=True), keep_default=False)
# Adding field 'Dataset.columns'
db.add_column('panda_dataset', 'columns', self.gf('panda.fields.JSONField')(default=None, null=True), keep_default=False)
db.commit_transaction() # Commit the first transaction
db.start_transaction() # Start the second, committed on completion
if not db.dry_run:
for dataset in orm.Dataset.objects.all():
columns = []
typed_columns = []
column_types = []
typed_column_names = []
for schema in dataset.column_schema:
columns.append(schema['name'])
typed_columns.append(schema['indexed'])
column_types.append(schema['type'])
typed_column_names.append(schema['indexed_name'])
dataset.columns = columns
dataset.typed_columns = typed_columns
dataset.column_types = column_types
dataset.typed_column_names = typed_column_names
dataset.save()
开发者ID:NUKnightLab,项目名称:panda,代码行数:35,代码来源:0007_auto__del_field_dataset_typed_columns__del_field_dataset_column_types_.py
示例2: drop_db_model
def drop_db_model(self, django_class):
""" Migrate the DB to remove a single model. """
# Drop the table. Also force a commit, or we'll have trouble with pending triggers in future operations.
table_name = django_class._meta.db_table
db.start_transaction()
db.delete_table(table_name)
db.commit_transaction()
示例3: _class_prepared_handler
def _class_prepared_handler(sender, **kwargs):
""" Signal handler for class_prepared.
This will be run for every model, looking for the moment when all
dependent models are prepared for the first time. It will then run
the given function, only once.
"""
sender_app=sender._meta.app_label.lower()+'.'+sender._meta.object_name
already_prepared=set([sender_app])
for app,models in app_cache.app_models.items():
for model_name,model in models.items():
already_prepared.add(app.lower()+'.'+model_name)
if all([x in already_prepared for x in dependencies]):
db.start_transaction()
try:
# We need to disconnect, otherwise each new dynamo model generation
# will trigger it and cause a "maximim recursion error"
class_prepared.disconnect(_class_prepared_handler,weak=False)
fn()
except DatabaseError, message:
# If tables are missing altogether, not much we can do
# until syncdb/migrate is run. "The code must go on" in this
# case, without running our function completely. At least
# database operations will be rolled back.
db.rollback_transaction()
# Better connect again
if message<>'no such table: dynamo_metamodel':
class_prepared.connect(_class_prepared_handler, weak=False)
else:
raise
else:
db.commit_transaction()
示例4: forwards
def forwards(self, orm):
db.start_transaction()
db.clear_table('lizard_rainapp_rainvalue')
db.clear_table('lizard_rainapp_completerainvalue')
db.clear_table('lizard_rainapp_geoobject')
db.commit_transaction()
# Adding model 'RainappConfig'
db.create_table('lizard_rainapp_rainappconfig', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=128)),
('jdbcsource', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['lizard_fewsjdbc.JdbcSource'])),
('filter_id', self.gf('django.db.models.fields.CharField')(max_length=128)),
))
db.send_create_signal('lizard_rainapp', ['RainappConfig'])
# Adding field 'RainValue.config'
db.add_column('lizard_rainapp_rainvalue', 'config', self.gf('django.db.models.fields.related.ForeignKey')(default=1, to=orm['lizard_rainapp.RainappConfig']), keep_default=False)
# Adding field 'CompleteRainValue.config'
db.add_column('lizard_rainapp_completerainvalue', 'config', self.gf('django.db.models.fields.related.ForeignKey')(default=1, to=orm['lizard_rainapp.RainappConfig']), keep_default=False)
# Deleting field 'GeoObject.filterkey'
db.delete_column('lizard_rainapp_geoobject', 'filterkey')
# Adding field 'GeoObject.config'
db.add_column('lizard_rainapp_geoobject', 'config', self.gf('django.db.models.fields.related.ForeignKey')(default=1, to=orm['lizard_rainapp.RainappConfig']), keep_default=False)
开发者ID:lizardsystem,项目名称:lizard-rainapp,代码行数:27,代码来源:0004_auto__add_rainappconfig__add_field_rainvalue_config__add_field_complet.py
示例5: run_forwards
def run_forwards(app, migrations, fake=False, db_dry_run=False, silent=False):
"""
Runs the specified migrations forwards, in order.
"""
for migration in migrations:
app_name = get_app_name(app)
if not silent:
print " > %s: %s" % (app_name, migration)
klass = get_migration(app, migration)
if fake:
if not silent:
print " (faked)"
else:
if db_dry_run:
db.dry_run = True
db.start_transaction()
try:
klass().forwards()
db.execute_deferred_sql()
except:
db.rollback_transaction()
raise
else:
db.commit_transaction()
if not db_dry_run:
# Record us as having done this
record = MigrationHistory.for_migration(app_name, migration)
record.applied = datetime.datetime.utcnow()
record.save()
示例6: forwards
def forwards(self, orm):
"Write your forwards methods here."
db.start_transaction()
self.set_foreign_key_checks(False)
# asset
for model in orm.AssetModel.objects.all():
new_model = orm.PricingObjectModel.objects.create(
model_id=model.model_id,
name=model.name,
manufacturer=model.manufacturer,
category=model.category,
type_id=1,
)
model.assetinfo_set.update(model=new_model)
# tenant
for model in orm.TenantGroup.objects.all():
new_model = orm.PricingObjectModel.objects.create(
name=model.name,
model_id=model.group_id,
type_id=3,
)
model.tenants.update(model=new_model)
# move base usages over 100
self.bumped_auto_increment(101 + orm.BaseUsage.objects.count())
self.update_usage_id()
self.set_foreign_key_checks(True)
db.commit_transaction()
示例7: forwards
def forwards(self, orm):
# Removing index on 'CaseActionData', fields ['action_type']
db.execute("DROP INDEX IF EXISTS sofabed_caseactiondata_action_type_like")
db.execute("DROP INDEX IF EXISTS sofabed_caseactiondata_case_id_like")
db.execute("DROP INDEX IF EXISTS sofabed_caseactiondata_user_id_like")
db.commit_transaction()
db.start_transaction()
db.execute("DROP INDEX IF EXISTS sofabed_casedata_case_id_like")
db.execute("DROP INDEX IF EXISTS sofabed_casedata_closed_by_like")
db.execute("DROP INDEX IF EXISTS sofabed_casedata_doc_type_like")
db.execute("DROP INDEX IF EXISTS sofabed_casedata_domain_like")
db.execute("DROP INDEX IF EXISTS sofabed_casedata_modified_by_like")
db.execute("DROP INDEX IF EXISTS sofabed_casedata_opened_by_like")
db.execute("DROP INDEX IF EXISTS sofabed_casedata_owner_id_like")
db.execute("DROP INDEX IF EXISTS sofabed_casedata_type_like")
db.execute("DROP INDEX IF EXISTS sofabed_casedata_user_id_like")
db.execute("DROP INDEX IF EXISTS sofabed_casedata_version_like")
db.commit_transaction()
db.start_transaction()
db.execute("DROP INDEX IF EXISTS sofabed_caseindexdata_case_id_like")
db.execute("DROP INDEX IF EXISTS sofabed_caseindexdata_identifier_like")
db.execute("DROP INDEX IF EXISTS sofabed_caseindexdata_referenced_type_like")
示例8: run_backwards
def run_backwards(app, migrations, ignore=[], fake=False, db_dry_run=False, silent=False):
"""
Runs the specified migrations backwards, in order, skipping those
migrations in 'ignore'.
"""
for migration in migrations:
if migration not in ignore:
app_name = get_app_name(app)
if not silent:
print " < %s: %s" % (app_name, migration)
klass = get_migration(app, migration)
if fake:
if not silent:
print " (faked)"
else:
if db_dry_run:
db.dry_run = True
db.start_transaction()
try:
klass().backwards()
db.execute_deferred_sql()
except:
db.rollback_transaction()
raise
else:
db.commit_transaction()
if not db_dry_run:
# Record us as having not done this
record = MigrationHistory.for_migration(app_name, migration)
record.delete()
示例9: test_dry_rename
def test_dry_rename(self):
"""
Test column renaming while --dry-run is turned on (should do nothing)
See ticket #65
"""
cursor = connection.cursor()
db.create_table("test_drn", [('spam', models.BooleanField(default=False))])
# Make sure we can select the column
cursor.execute("SELECT spam FROM test_drn")
# Rename it
db.dry_run = True
db.rename_column("test_drn", "spam", "eggs")
db.dry_run = False
cursor.execute("SELECT spam FROM test_drn")
db.commit_transaction()
db.start_transaction()
try:
cursor.execute("SELECT eggs FROM test_drn")
except:
pass
else:
self.fail("Dry-renamed new column could be selected!")
db.rollback_transaction()
db.delete_table("test_drn")
db.start_transaction()
示例10: forwards
def forwards(self, orm):
# Removing unique constraint on 'BadgeData', fields ['type', 'name']
db.delete_unique('askbot_badgedata', ['type', 'name'])
# Deleting field 'BadgeData.multiple'
db.delete_column('askbot_badgedata', 'multiple')
# Deleting field 'BadgeData.description'
db.delete_column('askbot_badgedata', 'description')
# Deleting field 'BadgeData.type'
db.delete_column('askbot_badgedata', 'type')
# Deleting field 'BadgeData.name'
db.delete_column('askbot_badgedata', 'name')
# Changing field 'BadgeData.slug'
db.alter_column('askbot_badgedata', 'slug', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=50))
# Adding unique constraint on 'BadgeData', fields ['slug']
return
try:#work around the South 0.7.3 bug
db.start_transaction()
db.create_unique('askbot_badgedata', ['slug'])
db.commit_transaction()
except:
db.rollback_transaction()
开发者ID:AndurilLi,项目名称:askbot-devel,代码行数:28,代码来源:0032_auto__del_field_badgedata_multiple__del_field_badgedata_description__d.py
示例11: forwards
def forwards(self, orm):
try:
# Deleting model 'Log'
db.delete_table('eff_log')
db.add_column('eff_client', 'external_source', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['eff.ExternalSource'], null=True), keep_default=False)
db.add_column('eff_client', 'external_id', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True), keep_default=False)
db.start_transaction()
for cli in Client.objects.all():
cli.external_id = cli.name
cli_proj = Project.objects.filter(client=cli)
if cli_proj:
ext_src_id = db.execute("select external_source_id from eff_project where id=%s" % cli_proj[0].id)[0][0]
cli.external_source = ExternalSource.objects.get(id=ext_src_id)
else:
cli.external_source = ExternalSource.objects.get(name="DotprojectMachinalis")
cli.save()
db.commit_transaction()
# Deleting field 'Project.external_source'
db.delete_column('eff_project', 'external_source_id')
# Deleting field 'Project.log'
db.delete_column('eff_project', 'log_id')
except:
pass
开发者ID:emilioramirez,项目名称:eff,代码行数:29,代码来源:0008_auto__del_log__del_field_project_external_source__del_field_project_lo.py
示例12: create_auto_m2m_tables
def create_auto_m2m_tables(model_class):
" Create tables for ManyToMany fields "
for f in model_class._meta.many_to_many:
if f.rel.through:
try:
# Django 1.2+
through = f.rel.through
except AttributeError:
# Django 1.1 and below
through = f.rel.through_model
if (not f.rel.through) or getattr(through._meta, "auto_created", None):
# Create the standard implied M2M table
m2m_table_name = f.m2m_db_table()
if (connection.introspection.table_name_converter(m2m_table_name)
not in connection.introspection.table_names()):
db.start_transaction()
m2m_column_name = f.m2m_column_name()[:-3] # without "_id"
m2m_reverse_name = f.m2m_reverse_name()[:-3] # without "_id"
db.create_table(f.m2m_db_table(), (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
(m2m_column_name, models.ForeignKey(model_class, null=False)),
(m2m_reverse_name, models.ForeignKey(f.rel.to, null=False))
))
db.create_unique(f.m2m_db_table(), [f.m2m_column_name(), f.m2m_reverse_name()])
#db.execute_deferred_sql()
db.commit_transaction()
logger.debug("Created table '%s'" % m2m_table_name)
示例13: forwards
def forwards(self, orm):
# move some models from maps to layers app
# 0. add some missing fields (not for wfp)
db.start_transaction()
try:
# Adding field 'Layer.bbox_top'
db.add_column('maps_layer', 'bbox_top', self.gf('django.db.models.fields.FloatField')(null=True, blank=True), keep_default=False)
# Adding field 'Layer.bbox_bottom'
db.add_column('maps_layer', 'bbox_bottom', self.gf('django.db.models.fields.FloatField')(null=True, blank=True), keep_default=False)
# Adding field 'Layer.bbox_left'
db.add_column('maps_layer', 'bbox_left', self.gf('django.db.models.fields.FloatField')(null=True, blank=True), keep_default=False)
# Adding field 'Layer.bbox_right'
db.add_column('maps_layer', 'bbox_right', self.gf('django.db.models.fields.FloatField')(null=True, blank=True), keep_default=False)
db.commit_transaction()
except:
print 'No need to create the fields, they are already there'
db.rollback_transaction()
# 1. layers_layer moved from maps_layer
db.rename_table('maps_layer', 'layers_layer')
if not db.dry_run:
orm['contenttypes.contenttype'].objects.filter(app_label='maps', model='layer').update(app_label='layers')
# 2. layers_contactrole moved from maps_contactrole
db.rename_table('maps_contactrole', 'layers_contactrole')
if not db.dry_run:
orm['contenttypes.contenttype'].objects.filter(app_label='maps', model='contactrole').update(app_label='layers')
示例14: forwards
def forwards(self, orm):
# If there are duplicated documents, we'll have an error when we try to
# create this index. So to protect against that, we should delete those
# documents before we create the index.
# We need to wrap the data migration and alter operation in separate transactions for PostgreSQL
# See: http://south.readthedocs.org/en/latest/migrationstructure.html#transactions
try:
db.start_transaction()
duplicated_records = Document.objects \
.values('content_type_id', 'object_id') \
.annotate(id_count=models.Count('id')) \
.filter(id_count__gt=1)
# Delete all but the first document.
for record in duplicated_records:
docs = Document.objects \
.values_list('id', flat=True) \
.filter(
content_type_id=record['content_type_id'],
object_id=record['object_id'],
)[1:]
docs = list(docs)
logging.warn('Deleting documents %s' % docs)
Document.objects.filter(id__in=docs).delete()
db.commit_transaction()
except Exception, e:
db.rollback_transaction()
raise e
示例15: add_necessary_db_columns
def add_necessary_db_columns(model_class):
'''
Takes a Django model class and creates relevant columns as necessary based
on the model_class. No columns or data are renamed or removed.
This is available in case a database exception occurs.
'''
db.start_transaction()
# Add field columns if missing
table_name = model_class._meta.db_table
fields = _get_fields(model_class)
db_column_names = [row[0] for row in connection.introspection.get_table_description(connection.cursor(), table_name)]
for field_name, field in fields:
if field.column not in db_column_names:
try:
db.add_column(table_name, field_name, field)
except ValueError:
field.null=True
db.add_column(table_name, field_name, field)
# Some columns require deferred SQL to be run. This was collected
# when running db.add_column().
db.execute_deferred_sql()
db.commit_transaction()