本文整理汇总了Python中migrate.UniqueConstraint类的典型用法代码示例。如果您正苦于以下问题:Python UniqueConstraint类的具体用法?Python UniqueConstraint怎么用?Python UniqueConstraint使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了UniqueConstraint类的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: _pre_upgrade_273
def _pre_upgrade_273(self, engine):
if engine.name != 'sqlite':
return
# Drop a variety of unique constraints to ensure that the script
# properly readds them back
for table_name, constraint_name in [
('compute_nodes', 'uniq_compute_nodes0'
'host0hypervisor_hostname'),
('fixed_ips', 'uniq_fixed_ips0address0deleted'),
('instance_info_caches', 'uniq_instance_info_caches0'
'instance_uuid'),
('instance_type_projects', 'uniq_instance_type_projects0'
'instance_type_id0project_id0'
'deleted'),
('pci_devices', 'uniq_pci_devices0compute_node_id0'
'address0deleted'),
('virtual_interfaces', 'uniq_virtual_interfaces0'
'address0deleted')]:
table = oslodbutils.get_table(engine, table_name)
constraints = [c for c in table.constraints
if c.name == constraint_name]
for cons in constraints:
# Need to use sqlalchemy-migrate UniqueConstraint
cons = UniqueConstraint(*[c.name for c in cons.columns],
name=cons.name,
table=table)
cons.drop()
开发者ID:jcalonsoh,项目名称:openstack-nova,代码行数:28,代码来源:test_migrations.py
示例2: downgrade
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
# Remove the new column
compute_nodes = Table("compute_nodes", meta, autoload=True)
shadow_compute_nodes = Table("shadow_compute_nodes", meta, autoload=True)
ukey = UniqueConstraint(
"host", "hypervisor_hostname", table=compute_nodes, name="uniq_compute_nodes0host0hypervisor_hostname"
)
ukey.drop()
compute_nodes.drop_column("host")
shadow_compute_nodes.drop_column("host")
开发者ID:rcosma-gd,项目名称:openstack-nova,代码行数:15,代码来源:268_add_host_in_compute_node.py
示例3: upgrade
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
for prefix in ('', 'shadow_'):
table = Table(prefix + 'block_device_mapping', meta, autoload=True)
if not hasattr(table.c, 'uuid'):
new_column = Column('uuid', String(36), nullable=True)
table.create_column(new_column)
if prefix == '':
# Only add the constraint on the non-shadow table...
con = UniqueConstraint('uuid', table=table,
name="uniq_block_device_mapping0uuid")
con.create(migrate_engine)
开发者ID:arbrandes,项目名称:nova,代码行数:15,代码来源:374_bdm_uuid.py
示例4: upgrade
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
# Add a new column host
compute_nodes = Table('compute_nodes', meta, autoload=True)
shadow_compute_nodes = Table('shadow_compute_nodes', meta, autoload=True)
# NOTE(sbauza) : Old compute nodes can report stats without this field, we
# need to set it as nullable
host = Column('host', String(255), nullable=True)
if not hasattr(compute_nodes.c, 'host'):
compute_nodes.create_column(host)
if not hasattr(shadow_compute_nodes.c, 'host'):
shadow_compute_nodes.create_column(host.copy())
# NOTE(sbauza) : Populate the host field with the value from the services
# table will be done at the ComputeNode object level when save()
ukey = UniqueConstraint('host', 'hypervisor_hostname', table=compute_nodes,
name="uniq_compute_nodes0host0hypervisor_hostname")
ukey.create()
开发者ID:375670450,项目名称:nova,代码行数:22,代码来源:268_add_host_in_compute_node.py
示例5: upgrade
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
if migrate_engine.name == 'sqlite':
# SQLite is also missing this one index
if not utils.index_exists(migrate_engine, 'fixed_ips', 'address'):
utils.add_index(migrate_engine, 'fixed_ips', 'address',
['address'])
for src_table, src_column, dst_table, dst_column, name in FKEYS:
src_table = Table(src_table, meta, autoload=True)
if name in set(fk.name for fk in src_table.foreign_keys):
continue
src_column = src_table.c[src_column]
dst_table = Table(dst_table, meta, autoload=True)
dst_column = dst_table.c[dst_column]
fkey = ForeignKeyConstraint(columns=[src_column],
refcolumns=[dst_column],
name=name)
fkey.create()
# SQLAlchemy versions < 1.0.0 don't reflect unique constraints
# for SQLite correctly causing sqlalchemy-migrate to recreate
# some tables with missing unique constraints. Re-add some
# potentially missing unique constraints as a workaround.
for table_name, name, column_names in UNIQUES:
table = Table(table_name, meta, autoload=True)
if name in set(c.name for c in table.constraints
if isinstance(table, schema.UniqueConstraint)):
continue
uc = UniqueConstraint(*column_names, table=table, name=name)
uc.create()
开发者ID:Dynavisor,项目名称:nova,代码行数:37,代码来源:273_sqlite_foreign_keys.py
示例6: upgrade
def upgrade(migrate_engine):
meta = sa.MetaData(bind=migrate_engine)
load_tables = dict((table_name, sa.Table(table_name, meta,
autoload=True))
for table_name in TABLES)
if migrate_engine.name != 'sqlite':
for table_name, indexes in INDEXES.items():
table = load_tables[table_name]
for column, ref_table_name, ref_column_name in indexes:
ref_table = load_tables[ref_table_name]
params = {'columns': [table.c[column]],
'refcolumns': [ref_table.c[ref_column_name]]}
if migrate_engine.name == "mysql" and \
table_name != 'alarm_history':
params['name'] = "_".join(('fk', table_name, column))
elif migrate_engine.name == "postgresql" and \
table_name == "sample":
# The fk contains the old table name
params['name'] = "_".join(('meter', column, 'fkey'))
fkey = ForeignKeyConstraint(**params)
fkey.drop()
sourceassoc = load_tables['sourceassoc']
if migrate_engine.name != 'sqlite':
idx = sa.Index('idx_su', sourceassoc.c.source_id,
sourceassoc.c.user_id)
idx.drop(bind=migrate_engine)
idx = sa.Index('idx_sp', sourceassoc.c.source_id,
sourceassoc.c.project_id)
idx.drop(bind=migrate_engine)
params = {}
if migrate_engine.name == "mysql":
params = {'name': 'uniq_sourceassoc0sample_id'}
uc = UniqueConstraint('sample_id', table=sourceassoc, **params)
uc.create()
params = {}
if migrate_engine.name == "mysql":
params = {'name': 'uniq_sourceassoc0sample_id0user_id'}
uc = UniqueConstraint('sample_id', 'user_id',
table=sourceassoc, **params)
uc.drop()
sourceassoc.c.user_id.drop()
sourceassoc.c.project_id.drop()
for table_name in TABLES_DROP:
sa.Table(table_name, meta, autoload=True).drop()
开发者ID:sahilbhagat0710,项目名称:ceilometer,代码行数:51,代码来源:035_drop_user_project_tables.py
示例7: downgrade
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
compute_nodes = Table('compute_nodes', meta, autoload=True)
ukey = UniqueConstraint(
'host', 'hypervisor_hostname', 'deleted',
table=compute_nodes,
name="uniq_compute_nodes0host0hypervisor_hostname0deleted")
ukey.drop()
ukey = UniqueConstraint('host', 'hypervisor_hostname', table=compute_nodes,
name="uniq_compute_nodes0host0hypervisor_hostname")
ukey.create()
开发者ID:Dynavisor,项目名称:nova,代码行数:14,代码来源:279_fix_unique_constraint_for_compute_node.py
示例8: upgrade
def upgrade(migrate_engine):
meta = sqlalchemy.MetaData()
meta.bind = migrate_engine
hosts_table = Table('hosts', meta, autoload=True)
failover_segments = Table('failover_segments', meta, autoload=True)
# NOTE(Dinesh_Bhor) We need to drop foreign keys first because unique
# constraints that we want to delete depend on them. So drop the fk and
# recreate it again after unique constraint deletion.
cons_fk = ForeignKeyConstraint([hosts_table.c.failover_segment_id],
[failover_segments.c.uuid],
name="fk_failover_segments_uuid")
cons_fk.drop(engine=migrate_engine)
cons_unique = UniqueConstraint('failover_segment_id', 'name', 'deleted',
name='uniq_host0name0deleted',
table=hosts_table)
cons_unique.drop(engine=migrate_engine)
# Create an updated unique constraint
updated_cons_unique = UniqueConstraint('name', 'deleted',
name='uniq_host0name0deleted',
table=hosts_table)
cons_fk.create()
updated_cons_unique.create()
开发者ID:openstack,项目名称:masakari,代码行数:24,代码来源:003_update_unique_constraint_hosts.py
示例9: downgrade
def downgrade(migrate_engine):
meta = sa.MetaData(bind=migrate_engine)
sample = sa.Table('sample', meta, autoload=True)
resource = sa.Table(
'resource', meta,
sa.Column('id', sa.String(255), primary_key=True),
sa.Column('resource_metadata', sa.Text),
sa.Column('user_id', sa.String(255)),
sa.Column('project_id', sa.String(255)),
sa.Index('ix_resource_project_id', 'project_id'),
sa.Index('ix_resource_user_id', 'user_id'),
sa.Index('resource_user_id_project_id_key', 'user_id', 'project_id'),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
resource.create()
source = sa.Table(
'source', meta,
sa.Column('id', sa.String(255), primary_key=True),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
source.create()
sourceassoc = sa.Table(
'sourceassoc', meta,
sa.Column('sample_id', sa.Integer),
sa.Column('resource_id', sa.String(255)),
sa.Column('source_id', sa.String(255)),
sa.Index('idx_sr', 'source_id', 'resource_id'),
sa.Index('idx_ss', 'source_id', 'sample_id'),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
sourceassoc.create()
params = {}
if migrate_engine.name == "mysql":
params = {'name': 'uniq_sourceassoc0sample_id'}
uc = UniqueConstraint('sample_id', table=sourceassoc, **params)
uc.create()
# reload source/resource tables.
# NOTE(gordc): fine to skip non-id attributes in table since
# they're constantly updated and not used by api
for table, col in [(source, 'source_id'), (resource, 'resource_id')]:
q = sa.select([sample.c[col]]).distinct()
# NOTE(sileht): workaround for
# https://bitbucket.org/zzzeek/sqlalchemy/
# issue/3044/insert-from-select-union_all
q.select = lambda: q
sql_ins = table.insert().from_select([table.c.id], q)
try:
migrate_engine.execute(sql_ins)
except TypeError:
# from select is empty
pass
# reload sourceassoc tables
for ref_col, col in [('id', 'sample_id'), ('resource_id', 'resource_id')]:
q = sa.select([sample.c.source_id, sample.c[ref_col]]).distinct()
q.select = lambda: q
sql_ins = sourceassoc.insert().from_select([sourceassoc.c.source_id,
sourceassoc.c[col]], q)
try:
migrate_engine.execute(sql_ins)
except TypeError:
# from select is empty
pass
sample.c.source_id.drop()
load_tables = dict((table_name, sa.Table(table_name, meta,
autoload=True))
for table_name in TABLES)
# add foreign keys
if migrate_engine.name != 'sqlite':
for table_name, indexes in INDEXES.items():
table = load_tables[table_name]
for column, ref_table_name, ref_column_name in indexes:
ref_table = load_tables[ref_table_name]
params = {'columns': [table.c[column]],
'refcolumns': [ref_table.c[ref_column_name]]}
fk_table_name = table_name
if migrate_engine.name == "mysql":
params['name'] = "_".join(('fk', fk_table_name, column))
elif (migrate_engine.name == "postgresql" and
table_name == 'sample'):
# fk was not renamed in script 030
params['name'] = "_".join(('meter', column, 'fkey'))
fkey = ForeignKeyConstraint(**params)
fkey.create()
开发者ID:AVerma87,项目名称:ceilometer,代码行数:94,代码来源:036_drop_sourceassoc_resource_tables.py
示例10: downgrade
def downgrade(migrate_engine):
meta = sa.MetaData(bind=migrate_engine)
user = sa.Table(
'user', meta,
sa.Column('id', sa.String(255), primary_key=True),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
project = sa.Table(
'project', meta,
sa.Column('id', sa.String(255), primary_key=True),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
tables = [project, user]
for i in sorted(tables):
i.create()
load_tables = dict((table_name, sa.Table(table_name, meta, autoload=True))
for table_name in TABLES)
# Restore the sourceassoc columns and constraints
sourceassoc = load_tables['sourceassoc']
user_id = sa.Column('user_id', sa.String(255))
project_id = sa.Column('project_id', sa.String(255))
sourceassoc.create_column(user_id)
sourceassoc.create_column(project_id)
if migrate_engine.name != 'sqlite':
params = {}
if migrate_engine.name == "mysql":
params = {'name': 'uniq_sourceassoc0sample_id0user_id'}
uc = UniqueConstraint('sample_id', 'user_id',
table=sourceassoc, **params)
uc.create()
params = {}
if migrate_engine.name == "mysql":
params = {'name': 'uniq_sourceassoc0sample_id'}
uc = UniqueConstraint('sample_id', table=sourceassoc, **params)
uc.drop()
idx = sa.Index('idx_su', sourceassoc.c.source_id,
sourceassoc.c.user_id)
idx.create(bind=migrate_engine)
idx = sa.Index('idx_sp', sourceassoc.c.source_id,
sourceassoc.c.project_id)
idx.create(bind=migrate_engine)
# Restore the user/project columns and constraints in all tables
for table_name, indexes in INDEXES.items():
table = load_tables[table_name]
for column, ref_table_name, ref_column_name in indexes:
ref_table = load_tables[ref_table_name]
c = getattr(Alias(table).c, column)
except_q = exists([getattr(ref_table.c, ref_column_name)])
q = select([c]).where(and_(c != sa.null(), not_(except_q)))
q = q.distinct()
# NOTE(sileht): workaround for
# https://bitbucket.org/zzzeek/sqlalchemy/
# issue/3044/insert-from-select-union_all
q.select = lambda: q
sql_ins = ref_table.insert().from_select(
[getattr(ref_table.c, ref_column_name)], q)
try:
migrate_engine.execute(sql_ins)
except TypeError:
# from select is empty
pass
if migrate_engine.name != 'sqlite':
params = {'columns': [table.c[column]],
'refcolumns': [ref_table.c[ref_column_name]]}
if migrate_engine.name == "mysql" and \
table_name != 'alarm_history':
params['name'] = "_".join(('fk', table_name, column))
elif migrate_engine.name == "postgresql" and \
table_name == "sample":
# The fk contains the old table name
params['name'] = "_".join(('meter', column, 'fkey'))
fkey = ForeignKeyConstraint(**params)
fkey.create()
开发者ID:sahilbhagat0710,项目名称:ceilometer,代码行数:88,代码来源:035_drop_user_project_tables.py
示例11: upgrade
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
resource_providers = Table('resource_providers', meta, autoload=True)
name = Column('name', Unicode(200), nullable=True)
generation = Column('generation', Integer, default=0)
can_host = Column('can_host', Integer, default=0)
if not hasattr(resource_providers.c, 'name'):
# NOTE(cdent): The resource_providers table is defined as
# latin1 to be more efficient. Now we need the name column
# to be UTF8. First create the column, then modify it,
# otherwise the declarative handling in sqlalchemy gets
# confused.
resource_providers.create_column(name)
if migrate_engine.name == 'mysql':
name_col_ddl = DDL(
"ALTER TABLE resource_providers MODIFY name "
"VARCHAR(200) CHARACTER SET utf8")
conn = migrate_engine.connect()
conn.execute(name_col_ddl)
uc = UniqueConstraint('name', table=resource_providers,
name='uniq_resource_providers0name')
uc.create()
utils.add_index(migrate_engine, 'resource_providers',
'resource_providers_name_idx',
['name'])
if not hasattr(resource_providers.c, 'generation'):
resource_providers.create_column(generation)
if not hasattr(resource_providers.c, 'can_host'):
resource_providers.create_column(can_host)
resource_provider_aggregates = Table(
'resource_provider_aggregates', meta,
Column('resource_provider_id', Integer, primary_key=True,
nullable=False),
Column('aggregate_id', Integer, primary_key=True, nullable=False),
mysql_engine='InnoDB',
mysql_charset='latin1'
)
Index('resource_provider_aggregates_aggregate_id_idx',
resource_provider_aggregates.c.aggregate_id)
resource_provider_aggregates.create(checkfirst=True)
utils.add_index(migrate_engine, 'allocations',
'allocations_resource_provider_class_used_idx',
['resource_provider_id', 'resource_class_id',
'used'])
utils.drop_index(migrate_engine, 'allocations',
'allocations_resource_provider_class_id_idx')
# Add a unique constraint so that any resource provider can have
# only one inventory for any given resource class.
inventories = Table('inventories', meta, autoload=True)
inventories_uc = UniqueConstraint(
'resource_provider_id', 'resource_class_id', table=inventories,
name='uniq_inventories0resource_provider_resource_class')
inventories_uc.create()
utils.add_index(migrate_engine, 'inventories',
'inventories_resource_provider_resource_class_idx',
['resource_provider_id', 'resource_class_id'])
开发者ID:4everming,项目名称:nova,代码行数:67,代码来源:318_resource_provider_name_aggregates.py
注:本文中的migrate.UniqueConstraint类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论