本文整理汇总了Python中sqlalchemy.pool.SingletonThreadPool方法的典型用法代码示例。如果您正苦于以下问题:Python pool.SingletonThreadPool方法的具体用法?Python pool.SingletonThreadPool怎么用?Python pool.SingletonThreadPool使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类sqlalchemy.pool
的用法示例。
在下文中一共展示了pool.SingletonThreadPool方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: create_sqlite_db
# 需要导入模块: from sqlalchemy import pool [as 别名]
# 或者: from sqlalchemy.pool import SingletonThreadPool [as 别名]
def create_sqlite_db(threads=1):
"""Create and initialize a sqlite db engine for use as the CAI temp store.
Args:
threads (int): The number of threads to support. Pool size is set to 5
greater than the number of threads, so that each thread can get its
own connection to the temp database, with a few spare.
Returns:
Tuple[sqlalchemy.engine.Engine, str]: A tuple containing an engine
object initialized to a temporary sqlite db file, and the path to
the temporary file.
"""
dbfile, tmpfile = tempfile.mkstemp('.db', 'forseti-cai-store-')
pool_size = threads + 5
try:
engine = create_engine('sqlite:///{}'.format(tmpfile),
sqlite_enforce_fks=False,
pool_size=pool_size,
connect_args={'check_same_thread': False},
poolclass=SingletonThreadPool)
_initialize(engine)
return engine, tmpfile
finally:
os.close(dbfile)
示例2: test_no_connect_on_recreate
# 需要导入模块: from sqlalchemy import pool [as 别名]
# 或者: from sqlalchemy.pool import SingletonThreadPool [as 别名]
def test_no_connect_on_recreate(self):
def creator():
raise Exception("no creates allowed")
for cls in (
pool.SingletonThreadPool,
pool.StaticPool,
pool.QueuePool,
pool.NullPool,
pool.AssertionPool,
):
p = cls(creator=creator)
p.dispose()
p2 = p.recreate()
assert p2.__class__ is cls
mock_dbapi = MockDBAPI()
p = cls(creator=mock_dbapi.connect)
conn = p.connect()
conn.close()
mock_dbapi.connect.side_effect = Exception("error!")
p.dispose()
p.recreate()
示例3: _init_dbs
# 需要导入模块: from sqlalchemy import pool [as 别名]
# 或者: from sqlalchemy.pool import SingletonThreadPool [as 别名]
def _init_dbs(self):
db1 = testing_engine(
"sqlite:///shard1_%s.db" % provision.FOLLOWER_IDENT,
options=dict(poolclass=SingletonThreadPool),
)
db2 = testing_engine(
"sqlite:///shard2_%s.db" % provision.FOLLOWER_IDENT
)
db3 = testing_engine(
"sqlite:///shard3_%s.db" % provision.FOLLOWER_IDENT
)
db4 = testing_engine(
"sqlite:///shard4_%s.db" % provision.FOLLOWER_IDENT
)
self.dbs = [db1, db2, db3, db4]
return self.dbs
示例4: _init_connection_args
# 需要导入模块: from sqlalchemy import pool [as 别名]
# 或者: from sqlalchemy.pool import SingletonThreadPool [as 别名]
def _init_connection_args(url, engine_args, **kw):
pool_class = url.get_dialect().get_pool_class(url)
# singletonthreadpool is used for :memory: connections;
# replace it with StaticPool.
if issubclass(pool_class, pool.SingletonThreadPool):
engine_args["poolclass"] = pool.StaticPool
engine_args['connect_args']['check_same_thread'] = False
示例5: create_test_engine_with_file
# 需要导入模块: from sqlalchemy import pool [as 别名]
# 或者: from sqlalchemy.pool import SingletonThreadPool [as 别名]
def create_test_engine_with_file(enforce_fks=True):
"""Create a test engine with a db file in /tmp/."""
fd, tmpfile = tempfile.mkstemp('.db', 'forseti-test-')
try:
LOGGER.info('Creating database at %s', tmpfile)
engine = create_engine('sqlite:///{}'.format(tmpfile),
sqlite_enforce_fks=enforce_fks,
pool_size=5,
connect_args={'check_same_thread': False},
poolclass=SingletonThreadPool)
return engine, tmpfile
finally:
os.close(fd)
示例6: test_singleton_pool
# 需要导入模块: from sqlalchemy import pool [as 别名]
# 或者: from sqlalchemy.pool import SingletonThreadPool [as 别名]
def test_singleton_pool(self):
self._do_test(pool.SingletonThreadPool, ["R", "CL", "R"])
示例7: test_listen_targets_per_subclass
# 需要导入模块: from sqlalchemy import pool [as 别名]
# 或者: from sqlalchemy.pool import SingletonThreadPool [as 别名]
def test_listen_targets_per_subclass(self):
"""test that listen() called on a subclass remains specific to
that subclass."""
canary = []
def listen_one(*args):
canary.append("listen_one")
def listen_two(*args):
canary.append("listen_two")
def listen_three(*args):
canary.append("listen_three")
event.listen(pool.Pool, "connect", listen_one)
event.listen(pool.QueuePool, "connect", listen_two)
event.listen(pool.SingletonThreadPool, "connect", listen_three)
p1 = pool.QueuePool(creator=MockDBAPI().connect)
p2 = pool.SingletonThreadPool(creator=MockDBAPI().connect)
assert listen_one in p1.dispatch.connect
assert listen_two in p1.dispatch.connect
assert listen_three not in p1.dispatch.connect
assert listen_one in p2.dispatch.connect
assert listen_two not in p2.dispatch.connect
assert listen_three in p2.dispatch.connect
p1.connect()
eq_(canary, ["listen_one", "listen_two"])
p2.connect()
eq_(canary, ["listen_one", "listen_two", "listen_one", "listen_three"])
示例8: test_cleanup
# 需要导入模块: from sqlalchemy import pool [as 别名]
# 或者: from sqlalchemy.pool import SingletonThreadPool [as 别名]
def test_cleanup(self):
self._test_cleanup(False)
# TODO: the SingletonThreadPool cleanup method
# has an unfixed race condition within the "cleanup" system that
# leads to this test being off by one connection under load; in any
# case, this connection will be closed once it is garbage collected.
# this pool is not a production-level pool and is only used for the
# SQLite "memory" connection, and is not very useful under actual
# multi-threaded conditions
# @testing.requires.threading_with_mock
# def test_cleanup_no_gc(self):
# self._test_cleanup(True)
示例9: test_pool_class
# 需要导入模块: from sqlalchemy import pool [as 别名]
# 或者: from sqlalchemy.pool import SingletonThreadPool [as 别名]
def test_pool_class(self):
e = create_engine("sqlite+pysqlite://")
assert e.pool.__class__ is pool.SingletonThreadPool
e = create_engine("sqlite+pysqlite:///:memory:")
assert e.pool.__class__ is pool.SingletonThreadPool
e = create_engine("sqlite+pysqlite:///foo.db")
assert e.pool.__class__ is pool.NullPool
示例10: _test_cleanup
# 需要导入模块: from sqlalchemy import pool [as 别名]
# 或者: from sqlalchemy.pool import SingletonThreadPool [as 别名]
def _test_cleanup(self, strong_refs):
"""test that the pool's connections are OK after cleanup() has
been called."""
dbapi = MockDBAPI()
lock = threading.Lock()
def creator():
# the mock iterator isn't threadsafe...
with lock:
return dbapi.connect()
p = pool.SingletonThreadPool(creator=creator, pool_size=3)
if strong_refs:
sr = set()
def _conn():
c = p.connect()
sr.add(c.connection)
return c
else:
def _conn():
return p.connect()
def checkout():
for x in range(10):
c = _conn()
assert c
c.cursor()
c.close()
time.sleep(0.1)
threads = []
for i in range(10):
th = threading.Thread(target=checkout)
th.start()
threads.append(th)
for th in threads:
th.join(join_timeout)
lp = len(p._all_conns)
is_true(3 <= lp <= 4)
if strong_refs:
still_opened = len([c for c in sr if not c.close.call_count])
eq_(still_opened, 3)