本文整理汇总了Python中pymysqlreplication.BinLogStreamReader方法的典型用法代码示例。如果您正苦于以下问题:Python pymysqlreplication.BinLogStreamReader方法的具体用法?Python pymysqlreplication.BinLogStreamReader怎么用?Python pymysqlreplication.BinLogStreamReader使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pymysqlreplication
的用法示例。
在下文中一共展示了pymysqlreplication.BinLogStreamReader方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_connection_stream_lost_event
# 需要导入模块: import pymysqlreplication [as 别名]
# 或者: from pymysqlreplication import BinLogStreamReader [as 别名]
def test_connection_stream_lost_event(self):
self.stream.close()
self.stream = BinLogStreamReader(
self.database, server_id=1024, blocking=True,
ignored_events=self.ignoredEvents())
query = "CREATE TABLE test (id INT NOT NULL AUTO_INCREMENT, data VARCHAR (50) NOT NULL, PRIMARY KEY (id))"
self.execute(query)
query2 = "INSERT INTO test (data) VALUES('a')"
for i in range(0, 10000):
self.execute(query2)
self.execute("COMMIT")
self.assertIsInstance(self.stream.fetchone(), RotateEvent)
self.assertIsInstance(self.stream.fetchone(), FormatDescriptionEvent)
event = self.stream.fetchone()
self.assertIsInstance(event, QueryEvent)
self.assertEqual(event.query, query)
self.conn_control.kill(self.stream._stream_connection.thread_id())
for i in range(0, 10000):
event = self.stream.fetchone()
self.assertIsNotNone(event)
示例2: test_skip_to_timestamp
# 需要导入模块: import pymysqlreplication [as 别名]
# 或者: from pymysqlreplication import BinLogStreamReader [as 别名]
def test_skip_to_timestamp(self):
self.stream.close()
query = "CREATE TABLE test_1 (id INT NOT NULL AUTO_INCREMENT, data VARCHAR (50) NOT NULL, PRIMARY KEY (id))"
self.execute(query)
time.sleep(1)
query = "SELECT UNIX_TIMESTAMP();"
timestamp = self.execute(query).fetchone()[0]
query2 = "CREATE TABLE test_2 (id INT NOT NULL AUTO_INCREMENT, data VARCHAR (50) NOT NULL, PRIMARY KEY (id))"
self.execute(query2)
self.stream = BinLogStreamReader(
self.database,
server_id=1024,
skip_to_timestamp=timestamp,
ignored_events=self.ignoredEvents(),
)
event = self.stream.fetchone()
self.assertIsInstance(event, QueryEvent)
self.assertEqual(event.query, query2)
示例3: test_drop_column
# 需要导入模块: import pymysqlreplication [as 别名]
# 或者: from pymysqlreplication import BinLogStreamReader [as 别名]
def test_drop_column(self):
self.stream.close()
self.execute("CREATE TABLE test_drop_column (id INTEGER(11), data VARCHAR(50))")
self.execute("INSERT INTO test_drop_column VALUES (1, 'A value')")
self.execute("COMMIT")
self.execute("ALTER TABLE test_drop_column DROP COLUMN data")
self.execute("INSERT INTO test_drop_column VALUES (2)")
self.execute("COMMIT")
self.stream = BinLogStreamReader(
self.database,
server_id=1024,
only_events=(WriteRowsEvent,),
)
try:
self.stream.fetchone() # insert with two values
self.stream.fetchone() # insert with one value
except Exception as e:
self.fail("raised unexpected exception: {exception}".format(exception=e))
finally:
self.resetBinLog()
示例4: test_alter_column
# 需要导入模块: import pymysqlreplication [as 别名]
# 或者: from pymysqlreplication import BinLogStreamReader [as 别名]
def test_alter_column(self):
self.stream.close()
self.execute("CREATE TABLE test_alter_column (id INTEGER(11), data VARCHAR(50))")
self.execute("INSERT INTO test_alter_column VALUES (1, 'A value')")
self.execute("COMMIT")
# this is a problem only when column is added in position other than at the end
self.execute("ALTER TABLE test_alter_column ADD COLUMN another_data VARCHAR(50) AFTER id")
self.execute("INSERT INTO test_alter_column VALUES (2, 'Another value', 'A value')")
self.execute("COMMIT")
self.stream = BinLogStreamReader(
self.database,
server_id=1024,
only_events=(WriteRowsEvent,),
)
event = self.stream.fetchone() # insert with two values
# both of these asserts fail because of issue underlying proble described in issue #118
# because it got table schema info after the alter table, it wrongly assumes the second
# column of the first insert is 'another_data'
# ER: {'id': 1, 'data': 'A value'}
# AR: {'id': 1, 'another_data': 'A value'}
self.assertIn("data", event.rows[0]["values"])
self.assertNot("another_data", event.rows[0]["values"])
self.assertEqual(event.rows[0]["values"]["data"], 'A value')
self.stream.fetchone() # insert with three values
示例5: test_position_gtid
# 需要导入模块: import pymysqlreplication [as 别名]
# 或者: from pymysqlreplication import BinLogStreamReader [as 别名]
def test_position_gtid(self):
query = "CREATE TABLE test (id INT NOT NULL, data VARCHAR (50) NOT NULL, PRIMARY KEY (id))"
self.execute(query)
query = "BEGIN;"
self.execute(query)
query = "INSERT INTO test (id, data) VALUES(1, 'Hello');"
self.execute(query)
query = "COMMIT;"
self.execute(query)
query = "CREATE TABLE test2 (id INT NOT NULL, data VARCHAR (50) NOT NULL, PRIMARY KEY (id))"
self.execute(query)
query = "SELECT @@global.gtid_executed;"
gtid = self.execute(query).fetchone()[0]
self.stream.close()
self.stream = BinLogStreamReader(
self.database, server_id=1024, blocking=True, auto_position=gtid)
self.assertIsInstance(self.stream.fetchone(), RotateEvent)
self.assertIsInstance(self.stream.fetchone(), FormatDescriptionEvent)
self.assertIsInstance(self.stream.fetchone(), GtidEvent)
event = self.stream.fetchone()
self.assertEqual(event.query, 'CREATE TABLE test2 (id INT NOT NULL, data VARCHAR (50) NOT NULL, PRIMARY KEY (id))');
示例6: _open_stream
# 需要导入模块: import pymysqlreplication [as 别名]
# 或者: from pymysqlreplication import BinLogStreamReader [as 别名]
def _open_stream(self, start_file="mysql-bin.000001", start_pos=4):
"""Returns a binary log stream starting at the given file and directly
after the given position. server_id and blocking are both set here
but they appear to have no effect on the actual stream.
start_pos defaults to 4 because the first event in every binlog starts
at log_pos 4.
"""
return BinLogStreamReader(
connection_settings=self.db_config._asdict(),
server_id=1,
blocking=False,
resume_stream=True,
log_file=start_file,
log_pos=start_pos,
only_schemas=[HEARTBEAT_DB]
)
示例7: _seek
# 需要导入模块: import pymysqlreplication [as 别名]
# 或者: from pymysqlreplication import BinLogStreamReader [as 别名]
def _seek(
self,
source_database_config,
tracker_database_config,
allowed_event_types,
position,
only_tables
):
self.stream = BinLogStreamReader(
connection_settings=source_database_config,
ctl_connection_settings=tracker_database_config,
server_id=self.get_unique_server_id(),
blocking=True,
only_events=allowed_event_types,
resume_stream=config.env_config.resume_stream,
only_tables=only_tables,
fail_on_table_metadata_unavailable=True,
**position.to_replication_dict()
)
示例8: consume_events
# 需要导入模块: import pymysqlreplication [as 别名]
# 或者: from pymysqlreplication import BinLogStreamReader [as 别名]
def consume_events():
stream = BinLogStreamReader(connection_settings=database,
server_id=3,
resume_stream=False,
blocking=True,
only_events = [UpdateRowsEvent],
only_tables = ['test'] )
start = time.clock()
i = 0.0
for binlogevent in stream:
i += 1.0
if i % 1000 == 0:
print("%d event by seconds (%d total)" % (i / (time.clock() - start), i))
stream.close()
示例9: test_filtering_only_events
# 需要导入模块: import pymysqlreplication [as 别名]
# 或者: from pymysqlreplication import BinLogStreamReader [as 别名]
def test_filtering_only_events(self):
self.stream.close()
self.stream = BinLogStreamReader(
self.database, server_id=1024, only_events=[QueryEvent])
query = "CREATE TABLE test (id INT NOT NULL AUTO_INCREMENT, data VARCHAR (50) NOT NULL, PRIMARY KEY (id))"
self.execute(query)
event = self.stream.fetchone()
self.assertIsInstance(event, QueryEvent)
self.assertEqual(event.query, query)
示例10: test_filtering_ignore_events
# 需要导入模块: import pymysqlreplication [as 别名]
# 或者: from pymysqlreplication import BinLogStreamReader [as 别名]
def test_filtering_ignore_events(self):
self.stream.close()
self.stream = BinLogStreamReader(
self.database, server_id=1024, ignored_events=[QueryEvent])
query = "CREATE TABLE test (id INT NOT NULL AUTO_INCREMENT, data VARCHAR (50) NOT NULL, PRIMARY KEY (id))"
self.execute(query)
event = self.stream.fetchone()
self.assertIsInstance(event, RotateEvent)
示例11: test_log_pos
# 需要导入模块: import pymysqlreplication [as 别名]
# 或者: from pymysqlreplication import BinLogStreamReader [as 别名]
def test_log_pos(self):
query = "CREATE TABLE test (id INT NOT NULL AUTO_INCREMENT, data VARCHAR (50) NOT NULL, PRIMARY KEY (id))"
self.execute(query)
query = "INSERT INTO test (data) VALUES('Hello')"
self.execute(query)
self.execute("COMMIT")
for i in range(6):
self.stream.fetchone()
# record position after insert
log_file, log_pos = self.stream.log_file, self.stream.log_pos
query = "UPDATE test SET data = 'World' WHERE id = 1"
self.execute(query)
self.execute("COMMIT")
# resume stream from previous position
if self.stream is not None:
self.stream.close()
self.stream = BinLogStreamReader(
self.database,
server_id=1024,
resume_stream=True,
log_file=log_file,
log_pos=log_pos,
ignored_events=self.ignoredEvents()
)
self.assertIsInstance(self.stream.fetchone(), RotateEvent)
self.assertIsInstance(self.stream.fetchone(), FormatDescriptionEvent)
self.assertIsInstance(self.stream.fetchone(), XidEvent)
# QueryEvent for the BEGIN
self.assertIsInstance(self.stream.fetchone(), QueryEvent)
self.assertIsInstance(self.stream.fetchone(), TableMapEvent)
self.assertIsInstance(self.stream.fetchone(), UpdateRowsEvent)
self.assertIsInstance(self.stream.fetchone(), XidEvent)
示例12: test_log_pos_handles_disconnects
# 需要导入模块: import pymysqlreplication [as 别名]
# 或者: from pymysqlreplication import BinLogStreamReader [as 别名]
def test_log_pos_handles_disconnects(self):
self.stream.close()
self.stream = BinLogStreamReader(
self.database,
server_id=1024,
resume_stream=False,
only_events = [FormatDescriptionEvent, QueryEvent, TableMapEvent, WriteRowsEvent, XidEvent]
)
query = "CREATE TABLE test (id INT PRIMARY KEY AUTO_INCREMENT, data VARCHAR (50) NOT NULL)"
self.execute(query)
query = "INSERT INTO test (data) VALUES('Hello')"
self.execute(query)
self.execute("COMMIT")
self.assertIsInstance(self.stream.fetchone(), FormatDescriptionEvent)
self.assertGreater(self.stream.log_pos, 0)
self.assertIsInstance(self.stream.fetchone(), QueryEvent)
self.assertIsInstance(self.stream.fetchone(), QueryEvent)
self.assertIsInstance(self.stream.fetchone(), TableMapEvent)
self.assertIsInstance(self.stream.fetchone(), WriteRowsEvent)
self.assertIsInstance(self.stream.fetchone(), XidEvent)
self.assertGreater(self.stream.log_pos, 0)
示例13: resetBinLog
# 需要导入模块: import pymysqlreplication [as 别名]
# 或者: from pymysqlreplication import BinLogStreamReader [as 别名]
def resetBinLog(self):
self.execute("RESET MASTER")
if self.stream is not None:
self.stream.close()
self.stream = BinLogStreamReader(self.database, server_id=1024,
ignored_events=self.ignoredEvents())
示例14: sync_binlog_stream
# 需要导入模块: import pymysqlreplication [as 别名]
# 或者: from pymysqlreplication import BinLogStreamReader [as 别名]
def sync_binlog_stream(mysql_conn, config, binlog_streams, state):
binlog_streams_map = generate_streams_map(binlog_streams)
for tap_stream_id in binlog_streams_map.keys():
common.whitelist_bookmark_keys(BOOKMARK_KEYS, tap_stream_id, state)
log_file, log_pos = calculate_bookmark(mysql_conn, binlog_streams_map, state)
verify_log_file_exists(mysql_conn, log_file, log_pos)
if config.get('server_id'):
server_id = int(config.get('server_id'))
LOGGER.info("Using provided server_id=%s", server_id)
else:
server_id = fetch_server_id(mysql_conn)
LOGGER.info("No server_id provided, will use global server_id=%s", server_id)
connection_wrapper = make_connection_wrapper(config)
try:
reader = BinLogStreamReader(
connection_settings={},
server_id=server_id,
slave_uuid='stitch-slave-{}'.format(server_id),
log_file=log_file,
log_pos=log_pos,
resume_stream=True,
only_events=[RotateEvent, WriteRowsEvent, UpdateRowsEvent, DeleteRowsEvent],
pymysql_wrapper=connection_wrapper
)
LOGGER.info("Starting binlog replication with log_file=%s, log_pos=%s", log_file, log_pos)
_run_binlog_sync(mysql_conn, reader, binlog_streams_map, state)
finally:
# BinLogStreamReader doesn't implement the `with` methods
# So, try/finally will close the chain from the top
reader.close()
singer.write_message(singer.StateMessage(value=copy.deepcopy(state)))
示例15: _binlog_loader
# 需要导入模块: import pymysqlreplication [as 别名]
# 或者: from pymysqlreplication import BinLogStreamReader [as 别名]
def _binlog_loader(self):
"""
read row from binlog
"""
if self.is_binlog_sync:
resume_stream = True
logging.info("Resume from binlog_file: {file} binlog_pos: {pos}".format(file=self.log_file,
pos=self.log_pos))
else:
resume_stream = False
stream = BinLogStreamReader(connection_settings=self.binlog_conf,
server_id=self.config['mysql']['server_id'],
only_events=[DeleteRowsEvent, WriteRowsEvent, UpdateRowsEvent],
only_tables=[self.config['mysql']['table']],
resume_stream=resume_stream,
blocking=True,
log_file=self.log_file,
log_pos=self.log_pos)
for binlogevent in stream:
self.log_file = stream.log_file
self.log_pos = stream.log_pos
for row in binlogevent.rows:
if isinstance(binlogevent, DeleteRowsEvent):
rv = {
'action': 'delete',
'doc': row['values']
}
elif isinstance(binlogevent, UpdateRowsEvent):
rv = {
'action': 'update',
'doc': row['after_values']
}
elif isinstance(binlogevent, WriteRowsEvent):
rv = {
'action': 'index',
'doc': row['values']
}
else:
logging.error('unknown action type in binlog')
raise TypeError('unknown action type in binlog')
yield rv
# print(rv)
stream.close()
raise IOError('mysql connection closed')