本文整理汇总了Python中logger.logger.Logger.stop_exe方法的典型用法代码示例。如果您正苦于以下问题:Python Logger.stop_exe方法的具体用法?Python Logger.stop_exe怎么用?Python Logger.stop_exe使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类logger.logger.Logger
的用法示例。
在下文中一共展示了Logger.stop_exe方法的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: forbid_root
# 需要导入模块: from logger.logger import Logger [as 别名]
# 或者: from logger.logger.Logger import stop_exe [as 别名]
def forbid_root(logger=None):
'''
Target:
- stop the execution of the program if this is being run by "root".
'''
if not logger:
logger = Logger()
try:
if getuser() == 'root': # Get system username
raise Exception()
except Exception as e:
logger.debug('Error en la función "forbid_root": {}.'.format(
str(e)))
logger.stop_exe(Messenger.ROOT_NOT_ALLOWED)
示例2: create_dir
# 需要导入模块: from logger.logger import Logger [as 别名]
# 或者: from logger.logger.Logger import stop_exe [as 别名]
def create_dir(path, logger=None):
'''
Target:
- stop the execution of the program if this is being run by "root".
Parameters:
- path: directory to create.
- logger: a logger to show and log some messages.
'''
if not logger:
logger = Logger()
try:
if not os.path.exists(path): # If path does not exist...
os.makedirs(path) # Create it
except Exception as e:
logger.debug('Error en la función "create_dir": {}.'.format(
str(e)))
logger.stop_exe(Messenger.USER_NOT_ALLOWED_TO_CHDIR)
示例3: __init__
# 需要导入模块: from logger.logger import Logger [as 别名]
# 或者: from logger.logger.Logger import stop_exe [as 别名]
class Terminator:
target_all = None # Flag which determinates if terminate any connection
target_user = None # Terminate any connection of an specific user
target_dbs = [] # Terminate any connection to a list of databases
# An object with connection parameters to connect to PostgreSQL
connecter = None
logger = None # Logger to show and log some messages
def __init__(self, connecter, target_all=False, target_user='',
target_dbs=[], logger=None):
if logger:
self.logger = logger
else:
self.logger = Logger()
if connecter:
self.connecter = connecter
else:
self.logger.stop_exe(Messenger.NO_CONNECTION_PARAMS)
if target_all is None:
self.target_all = target_all
elif isinstance(target_all, bool):
self.target_all = target_all
elif Checker.str_is_bool(target_all):
self.target_all = Casting.str_to_bool(target_all)
else:
self.logger.stop_exe(Messenger.INVALID_TARGET_ALL)
self.target_user = target_user
if target_dbs is None:
self.target_dbs = []
elif isinstance(target_dbs, list):
self.target_dbs = target_dbs
else:
self.target_dbs = Casting.str_to_list(target_dbs)
message = Messenger.TERMINATOR_VARS.format(
server=self.connecter.server, user=self.connecter.user,
port=self.connecter.port, target_all=self.target_all,
target_user=target_user, target_dbs=self.target_dbs)
self.logger.debug(Messenger.TERMINATOR_VARS_INTRO)
self.logger.debug(message)
def terminate_backend_user(self):
'''
Target:
- terminate every connection of a specific user to PostgreSQL (as
long as the target user is the one who is running the program).
'''
message = Messenger.BEGINNING_TERMINATE_USER_CONN.format(
target_user=self.target_user)
self.logger.highlight('info', message, 'white')
try:
pg_pid = self.connecter.get_pid_str() # Get PID variable's name
sql = Queries.GET_CURRENT_PG_USER
self.connecter.cursor.execute(sql)
current_pg_user = self.connecter.cursor.fetchone()[0]
if self.target_user == current_pg_user:
message = Messenger.TARGET_USER_IS_CURRENT_USER.format(
target_user=self.target_user)
self.logger.highlight('warning', message, 'yellow')
else:
formatted_sql = Queries.BACKEND_PG_USER_EXISTS.format(
pg_pid=pg_pid, target_user=self.target_user)
self.connecter.cursor.execute(formatted_sql)
result = self.connecter.cursor.fetchone()
if result:
formatted_sql = Queries.TERMINATE_BACKEND_PG_USER.format(
pg_pid=pg_pid, target_user=self.target_user)
self.connecter.cursor.execute(formatted_sql)
else:
message = Messenger.NO_USER_CONNS.format(
target_user=self.target_user)
self.logger.info(message)
message = Messenger.TERMINATE_USER_CONN_DONE.format(
target_user=self.target_user)
self.logger.highlight('info', message, 'green')
except Exception as e:
self.logger.debug('Error en la función "terminate_backend_user": '
'{}.'.format(str(e)))
message = Messenger.TERMINATE_USER_CONN_FAIL.format(
target_user=self.target_user)
self.logger.highlight('warning', message, 'yellow', effect='bold')
self.logger.highlight('info', Messenger.TERMINATOR_DONE, 'green')
def terminate_backend_db(self, target_db):
'''
Target:
#.........这里部分代码省略.........
示例4: version
# 需要导入模块: from logger.logger import Logger [as 别名]
# 或者: from logger.logger.Logger import stop_exe [as 别名]
class Connecter:
'''This class manages connections with database engines and operations
involving them.
So far, only PostgreSQL is supported.
'''
conn = None # The PostgreSQL connection object
cursor = None # The cursor of the PostgreSQL connection
server = None # The target host of the connection
user = None # The PostgreSQL user who makes the connection
port = None # The target port of the connection
database = None # The target database of the connection
logger = None # A logger to show and log some messages
# PostgreSQL version (from this one on some variables change their names)
PG_PID_VERSION_THRESHOLD = 90200
pg_pid_91 = 'procpid' # Name for PostgreSQL PID variable till version 9.1
pg_pid_92 = 'pid' # Name for PostgreSQL PID variable since version 9.2
def __init__(self, server, user, port, database=None, logger=None):
if logger:
self.logger = logger
else:
self.logger = Logger()
self.server = server
self.user = user
if isinstance(port, int):
self.port = port
elif Checker.str_is_int(port):
self.port = Casting.str_to_int(port)
else:
self.logger.stop_exe(Msg.INVALID_PORT)
if database is None:
self.database = Default.CONNECTION_DATABASE
elif database:
self.database = database
else:
self.logger.stop_exe(Msg.NO_CONNECTION_DATABASE)
try:
self.conn = psycopg2.connect(host=self.server, user=self.user,
port=self.port,
database=self.database)
self.conn.autocommit = True
# TODO: ask for a password here if possible
self.cursor = self.conn.cursor(
cursor_factory=psycopg2.extras.DictCursor)
except Exception as e:
self.logger.debug('Error en la función "pg_connect": {}.'.format(
str(e)))
self.logger.stop_exe(Msg.CONNECT_FAIL)
def pg_disconnect(self):
'''
Target:
- disconnect from PostgreSQL.
'''
try:
self.cursor.close()
self.conn.close()
except Exception as e:
self.logger.debug('Error en la función "pg_disconnect": '
'{}.'.format(str(e)))
self.logger.stop_exe(Msg.DISCONNECT_FAIL)
def get_pg_version(self):
'''
Target:
- get the PostgreSQL version.
Return:
- a integer which gives the PostgreSQL version.
'''
return self.conn.server_version
def get_pretty_pg_version(self):
'''
Target:
- get the pretty PostgreSQL version.
Return:
- a string which gives the PostgreSQL version and more details.
'''
try:
self.cursor.execute(Queries.GET_PG_PRETTY_VERSION)
pretty_pg_version = self.cursor.fetchone()
return pretty_pg_version[0]
except Exception as e:
# Rollback to avoid errors in next queries because of waiting
# this transaction to finish
self.conn.rollback()
self.logger.debug('Error en la función "get_pretty_pg_version": '
'{}.'.format(str(e)))
self.logger.highlight('warning', Msg.GET_PG_VERSION_FAIL, 'yellow')
return None
#.........这里部分代码省略.........
示例5: __init__
# 需要导入模块: from logger.logger import Logger [as 别名]
# 或者: from logger.logger.Logger import stop_exe [as 别名]
class TrimmerCluster:
bkp_path = '' # The path where the backups are stored
prefix = '' # The prefix of the backups' names
min_n_bkps = None # Minimum number of a database's backups to keep
exp_days = None # Number of days which make a backup obsolete
max_size = None # Maximum size of a group of database's backups
# Maximum size in Bytes of a group of database's backups
max_size_bytes = None
# Related to max_size, equivalence to turn the specified unit of measure in
# the max_size variable into Bytes
equivalence = 10 ** 6
logger = None # Logger to show and log some messages
def __init__(self, bkp_path='', prefix='', min_n_bkps=1, exp_days=365,
max_size=5000, logger=None):
if logger:
self.logger = logger
else:
self.logger = Logger()
if bkp_path and os.path.isdir(bkp_path):
self.bkp_path = bkp_path
else:
self.logger.stop_exe(Messenger.DIR_DOES_NOT_EXIST)
if prefix is None:
self.prefix = Default.PREFIX
else:
self.prefix = prefix
if min_n_bkps is None:
self.min_n_bkps = Default.MIN_N_BKPS
elif isinstance(min_n_bkps, int):
self.min_n_bkps = min_n_bkps
elif Checker.str_is_int(min_n_bkps):
self.min_n_bkps = Casting.str_to_int(min_n_bkps)
else:
self.logger.stop_exe(Messenger.INVALID_MIN_BKPS)
if exp_days is None:
self.exp_days = Default.EXP_DAYS
elif isinstance(exp_days, int) and exp_days >= -1:
self.exp_days = exp_days
elif Checker.str_is_valid_exp_days(exp_days):
self.exp_days = Casting.str_to_int(exp_days)
else:
self.logger.stop_exe(Messenger.INVALID_OBS_DAYS)
if max_size is None:
self.max_size = Default.MAX_SIZE
elif Checker.str_is_valid_max_size(max_size):
self.max_size = max_size
else:
self.logger.stop_exe(Messenger.INVALID_MAX_TSIZE)
# Split a string with size and unit of measure into a dictionary
self.max_size = Casting.str_to_max_size(self.max_size)
# Get the equivalence in Bytes of the specified unit of measure
self.equivalence = Casting.get_equivalence(self.max_size['unit'])
# Get the specified size in Bytes
self.max_size_bytes = self.max_size['size'] * self.equivalence
message = Messenger.CL_TRIMMER_VARS.format(
bkp_path=self.bkp_path, prefix=self.prefix,
min_n_bkps=self.min_n_bkps, exp_days=self.exp_days,
max_size=self.max_size)
self.logger.debug(Messenger.CL_TRIMMER_VARS_INTRO)
self.logger.debug(message)
def trim_cluster(self, ht_bkps_list):
'''
Target:
- remove (if necessary) some cluster's backups, taking into
account some parameters in the following order: minimum number of
backups to keep > obsolete backups.
Parameters:
- ht_bkps_list: list of backups of a cluster to analyse and trim.
'''
if self.exp_days == -1: # No expiration date
x_days_ago = None
else:
x_days_ago = time.time() - (60 * 60 * 24 * self.exp_days)
# Store the total number of backups of the cluster
num_bkps = len(ht_bkps_list)
# Clone the list to avoid conflict errors when removing
ht_bkps_lt = ht_bkps_list[:]
unlinked = False
self.logger.highlight('info', Messenger.BEGINNING_CL_TRIMMER, 'white')
start_time = DateTools.get_current_datetime()
for f in ht_bkps_list:
# Break if number of backups do not exceed the minimum
if num_bkps <= self.min_n_bkps:
#.........这里部分代码省略.........
示例6: __init__
# 需要导入模块: from logger.logger import Logger [as 别名]
# 或者: from logger.logger.Logger import stop_exe [as 别名]
class Replicator:
# An object with connection parameters to connect to PostgreSQL
connecter = None
logger = None # Logger to show and log some messages
new_dbname = '' # Name of the copy
original_dbname = '' # Name of the original database
def __init__(self, connecter=None, new_dbname='', original_dbname='',
logger=None):
if logger:
self.logger = logger
else:
self.logger = Logger()
if connecter:
self.connecter = connecter
else:
self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)
# First check whether the name of the copy already exists in PostgreSQL
self.connecter.cursor.execute(Queries.PG_DB_EXISTS, (new_dbname, ))
# Do not replicate if the name already exists
result = self.connecter.cursor.fetchone()
if result:
msg = Msg.DB_ALREADY_EXISTS.format(dbname=new_dbname)
self.logger.stop_exe(msg)
if new_dbname:
self.new_dbname = new_dbname
else:
self.logger.stop_exe(Msg.NO_NEW_DBNAME)
# First check whether the name of the source exists in PostgreSQL
self.connecter.cursor.execute(Queries.PG_DB_EXISTS,
(original_dbname, ))
result = self.connecter.cursor.fetchone()
if not result:
msg = Msg.DB_DOES_NOT_EXIST.format(dbname=original_dbname)
self.logger.stop_exe(msg)
if original_dbname:
self.original_dbname = original_dbname
else:
self.logger.stop_exe(Msg.NO_ORIGINAL_DBNAME)
msg = Msg.REPLICATOR_VARS.format(server=self.connecter.server,
user=self.connecter.user,
port=self.connecter.port,
original_dbname=self.original_dbname,
new_dbname=self.new_dbname)
self.logger.debug(Msg.REPLICATOR_VARS_INTRO)
self.logger.debug(msg)
def replicate_pg_db(self):
'''
Target:
- clone a specified database in PostgreSQL.
'''
try:
pg_pid = self.connecter.get_pid_str()
formatted_sql = Queries.BACKEND_PG_DB_EXISTS.format(
pg_pid=pg_pid, target_db=self.original_dbname)
self.connecter.cursor.execute(formatted_sql)
result = self.connecter.cursor.fetchone()
if result:
msg = Msg.ACTIVE_CONNS_ERROR.format(
dbname=self.original_dbname)
self.logger.stop_exe(msg)
formatted_query_clone_pg_db = Queries.CLONE_PG_DB.format(
dbname=self.new_dbname, original_dbname=self.original_dbname,
user=self.connecter.user)
msg = Msg.BEGINNING_REPLICATOR.format(
original_dbname=self.original_dbname)
self.logger.highlight('info', msg, 'white')
# Get the database's "datallowconn" value
datallowconn = self.connecter.get_datallowconn(
self.original_dbname)
# If datallowconn is allowed, change it temporarily
if datallowconn:
# Disallow connections to the database during the
# process
result = self.connecter.disallow_db_conn(self.original_dbname)
if not result:
msg = Msg.DISALLOW_CONN_TO_PG_DB_FAIL.format(
dbname=self.original_dbname)
self.logger.highlight('warning', msg, 'yellow')
# self.connecter.cursor.execute('commit')
start_time = DateTools.get_current_datetime()
# Replicate the database
self.connecter.cursor.execute(formatted_query_clone_pg_db)
end_time = DateTools.get_current_datetime()
# Get and show the process' duration
#.........这里部分代码省略.........
示例7: file
# 需要导入模块: from logger.logger import Logger [as 别名]
# 或者: from logger.logger.Logger import stop_exe [as 别名]
class Restorer:
# An object with connection parameters to connect to PostgreSQL
connecter = None
logger = None # Logger to show and log some messages
db_backup = '' # Absolute path of the backup file (of a database)
new_dbname = '' # New name for the database restored in PostgreSQL
def __init__(self, connecter=None, db_backup='', new_dbname='',
logger=None):
if logger:
self.logger = logger
else:
self.logger = Logger()
if connecter:
self.connecter = connecter
else:
self.logger.stop_exe(Messenger.NO_CONNECTION_PARAMS)
if db_backup and os.path.isfile(db_backup):
self.db_backup = db_backup
else:
self.logger.stop_exe(Messenger.NO_BKP_TO_RESTORE)
if new_dbname:
self.new_dbname = new_dbname
else:
self.logger.stop_exe(Messenger.NO_DBNAME_TO_RESTORE)
message = Messenger.DB_RESTORER_VARS.format(
server=self.connecter.server, user=self.connecter.user,
port=self.connecter.port, db_backup=self.db_backup,
new_dbname=self.new_dbname)
self.logger.debug(Messenger.DB_RESTORER_VARS_INTRO)
self.logger.debug(message)
def restore_db_backup(self):
'''
Target:
- restore a database's backup in PostgreSQL.
'''
#replicator = Replicator(self.connecter, self.new_dbname,
#Default.RESTORING_TEMPLATE, self.logger)
#result = self.connecter.allow_db_conn(Default.RESTORING_TEMPLATE)
#if result:
#replicator.replicate_pg_db()
#self.connecter.disallow_db_conn(Default.RESTORING_TEMPLATE)
#else:
#self.logger.stop_exe(Messenger.ALLOW_DB_CONN_FAIL.format(
#dbname=Default.RESTORING_TEMPLATE))
# Regular expression which must match the backup's name
regex = r'.*db_(.+)_(\d{8}_\d{6}_.+)\.(dump|bz2|gz|zip)$'
regex = re.compile(regex)
if re.match(regex, self.db_backup):
# Store the parts of the backup's name (name, date, ext)
parts = regex.search(self.db_backup).groups()
# Store only the extension to know the type of file
ext = parts[2]
else:
self.logger.stop_exe(Messenger.NO_BACKUP_FORMAT)
message = Messenger.BEGINNING_DB_RESTORER.format(
db_backup=self.db_backup, new_dbname=self.new_dbname)
self.logger.highlight('info', message, 'white')
self.logger.info(Messenger.WAIT_PLEASE)
if ext == 'gz':
command = 'gunzip -c {} -k | pg_restore -U {} -h {} -p {} ' \
'-d {}'.format(self.db_backup, self.connecter.user,
self.connecter.server,
self.connecter.port, self.new_dbname)
elif ext == 'bz2':
command = 'bunzip2 -c {} -k | pg_restore -U {} -h {} -p {} ' \
'-d {}'.format(self.db_backup, self.connecter.user,
self.connecter.server,
self.connecter.port, self.new_dbname)
elif ext == 'zip':
command = 'unzip -p {} | pg_restore -U {} -h {} -p {} ' \
'-d {}'.format(self.db_backup, self.connecter.user,
self.connecter.server,
self.connecter.port, self.new_dbname)
else:
command = 'pg_restore -U {} -h {} -p {} -d {} {}'.format(
self.connecter.user, self.connecter.server,
self.connecter.port, self.new_dbname, self.db_backup)
try:
start_time = DateTools.get_current_datetime()
# Make the restauration of the database
result = subprocess.call(command, shell=True)
end_time = DateTools.get_current_datetime()
# Get and show the process' duration
diff = DateTools.get_diff_datetimes(start_time, end_time)
if result != 0:
raise Exception()
#.........这里部分代码省略.........
示例8: __init__
# 需要导入模块: from logger.logger import Logger [as 别名]
# 或者: from logger.logger.Logger import stop_exe [as 别名]
class Dropper:
# An object with connection parameters to connect to PostgreSQL
connecter = None
logger = None # Logger to show and log some messages
dbnames = [] # List of databases to be removed
def __init__(self, connecter=None, dbnames=[], logger=None):
if logger:
self.logger = logger
else:
self.logger = Logger()
if connecter:
self.connecter = connecter
else:
self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)
if isinstance(dbnames, list):
self.dbnames = dbnames
else:
self.dbnames = Casting.str_to_list(dbnames)
msg = Msg.DROPPER_VARS.format(server=self.connecter.server,
user=self.connecter.user,
port=self.connecter.port,
dbnames=self.dbnames)
self.logger.debug(Msg.DROPPER_VARS_INTRO)
self.logger.debug(msg)
def drop_pg_db(self, dbname, pg_superuser):
'''
Target:
- remove a database in PostgreSQL.
Parameters:
- dbname: the PostgreSQL database's name which is going to be
removed.
- pg_superuser: a flag which indicates whether the current user is
PostgreSQL superuser or not.
'''
delete = False
try:
self.connecter.cursor.execute(Queries.PG_DB_EXISTS, (dbname, ))
result = self.connecter.cursor.fetchone()
if result:
pg_pid = self.connecter.get_pid_str()
formatted_sql = Queries.BACKEND_PG_DB_EXISTS.format(
pg_pid=pg_pid, target_db=dbname)
self.connecter.cursor.execute(formatted_sql)
result = self.connecter.cursor.fetchone()
# If there are not any connections to the target database...
if not result:
# Users who are not superusers will only be able to drop
# the databases they own
if not pg_superuser:
self.connecter.cursor.execute(Queries.GET_PG_DB_OWNER,
(dbname, ))
db = self.connecter.cursor.fetchone()
if db['owner'] != self.connecter.user:
msg = Msg.DROP_DB_NOT_ALLOWED.format(
user=self.connecter.user, dbname=dbname)
self.logger.highlight('warning', msg, 'yellow')
else:
delete = True
else:
delete = True
if delete:
# Get the database's "datallowconn" value
datallowconn = self.connecter.get_datallowconn(dbname)
# If datallowconn is allowed, change it temporarily
if datallowconn:
# Disallow connections to the database during the
# process
result = self.connecter.disallow_db_conn(dbname)
if not result:
msg = Msg.DISALLOW_CONN_TO_PG_DB_FAIL.format(
dbname=dbname)
self.logger.highlight('warning', msg, 'yellow')
fmt_query_drop_db = Queries.DROP_PG_DB.format(
dbname=dbname)
start_time = DateTools.get_current_datetime()
# Drop the database
self.connecter.cursor.execute(fmt_query_drop_db)
#.........这里部分代码省略.........
示例9: process
# 需要导入模块: from logger.logger import Logger [as 别名]
# 或者: from logger.logger.Logger import stop_exe [as 别名]
class Vacuumer:
in_dbs = [] # List of databases to be included in the process
in_regex = '' # Regular expression which must match the included databases
# Flag which determinates whether inclusion conditions predominate over the
# exclusion ones
in_priority = False
ex_dbs = [] # List of databases to be excluded in the process
ex_regex = '' # Regular expression which must match the excluded databases
# Flag which determinates whether the templates must be included
ex_templates = True
# Use other PostgreSQL user during the backup process (only for superusers)
db_owner = ''
# An object with connection parameters to connect to PostgreSQL
connecter = None
logger = None # Logger to show and log some messages
def __init__(self, connecter=None, in_dbs=[], in_regex='',
in_priority=False, ex_dbs=['postgres'], ex_regex='',
ex_templates=True, db_owner='', logger=None):
if logger:
self.logger = logger
else:
self.logger = Logger()
if connecter:
self.connecter = connecter
else:
self.logger.stop_exe(Messenger.NO_CONNECTION_PARAMS)
if isinstance(in_dbs, list):
self.in_dbs = in_dbs
else:
self.in_dbs = Casting.str_to_list(in_dbs)
if Checker.check_regex(in_regex):
self.in_regex = in_regex
else:
self.logger.stop_exe(Messenger.INVALID_IN_REGEX)
if isinstance(in_priority, bool):
self.in_priority = in_priority
elif Checker.str_is_bool(in_priority):
self.in_priority = Casting.str_to_bool(in_priority)
else:
self.logger.stop_exe(Messenger.INVALID_IN_PRIORITY)
if isinstance(ex_dbs, list):
self.ex_dbs = ex_dbs
else:
self.ex_dbs = Casting.str_to_list(ex_dbs)
if Checker.check_regex(ex_regex):
self.ex_regex = ex_regex
else:
self.logger.stop_exe(Messenger.INVALID_EX_REGEX)
if isinstance(ex_templates, bool):
self.ex_templates = ex_templates
elif Checker.str_is_bool(ex_templates):
self.ex_templates = Casting.str_to_bool(ex_templates)
else:
self.logger.stop_exe(Messenger.INVALID_EX_TEMPLATES)
if db_owner is None:
self.db_owner = Default.DB_OWNER
else:
self.db_owner = db_owner
message = Messenger.VACUUMER_VARS.format(
server=self.connecter.server, user=self.connecter.user,
port=self.connecter.port, in_dbs=self.in_dbs,
in_regex=self.in_regex, in_priority=self.in_priority,
ex_dbs=self.ex_dbs, ex_regex=self.ex_regex,
ex_templates=self.ex_templates, db_owner=self.db_owner)
self.logger.debug(Messenger.VACUUMER_VARS_INTRO)
self.logger.debug(message)
def vacuum_db(self, dbname):
'''
Target:
- vacuum a PostgreSQL database.
Parameters:
- dbname: name of the database which is going to be vacuumed.
Return:
- a boolean which indicates the success of the process.
'''
success = True
# Store the command to do
command = 'vacuumdb {} -U {} -h {} -p {}'.format(
dbname, self.connecter.user, self.connecter.server,
self.connecter.port)
try:
# Execute the command in console
result = subprocess.call(command, shell=True)
if result != 0:
raise Exception()
#.........这里部分代码省略.........
示例10: __init__
# 需要导入模块: from logger.logger import Logger [as 别名]
# 或者: from logger.logger.Logger import stop_exe [as 别名]
class Scheduler:
time = '' # Time when the command is going to be executed in Cron
command = '' # Command which is going to be executed in Cron.
logger = None # Logger to show and log some messages
def __init__(self, time='', command='', logger=None):
if logger:
self.logger = logger
else:
self.logger = Logger()
self.time = time.strip()
self.command = command.strip()
def show_lines(self):
'''
Target:
- show the lines of the program's CRON file.
'''
self.logger.highlight('info', Messenger.SHOWING_CRONTAB_FILE, 'white')
print()
cron = CronTab(user=True)
if cron:
for line in cron.lines:
print(str(line))
else:
print('\033[1;40;93m' + Messenger.NO_CRONTAB_FILE + '\033[0m')
def add_line(self):
'''
Target:
- add a line to the program's CRON file.
'''
cron = CronTab(user=True)
job = cron.new(command=self.command)
if self.time in ['@yearly', '@annually']:
job.setall('0 0 1 1 *')
elif self.time == '@monthly':
job.setall('0 0 1 * *')
elif self.time == '@weekly':
job.setall('0 0 * * 0')
elif self.time in ['@daily', '@midnight']:
job.setall('0 0 * * *')
elif self.time == '@hourly':
job.setall('0 * * * *')
elif self.time == '@reboot':
job.every_reboot()
else:
job.setall(self.time)
self.logger.highlight('info', Messenger.SCHEDULER_ADDING, 'white')
if not cron:
self.logger.info(Messenger.CREATING_CRONTAB)
try:
cron.write()
self.logger.highlight('info', Messenger.SCHEDULER_ADD_DONE,
'green')
#print(cron.render())
except Exception as e:
self.logger.debug('Error en la función "add_line": {}.'.format(
str(e)))
self.logger.stop_exe(Messenger.SCHEDULER_ADD_FAIL)
def remove_line(self):
'''
Target:
- remove a line from the program's CRON file.
'''
self.logger.highlight('info', Messenger.SCHEDULER_REMOVING, 'white')
cron = CronTab(user=True)
if not cron:
self.logger.stop_exe(Messenger.NO_CRONTAB_FILE)
deletion = False
line = self.time + ' ' + self.command
for job in cron:
if str(job).strip() == line:
try:
cron.remove(job)
message = Messenger.SCHEDULER_REMOVE_DONE.format(job=job)
self.logger.highlight('info', message, 'green')
deletion = True
except Exception as e:
self.logger.debug('Error en la función "remove_line": '
#.........这里部分代码省略.........
示例11: __init__
# 需要导入模块: from logger.logger import Logger [as 别名]
# 或者: from logger.logger.Logger import stop_exe [as 别名]
class Alterer:
in_dbs = [] # List of databases to be included in the process
old_role = '' # Current owner of the database's tables
new_role = '' # New owner for the database and its tables
# An object with connection parameters to connect to PostgreSQL
connecter = None
logger = None # Logger to show and log some messages
def __init__(self, connecter=None, in_dbs=[], old_role='', new_role='',
logger=None):
if logger:
self.logger = logger
else:
self.logger = Logger()
if connecter:
self.connecter = connecter
else:
self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)
if isinstance(in_dbs, list):
self.in_dbs = in_dbs
else:
self.in_dbs = Casting.str_to_list(in_dbs)
if old_role:
self.old_role = old_role
else:
self.logger.stop_exe(Msg.NO_OLD_ROLE)
if not new_role:
self.logger.stop_exe(Msg.NO_NEW_ROLE)
# First check whether the user exists in PostgreSQL or not
self.connecter.cursor.execute(Queries.PG_USER_EXISTS, (new_role, ))
# Do not alter database if the user does not exist
result = self.connecter.cursor.fetchone()
if result:
self.new_role = new_role
else:
msg = Msg.USER_DOES_NOT_EXIST.format(user=new_role)
self.logger.stop_exe(msg)
msg = Msg.ALTERER_VARS.format(
server=self.connecter.server, user=self.connecter.user,
port=self.connecter.port, in_dbs=self.in_dbs,
old_role=self.old_role, new_role=self.new_role)
self.logger.debug(Msg.ALTERER_VARS_INTRO)
self.logger.debug(msg)
def alter_db_owner(self, db):
'''
Target:
- change the owner of a databases and its tables.
Parameters:
- db: database which is going to be altered.
Return:
- a boolean which indicates the success of the process.
'''
msg = Msg.ALTERER_FEEDBACK.format(old_role=self.old_role,
new_role=self.new_role)
self.logger.info(msg)
success = True
dbname = db['datname']
if db['owner'] != 'postgres': # Do not allow switch an owner postgres
if db['datallowconn'] == 1: # Check if the db allows connections
try:
# Change the owner of the database
self.connecter.cursor.execute(
Queries.CHANGE_PG_DB_OWNER.format(
dbname=dbname, new_role=self.new_role))
except Exception as e:
success = False
self.logger.debug('Error en la función "alter_db_owner": '
'{}'.format(str(e)))
msg = Msg.CHANGE_PG_DB_OWNER_FAIL
self.logger.highlight('warning', msg, 'yellow')
# Start another connection to the target database to be able to
# apply the next query
own_connecter = Connecter(server=self.connecter.server,
user=self.connecter.user,
port=self.connecter.port,
database=dbname, logger=self.logger)
# Disallow connections to the database during the process
result = self.connecter.disallow_db_conn(dbname)
if not result:
msg = Msg.DISALLOW_CONN_TO_PG_DB_FAIL.format(dbname=dbname)
self.logger.highlight('warning', msg, 'yellow')
try:
# Change the owner of the database's tables
own_connecter.cursor.execute(
#.........这里部分代码省略.........
示例12: __init__
# 需要导入模块: from logger.logger import Logger [as 别名]
# 或者: from logger.logger.Logger import stop_exe [as 别名]
class BackerCluster:
bkp_path = '' # The path where the backups are stored
group = '' # The name of the subdirectory where the backups are stored
bkp_type = '' # The type of the backups' files
prefix = '' # The prefix of the backups' names
# Flag which determinates whether the databases must be vacuumed before the
# backup process
vacuum = True
# An object with connection parameters to connect to PostgreSQL
connecter = None
logger = None # Logger to show and log some messages
def __init__(self, connecter=None, bkp_path='', group='',
bkp_type='dump', prefix='', vacuum=True, logger=None):
if logger:
self.logger = logger
else:
self.logger = Logger()
if connecter:
self.connecter = connecter
else:
self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)
# If backup directory is not specified, create a default one to store
# the backups
if bkp_path:
self.bkp_path = bkp_path
else:
self.bkp_path = Default.BKP_PATH
Dir.create_dir(self.bkp_path, self.logger)
if group:
self.group = group
else:
self.group = Default.GROUP
if bkp_type is None:
self.bkp_type = Default.BKP_TYPE
elif Checker.check_compress_type(bkp_type):
self.bkp_type = bkp_type
else:
self.logger.stop_exe(Msg.INVALID_BKP_TYPE)
self.prefix = prefix
if isinstance(vacuum, bool):
self.vacuum = vacuum
elif Checker.str_is_bool(vacuum):
self.vacuum = Casting.str_to_bool(vacuum)
else:
self.logger.stop_exe(Msg.INVALID_VACUUM)
msg = Msg.CL_BACKER_VARS.format(
server=self.connecter.server, user=self.connecter.user,
port=self.connecter.port, bkp_path=self.bkp_path, group=self.group,
bkp_type=self.bkp_type, prefix=self.prefix, vacuum=self.vacuum)
self.logger.debug(Msg.CL_BACKER_VARS_INTRO)
self.logger.debug(msg)
def backup_all(self, bkps_dir):
'''
Target:
- make a backup of a cluster.
Parameters:
- bkps_dir: directory where the backup is going to be stored.
Return:
- a boolean which indicates the success of the process.
'''
success = True
# Get date and time of the zone
init_ts = DateTools.get_date()
# Get current year
year = str(DateTools.get_year(init_ts))
# Get current month
month = str(DateTools.get_month(init_ts))
# Create new directories with the year and the month of the backup
bkp_dir = bkps_dir + year + '/' + month + '/'
Dir.create_dir(bkp_dir, self.logger)
# Set backup's name
file_name = self.prefix + 'ht_' + self.connecter.server + \
str(self.connecter.port) + '_cluster_' + init_ts + '.' + \
self.bkp_type
# Store the command to do depending on the backup type
if self.bkp_type == 'gz': # Zip with gzip
command = 'pg_dumpall -U {} -h {} -p {} | gzip > {}'.format(
self.connecter.user, self.connecter.server,
self.connecter.port, bkp_dir + file_name)
elif self.bkp_type == 'bz2': # Zip with bzip2
command = 'pg_dumpall -U {} -h {} -p {} | bzip2 > {}'.format(
self.connecter.user, self.connecter.server,
self.connecter.port, bkp_dir + file_name)
elif self.bkp_type == 'zip': # Zip with zip
command = 'pg_dumpall -U {} -h {} -p {} | zip > {}'.format(
self.connecter.user, self.connecter.server,
self.connecter.port, bkp_dir + file_name)
#.........这里部分代码省略.........
示例13: process
# 需要导入模块: from logger.logger import Logger [as 别名]
# 或者: from logger.logger.Logger import stop_exe [as 别名]
class Backer:
bkp_path = '' # The path where the backups are stored
group = '' # The name of the subdirectory where the backups are stored
bkp_type = '' # The type of the backups' files
prefix = '' # The prefix of the backups' names
in_dbs = [] # List of databases to be included in the process
in_regex = '' # Regular expression which must match the included databases
# Flag which determinates whether inclusion conditions predominate over the
# exclusion ones
in_priority = False
ex_dbs = [] # List of databases to be excluded in the process
ex_regex = '' # Regular expression which must match the excluded databases
# Flag which determinates whether the templates must be included
ex_templates = True
# Flag which determinates whether the included databases must be vacuumed
# before the backup process
vacuum = True
# Use other PostgreSQL user during the backup process (only for superusers)
db_owner = ''
# An object with connection parameters to connect to PostgreSQL
connecter = None
logger = None # Logger to show and log some messages
def __init__(self, connecter=None, bkp_path='', group='',
bkp_type='dump', prefix='', in_dbs=[], in_regex='',
in_priority=False, ex_dbs=['postgres'], ex_regex='',
ex_templates=True, vacuum=True, db_owner='', logger=None):
if logger:
self.logger = logger
else:
self.logger = Logger()
if connecter:
self.connecter = connecter
else:
self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)
# If backup directory is not specified, create a default one to store
# the backups
if bkp_path:
self.bkp_path = bkp_path
else:
self.bkp_path = Default.BKP_PATH
Dir.create_dir(self.bkp_path, self.logger)
if group:
self.group = group
else:
self.group = Default.GROUP
if bkp_type is None:
self.bkp_type = Default.BKP_TYPE
elif Checker.check_compress_type(bkp_type):
self.bkp_type = bkp_type
else:
self.logger.stop_exe(Msg.INVALID_BKP_TYPE)
self.prefix = prefix
if isinstance(in_dbs, list):
self.in_dbs = in_dbs
else:
self.in_dbs = Casting.str_to_list(in_dbs)
if Checker.check_regex(in_regex):
self.in_regex = in_regex
else:
self.logger.stop_exe(Msg.INVALID_IN_REGEX)
if isinstance(in_priority, bool):
self.in_priority = in_priority
elif Checker.str_is_bool(in_priority):
self.in_priority = Casting.str_to_bool(in_priority)
else:
self.logger.stop_exe(Msg.INVALID_IN_PRIORITY)
if isinstance(ex_dbs, list):
self.ex_dbs = ex_dbs
else:
self.ex_dbs = Casting.str_to_list(ex_dbs)
if Checker.check_regex(ex_regex):
self.ex_regex = ex_regex
else:
self.logger.stop_exe(Msg.INVALID_EX_REGEX)
if isinstance(ex_templates, bool):
self.ex_templates = ex_templates
elif Checker.str_is_bool(ex_templates):
self.ex_templates = Casting.str_to_bool(ex_templates)
else:
self.logger.stop_exe(Msg.INVALID_EX_TEMPLATES)
if isinstance(vacuum, bool):
self.vacuum = vacuum
elif Checker.str_is_bool(vacuum):
self.vacuum = Casting.str_to_bool(vacuum)
else:
#.........这里部分代码省略.........