本文整理汇总了Python中mpp.lib.config.GPDBConfig.get_host_and_datadir_of_segment方法的典型用法代码示例。如果您正苦于以下问题:Python GPDBConfig.get_host_and_datadir_of_segment方法的具体用法?Python GPDBConfig.get_host_and_datadir_of_segment怎么用?Python GPDBConfig.get_host_and_datadir_of_segment使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类mpp.lib.config.GPDBConfig
的用法示例。
在下文中一共展示了GPDBConfig.get_host_and_datadir_of_segment方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get_host_and_db_path
# 需要导入模块: from mpp.lib.config import GPDBConfig [as 别名]
# 或者: from mpp.lib.config.GPDBConfig import get_host_and_datadir_of_segment [as 别名]
def get_host_and_db_path(self, dbname, contentid=0):
''' Get the host and database path for the content'''
config = GPDBConfig()
db_oid = PSQL.run_sql_command("select oid from pg_database where datname='%s'" % dbname, flags='-q -t', dbname='postgres')
dbid = PSQL.run_sql_command("select dbid from gp_segment_configuration where content=%s and role='p'" % contentid, flags='-q -t', dbname='postgres')
(host, address) = config.get_host_and_datadir_of_segment(dbid= dbid.strip())
db_path = os.path.join(address, 'base', db_oid.strip())
return (host.strip(), db_path)
示例2: check_logs
# 需要导入模块: from mpp.lib.config import GPDBConfig [as 别名]
# 或者: from mpp.lib.config.GPDBConfig import get_host_and_datadir_of_segment [as 别名]
def check_logs(search_string_list):
"""
Check all the segment logs(master/primary/mirror) for keywords in the
search_string_list
"""
dbid_list = PSQL.run_sql_command("select dbid from gp_segment_configuration;", flags="-q -t", dbname="postgres")
dbid_list = dbid_list.split()
config = GPDBConfig()
for dbid in dbid_list:
(host, data_dir) = config.get_host_and_datadir_of_segment(dbid.strip())
(rc, msg) = search_string(host, search_string_list, data_dir)
if rc:
return (False, msg)
return (True, "No Issues found")
示例3: GPAddmirrorsTestCase
# 需要导入模块: from mpp.lib.config import GPDBConfig [as 别名]
# 或者: from mpp.lib.config.GPDBConfig import get_host_and_datadir_of_segment [as 别名]
class GPAddmirrorsTestCase(MPPTestCase):
def __init__(self, methodName):
self.config = GPDBConfig()
self.mdd = os.environ.get('MASTER_DATA_DIRECTORY')
self.seg_prefix = os.path.basename(self.mdd).split('-')[0]
self.master_host = self.config.get_masterhost()
self.gpinitconfig_template = local_path('configs/gpinitconfig_template')
self.datadir_config_file = local_path('configs/datadir_config_file')
self.mirror_config_file = local_path('configs/mirror_config_file')
self.gpinitconfig_file = local_path('configs/gpinitconfig')
self.host_file = local_path('configs/hosts')
self.hosts = self.config.get_hosts(segments = True)
self.port_base = '40000'
self.master_port = os.environ.get('PGPORT', '5432')
self.primary_data_dir = self.config.get_host_and_datadir_of_segment(dbid = 2)[1]
# initially set the mirror data dir same to primary's
self.mirror_data_dir = os.path.join(os.path.dirname(os.path.dirname(self.primary_data_dir)), 'mirror')
self.gpinitsystem = True
self.number_of_segments = self.config.get_countprimarysegments()
self.number_of_segments_per_host = self.number_of_segments / len(self.hosts)
self.standby_enabled = False
self.number_of_parallelism = 4
self.fs_location = []
super(GPAddmirrorsTestCase, self).__init__(methodName)
def setUp(self):
super(GPAddmirrorsTestCase, self).setUp()
def _setup_gpaddmirrors(self, port_offset=1000):
"""
Takes care of creating all the directories required for gpaddmirrors
and generating input files for gpaddmirrors
"""
# Generate gpaddmirrors config files
try:
self._generate_gpaddmirrors_input_files(port_offset)
except Exception, e:
tinctest.logger.exception("Encountered exception during generation of input files: %s" % e)
raise
示例4: FilerepTestCase
# 需要导入模块: from mpp.lib.config import GPDBConfig [as 别名]
# 或者: from mpp.lib.config.GPDBConfig import get_host_and_datadir_of_segment [as 别名]
class FilerepTestCase(MPPTestCase):
def __init__(self, methodName):
self.pgport = os.environ.get('PGPORT')
self.util = Filerepe2e_Util()
self.gpconfig = GpConfig()
self.config = GPDBConfig()
self.gpr = GpRecover(self.config)
self.dbstate = DbStateClass('run_validation',self.config)
self.gpstart = GpStart()
self.gpstop = GpStop()
super(FilerepTestCase,self).__init__(methodName)
def sleep(self, seconds=60):
time.sleep(seconds)
def create_file_in_datadir(self, content, role, filename):
dbid = self.config.get_dbid(content=content, seg_role=role)
host, datadir = self.config.get_host_and_datadir_of_segment(dbid=dbid)
file_path = os.path.join(datadir, filename)
cmd = Command('create a file', 'touch %s' % file_path, ctxt=REMOTE, remoteHost=host)
cmd.run(validateAfter=True)
def remove_file_in_datadir(self, content, role, filename):
dbid = self.config.get_dbid(content=content, seg_role=role)
host, datadir = self.config.get_host_and_datadir_of_segment(dbid=dbid)
file_path = os.path.join(datadir, filename)
cmd = Command('remove a file', 'rm %s' % file_path, ctxt=REMOTE, remoteHost=host)
cmd.run(validateAfter=True)
def get_timestamp_of_file_in_datadir(self, content, role, filename):
dbid = self.config.get_dbid(content=content, seg_role=role)
host, datadir = self.config.get_host_and_datadir_of_segment(dbid=dbid)
file_path = os.path.join(datadir, filename)
cmd = Command('check timestamp', """ python -c "import os; print os.stat('%s').st_mtime" """ %
file_path, ctxt=REMOTE, remoteHost=host)
cmd.run(validateAfter=True)
res = cmd.get_results().stdout.strip()
return res
def verify_file_exists(self, content, role, filename):
dbid = self.config.get_dbid(content=content, seg_role=role)
host, datadir = self.config.get_host_and_datadir_of_segment(dbid=dbid)
file_path = os.path.join(datadir, filename)
cmd = Command('check if file exists', 'test -f %s' % file_path, ctxt=REMOTE, remoteHost=host)
cmd.run(validateAfter=True)
def handle_ext_cases(self,file):
"""
@file: wet sql file to replace with specific machine env.
"""
host = str(socket.gethostbyname(socket.gethostname())) #Must be an IP
querystring = "gpfdist://"+host+":8088"
if os.path.isfile(file):
for line in fileinput.FileInput(file,inplace=1):
line = re.sub('gpfdist.+8088',querystring,line)
print str(re.sub('\n','',line))
def handle_hybrid_part_cases(self, file):
"""
@file: hybrid sql file to replace with specific machine env
"""
querystring = "FROM '"+local_path('hybrid_part.data')+"'"
if os.path.isfile(file):
for line in fileinput.FileInput(file,inplace=1):
line = re.sub('FROM\s\'.+hybrid_part.data\'',querystring,line)
print str(re.sub('\n','',line))
def preprocess(self):
"""
Replace the hard-coded information from sql files with correct hostname and ip address,etc
"""
list_workload_dir = ['set_sync1','sync1','set_ck_sync1','ck_sync1',
'set_ct','ct','set_resync','resync','set_sync2','sync2']
for dir in list_workload_dir:
sql_path = os.path.join(local_path(dir),'sql')
ans_path = os.path.join(local_path(dir),'expected')
for file in os.listdir(sql_path):
if (file.find('wet_ret')>=0):
self.handle_ext_cases(os.path.join(sql_path,file))
if (file.find('hybrid_part')>=0):
self.handle_hybrid_part_cases(os.path.join(sql_path,file))
for file in os.listdir(ans_path):
if (file.find('wet_ret')>=0):
self.handle_ext_cases(os.path.join(ans_path,file))
if (file.find('hybrid_part')>=0):
self.handle_hybrid_part_cases(os.path.join(ans_path,file))
def clean_data(self):
"""
Clean the data by removing the external table, otherwise, more data will be appended to the
same external table from running multiple sql files.
"""
#.........这里部分代码省略.........