本文整理汇总了Python中resource_management.libraries.script.script.Script.is_hdp_stack_less_than方法的典型用法代码示例。如果您正苦于以下问题:Python Script.is_hdp_stack_less_than方法的具体用法?Python Script.is_hdp_stack_less_than怎么用?Python Script.is_hdp_stack_less_than使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类resource_management.libraries.script.script.Script
的用法示例。
在下文中一共展示了Script.is_hdp_stack_less_than方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: pre_rolling_restart
# 需要导入模块: from resource_management.libraries.script.script import Script [as 别名]
# 或者: from resource_management.libraries.script.script.Script import is_hdp_stack_less_than [as 别名]
def pre_rolling_restart(self, env):
import params
env.set_params(params)
# this function should not execute if the version can't be determined or
# is not at least HDP 2.2.0.0
if Script.is_hdp_stack_less_than("2.2"):
return
Logger.info("Executing Accumulo Client Rolling Upgrade pre-restart")
conf_select.select(params.stack_name, "accumulo", params.version)
hdp_select.select("accumulo-client", params.version)
示例2: pre_rolling_restart
# 需要导入模块: from resource_management.libraries.script.script import Script [as 别名]
# 或者: from resource_management.libraries.script.script.Script import is_hdp_stack_less_than [as 别名]
def pre_rolling_restart(self, env):
import params
env.set_params(params)
# this function should not execute if the version can't be determined or
# is not at least HDP 2.2.0.0
if Script.is_hdp_stack_less_than("2.2"):
return
if self.component not in self.COMPONENT_TO_HDP_SELECT_MAPPING:
Logger.info("Unable to execute an upgrade for unknown component {0}".format(self.component))
raise Fail("Unable to execute an upgrade for unknown component {0}".format(self.component))
hdp_component = self.COMPONENT_TO_HDP_SELECT_MAPPING[self.component]
Logger.info("Executing Accumulo Rolling Upgrade pre-restart for {0}".format(hdp_component))
conf_select.select(params.stack_name, "accumulo", params.version)
hdp_select.select(hdp_component, params.version)
# some accumulo components depend on the client, so update that too
hdp_select.select("accumulo-client", params.version)
示例3: format
# 需要导入模块: from resource_management.libraries.script.script import Script [as 别名]
# 或者: from resource_management.libraries.script.script.Script import is_hdp_stack_less_than [as 别名]
# for HDP1 it's "/usr/share/HDP-oozie/ext.zip"
ext_js_file = "ext-2.2.zip"
ext_js_path = format("/usr/share/HDP-oozie/{ext_js_file}")
security_enabled = config['configurations']['cluster-env']['security_enabled']
oozie_heapsize = config['configurations']['oozie-env']['oozie_heapsize']
oozie_permsize = config['configurations']['oozie-env']['oozie_permsize']
kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
oozie_service_keytab = config['configurations']['oozie-site']['oozie.service.HadoopAccessorService.keytab.file']
oozie_principal = config['configurations']['oozie-site']['oozie.service.HadoopAccessorService.kerberos.principal']
http_principal = config['configurations']['oozie-site']['oozie.authentication.kerberos.principal']
oozie_site = config['configurations']['oozie-site']
# Need this for yarn.nodemanager.recovery.dir in yarn-site
yarn_log_dir_prefix = config['configurations']['yarn-env']['yarn_log_dir_prefix']
if security_enabled and Script.is_hdp_stack_less_than("2.2"):
#older versions of oozie have problems when using _HOST in principal
oozie_site = dict(config['configurations']['oozie-site'])
oozie_site['oozie.service.HadoopAccessorService.kerberos.principal'] = \
oozie_principal.replace('_HOST', hostname)
oozie_site['oozie.authentication.kerberos.principal'] = \
http_principal.replace('_HOST', hostname)
smokeuser_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
oozie_keytab = default("/configurations/oozie-env/oozie_keytab", oozie_service_keytab)
oozie_env_sh_template = config['configurations']['oozie-env']['content']
oracle_driver_jar_name = "ojdbc6.jar"
oozie_metastore_user_name = config['configurations']['oozie-site']['oozie.service.JPAService.jdbc.username']
oozie_metastore_user_passwd = default("/configurations/oozie-site/oozie.service.JPAService.jdbc.password","")
示例4: default
# 需要导入模块: from resource_management.libraries.script.script import Script [as 别名]
# 或者: from resource_management.libraries.script.script.Script import is_hdp_stack_less_than [as 别名]
from resource_management.libraries import functions
# server configurations
config = Script.get_config()
tmp_dir = Script.get_tmp_dir()
sudo = AMBARI_SUDO_BINARY
stack_name = default("/hostLevelParams/stack_name", None)
# node hostname
hostname = config["hostname"]
# This is expected to be of the form #.#.#.#
stack_version_unformatted = str(config["hostLevelParams"]["stack_version"])
hdp_stack_version_major = format_hdp_stack_version(stack_version_unformatted)
stack_is_hdp21 = Script.is_hdp_stack_greater_or_equal("2.0") and Script.is_hdp_stack_less_than("2.2")
# this is not available on INSTALL action because hdp-select is not available
hdp_stack_version = functions.get_hdp_version("hive-server2")
# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade.
# It cannot be used during the initial Cluser Install because the version is not yet known.
version = default("/commandParams/version", None)
# current host stack version
current_version = default("/hostLevelParams/current_version", None)
# When downgrading the 'version' and 'current_version' are both pointing to the downgrade-target version
# downgrade_from_version provides the source-version the downgrade is happening from
downgrade_from_version = default("/commandParams/downgrade_from_version", None)
示例5: default
# 需要导入模块: from resource_management.libraries.script.script import Script [as 别名]
# 或者: from resource_management.libraries.script.script.Script import is_hdp_stack_less_than [as 别名]
else: # params.dfs_http_policy == "HTTP_ONLY" or not defined:
secure_dn_ports_are_in_use = utils.is_secure_port(dfs_dn_port) or utils.is_secure_port(dfs_dn_http_port)
if secure_dn_ports_are_in_use:
hadoop_secure_dn_user = hdfs_user
else:
hadoop_secure_dn_user = '""'
ambari_libs_dir = "/var/lib/ambari-agent/lib"
limits_conf_dir = "/etc/security/limits.d"
hdfs_user_nofile_limit = default("/configurations/hadoop-env/hdfs_user_nofile_limit", "128000")
hdfs_user_nproc_limit = default("/configurations/hadoop-env/hdfs_user_nproc_limit", "65536")
create_lib_snappy_symlinks = not Script.is_hdp_stack_greater_or_equal("2.2")
if Script.is_hdp_stack_greater_or_equal("2.0") and Script.is_hdp_stack_less_than("2.1") and not OSCheck.is_suse_family():
# deprecated rhel jsvc_path
jsvc_path = "/usr/libexec/bigtop-utils"
else:
jsvc_path = "/usr/lib/bigtop-utils"
execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir
ulimit_cmd = "ulimit -c unlimited ; "
snappy_so = "libsnappy.so"
so_target_dir_x86 = format("{hadoop_lib_home}/native/Linux-i386-32")
so_target_dir_x64 = format("{hadoop_lib_home}/native/Linux-amd64-64")
so_target_x86 = format("{so_target_dir_x86}/{snappy_so}")
so_target_x64 = format("{so_target_dir_x64}/{snappy_so}")
so_src_dir_x86 = format("{hadoop_home}/lib")
so_src_dir_x64 = format("{hadoop_home}/lib64")