当前位置: 首页>>代码示例>>Python>>正文


Python Script.is_hdp_stack_greater_or_equal方法代码示例

本文整理汇总了Python中resource_management.libraries.script.Script.is_hdp_stack_greater_or_equal方法的典型用法代码示例。如果您正苦于以下问题:Python Script.is_hdp_stack_greater_or_equal方法的具体用法?Python Script.is_hdp_stack_greater_or_equal怎么用?Python Script.is_hdp_stack_greater_or_equal使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在resource_management.libraries.script.Script的用法示例。


在下文中一共展示了Script.is_hdp_stack_greater_or_equal方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: pre_upgrade_restart

# 需要导入模块: from resource_management.libraries.script import Script [as 别名]
# 或者: from resource_management.libraries.script.Script import is_hdp_stack_greater_or_equal [as 别名]
  def pre_upgrade_restart(self, env, upgrade_type=None):
    import params
    env.set_params(params)

    if Script.is_hdp_stack_greater_or_equal('2.3.0.0'):
      conf_select.select(params.stack_name, "hadoop", params.version)
      hdp_select.select("hadoop-hdfs-nfs3", params.version)
开发者ID:OpenPOWER-BigData,项目名称:HDP-ambari,代码行数:9,代码来源:nfsgateway.py

示例2: pre_rolling_restart

# 需要导入模块: from resource_management.libraries.script import Script [as 别名]
# 或者: from resource_management.libraries.script.Script import is_hdp_stack_greater_or_equal [as 别名]
  def pre_rolling_restart(self, env):
    import params
    env.set_params(params)

    if Script.is_hdp_stack_greater_or_equal("2.3"):
      # phoenix uses hbase configs
      conf_select.select(params.stack_name, "hbase", params.version)
      hdp_select.select("phoenix-server", params.version)
开发者ID:zouzhberk,项目名称:ambaridemo,代码行数:10,代码来源:phoenix_queryserver.py

示例3: pre_rolling_restart

# 需要导入模块: from resource_management.libraries.script import Script [as 别名]
# 或者: from resource_management.libraries.script.Script import is_hdp_stack_greater_or_equal [as 别名]
    def pre_rolling_restart(self, env):
        Logger.info("Executing Metastore Rolling Upgrade pre-restart")
        import params

        env.set_params(params)

        if Script.is_hdp_stack_greater_or_equal("2.3"):
            self.upgrade_schema(env)

        if params.version and compare_versions(format_hdp_stack_version(params.version), "2.2.0.0") >= 0:
            conf_select.select(params.stack_name, "hive", params.version)
            hdp_select.select("hive-metastore", params.version)
开发者ID:zouzhberk,项目名称:ambaridemo,代码行数:14,代码来源:hive_metastore.py

示例4: pre_upgrade_restart

# 需要导入模块: from resource_management.libraries.script import Script [as 别名]
# 或者: from resource_management.libraries.script.Script import is_hdp_stack_greater_or_equal [as 别名]
  def pre_upgrade_restart(self, env, upgrade_type=None):
    Logger.info("Executing Metastore Stack Upgrade pre-restart")
    import params

    env.set_params(params)

    is_stack_hdp_23 = Script.is_hdp_stack_greater_or_equal("2.3")
    is_upgrade = params.upgrade_direction == Direction.UPGRADE

    if is_stack_hdp_23 and is_upgrade:
      self.upgrade_schema(env)

    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
      conf_select.select(params.stack_name, "hive", params.version)
      hdp_select.select("hive-metastore", params.version)
开发者ID:OpenPOWER-BigData,项目名称:HDP-ambari,代码行数:17,代码来源:hive_metastore.py

示例5: link_configs

# 需要导入模块: from resource_management.libraries.script import Script [as 别名]
# 或者: from resource_management.libraries.script.Script import is_hdp_stack_greater_or_equal [as 别名]
def link_configs(struct_out_file):
  """
  Links configs, only on a fresh install of HDP-2.3 and higher
  """

  if not Script.is_hdp_stack_greater_or_equal("2.3"):
    Logger.info("Can only link configs for HDP-2.3 and higher.")
    return

  json_version = load_version(struct_out_file)

  if not json_version:
    Logger.info("Could not load 'version' from {0}".format(struct_out_file))
    return

  for k, v in conf_select.PACKAGE_DIRS.iteritems():
    conf_select.convert_conf_directories_to_symlinks(k, json_version, v)
开发者ID:OpenPOWER-BigData,项目名称:HDP-ambari,代码行数:19,代码来源:shared_initialization.py

示例6:

# 需要导入模块: from resource_management.libraries.script import Script [as 别名]
# 或者: from resource_management.libraries.script.Script import is_hdp_stack_greater_or_equal [as 别名]
# hadoop default params
mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"

# upgrades would cause these directories to have a version instead of "current"
# which would cause a lot of problems when writing out hadoop-env.sh; instead
# force the use of "current" in the hook
hadoop_home = hdp_select.get_hadoop_dir("home", force_latest_on_upgrade=True)
hadoop_libexec_dir = hdp_select.get_hadoop_dir("libexec", force_latest_on_upgrade=True)

hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
hadoop_secure_dn_user = hdfs_user
hadoop_dir = "/etc/hadoop"
versioned_hdp_root = '/usr/hdp/current'

# HDP 2.2+ params
if Script.is_hdp_stack_greater_or_equal("2.2"):
  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"

  # not supported in HDP 2.2+
  hadoop_conf_empty_dir = None

  if not security_enabled:
    hadoop_secure_dn_user = '""'
  else:
    dfs_dn_port = get_port(dfs_dn_addr)
    dfs_dn_http_port = get_port(dfs_dn_http_addr)
    dfs_dn_https_port = get_port(dfs_dn_https_addr)
    # We try to avoid inability to start datanode as a plain user due to usage of root-owned ports
    if dfs_http_policy == "HTTPS_ONLY":
      secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_https_port)
    elif dfs_http_policy == "HTTP_AND_HTTPS":
开发者ID:andreysabitov,项目名称:ambari-mantl,代码行数:33,代码来源:params.py

示例7: default

# 需要导入模块: from resource_management.libraries.script import Script [as 别名]
# 或者: from resource_management.libraries.script.Script import is_hdp_stack_greater_or_equal [as 别名]
# server configurations
config = Script.get_config()
tmp_dir = Script.get_tmp_dir()
sudo = AMBARI_SUDO_BINARY

stack_name = default("/hostLevelParams/stack_name", None)
upgrade_direction = default("/commandParams/upgrade_direction", Direction.UPGRADE)
version = default("/commandParams/version", None)

storm_component_home_dir = status_params.storm_component_home_dir
conf_dir = status_params.conf_dir

stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
stack_is_hdp22_or_further = Script.is_hdp_stack_greater_or_equal("2.2")

# default hadoop params
rest_lib_dir = "/usr/lib/storm/contrib/storm-rest"
storm_bin_dir = "/usr/bin"
storm_lib_dir = "/usr/lib/storm/lib/"

# hadoop parameters for 2.2+
if stack_is_hdp22_or_further:
  rest_lib_dir = format("{storm_component_home_dir}/contrib/storm-rest")
  storm_bin_dir = format("{storm_component_home_dir}/bin")
  storm_lib_dir = format("{storm_component_home_dir}/lib")
  log4j_dir = format("{storm_component_home_dir}/log4j2")

storm_user = config['configurations']['storm-env']['storm_user']
log_dir = config['configurations']['storm-env']['storm_log_dir']
开发者ID:biggeng,项目名称:ambari,代码行数:32,代码来源:params_linux.py

示例8: str

# 需要导入模块: from resource_management.libraries.script import Script [as 别名]
# 或者: from resource_management.libraries.script.Script import is_hdp_stack_greater_or_equal [as 别名]
from resource_management.core.system import System
from ambari_commons.os_check import OSCheck

config = Script.get_config()
sudo = AMBARI_SUDO_BINARY

stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)

# default hadoop params
mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
hadoop_libexec_dir = hdp_select.get_hadoop_dir("libexec")
hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"

# HDP 2.2+ params
if Script.is_hdp_stack_greater_or_equal("2.2"):
  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"

  # not supported in HDP 2.2+
  hadoop_conf_empty_dir = None

versioned_hdp_root = '/usr/hdp/current'

#security params
security_enabled = config['configurations']['cluster-env']['security_enabled']

#java params
java_home = config['hostLevelParams']['java_home']

#hadoop params
hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
开发者ID:andreysabitov,项目名称:ambari-mantl,代码行数:33,代码来源:params.py


注:本文中的resource_management.libraries.script.Script.is_hdp_stack_greater_or_equal方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。