当前位置: 首页>>代码示例>>Python>>正文


Python xmlutils.load_hadoop_xml_defaults函数代码示例

本文整理汇总了Python中sahara.utils.xmlutils.load_hadoop_xml_defaults函数的典型用法代码示例。如果您正苦于以下问题:Python load_hadoop_xml_defaults函数的具体用法?Python load_hadoop_xml_defaults怎么用?Python load_hadoop_xml_defaults使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了load_hadoop_xml_defaults函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: get_possible_job_config

def get_possible_job_config(job_type):
    if not edp.compare_job_type(job_type, *edp.JOB_TYPES_ALL):
        return None

    if edp.compare_job_type(job_type, edp.JOB_TYPE_JAVA):
        return {'job_config': {'configs': [], 'args': []}}

    if edp.compare_job_type(job_type,
                            edp.JOB_TYPE_MAPREDUCE, edp.JOB_TYPE_PIG):
        # TODO(nmakhotkin): Here we need return config based on specific plugin
        cfg = xmlutils.load_hadoop_xml_defaults(
            'plugins/vanilla/v1_2_1/resources/mapred-default.xml')
        if edp.compare_job_type(job_type, edp.JOB_TYPE_MAPREDUCE):
            cfg += xmlutils.load_hadoop_xml_defaults(
                'service/edp/resources/mapred-job-config.xml')
    elif edp.compare_job_type(job_type, edp.JOB_TYPE_HIVE):
        # TODO(nmakhotkin): Here we need return config based on specific plugin
        cfg = xmlutils.load_hadoop_xml_defaults(
            'plugins/vanilla/v1_2_1/resources/hive-default.xml')

    # TODO(tmckay): args should be a list when bug #269968
    # is fixed on the UI side
    config = {'configs': cfg, "args": {}}
    if not edp.compare_job_type(edp.JOB_TYPE_MAPREDUCE, edp.JOB_TYPE_JAVA):
        config.update({'params': {}})
    return {'job_config': config}
开发者ID:stannie42,项目名称:sahara,代码行数:26,代码来源:workflow_factory.py

示例2: get_possible_job_config

def get_possible_job_config(job_type):
    if not edp.compare_job_type(job_type, *get_possible_job_types()):
        return None

    if edp.compare_job_type(job_type, 'Java'):
        return {'job_config': {'configs': [], 'args': []}}

    if edp.compare_job_type(job_type, 'MapReduce', 'Pig'):
        #TODO(nmakhotkin) Here we should return config based on specific plugin
        cfg = xmlutils.load_hadoop_xml_defaults(
            'plugins/vanilla/v1_2_1/resources/mapred-default.xml')
        if edp.compare_job_type(job_type, 'MapReduce'):
            cfg += xmlutils.load_hadoop_xml_defaults(
                'service/edp/resources/mapred-job-config.xml')
    elif edp.compare_job_type(job_type, 'Hive'):
        #TODO(nmakhotkin) Here we should return config based on specific plugin
        cfg = xmlutils.load_hadoop_xml_defaults(
            'plugins/vanilla/v1_2_1/resources/hive-default.xml')

    # TODO(tmckay): args should be a list when bug #269968
    # is fixed on the UI side
    config = {'configs': cfg, "args": {}}
    if not edp.compare_job_type('MapReduce', 'Java'):
        config.update({'params': {}})
    return {'job_config': config}
开发者ID:qinweiwei,项目名称:sahara,代码行数:25,代码来源:workflow_factory.py

示例3: get_possible_job_config

def get_possible_job_config(job_type):
    if not edp.compare_job_type(job_type, *edp.JOB_TYPES_ALL):
        return None

    if edp.compare_job_type(job_type, edp.JOB_TYPE_JAVA):
        return {'job_config': {'configs': [], 'args': []}}

    if edp.compare_job_type(job_type, edp.JOB_TYPE_SHELL):
        return {'job_config': {'configs': [], 'params': {}, 'args': []}}

    if edp.compare_job_type(job_type,
                            edp.JOB_TYPE_MAPREDUCE, edp.JOB_TYPE_PIG):
        cfg = xmlutils.load_hadoop_xml_defaults(
            'service/edp/resources/mapred-default.xml')
        if edp.compare_job_type(job_type, edp.JOB_TYPE_MAPREDUCE):
            cfg += get_possible_mapreduce_configs()
    elif edp.compare_job_type(job_type, edp.JOB_TYPE_HIVE):
        cfg = xmlutils.load_hadoop_xml_defaults(
            'service/edp/resources/hive-default.xml')

    config = {'configs': cfg}
    if edp.compare_job_type(job_type, edp.JOB_TYPE_PIG, edp.JOB_TYPE_HIVE):
        config.update({'params': {}})
    if edp.compare_job_type(job_type, edp.JOB_TYPE_PIG):
        config.update({'args': []})
    return {'job_config': config}
开发者ID:openstack,项目名称:sahara,代码行数:26,代码来源:workflow_factory.py

示例4: get_possible_hive_config_from

def get_possible_hive_config_from(file_name):
    '''Return the possible configs, args, params for a Hive job.'''
    config = {
        'configs': xmlutils.load_hadoop_xml_defaults(file_name),
        'params': {}
        }
    return config
开发者ID:AlexanderYAPPO,项目名称:sahara,代码行数:7,代码来源:confighints_helper.py

示例5: test_load_xml_defaults

 def test_load_xml_defaults(self):
     self.assertEqual(
         [{'name': u'name1', 'value': u'value1', 'description': 'descr1'},
          {'name': u'name2', 'value': u'value2', 'description': 'descr2'},
          {'name': u'name3', 'value': '', 'description': 'descr3'},
          {'name': u'name4', 'value': '', 'description': 'descr4'},
          {'name': u'name5', 'value': u'value5', 'description': ''}],
         x.load_hadoop_xml_defaults(
             'tests/unit/resources/test-default.xml'))
开发者ID:B-Rich,项目名称:sahara,代码行数:9,代码来源:test_xml_utils.py

示例6: get_swift_configs

def get_swift_configs():
    configs = x.load_hadoop_xml_defaults("swift/resources/conf-template.xml")
    for conf in configs:
        if conf["name"] == HADOOP_SWIFT_AUTH_URL:
            conf["value"] = su.retrieve_auth_url() + "tokens/"
        if conf["name"] == HADOOP_SWIFT_TENANT:
            conf["value"] = retrieve_tenant()
        if CONF.os_region_name and conf["name"] == HADOOP_SWIFT_REGION:
            conf["value"] = CONF.os_region_name

    result = [cfg for cfg in configs if cfg["value"]]
    LOG.info(_LI("Swift would be integrated with the following " "params: {result}").format(result=result))
    return result
开发者ID:snowind,项目名称:sahara,代码行数:13,代码来源:swift_helper.py

示例7: vm_awareness_core_config

def vm_awareness_core_config():
    c = x.load_hadoop_xml_defaults('topology/resources/core-template.xml')
    result = [cfg for cfg in c if cfg['value']]

    if not CONF.enable_hypervisor_awareness:
        # not leveraging 4-layer approach so override template value
        param = next((prop for prop in result
                      if prop['name'] == 'net.topology.impl'), None)
        if param:
            param['value'] = 'org.apache.hadoop.net.NetworkTopology'

    LOG.info("Vm awareness will add following configs in core-site "
             "params: %s", result)
    return result
开发者ID:B-Rich,项目名称:sahara,代码行数:14,代码来源:topology_helper.py

示例8: test_create_hadoop_xml

    def test_create_hadoop_xml(self):
        conf = x.load_hadoop_xml_defaults(
            'tests/unit/resources/test-default.xml')
        self.assertEqual(x.create_hadoop_xml({'name1': 'some_val1',
                                              'name2': 2}, conf),
                         """<?xml version="1.0" ?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
  <property>
    <name>name2</name>
    <value>2</value>
  </property>
  <property>
    <name>name1</name>
    <value>some_val1</value>
  </property>
</configuration>
""")
开发者ID:B-Rich,项目名称:sahara,代码行数:18,代码来源:test_xml_utils.py

示例9: get_swift_configs

def get_swift_configs():
    configs = x.load_hadoop_xml_defaults('swift/resources/conf-template.xml')
    for conf in configs:
        if conf['name'] == HADOOP_SWIFT_AUTH_URL:
            conf['value'] = su.retrieve_auth_url() + "auth/tokens/"
        if conf['name'] == HADOOP_SWIFT_TENANT:
            conf['value'] = retrieve_tenant()
        if CONF.os_region_name and conf['name'] == HADOOP_SWIFT_REGION:
            conf['value'] = CONF.os_region_name
        if conf['name'] == HADOOP_SWIFT_DOMAIN_NAME:
            # NOTE(jfreud): Don't be deceived here... Even though there is an
            # attribute provided by context called domain_name, it is used for
            # domain scope, and hadoop-swiftfs always authenticates using
            # project scope. The purpose of the setting below is to override
            # the default value for project domain and user domain, domain id
            # as 'default', which may not always be correct.
            # TODO(jfreud): When hadoop-swiftfs allows it, stop hoping that
            # project_domain_name is always equal to user_domain_name.
            conf['value'] = context.current().project_domain_name

    result = [cfg for cfg in configs if cfg['value']]
    LOG.info("Swift would be integrated with the following "
             "params: {result}".format(result=result))
    return result
开发者ID:openstack,项目名称:sahara,代码行数:24,代码来源:swift_helper.py

示例10:

from sahara.plugins import utils
from sahara.plugins.vanilla import utils as vu
from sahara.plugins.vanilla.v1_2_1 import mysql_helper as m_h
from sahara.plugins.vanilla.v1_2_1 import oozie_helper as o_h
from sahara.swift import swift_helper as swift
from sahara.topology import topology_helper as topology
from sahara.utils import crypto
from sahara.utils import types as types
from sahara.utils import xmlutils as x


conductor = c.API
LOG = logging.getLogger(__name__)
CONF = cfg.CONF

CORE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v1_2_1/resources/core-default.xml')

HDFS_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v1_2_1/resources/hdfs-default.xml')

MAPRED_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v1_2_1/resources/mapred-default.xml')

HIVE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v1_2_1/resources/hive-default.xml')

# Append Oozie configs fore core-site.xml
CORE_DEFAULT += o_h.OOZIE_CORE_DEFAULT

XML_CONFS = {
    "HDFS": [CORE_DEFAULT, HDFS_DEFAULT],
开发者ID:a9261,项目名称:sahara,代码行数:32,代码来源:config_helper.py

示例11:

# You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from sahara.plugins import provisioning as p
from sahara.utils import xmlutils as x


CORE_DEFAULT = x.load_hadoop_xml_defaults("plugins/intel/v3_0_2/resources/hadoop-default.xml")

HDFS_DEFAULT = x.load_hadoop_xml_defaults("plugins/intel/v3_0_2/resources/hdfs-default.xml")

YARN_DEFAULT = x.load_hadoop_xml_defaults("plugins/intel/v3_0_2/resources/yarn-default.xml")

OOZIE_DEFAULT = x.load_hadoop_xml_defaults("plugins/intel/v3_0_2/resources/oozie-default.xml")


XML_CONFS = {"Hadoop": [CORE_DEFAULT], "HDFS": [HDFS_DEFAULT], "YARN": [YARN_DEFAULT], "JobFlow": [OOZIE_DEFAULT]}

IDH_TARBALL_URL = p.Config(
    "IDH tarball URL",
    "general",
    "cluster",
    priority=1,
开发者ID:qinweiwei,项目名称:sahara,代码行数:31,代码来源:config_helper.py

示例12: vm_awareness_mapred_config

def vm_awareness_mapred_config():
    c = x.load_hadoop_xml_defaults('topology/resources/mapred-template.xml')
    result = [cfg for cfg in c if cfg['value']]
    LOG.info("Vm awareness will add following configs in map-red "
             "params: %s", result)
    return result
开发者ID:B-Rich,项目名称:sahara,代码行数:6,代码来源:topology_helper.py

示例13:

# limitations under the License.

from oslo.config import cfg

from sahara import exceptions as ex
from sahara.openstack.common import log as logging
from sahara.plugins import provisioning as p
from sahara.utils import types as types
from sahara.utils import xmlutils as x

CONF = cfg.CONF
CONF.import_opt("enable_data_locality", "sahara.topology.topology_helper")

LOG = logging.getLogger(__name__)

CORE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v2_3_0/resources/core-default.xml')

HDFS_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v2_3_0/resources/hdfs-default.xml')

MAPRED_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v2_3_0/resources/mapred-default.xml')

YARN_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v2_3_0/resources/yarn-default.xml')

OOZIE_DEFAULT = x.load_hadoop_xml_defaults(
    'plugins/vanilla/v2_3_0/resources/oozie-default.xml')

XML_CONFS = {
    "Hadoop": [CORE_DEFAULT],
开发者ID:qinweiwei,项目名称:sahara,代码行数:32,代码来源:config_helper.py

示例14: read_default_swift_configs

def read_default_swift_configs():
    return x.load_hadoop_xml_defaults('swift/resources/conf-template.xml')
开发者ID:Imperat,项目名称:sahara,代码行数:2,代码来源:swift_helper.py

示例15:

from oslo.config import cfg

from sahara import conductor as c
from sahara.openstack.common import log as logging
from sahara.plugins.general import utils
from sahara.plugins import provisioning as p
from sahara.topology import topology_helper as topology
from sahara.utils import types as types
from sahara.utils import xmlutils as x


conductor = c.API
LOG = logging.getLogger(__name__)
CONF = cfg.CONF

CORE_DEFAULT = x.load_hadoop_xml_defaults("plugins/spark/resources/core-default.xml")

HDFS_DEFAULT = x.load_hadoop_xml_defaults("plugins/spark/resources/hdfs-default.xml")

XML_CONFS = {"HDFS": [CORE_DEFAULT, HDFS_DEFAULT]}

SPARK_CONFS = {
    "Spark": {
        "OPTIONS": [
            {
                "name": "Master port",
                "description": "Start the master on a different port" " (default: 7077)",
                "default": "7077",
                "priority": 2,
            },
            {
开发者ID:JohannaMW,项目名称:sahara,代码行数:31,代码来源:config_helper.py


注:本文中的sahara.utils.xmlutils.load_hadoop_xml_defaults函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。