本文整理汇总了Python中sahara.tests.unit.testutils.make_inst_dict函数的典型用法代码示例。如果您正苦于以下问题:Python make_inst_dict函数的具体用法?Python make_inst_dict怎么用?Python make_inst_dict使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了make_inst_dict函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: setUp
def setUp(self):
super(TestUtils, self).setUp()
self.plugin = p.VanillaProvider()
self.ng_manager = tu.make_ng_dict(
'mng', 'f1', ['manager'], 1,
[tu.make_inst_dict('mng1', 'manager')])
self.ng_namenode = tu.make_ng_dict(
'nn', 'f1', ['namenode'], 1,
[tu.make_inst_dict('nn1', 'namenode')])
self.ng_resourcemanager = tu.make_ng_dict(
'jt', 'f1', ['resourcemanager'], 1,
[tu.make_inst_dict('jt1', 'resourcemanager')])
self.ng_datanode = tu.make_ng_dict(
'dn', 'f1', ['datanode'], 2,
[tu.make_inst_dict('dn1', 'datanode-1'),
tu.make_inst_dict('dn2', 'datanode-2')])
self.ng_nodemanager = tu.make_ng_dict(
'tt', 'f1', ['nodemanager'], 2,
[tu.make_inst_dict('tt1', 'nodemanager-1'),
tu.make_inst_dict('tt2', 'nodemanager-2')])
self.ng_oozie = tu.make_ng_dict(
'ooz1', 'f1', ['oozie'], 1,
[tu.make_inst_dict('ooz1', 'oozie')])
self.ng_hiveserver = tu.make_ng_dict(
'hs', 'f1', ['hiveserver'], 1,
[tu.make_inst_dict('hs1', 'hiveserver')])
self.ng_secondarynamenode = tu.make_ng_dict(
'snn', 'f1', ['secondarynamenode'], 1,
[tu.make_inst_dict('snn1', 'secondarynamenode')])
示例2: get_fake_cluster
def get_fake_cluster(**kwargs):
mng = tu.make_inst_dict('id1', 'manager_inst', management_ip='1.2.3.4')
mng_ng = tu.make_ng_dict('manager_ng', 1, ['MANAGER'], 1, [mng])
mst = tu.make_inst_dict('id2', 'master_inst', management_ip='1.2.3.5')
mst_ng = tu.make_ng_dict('master_ng', 1, ['NAMENODE', 'SECONDARYNAMENODE',
'RESOURCEMANAGER', 'JOBHISTORY',
'OOZIE_SERVER'], 1, [mst])
wkrs = _get_workers()
wkrs_ng = tu.make_ng_dict('worker_ng', 1, ['DATANODE', 'NODEMANAGER'],
len(wkrs), wkrs)
return tu.create_cluster('test_cluster', 1, 'cdh', '5',
[mng_ng, mst_ng, wkrs_ng],
**kwargs)
示例3: _get_context
def _get_context(self):
i1 = tu.make_inst_dict('id_1', 'instance_1', '1.1.1.1')
master_proc = [
yarn.RESOURCE_MANAGER.ui_name,
yarn.NODE_MANAGER.ui_name,
yarn.HISTORY_SERVER.ui_name,
maprfs.CLDB.ui_name,
maprfs.FILE_SERVER.ui_name,
oozie.OOZIE.ui_name,
management.ZOOKEEPER.ui_name,
]
master_ng = tu.make_ng_dict('master', 'large', master_proc, 1, [i1])
cluster_configs = {
'Service': {
'key': 'value',
'Service Version': '1.1',
},
}
cluster = tu.create_cluster(
name='test_cluster',
tenant='large',
plugin='mapr',
version='4.0.1.mrv1',
node_groups=[master_ng],
cluster_configs=cluster_configs,
)
self.ng = cluster.node_groups[0]
self.instance = self.ng.instances[0]
return cc.Context(cluster, handler.VersionHandler())
示例4: test_edp_main_class_spark
def test_edp_main_class_spark(self, job_get, cluster_get):
job_get.return_value = mock.Mock(type=edp.JOB_TYPE_SPARK,
interface=[])
ng = tu.make_ng_dict('master', 42, ['namenode'], 1,
instances=[tu.make_inst_dict('id', 'name')])
cluster_get.return_value = tu.create_cluster("cluster", "tenant1",
"spark", "1.0.0", [ng])
self._assert_create_object_validation(
data={
"cluster_id": six.text_type(uuid.uuid4()),
"job_configs": {"configs": {},
"params": {},
"args": []}
},
bad_req_i=(1, "INVALID_DATA",
"%s job must "
"specify edp.java.main_class" % edp.JOB_TYPE_SPARK))
self._assert_create_object_validation(
data={
"cluster_id": six.text_type(uuid.uuid4()),
"job_configs": {
"configs": {
"edp.java.main_class": "org.me.myclass"},
"params": {},
"args": []}
})
示例5: _get_workers
def _get_workers():
workers = []
for i in range(3):
w = tu.make_inst_dict('id0%d' % i, 'worker-0%d' % i,
management_ip='1.2.3.1%d' % i)
workers.append(w)
return workers
示例6: setUp
def setUp(self):
super(GeneralUtilsTest, self).setUp()
i1 = tu.make_inst_dict("i1", "master")
i2 = tu.make_inst_dict("i2", "worker1")
i3 = tu.make_inst_dict("i3", "worker2")
i4 = tu.make_inst_dict("i4", "worker3")
i5 = tu.make_inst_dict("i5", "sn")
ng1 = tu.make_ng_dict("master", "f1", ["jt", "nn"], 1, [i1])
ng2 = tu.make_ng_dict("workers", "f1", ["tt", "dn"], 3, [i2, i3, i4])
ng3 = tu.make_ng_dict("sn", "f1", ["dn"], 1, [i5])
self.c1 = tu.create_cluster("cluster1", "tenant1", "general", "2.6.0", [ng1, ng2, ng3])
self.ng1 = self.c1.node_groups[0]
self.ng2 = self.c1.node_groups[1]
self.ng3 = self.c1.node_groups[2]
示例7: setUp
def setUp(self):
i1 = tu.make_inst_dict('i1', 'master')
i2 = tu.make_inst_dict('i2', 'worker1')
i3 = tu.make_inst_dict('i3', 'worker2')
i4 = tu.make_inst_dict('i4', 'worker3')
i5 = tu.make_inst_dict('i5', 'sn')
ng1 = tu.make_ng_dict("master", "f1", ["jt", "nn"], 1, [i1])
ng2 = tu.make_ng_dict("workers", "f1", ["tt", "dn"], 3,
[i2, i3, i4])
ng3 = tu.make_ng_dict("sn", "f1", ["dn"], 1, [i5])
self.c1 = tu.create_cluster("cluster1", "tenant1", "general", "1.2.1",
[ng1, ng2, ng3])
self.ng1 = self.c1.node_groups[0]
self.ng2 = self.c1.node_groups[1]
self.ng3 = self.c1.node_groups[2]
示例8: test_data_sources_differ
def test_data_sources_differ(self, get_job, get_data_source, get_cluster):
get_job.return_value = mock.Mock(
type=edp.JOB_TYPE_MAPREDUCE_STREAMING, libs=[], interface=[])
ds1_id = six.text_type(uuid.uuid4())
ds2_id = six.text_type(uuid.uuid4())
data_sources = {
ds1_id: mock.Mock(type="swift", url="http://swift/test"),
ds2_id: mock.Mock(type="swift", url="http://swift/test2"),
}
get_data_source.side_effect = lambda ctx, x: data_sources[x]
ng = tu.make_ng_dict('master', 42, ['oozie'], 1,
instances=[tu.make_inst_dict('id', 'name')])
get_cluster.return_value = tu.create_cluster("cluster", "tenant1",
"vanilla", "2.7.1", [ng])
self._assert_create_object_validation(
data={
"cluster_id": six.text_type(uuid.uuid4()),
"input_id": ds1_id,
"output_id": ds2_id,
"job_configs": {
"configs": {
"edp.streaming.mapper": "/bin/cat",
"edp.streaming.reducer": "/usr/bin/wc"},
"params": {},
"job_execution_info": {},
"args": []}
})
data_sources[ds2_id].url = "http://swift/test"
err_msg = ("Provided input and output DataSources reference the "
"same location: %s" % data_sources[ds2_id].url)
self._assert_create_object_validation(
data={
"cluster_id": six.text_type(uuid.uuid4()),
"input_id": ds1_id,
"output_id": ds2_id,
"job_configs": {
"configs": {
"edp.streaming.mapper": "/bin/cat",
"edp.streaming.reducer": "/usr/bin/wc"},
"params": {},
"job_execution_info": {},
"args": []}
},
bad_req_i=(1, "INVALID_DATA", err_msg))
示例9: make_ng_dict_with_inst
def make_ng_dict_with_inst(counter, name, flavor,
processes, count, instances=None,
**kwargs):
if not instances:
instances = []
for i in range(count):
n = six.next(counter)
instance = tu.make_inst_dict("id{0}".format(n),
"fake_inst{0}".format(n),
management_ip='1.2.3.{0}'.format(n))
instances.append(instance)
return tu.make_ng_dict(name, flavor, processes,
count, instances, **kwargs)
示例10: test_edp_main_class_java
def test_edp_main_class_java(self, job_get, cluster_get):
job_get.return_value = mock.Mock(type=edp.JOB_TYPE_JAVA, interface=[])
ng = tu.make_ng_dict("master", 42, ["namenode", "oozie"], 1, instances=[tu.make_inst_dict("id", "name")])
cluster_get.return_value = tu.create_cluster("cluster", "tenant1", "vanilla", "2.6.0", [ng])
self._assert_create_object_validation(
data={"cluster_id": six.text_type(uuid.uuid4()), "job_configs": {"configs": {}, "params": {}, "args": []}},
bad_req_i=(1, "INVALID_DATA", "%s job must " "specify edp.java.main_class" % edp.JOB_TYPE_JAVA),
)
self._assert_create_object_validation(
data={
"cluster_id": six.text_type(uuid.uuid4()),
"job_configs": {"configs": {"edp.java.main_class": "org.me.myclass"}, "params": {}, "args": []},
}
)
示例11: test_streaming
def test_streaming(self, get_job, get_data_source, get_cluster):
get_job.return_value = mock.Mock(
type=edp.JOB_TYPE_MAPREDUCE_STREAMING, libs=[], interface=[])
ds1_id = uuidutils.generate_uuid()
ds2_id = uuidutils.generate_uuid()
data_sources = {
ds1_id: mock.Mock(type="swift", url="http://swift/test"),
ds2_id: mock.Mock(type="swift", url="http://swift/test2"),
}
get_data_source.side_effect = lambda ctx, x: data_sources[x]
ng = tu.make_ng_dict('master', 42, ['oozie'], 1,
instances=[tu.make_inst_dict('id', 'name')])
get_cluster.return_value = tu.create_cluster("cluster", "tenant1",
"fake", "0.1", [ng])
self._assert_create_object_validation(
data={
"cluster_id": uuidutils.generate_uuid(),
"input_id": ds1_id,
"output_id": ds2_id,
"job_configs": {"configs": {},
"params": {},
"args": [],
"job_execution_info": {}}
},
bad_req_i=(1, "INVALID_DATA",
"MapReduce.Streaming job "
"must specify streaming mapper "
"and reducer"))
self._assert_create_object_validation(
data={
"cluster_id": uuidutils.generate_uuid(),
"input_id": ds1_id,
"output_id": ds2_id,
"job_configs": {
"configs": {
"edp.streaming.mapper": "/bin/cat",
"edp.streaming.reducer": "/usr/bin/wc"},
"params": {},
"job_execution_info": {},
"args": []}
})
示例12: test_check_edp_job_support
def test_check_edp_job_support(self, get_job, get_cluster):
get_job.return_value = FakeJob()
self._assert_create_object_validation(
data={
"cluster_id": six.text_type(uuid.uuid4()),
"input_id": six.text_type(uuid.uuid4()),
"output_id": six.text_type(uuid.uuid4())
},
bad_req_i=(1, "INVALID_COMPONENT_COUNT",
"Hadoop cluster should contain 1 oozie component(s). "
"Actual oozie count is 0"))
ng = tu.make_ng_dict('master', 42, ['oozie'], 1,
instances=[tu.make_inst_dict('id', 'name')])
get_cluster.return_value = tu.create_cluster("cluster", "tenant1",
"vanilla", "1.2.1", [ng])
validation_base.check_edp_job_support('some_id')
示例13: test_check_edp_no_oozie
def test_check_edp_no_oozie(self, get_job, get_cluster):
get_job.return_value = mock.Mock(type=edp.JOB_TYPE_PIG, libs=[])
ng = tu.make_ng_dict('master', 42, ['namenode'], 1,
instances=[tu.make_inst_dict('id', 'name')])
get_cluster.return_value = tu.create_cluster("cluster", "tenant1",
"vanilla", "1.2.1", [ng])
self._assert_create_object_validation(
data={
"cluster_id": six.text_type(uuid.uuid4()),
"input_id": six.text_type(uuid.uuid4()),
"output_id": six.text_type(uuid.uuid4())
},
bad_req_i=(1, "INVALID_COMPONENT_COUNT",
"Hadoop cluster should contain 1 oozie component(s). "
"Actual oozie count is 0"))
示例14: test_edp_main_class_java
def test_edp_main_class_java(self, job_get, cluster_get):
job_get.return_value = mock.Mock(type=edp.JOB_TYPE_JAVA,
interface=[])
ng = tu.make_ng_dict('master', 42, ['namenode', 'oozie'], 1,
instances=[tu.make_inst_dict('id', 'name')])
cluster_get.return_value = tu.create_cluster("cluster", "tenant1",
"fake", "0.1", [ng])
self._assert_create_object_validation(
data={
"cluster_id": uuidutils.generate_uuid(),
"job_configs": {"configs": {},
"params": {},
"args": [],
"job_execution_info": {}}
},
bad_req_i=(1, "INVALID_DATA",
"%s job must "
"specify edp.java.main_class" % edp.JOB_TYPE_JAVA))
self._assert_create_object_validation(
data={
"cluster_id": uuidutils.generate_uuid(),
"job_configs": {
"configs": {
"edp.java.main_class": ""},
"params": {},
"args": [],
"job_execution_info": {}}
},
bad_req_i=(1, "INVALID_DATA",
"%s job must "
"specify edp.java.main_class" % edp.JOB_TYPE_JAVA))
self._assert_create_object_validation(
data={
"cluster_id": uuidutils.generate_uuid(),
"job_configs": {
"configs": {
"edp.java.main_class": "org.me.myclass"},
"params": {},
"job_execution_info": {},
"args": []}
})
示例15: test_check_edp_job_support_spark
def test_check_edp_job_support_spark(self, get_job, get_cluster):
# utils.start_patch will construct a vanilla cluster as a
# default for get_cluster, but we want a Spark cluster.
# So, we'll make our own.
# Note that this means we cannot use assert_create_object_validation()
# because it calls start_patch() and will override our setting
job = mock.Mock(type=edp.JOB_TYPE_SPARK, mains=["main"], interface=[])
get_job.return_value = job
ng = tu.make_ng_dict('master', 42, [], 1,
instances=[tu.make_inst_dict('id', 'name')])
get_cluster.return_value = tu.create_cluster("cluster", "tenant1",
"spark", "1.3.1", [ng])
# Everything is okay, spark cluster supports EDP by default
# because cluster requires a master and slaves >= 1
wrap_it(data={"cluster_id": six.text_type(uuid.uuid4()),
"job_configs": {
"configs": {
"edp.java.main_class": "org.me.class"}}})