本文整理汇总了Python中model.Model.run方法的典型用法代码示例。如果您正苦于以下问题:Python Model.run方法的具体用法?Python Model.run怎么用?Python Model.run使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类model.Model
的用法示例。
在下文中一共展示了Model.run方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: run
# 需要导入模块: from model import Model [as 别名]
# 或者: from model.Model import run [as 别名]
def run(self):
'''
Runs through each model specified by models_to_run once with each possible
setting in params.
'''
N = 0
self.prepare_report()
for index, clf in enumerate([self.clfs[x] for x in self.models_to_run]):
iteration = 0
print 'Running {}.'.format(self.models_to_run[index])
parameter_values = self.params[self.models_to_run[index]]
grid = ParameterGrid(parameter_values)
while iteration < self.iterations_max and iteration < len(grid):
print ' Running Iteration {} of {}...'.format(iteration + 1,
self.iterations_max)
if len(grid) > self.iterations_max:
p = random.choice(list(grid))
else:
p = list(grid)[iteration]
try:
m = Model(clf, self.X_train, self.y_train, self.X_test,
self.y_test, p, N, self.models_to_run[index],
iteration, self.run_name, self.label,
self.thresholds, self.outfile)
m.run()
self.check_model_performance(m, self.comparison_threshold)
m.performance_to_file()
self.pickle_model(m)
except IndexError as e:
print p
print N
print 'IndexError: {}'.format(e)
print traceback.format_exc()
continue
except RuntimeError as e:
print p
print N
print 'RuntimeError: {}'.format(e)
print traceback.format_exc()
continue
except AttributeError as e:
print p
print N
print 'AttributeError: {}'.format(e)
print traceback.format_exc()
continue
except ValueError as e:
print p
print N
print 'Unexpected ValueError: {}'.format(e)
print traceback.format_exc()
continue
iteration += 1
N += 1
示例2: test_scalars
# 需要导入模块: from model import Model [as 别名]
# 或者: from model.Model import run [as 别名]
def test_scalars(self):
# Create 2 inputs
X = helper.make_tensor_value_info('A', TensorProto.INT32, [])
Y = helper.make_tensor_value_info('B', TensorProto.INT32, [])
# Create one output
Z = helper.make_tensor_value_info('C', TensorProto.INT32, [])
# Create a node
node_def = helper.make_node('Add', ['A', 'B'], ['C'])
# Create the model
graph_def = helper.make_graph([node_def], "scalar-model", [X, Y], [Z])
onnx_model = helper.make_model(graph_def,
producer_name='onnx-example')
model = Model()
model.BuildFromOnnxModel(onnx_model)
schedule = model.OptimizeSchedule()
schedule = schedule.replace('\n', ' ')
expected_schedule = r'// Target: .+// MachineParams: .+// Delete this line if not using Generator Pipeline pipeline = get_pipeline\(\);.+Func C = pipeline.get_func\(2\);.+{.+}.+'
self.assertRegex(schedule, expected_schedule)
input1 = np.random.randint(-10, 10, size=())
input2 = np.random.randint(-10, 10, size=())
outputs = model.run([input1, input2])
self.assertEqual(1, len(outputs))
output = outputs[0]
expected = input1 + input2
np.testing.assert_allclose(expected, output)
示例3: test_small_model
# 需要导入模块: from model import Model [as 别名]
# 或者: from model.Model import run [as 别名]
def test_small_model(self):
# Create one input
X = helper.make_tensor_value_info('IN', TensorProto.FLOAT, [2, 3])
# Create one output
Y = helper.make_tensor_value_info('OUT', TensorProto.FLOAT, [2, 3])
# Create a node
node_def = helper.make_node('Abs', ['IN'], ['OUT'])
# Create the model
graph_def = helper.make_graph([node_def], "test-model", [X], [Y])
onnx_model = helper.make_model(graph_def,
producer_name='onnx-example')
model = Model()
model.BuildFromOnnxModel(onnx_model)
schedule = model.OptimizeSchedule()
schedule = schedule.replace('\n', ' ')
expected_schedule = r'// Target: .+// MachineParams: .+// Delete this line if not using Generator Pipeline pipeline = get_pipeline\(\);.+Func OUT = pipeline.get_func\(1\);.+{.+}.+'
self.assertRegex(schedule, expected_schedule)
input = np.random.rand(2, 3) - 0.5
outputs = model.run([input])
self.assertEqual(1, len(outputs))
output = outputs[0]
expected = np.abs(input)
np.testing.assert_allclose(expected, output)
示例4: main
# 需要导入模块: from model import Model [as 别名]
# 或者: from model.Model import run [as 别名]
def main():
# objs/emulator -verbose @Nexus4
parser = argparse.ArgumentParser()
parser.add_argument('--emu-path', default=os.path.abspath('../qemu/obj/emulator'),
help='emulator path')
parser.add_argument('emu_args', default=['-verbose', '@Nexus4'], nargs='*',
help='args for the emulator. e.g: @Nexus4')
args = parser.parse_args()
device = Emulator(args.emu_path, *args.emu_args)
model = Model(device)
modem = Modem()
model.add_component('modem_driver_read', modem)
try:
model.run()
except (KeyboardInterrupt, SystemExit):
print("got Ctrl+C (SIGINT) or exit() is called")
model.stop()
示例5: test_tensors_rank_zero
# 需要导入模块: from model import Model [as 别名]
# 或者: from model.Model import run [as 别名]
def test_tensors_rank_zero(self):
X = helper.make_tensor_value_info('X', TensorProto.FLOAT, [3, 2])
S1 = helper.make_tensor_value_info('S1', TensorProto.INT64, [])
S2 = helper.make_tensor_value_info('S2', TensorProto.FLOAT, [])
size_node = helper.make_node('Size', ['X'], ['S1'])
graph_def = helper.make_graph([size_node],
"rank_zero_test",
[X],
[S1, S2],
initializer=[
helper.make_tensor('S2', TensorProto.FLOAT, (), (3.14,))])
onnx_model = helper.make_model(graph_def,
producer_name='onnx-example')
model = Model()
model.BuildFromOnnxModel(onnx_model)
input_data = np.random.rand(3, 2)
outputs = model.run([input_data])
self.assertEqual(6, outputs[0])
self.assertAlmostEqual(3.14, outputs[1])
示例6: test_model_with_initializer
# 需要导入模块: from model import Model [as 别名]
# 或者: from model.Model import run [as 别名]
def test_model_with_initializer(self):
X = helper.make_tensor_value_info('X', TensorProto.FLOAT, [3, 1])
Z2 = helper.make_tensor_value_info('Z2', TensorProto.FLOAT, [2, 3, 6])
expand_node_def = helper.make_node('Expand', ['X', 'Y'], ['Z1'])
cast_node_def = helper.make_node('Scale', ['Z1'], ['Z2'])
graph_def = helper.make_graph([expand_node_def, cast_node_def],
"test-node",
[X],
[Z2],
initializer=[
helper.make_tensor('Y', TensorProto.INT64, (3,), (2, 1, 6))])
onnx_model = helper.make_model(graph_def,
producer_name='onnx-example')
model = Model()
model.BuildFromOnnxModel(onnx_model)
input_data = np.random.rand(3, 1)
outputs = model.run([input_data])
expected = input_data * np.ones([2, 1, 6], dtype=np.float32)
np.testing.assert_allclose(expected, outputs[0])
示例7: print
# 需要导入模块: from model import Model [as 别名]
# 或者: from model.Model import run [as 别名]
#!/usr/bin/python3.5
print("initializing Ising Model")
from model import Model
from interface import Interface
#i = Interface()
#dimension = i.askDimension()
#size = i.askSize()
size = 100
dimension = 2
populate = 'circle'
dynamic = 'none'
output = 'video'
iterate = 10
m = Model()
m.run(dimension, size, populate = populate, iterate = iterate, output = output)
print("finished")