本文整理汇总了Python中model.model方法的典型用法代码示例。如果您正苦于以下问题:Python model.model方法的具体用法?Python model.model怎么用?Python model.model使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类model
的用法示例。
在下文中一共展示了model.model方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: import model [as 别名]
# 或者: from model import model [as 别名]
def __init__(self, env, dueling, noisy, fname):
self.g = tf.Graph()
self.noisy = noisy
self.dueling = dueling
self.env = env
with self.g.as_default():
self.act = deepq.build_act_enjoy(
make_obs_ph=lambda name: U.Uint8Input(
env.observation_space.shape, name=name),
q_func=dueling_model if dueling else model,
num_actions=env.action_space.n,
noisy=noisy
)
self.saver = tf.train.Saver()
self.sess = tf.Session(graph=self.g)
if fname is not None:
print('Loading Model...')
self.saver.restore(self.sess, fname)
示例2: maybe_save_model
# 需要导入模块: import model [as 别名]
# 或者: from model import model [as 别名]
def maybe_save_model(savedir, container, state):
if savedir is None:
return
start_time = time.time()
model_dir = "model-{}".format(state["num_iters"])
U.save_state(os.path.join(savedir, model_dir, "saved"))
if container is not None:
container.put(os.path.join(savedir, model_dir), model_dir)
relatively_safe_pickle_dump(state,
os.path.join(savedir,
'training_state.pkl.zip'),
compression=True)
if container is not None:
container.put(os.path.join(savedir, 'training_state.pkl.zip'),
'training_state.pkl.zip')
relatively_safe_pickle_dump(state["monitor_state"],
os.path.join(savedir, 'monitor_state.pkl'))
if container is not None:
container.put(os.path.join(savedir, 'monitor_state.pkl'),
'monitor_state.pkl')
logger.log("Saved model in {} seconds\n".format(time.time() - start_time))
示例3: maybe_load_model
# 需要导入模块: import model [as 别名]
# 或者: from model import model [as 别名]
def maybe_load_model(savedir, container):
"""Load model if present at the specified path."""
if savedir is None:
return
state_path = os.path.join(os.path.join(savedir, 'training_state.pkl.zip'))
if container is not None:
logger.log("Attempting to download model from Azure")
found_model = container.get(savedir, 'training_state.pkl.zip')
else:
found_model = os.path.exists(state_path)
if found_model:
state = pickle_load(state_path, compression=True)
model_dir = "model-{}".format(state["num_iters"])
if container is not None:
container.get(savedir, model_dir)
U.load_state(os.path.join(savedir, model_dir, "saved"))
logger.log("Loaded models checkpoint at {} iterations".format(
state["num_iters"]))
return state
示例4: create_train_model
# 需要导入模块: import model [as 别名]
# 或者: from model import model [as 别名]
def create_train_model(model_creator, hparams, data_dir):
"""Create train graph, model, and iterator."""
train_data_path = []
for root, _, name in os.walk(os.path.join(data_dir, 'train_data')):
for x in name:
if x.split('.')[-1] == 'mat':
train_data_path.append(os.path.join(root, x))
assert len(train_data_path) == 1
train_data = scio.loadmat(*train_data_path)['data']
assert hparams.src_len == hparams.tgt_len == train_data.shape[1]
graph = tf.Graph()
with graph.as_default(), tf.container("train"):
# channels: [features, SBP, DBP, MBP]
train_src_data = train_data[:, :, 0:hparams.src_feature_size]
train_tgt_data = train_data[:, :, hparams.src_feature_size:hparams.src_feature_size + hparams.tgt_feature_size]
src_dataset = tf.data.Dataset.from_tensor_slices(train_src_data)
tgt_dataset = tf.data.Dataset.from_tensor_slices(train_tgt_data)
iterator = get_iterator(src_dataset, tgt_dataset, batch_size=hparams.batch_size,
random_seed=hparams.random_seed, is_train=True)
model = model_creator(hparams, iterator=iterator, mode=tf.contrib.learn.ModeKeys.TRAIN)
return TrainModel(graph=graph, model=model, iterator=iterator)
示例5: create_eval_model
# 需要导入模块: import model [as 别名]
# 或者: from model import model [as 别名]
def create_eval_model(model_creator, hparams, data_dir):
"""Create eval graph, model and iterator."""
eval_data_path = []
for root, _, name in os.walk(os.path.join(data_dir, 'eval_data')):
for x in name:
if x.split('.')[-1] == 'mat':
eval_data_path.append(os.path.join(root, x))
assert len(eval_data_path) == 1
eval_data = scio.loadmat(*eval_data_path)['data']
data_mean, data_std = load_data_mean_std(hparams, data_dir)
batch_size = eval_data.shape[0]
graph = tf.Graph()
with graph.as_default(), tf.container("eval"):
eval_src_data = eval_data[:, :, 0:hparams.src_feature_size]
# channels: [features, SBP, DBP, MBP]
eval_tgt_data = eval_data[:, :, hparams.src_feature_size:hparams.src_feature_size + hparams.tgt_feature_size]
src_dataset = tf.data.Dataset.from_tensor_slices(eval_src_data)
tgt_dataset = tf.data.Dataset.from_tensor_slices(eval_tgt_data)
iterator = get_iterator(src_dataset, tgt_dataset, batch_size=batch_size,
random_seed=hparams.random_seed, is_train=False)
model = model_creator(hparams, iterator=iterator, mode=tf.contrib.learn.ModeKeys.EVAL)
return EvalModel(graph=graph, model=model, iterator=iterator, data_mean=data_mean, data_std=data_std)
示例6: maybe_save_model
# 需要导入模块: import model [as 别名]
# 或者: from model import model [as 别名]
def maybe_save_model(savedir, container, state):
"""This function checkpoints the model and state of the training algorithm."""
if savedir is None:
return
start_time = time.time()
model_dir = "model-{}".format(state["num_iters"])
U.save_state(os.path.join(savedir, model_dir, "saved"))
if container is not None:
container.put(os.path.join(savedir, model_dir), model_dir)
relatively_safe_pickle_dump(state, os.path.join(savedir, 'training_state.pkl.zip'), compression=True)
if container is not None:
container.put(os.path.join(savedir, 'training_state.pkl.zip'), 'training_state.pkl.zip')
relatively_safe_pickle_dump(state["monitor_state"], os.path.join(savedir, 'monitor_state.pkl'))
if container is not None:
container.put(os.path.join(savedir, 'monitor_state.pkl'), 'monitor_state.pkl')
logger.log("Saved model in {} seconds\n".format(time.time() - start_time))
示例7: maybe_load_model
# 需要导入模块: import model [as 别名]
# 或者: from model import model [as 别名]
def maybe_load_model(savedir, container):
"""Load model if present at the specified path."""
if savedir is None:
return
state_path = os.path.join(os.path.join(savedir, 'training_state.pkl.zip'))
if container is not None:
logger.log("Attempting to download model from Azure")
found_model = container.get(savedir, 'training_state.pkl.zip')
else:
found_model = os.path.exists(state_path)
if found_model:
state = pickle_load(state_path, compression=True)
model_dir = "model-{}".format(state["num_iters"])
if container is not None:
container.get(savedir, model_dir)
U.load_state(os.path.join(savedir, model_dir, "saved"))
logger.log("Loaded models checkpoint at {} iterations".format(state["num_iters"]))
return state
示例8: tower_loss
# 需要导入模块: import model [as 别名]
# 或者: from model import model [as 别名]
def tower_loss(images, score_maps, geo_maps, training_masks, reuse_variables=None):
# Build inference graph
with tf.variable_scope(tf.get_variable_scope(), reuse=reuse_variables):
f_score, f_geometry = model.model(images, is_training=True)
model_loss = model.loss(score_maps, f_score,
geo_maps, f_geometry,
training_masks)
total_loss = tf.add_n([model_loss] + tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES))
# add summary
if reuse_variables is None:
tf.summary.image('input', images)
tf.summary.image('score_map', score_maps)
tf.summary.image('score_map_pred', f_score * 255)
tf.summary.image('geo_map_0', geo_maps[:, :, :, 0:1])
tf.summary.image('geo_map_0_pred', f_geometry[:, :, :, 0:1])
tf.summary.image('training_masks', training_masks)
tf.summary.scalar('model_loss', model_loss)
tf.summary.scalar('total_loss', total_loss)
return total_loss, model_loss
示例9: predict_from_model
# 需要导入模块: import model [as 别名]
# 或者: from model import model [as 别名]
def predict_from_model(logit_groups_geometry, logit_groups_semantics,
temperature):
"""Reconstruct predicted geometry and semantics from model output."""
predictions_geometry_list = []
for logit_group in logit_groups_geometry:
if FLAGS.p_norm > 0:
predictions_geometry_list.append(logit_group[:, :, :, :, 0])
else:
logit_group_shape = logit_group.shape_as_list()
logit_group = tf.reshape(logit_group, [-1, logit_group_shape[-1]])
samples = tf.multinomial(temperature * logit_group, 1)
predictions_geometry_list.append(
tf.reshape(samples, logit_group_shape[:-1]))
predictions_semantics_list = []
if FLAGS.predict_semantics:
for logit_group in logit_groups_semantics:
predictions_semantics_list.append(tf.argmax(logit_group, 4))
else:
predictions_semantics_list = [
tf.zeros(shape=predictions_geometry_list[0].shape, dtype=tf.uint8)
] * len(predictions_geometry_list)
return predictions_geometry_list, predictions_semantics_list
示例10: __init__
# 需要导入模块: import model [as 别名]
# 或者: from model import model [as 别名]
def __init__(self, env, dueling, noisy, fname):
self.g = tf.Graph()
self.noisy = noisy
self.dueling = dueling
self.env = env
with self.g.as_default():
self.act = deepq.build_act_enjoy(
make_obs_ph=lambda name: U.Uint8Input(env.observation_space.shape, name=name),
q_func=dueling_model if dueling else model,
num_actions=env.action_space.n,
noisy=noisy
)
self.saver = tf.train.Saver()
self.sess = tf.Session(graph=self.g)
if fname is not None:
print ('Loading Model...')
self.saver.restore(self.sess, fname)
示例11: parse_args
# 需要导入模块: import model [as 别名]
# 或者: from model import model [as 别名]
def parse_args():
parser = argparse.ArgumentParser("Run an already learned DQN model.")
# Environment
parser.add_argument("--env", type=str, required=True, help="name of the game")
parser.add_argument("--model-dir", type=str, default=None, help="load model from this directory. ")
parser.add_argument("--video", type=str, default=None, help="Path to mp4 file where the video of first episode will be recorded.")
boolean_flag(parser, "stochastic", default=True, help="whether or not to use stochastic actions according to models eps value")
boolean_flag(parser, "dueling", default=False, help="whether or not to use dueling model")
#V: Attack Arguments#
parser.add_argument("--model-dir2", type=str, default=None, help="load adversarial model from this directory (blackbox attacks). ")
parser.add_argument("--attack", type=str, default=None, help="Method to attack the model.")
boolean_flag(parser, "noisy", default=False, help="whether or not to NoisyNetwork")
boolean_flag(parser, "noisy2", default=False, help="whether or not to NoisyNetwork")
boolean_flag(parser, "blackbox", default=False, help="whether or not to NoisyNetwork")
return parser.parse_args()
示例12: load_model
# 需要导入模块: import model [as 别名]
# 或者: from model import model [as 别名]
def load_model(self):
sess = tf.Session()
with tf.get_default_graph().as_default():
input_images = tf.placeholder(tf.float32, shape=[None, None, None, 3], name='input_images')
global_step = tf.get_variable('global_step', [], initializer=tf.constant_initializer(0), trainable=False)
f_score, f_geometry = model.model(input_images, is_training=False)
variable_averages = tf.train.ExponentialMovingAverage(0.997, global_step)
saver = tf.train.Saver(variable_averages.variables_to_restore())
with sess.as_default():
model_path = tf.train.latest_checkpoint(self.model_dir)
saver.restore(sess, model_path)
self._f_score = f_score
self._f_geometry = f_geometry
self._sess = sess
self._input_images = input_images
示例13: __init__
# 需要导入模块: import model [as 别名]
# 或者: from model import model [as 别名]
def __init__(self, env, dueling, noisy, fname):
self.g = tf.Graph()
self.noisy = noisy
self.dueling = dueling
self.env = env
with self.g.as_default():
self.act = deepq.build_act_enjoy(
make_obs_ph=lambda name: U.Uint8Input(
env.observation_space.shape, name=name),
q_func=dueling_model if dueling else model,
num_actions=env.action_space.n,
noisy=noisy
)
self.saver = tf.train.Saver()
self.sess = tf.Session(graph=self.g)
if fname is not None:
print('Loading Model...')
self.saver.restore(self.sess, fname)
示例14: maybe_save_model
# 需要导入模块: import model [as 别名]
# 或者: from model import model [as 别名]
def maybe_save_model(savedir, container, state):
if savedir is None:
return
start_time = time.time()
model_dir = "model-{}".format(state["num_iters"])
U.save_state(os.path.join(savedir, model_dir, "saved"))
if container is not None:
container.put(os.path.join(savedir, model_dir), model_dir)
relatively_safe_pickle_dump(state,
os.path.join(savedir,
'training_state.pkl.zip'),
compression=True)
if container is not None:
container.put(os.path.join(savedir, 'training_state.pkl.zip'),
'training_state.pkl.zip')
relatively_safe_pickle_dump(state["monitor_state"],
os.path.join(savedir, 'monitor_state.pkl'))
if container is not None:
container.put(os.path.join(savedir, 'monitor_state.pkl'),
'monitor_state.pkl')
logger.log("Saved model in {} seconds\n".format(time.time() - start_time))
示例15: maybe_load_model
# 需要导入模块: import model [as 别名]
# 或者: from model import model [as 别名]
def maybe_load_model(savedir, container):
"""Load model if present at the specified path."""
if savedir is None:
return
state_path = os.path.join(os.path.join(savedir, 'training_state.pkl.zip'))
if container is not None:
logger.log("Attempting to download model from Azure")
found_model = container.get(savedir, 'training_state.pkl.zip')
else:
found_model = os.path.exists(state_path)
if found_model:
state = pickle_load(state_path, compression=True)
model_dir = "model-{}".format(state["num_iters"])
if container is not None:
container.get(savedir, model_dir)
U.load_state(os.path.join(savedir, model_dir, "saved"))
logger.log("Loaded models checkpoint at {} iterations".format(
state["num_iters"]))
return state