当前位置: 首页>>代码示例>>Python>>正文


Python NeuralNet.get_all_params_values方法代码示例

本文整理汇总了Python中nolearn.lasagne.NeuralNet.get_all_params_values方法的典型用法代码示例。如果您正苦于以下问题:Python NeuralNet.get_all_params_values方法的具体用法?Python NeuralNet.get_all_params_values怎么用?Python NeuralNet.get_all_params_values使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在nolearn.lasagne.NeuralNet的用法示例。


在下文中一共展示了NeuralNet.get_all_params_values方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: __call__

# 需要导入模块: from nolearn.lasagne import NeuralNet [as 别名]
# 或者: from nolearn.lasagne.NeuralNet import get_all_params_values [as 别名]
	def __call__(self, nn, train_history):
	    current_valid = train_history[-1]['valid_loss']
	    current_epoch = train_history[-1]['epoch']
	    if current_valid < self.best_valid:
	        self.best_valid = current_valid
	        self.best_valid_epoch = current_epoch
	        self.best_weights = nn.get_all_params_values()
	    elif self.best_valid_epoch + self.patience < current_epoch:
	        print("Early stopping.")
	        print("Best valid loss was {:.6f} at epoch {}.".format(
	            self.best_valid, self.best_valid_epoch))
	        nn.load_params_from(self.best_weights)
	        raise StopIteration()
开发者ID:thewayofknowing,项目名称:Kaggle,代码行数:15,代码来源:script.py

示例2: build_dbn

# 需要导入模块: from nolearn.lasagne import NeuralNet [as 别名]
# 或者: from nolearn.lasagne.NeuralNet import get_all_params_values [as 别名]

#.........这里部分代码省略.........
		dropout2_p=0.5, hidden4_num_units=hidden_layer_size,
		dropout3_p=0.3, hidden5_num_units=hidden_layer_size,
		dropout4_p=0.2, output_num_units=N_EVENTS,
		output_nonlinearity=sigmoid,
		
		batch_iterator_train = BatchIterator(batch_size=1000),
		batch_iterator_test = BatchIterator(batch_size=1000),
		
		y_tensor_type=theano.tensor.matrix,
		update=nesterov_momentum,
		update_learning_rate=theano.shared(float(0.03)),
		update_momentum=theano.shared(float(0.9)),
		
		objective_loss_function=loss,
		regression=True,

		on_epoch_finished=[
			AdjustVariable('update_learning_rate', start=0.03,stop=0.0001),
			AdjustVariable('update_momentum', start=0.9, stop=0.999),
			EarlyStopping(patience=100),	
		],

		max_epochs=max_epochs,
		verbose=1,
		)
	
	# load trial dataset
	dic = pickle.load(open('datapickled/traildata.pickle', 'rb'))
	
	X = dic['X']
	y = dic['y']
	
	# process training data
	total_time_points = len(X) // NO_TIME_POINTS
	no_rows = total_time_points * NO_TIME_POINTS

	X = X[0:no_rows, :]
	
	X = X.transpose()
	X_Samples = np.split(X, total_time_points, axis=1)
	X = np.asarray(X_Samples)
	
	y = y[0:no_rows, :]
	y = y[::NO_TIME_POINTS, :]
	y = y.astype('float32')
	
	net.fit(X,y)
	
	tip = datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
	
	# Save the net
	with open('net/net'+tip+'.pickle', 'wb') as f:
		pickle.dump(net, f, -1)
	
	plot(net)

	# Load test data
	dic = pickle.load(open('datapickled/testdata2.pickle', 'rb'))
	X_test = dic['X_test']
	ids_tot = dic['ids_tot']
	test_dict = dic['test_dict']
	test_total = dic['test_total']

	####process test data####
	print("Creating prediction file ... ")
	
	X_test = X_test
	total_test_len = len(X_test)
	
	total_test_time_points = len(X_test) // NO_TIME_POINTS
	remainder_test_points = len(X_test) % NO_TIME_POINTS
	
	no_rows = total_test_time_points * NO_TIME_POINTS
	X_test = X_test[0:no_rows, :]

	X_test = X_test.transpose()
	X_test_Samples = np.split(X_test, total_test_time_points, axis=1)
	X_test = np.asarray(X_test_Samples)
	
	# Evaluate test data
	print("Testing subject 0....")
	params = net.get_all_params_values()
	learned_weights = net.load_params_from(params)
	probabilities = net.predict_proba(X_test)
	
	total_test_points = total_test_len // NO_TIME_POINTS
	remainder_data = total_test_len % NO_TIME_POINTS
	for i, p in enumerate(probabilities):
		if i != total_test_points:
			for j in range(NO_TIME_POINTS):
				pred_tot.append(p)
	
	# create prediction file
	print('Creating submission(prediction) file...')
	tip = datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
	submission_file = 'res/test_conv_net_push'+tip+'.csv'
	# create pandas object
	submission =  pd.DataFrame(index=ids_tot[:len(pred_tot)],columns=cols,data=pred_tot)
	# write file
	submission.to_csv(submission_file, index_label='id', float_format='%.6f')
开发者ID:LadyEos,项目名称:EegCovNet,代码行数:104,代码来源:convnet.py

示例3: data_preprocess_test

# 需要导入模块: from nolearn.lasagne import NeuralNet [as 别名]
# 或者: from nolearn.lasagne.NeuralNet import get_all_params_values [as 别名]
    X_test = data_preprocess_test(X_test)
    total_test_time_points = len(X_test) // NO_TIME_POINTS
    remainder_test_points = len(X_test) % NO_TIME_POINTS

    no_rows = total_test_time_points * NO_TIME_POINTS
    X_test = X_test[0:no_rows, :]

    X_test = X_test.transpose()
    X_test_Samples = np.split(X_test, total_test_time_points, axis=1)
    X_test = np.asarray(X_test_Samples)


###########################################################################
#######get predictions and write to files for series 9 and series 10#######
    print("Testing subject%d...." %(subject))
    params = net.get_all_params_values()
    learned_weights = net.load_params_from(params)
    probabilities = net.predict_proba(X_test)

    sub9 = 'subj{0}_series{1}'.format(subject, 9)
    data_len9 = test_dict[sub9]
    total_time_points9 = data_len9 // NO_TIME_POINTS
    remainder_data9 = data_len9 % NO_TIME_POINTS

    sub10 = 'subj{0}_series{1}'.format(subject, 10)
    data_len10 = test_dict[sub10]
    total_time_points10 = data_len10 // NO_TIME_POINTS
    remainder_data10 = data_len10 % NO_TIME_POINTS

    total_test_points = total_time_points9+total_time_points10
开发者ID:LadyEos,项目名称:EegCovNet,代码行数:32,代码来源:kaggleexample.py

示例4: build_dbn

# 需要导入模块: from nolearn.lasagne import NeuralNet [as 别名]
# 或者: from nolearn.lasagne.NeuralNet import get_all_params_values [as 别名]

#.........这里部分代码省略.........
	)

	
	###process training data####
	X = data_preprocess(X)
	total_time_points = len(X) // NO_TIME_POINTS

	no_rows = total_time_points * NO_TIME_POINTS
	print X.shape
	print total_time_points
	print no_rows

	X = X[0:no_rows, :]

	print X.shape

	X = X.transpose()
	X_Samples = np.split(X, total_time_points, axis=1)
	X = np.asarray(X_Samples)
	print X.shape


	y = y[0:no_rows, :]
	y = y[::NO_TIME_POINTS, :]





	print("Training trial %d...." %(t))
	net.fit(X,y)

	####process test data####
	print("Creating prediction file ... ")

	X_test = X_test
	X_test = data_preprocess(X_test)
	total_test_time_points = len(X_test) // NO_TIME_POINTS
	remainder_test_points = len(X_test) % NO_TIME_POINTS

	no_rows = total_test_time_points * NO_TIME_POINTS
	X_test = X_test[0:no_rows, :]

	X_test = X_test.transpose()
	X_test_Samples = np.split(X_test, total_test_time_points, axis=1)
	X_test = np.asarray(X_test_Samples)
	
###########################################################################
#######get predictions and write to files for series 9 and series 10#######
	print("Testing subject 0....")
	params = net.get_all_params_values()
	learned_weights = net.load_params_from(params)
	probabilities = net.predict_proba(X_test)

	total_time_points = []
	all_remainder_data = []
	subs = []
	total_test_points = 0

	trials = np.array(['01','02','03','04','05','06','07','08','09','10'])

	for trial in trials:    	
		sub = 'subj{0}_series{1}'.format('0', trial)
		data_len = test_dict[sub]
		total_time_point = data_len // NO_TIME_POINTS
		remainder_data = data_len % NO_TIME_POINTS

		subs.append(sub)
		total_time_points.append(total_time_point)
		all_remainder_data.append(remainder_data)

	total_test_points = np.sum(total_time_points)


	print len(ids_tot)
	print cols

	print len(probabilities)

	for i, p in enumerate(probabilities):
		for j in range(NO_TIME_POINTS):
			pred_tot.append(p)


	print len(pred_tot)
	
	# for k in range(np.sum(all_remainder_data)):
	# 	pred_tot.append(pred_tot[-1])

	#submission file
	print('Creating submission(prediction) file...')

	submission_file = './test_conv_net_push.csv'
	# create pandas object for sbmission

	submission = pd.DataFrame(index=ids_tot[:len(pred_tot)],
	                           columns=cols,
	                           data=pred_tot)
	# write file
	submission.to_csv(submission_file, index_label='id', float_format='%.6f')
开发者ID:LadyEos,项目名称:EegCovNet,代码行数:104,代码来源:ltest3.py


注:本文中的nolearn.lasagne.NeuralNet.get_all_params_values方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。