本文整理汇总了Python中network.Network.train方法的典型用法代码示例。如果您正苦于以下问题:Python Network.train方法的具体用法?Python Network.train怎么用?Python Network.train使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类network.Network
的用法示例。
在下文中一共展示了Network.train方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: run_mnist
# 需要导入模块: from network import Network [as 别名]
# 或者: from network.Network import train [as 别名]
def run_mnist(epochs, layers, neuron_count):
""" Run Mnist dataset and output a guess list on the Kaggle test_set
Parameters
----------
epochs : int
Number of iterations of the the traininng loop for the whole dataset
layers : int
Number of layers (not counting the input layer, but does count output
layer)
neuron_count : list
The number of neurons in each of the layers (in order), does not count
the bias term
Attributes
----------
target_values : list
The possible values for each training vector
"""
with open('train.csv', 'r') as f:
reader = csv.reader(f)
t = list(reader)
train = [[int(x) for x in y] for y in t[1:]]
with open('test.csv', 'r') as f:
reader = csv.reader(f)
raw_nums = list(reader)
test_set = [[int(x) for x in y] for y in raw_nums[1:]]
ans_train = [x[0] for x in train]
train_set = [x[1:] for x in train]
ans_train.pop(0)
train_set.pop(0)
train_set = utils.resample(train_set, random_state=2)
ans_train = utils.resample(ans_train, random_state=2)
network = Network(layers, neuron_count, train_set[0])
network.train(train_set, ans_train, epochs)
# For validation purposes
# guess_list = network.run_unseen(train_set[4000:4500])
# network.report_results(guess_list, ans_train[4000:4500])
# guess_list = network.run_unseen(train_set[4500:5000])
# network.report_results(guess_list, ans_train[4500:5000])
guess_list = network.run_unseen(test_set)
with open('digits.txt', 'w') as d:
for elem in guess_list:
d.write(str(elem)+'\n')
示例2: test_xor
# 需要导入模块: from network import Network [as 别名]
# 或者: from network.Network import train [as 别名]
def test_xor():
n = Network(2, 4, 1)
train = (
([0, 0], [0]),
([1, 1], [0]),
([0, 1], [1]),
([1, 0], [1]),
)
for i in xrange(20000):
inp, out = train[randint(0, 3)]
n.train(inp, out)
for inp, out in train:
assert round(n.test(inp)[0]) == out[0]
示例3: test_train
# 需要导入模块: from network import Network [as 别名]
# 或者: from network.Network import train [as 别名]
def test_train(self):
border = 2
mbs = 500
n_in = (2*border+1)**2
tdata = BatchProcessor(
X_dirpath=config.data_dir_path + 'train/*',
y_dirpath=config.data_dir_path + 'train_cleaned/',
batchsize=5000, border=border,
limit=1, dtype=theano.config.floatX,
random=True, random_mode='fully',
rnd=rnd)
vdata = BatchProcessor(
X_dirpath=config.data_dir_path + 'train/*',
y_dirpath=config.data_dir_path + 'train_cleaned/',
batchsize=5000, border=border,
limit=1, dtype=theano.config.floatX,
random=False, rnd=rnd)
net = Network([
FullyConnectedLayer(n_in=25, n_out=19, rnd=rnd),
FullyConnectedLayer(n_in=19, n_out=1, rnd=rnd),
], mbs)
cost = net.train(tdata=tdata, epochs=1, mbs=mbs, eta=0.1,
eta_min=0.01, vdata=vdata, lmbda=0.0,
momentum=0.95, patience_increase=2,
improvement_threshold=0.995,
validation_frequency=1, algorithm='rmsprop',
early_stoping=False)
self.assertTrue(float(cost) < 1.0)
示例4: train
# 需要导入模块: from network import Network [as 别名]
# 或者: from network.Network import train [as 别名]
def train(job_id, border, n_hidden_layer, eta):
print "Job ID: %d" % job_id
metric_recorder = MetricRecorder(config_dir_path='.', job_id=job_id)
C = {
'X_dirpath' : '../../../data/train/*',
'y_dirpath' : '../../../data/train_cleaned/',
'mini_batch_size' : 500,
'batchsize' : 500000,
'limit' : 30,
'epochs' : 100,
'patience' : 20000,
'patience_increase' : 2,
'improvement_threshold' : 0.995,
'validation_frequency' : 5000,
'lmbda' : 0.0,
'training_size' : None,
'validation_size' : None,
'algorithm' : 'RMSProp'
}
training_data = BatchProcessor(
X_dirpath='../../../data/train/*',
y_dirpath='../../../data/train_cleaned/',
batchsize=C['batchsize'],
border=border,
limit=C['limit'],
dtype=theano.config.floatX)
validation_data = BatchProcessor(
X_dirpath='../../../data/valid/*',
y_dirpath='../../../data/train_cleaned/',
batchsize=C['batchsize'],
border=border,
limit=C['limit'],
dtype=theano.config.floatX)
C['training_size'] = len(training_data)
C['validation_size'] = len(validation_data)
print "Training size: %d" % C['training_size']
print "Validation size: %d" % C['validation_size']
metric_recorder.add_experiment_metainfo(constants=C)
metric_recorder.start()
n_in = (2*border+1)**2
net = Network([FullyConnectedLayer(n_in=n_in, n_out=n_hidden_layer),
FullyConnectedLayer(n_in=n_hidden_layer, n_out=1)],
C['mini_batch_size'])
result = net.train(tdata=training_data, epochs=C['epochs'],
mbs=C['mini_batch_size'], eta=eta,
vdata=validation_data, lmbda=C['lmbda'],
momentum=None, patience_increase=C['patience_increase'],
improvement_threshold=C['improvement_threshold'],
validation_frequency=C['validation_frequency'],
metric_recorder=metric_recorder)
print 'Time = %f' % metric_recorder.stop()
print 'Result = %f' % result
return float(result)
示例5: demo
# 需要导入模块: from network import Network [as 别名]
# 或者: from network.Network import train [as 别名]
def demo():
params = iho_len(add_set[0])
first = Network(params[0], params[1], params[2], 5000, 0.08, momentum=0.1)
first.train(add_set)
first.test(add_set)
"""
Used when dealing with entirely binary inputs.
"""
"""
x = [0, 1, 2, 3, 4, 5, 6]
y = [9, 8, 7, 6, 4, 3, 3]
print ''
for i, j in zip(x, y):
first.test([[dec_bin(i) + dec_bin(j), dec_bin(i + j)]])
"""
"""
示例6: main
# 需要导入模块: from network import Network [as 别名]
# 或者: from network.Network import train [as 别名]
def main():
args = parseArgs()
network = Network(args.window_size, (args.window_size-1)/2, args.n_hidden_neurons)
if args.dict:
print 'Training using individual words from top 1000 dictionary.'
training_set = datasetDictionary(network)
else:
print 'Training using generated strings from dictionary.'
training_set = datasetGeneratedText(network)
print 'Your network is being trained..',
def print_dot():
print '%d..' % (network.n_trainings+1),
sys.stdout.flush()
network.train(training_set, args.n_epochs, callback=print_dot)
pickle.dump(network, args.outfile)
示例7: run_scikit_digits
# 需要导入模块: from network import Network [as 别名]
# 或者: from network.Network import train [as 别名]
def run_scikit_digits(epochs, layers, neuron_count):
""" Run Handwritten Digits dataset from Scikit-Learn. Learning set is split
into 70% for training, 15% for testing, and 15% for validation.
Parameters
----------
epochs : int
Number of iterations of the the traininng loop for the whole dataset
layers : int
Number of layers (not counting the input layer, but does count output
layer)
neuron_count : list
The number of neurons in each of the layers (in order), does not count
the bias term
Attributes
----------
target_values : list
The possible values for each training vector
"""
# Imported from linear_neuron
temp_digits = datasets.load_digits()
digits = utils.resample(temp_digits.data, random_state=3)
temp_answers = utils.resample(temp_digits.target, random_state=3)
# images = utils.resample(temp_digits.images, random_state=0)
num_of_training_vectors = 1250
answers, answers_to_test, validation_answers = temp_answers[:num_of_training_vectors], temp_answers[num_of_training_vectors:num_of_training_vectors+260], temp_answers[num_of_training_vectors+260:]
training_set, testing_set, validation_set = digits[:num_of_training_vectors], digits[num_of_training_vectors:num_of_training_vectors+260], digits[num_of_training_vectors+260:]
###########
# network.visualization(training_set[10], answers[10])
# network.visualization(training_set[11], answers[11])
# network.visualization(training_set[12], answers[12])
network = Network(layers, neuron_count, training_set[0])
network.train(training_set, answers, epochs)
guess_list = network.run_unseen(testing_set)
network.report_results(guess_list, answers_to_test)
valid_list = network.run_unseen(validation_set)
network.report_results(valid_list, validation_answers)
示例8: test_bindec
# 需要导入模块: from network import Network [as 别名]
# 或者: from network.Network import train [as 别名]
def test_bindec():
n = Network(4, 6, 16)
train = {}
for i in xrange(16):
bstr = bin(i)[2:]
bstr = '0' * (4 - len(bstr)) + bstr
inp = map(int, [c for c in bstr])
out = [0] * 16
out[i] = 1
train[i] = (inp, out)
for i in xrange(100000):
n.train(*train[randint(0, 15)])
for k, (inp, out) in train.items():
ret = n.test(inp)
assert ret.index(max(ret)) == k
示例9: run
# 需要导入模块: from network import Network [as 别名]
# 或者: from network.Network import train [as 别名]
def run():
lines = DataReader.read('car.data.txt')
training_inputs = DataReader.parse_data(lines)
print "Initializing Network..."
my_network = Network(number_of_centers=NUMBER_OF_CENTERS,
training=TRAINING_ITERATIONS)
print "Done."
print "Starting training. {} centers / {} iterations".\
format(NUMBER_OF_CENTERS, TRAINING_ITERATIONS)
my_network.train(training_inputs)
print "Done."
# TODO(Accuracy): Test accuracy with non training data.
right = 0
total_tests = 100
for i in range(total_tests):
chosen = random.choice(training_inputs)
response = my_network.classify(chosen['inputs'])
if response == chosen['expected']:
right += 1
print "Accuracy => {}/{}".format(right, total_tests)
示例10: Network
# 需要导入模块: from network import Network [as 别名]
# 或者: from network.Network import train [as 别名]
X_val1 = X_val1.astype('float32')
X_val2 = X_val2.astype('float32')
# convert class vectors to binary class matrices
Y_train = np_utils.to_categorical(y_train, nb_classes)
Y_test = np_utils.to_categorical(y_test, nb_classes)
Y_val1 = np_utils.to_categorical(y_val1, nb_classes)
Y_val2 = np_utils.to_categorical(y_val2, nb_classes)
# load the model
nnet = Network()
model = nnet.model
print("32x16 random noise")
t0 = time()
nnet.train([X_train, Y_train], [X_test, Y_test], weight_file=weight_file)
# Training
# for k in range(1, nb_epoch + 1):
# print("epoch %s/%s:" %(k,nb_epoch))
# X_train_temp = np.copy(X_train) # Copy to not effect the originals
# # Add noise on later epochs
# if k > 1:
# for j in range(0, X_train_temp.shape[0]):
# X_train_temp[j,0, :, :] = rand_jitter(X_train_temp[j,0,:,:])
# model.fit(X_train_temp, Y_train, nb_epoch=1, batch_size=batch_size,
# validation_data=(X_test, Y_test),
# callbacks=[checkpointer])
t1 = time()
示例11: LabelBinarizer
# 需要导入模块: from network import Network [as 别名]
# 或者: from network.Network import train [as 别名]
train_target = LabelBinarizer().fit_transform(train_target)
valid_target = LabelBinarizer().fit_transform(valid_target)
test_target = LabelBinarizer().fit_transform(test_target)
# size
train_size = 50000
valid_size = 10000
test_size = 10000
# train
epoch = 100
nn = Network([784, 1500, 700, 10])
for e in xrange(epoch):
print "epoch:%d" % e
for i in xrange(train_size):
nn.train(train_data[i], train_target[i])
#"""
correct = 0
for i in xrange(test_size):
output = nn.forward_propagation(test_data[i])
if np.argmax(output) == np.argmax(test_target[i]):
correct += 1
print u"correct: %d / %d" % (correct, test_size)
#"""
# test
correct = 0
for i in xrange(test_size):
output = nn.forward_propagation(test_data[i])
if np.argmax(output) == np.argmax(test_target[i]):
示例12: main
# 需要导入模块: from network import Network [as 别名]
# 或者: from network.Network import train [as 别名]
def main(argv):
try:
opts, args = getopt.getopt(argv,"t:i:n:w:r:c:",["datasetType=","infile=","numHidden=","weightsFile=","rate=","classifyFile="])
except getopt.GetoptError:
print "\nIncorrect call. Please try again."
printError()
datasetType = None
numHidden = None
weightsFile = None
learnRate = 0.05
classifyFile = None
for opt, arg in opts:
if opt in ("-t", "--datasetType"):
datasetType = int(arg)
elif opt in ("-i", "--infile"):
fileName = str(arg)
elif opt in ("-n", "--numHidden"):
numHidden = int(arg)
elif opt in ("-w", "--weightsFile"):
weightsFile = str(arg)
elif opt in ("-r", "--rate"):
learnRate = float(arg)
elif opt in ("-c", "--classifyFile"):
classifyFile = str(arg)
if numHidden is None:
print "\nPlease enter the number of neurons in the hidden layer."
printError()
if datasetType is None:
print "\nPlease enter the type of the data set."
printError()
datasetFolder = ""
if (datasetType == 1):
datasetFolder = "circle/"
elif (datasetType == 2 or datasetType == 3):
datasetFolder = "iris/"
elif (datasetType == 4):
datasetFolder = "logicalOperators/"
filePath = dataFolder+datasetFolder+fileName
fileExists = os.path.isfile(filePath)
if fileExists:
f = open(filePath,'r')
else:
print "\nThe given input file '%s' doesn't exist.\n" %(filePath)
sys.exit()
fileData = readFile(filePath)
data,numInput,numOuter = processData(fileData,datasetType,True)
iteration = 0
totalError = 2000
network = Network(numInput,numHidden,numOuter,learnRate)
if weightsFile is not None:
filePath = weightsFolder+datasetFolder+weightsFile
if os.path.isfile(filePath):
with open(filePath,'r') as f:
weights = json.load(f)
network.setWeights(weights)
print "\nWeights set."
network.printWeights()
else:
print "\nWeights file %s doesn't exist.\n" %(filePath)
sys.exit()
else:
totalErrors = []
totalMean = 0
while (totalError != 0 and totalError > MINERR and iteration < MAXITER):
totalError = 0
totalError = network.train(data)
totalErrors.append(totalError)
totalMean += totalError
if (iteration % 100 == 0):
print "Iteration: %s, totalError: %s" %(iteration,totalError)
iteration += 1
print totalMean / iteration
network.printWeights()
timestr = time.strftime("%Y%m%d%H%M%S")
errorName = "error_neurons"+str(numHidden)+"_rate"+str(learnRate)+"_"+timestr+"_"
filePath = errorFolder+datasetFolder+errorName+fileName
#.........这里部分代码省略.........
示例13: xrange
# 需要导入模块: from network import Network [as 别名]
# 或者: from network.Network import train [as 别名]
count = 0
theta = 0.0
inputs = []
for i in xrange(5):
nn = Network(2,4)
inputs.append([1,0])
inputs.append([0,1])
inputs.append([1,1])
inputs.append([0,0])
pick = inputs[random.randint(0,len(inputs)-1)]
known = 1
if (pick[0] == 1 and pick[1] == 1) or (pick[0] == 0 and pick[1] == 0): known = 0
result = nn.train(pick,known)
count += 1
theta += 0.0025
mse = 0.0
for ind,val in enumerate(inputs):
known = 1
if (pick[0] == 1 and pick[1] == 1) or (pick[0] == 0 and pick[1] == 0): known = 0
result = nn.feedForward(pick)
mse += (result - known)*(result-known)#? - -
rmse = math.sqrt(mse/4)
print "Root mean squared error: ", rmse
示例14: Network
# 需要导入模块: from network import Network [as 别名]
# 或者: from network.Network import train [as 别名]
import glob
import io
from network import Network
from language import Language
from pybrain.tools.validation import CrossValidator
from pybrain.tools.validation import ModuleValidator
languages = []
for g in glob.glob("./data/*.txt"):
language, num = g.split("/")[-1].split("_")
languages.append(Language(io.open(g, "r+"), language))
n = Network(languages)
n.train()
n.trainer.verbose = True
n.trainer.trainUntilConvergence()
def correctValFunc(output, target):
assert len(output) == len(target)
n_correct = 0
for idx, instance in enumerate(output):
# This will find the maximum liklihood language
classification = instance.argmax(axis=0)
objective = target[idx].argmax(axis=0)
if objective == classification:
n_correct += 1
示例15: Network
# 需要导入模块: from network import Network [as 别名]
# 或者: from network.Network import train [as 别名]
'''
nw = Network((2,4,1))
trainingset = []
for i in xrange(1000):
x = random.uniform(-3,3)
y = random.uniform(-3,3)
point = ([x,y],[0])
if np.sqrt(x**2+y**2) < 1:
point[1][0] = 1
trainingset.append(point)
nw.train(trainingset,50,2,1)
#print nw
x = np.linspace(-5,5,100)
y = np.zeros(x.shape)
y2 = np.zeros(x.shape)
y3 = np.zeros(x.shape)
for i in xrange(x.size):
outp = nw.compute_outputs((x[i],0))
y[i] = outp[-1][0]
outp = nw.compute_outputs((0,x[i]))
y2[i] = outp[-1][0]
outp = nw.compute_outputs((x[i]/np.sqrt(2),x[i]/np.sqrt(2)))
y3[i] = outp[-1][0]