本文整理汇总了Python中LogisticRegression.LogisticRegression.cost方法的典型用法代码示例。如果您正苦于以下问题:Python LogisticRegression.cost方法的具体用法?Python LogisticRegression.cost怎么用?Python LogisticRegression.cost使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类LogisticRegression.LogisticRegression
的用法示例。
在下文中一共展示了LogisticRegression.cost方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: from LogisticRegression import LogisticRegression [as 别名]
# 或者: from LogisticRegression.LogisticRegression import cost [as 别名]
class NeuralNetwork:
def __init__(self, rng, n_in, n_out, hl):
# will contain basically a list of Hidden Layers objects.
self.layers = []
inp_size = n_in
for i in range(len(hl)):
HL = HiddenLayer(rng, inp_size, hl[i])
self.layers.append(HL)
inp_size = hl[i]
self.op = LogisticRegression(inp_size, n_out)
self.params = []
for l in self.layers:
self.params = self.params + l.params
self.params = self.params + self.op.params
# self.params = [l.params for l in self.layers]
# forward pass is here
def forward(self, x):
act = [x]
for i, l in enumerate(self.layers):
act.append(l.output(act[i]))
return act
def cost(self, x, y):
act = self.forward(x)
estimate = act[-1]
return self.op.cost(estimate, y)
def calcAccuracy(self, x, y):
act = self.forward(x)
ll = act[-1]
return self.op.calcAccuracy(ll, y)
示例2: loadData
# 需要导入模块: from LogisticRegression import LogisticRegression [as 别名]
# 或者: from LogisticRegression.LogisticRegression import cost [as 别名]
Tr, Ts, _ = loadData('mnist.pkl.gz', True)
m_sample = Tr[0].shape[0]
m_test_sample = Ts[1].shape[0]
x, y = T.dmatrices('x', 'y')
H = Hidden( x, 784, 50, param_init = ( 'glorot', 'zero' ) , activation_func = T.tanh )
h = H.output( )
L = LogReg( h, 50, 10, param_init = ( 'zero', 'zero' ) )
lam = 0.02 # regularizer
alpha = 0.8 # learning rate/ weight decay
zeta = 0.995 # nestrov momentum
global cost
if lam is None:
cost = L.cost( y )
else:
cost = L.cost( y ) + L.regularizer ( {'weights':lam, 'bias':0.0} ) \
+ lam * H.regularizer_L2( ) + 0.02 * H.regularizer_L1( )
pred = L.predict ( )
gw = T.grad (cost, wrt=L.W)
gb = T.grad (cost, wrt=L.B)
gwh1 = T.grad ( cost, wrt=H.W )
gbh1 = T.grad ( cost, wrt=H.B )
W_shape, B_shape = L.weightShapes()
WH1_shape, BH1_shape = H.weightShapes()
VW = zeros(W_shape)
VB = zeros(B_shape)
VWH1 = zeros(WH1_shape)
VBH1 = zeros(BH1_shape)