本文整理汇总了Python中keras.backend.count_params方法的典型用法代码示例。如果您正苦于以下问题:Python backend.count_params方法的具体用法?Python backend.count_params怎么用?Python backend.count_params使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类keras.backend
的用法示例。
在下文中一共展示了backend.count_params方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get_model_memory_usage
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import count_params [as 别名]
def get_model_memory_usage(batch_size, model):
import numpy as np
from keras import backend as K
shapes_mem_count = 0
for l in model.layers:
single_layer_mem = 1
for s in l.output_shape:
if s is None:
continue
single_layer_mem *= s
shapes_mem_count += single_layer_mem
trainable_count = np.sum([K.count_params(p) for p in set(model.trainable_weights)])
non_trainable_count = np.sum([K.count_params(p) for p in set(model.non_trainable_weights)])
total_memory = 4.0*batch_size*(shapes_mem_count + trainable_count + non_trainable_count)
gbytes = np.round(total_memory / (1024.0 ** 3), 3)
return gbytes
示例2: plot_parameter_statistic
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import count_params [as 别名]
def plot_parameter_statistic(model, layer_types=['Dense', 'Conv2D'], trainable=True, non_trainable=False, outputs=False):
parameter_count = []
names = []
for l in model.layers:
if l.__class__.__name__ not in layer_types:
continue
count = 0
if outputs:
count += np.sum([np.sum([np.prod(s[1:]) for s in n.output_shapes]) for n in l._inbound_nodes])
if trainable:
count += np.sum([K.count_params(p) for p in set(l.trainable_weights)])
if non_trainable:
count += np.sum([K.count_params(p) for p in set(l.non_trainable_weights)])
parameter_count.append(count)
names.append(l.name)
y = range(len(names))
plt.figure(figsize=[12,max(len(y)//4,1)])
plt.barh(y, parameter_count, align='center')
plt.yticks(y, names)
plt.ylim(y[0]-1, y[-1]+1)
ax = plt.gca()
ax.invert_yaxis()
ax.xaxis.tick_top()
plt.show()
示例3: test_build_model
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import count_params [as 别名]
def test_build_model(self):
dim = 256
FL = 3
learn_rate = 0.0001
n_filters = 112
init = 'he_normal'
lmbda = 1e-06
drop = 0.15
model = mt.build_model(dim, learn_rate, lmbda, drop, FL, init,
n_filters)
# Following https://stackoverflow.com/questions/45046525/keras-number-of-trainable-parameters-in-model
trainable_count = int(np.sum([K.count_params(p) for p in
set(model.trainable_weights)]))
non_trainable_count = int(np.sum([K.count_params(p) for p in
set(model.non_trainable_weights)]))
assert trainable_count + non_trainable_count == 10278017
assert trainable_count == 10278017
assert non_trainable_count == 0
示例4: calc_memory_usage
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import count_params [as 别名]
def calc_memory_usage(model, batch_size=1):
"""Compute the memory usage of a keras modell.
# Arguments
model: Keras model.
batch_size: Batch size used for training.
source: https://stackoverflow.com/a/46216013/445710
"""
shapes_mem_count = 0
for l in model.layers:
shapes_mem_count += np.sum([np.sum([np.prod(s[1:]) for s in n.output_shapes]) for n in l._inbound_nodes])
trainable_count = np.sum([K.count_params(p) for p in set(model.trainable_weights)])
non_trainable_count = np.sum([K.count_params(p) for p in set(model.non_trainable_weights)])
# each shape unit occupies 4 bytes in memory
total_memory = 4.0 * batch_size * (shapes_mem_count + trainable_count + non_trainable_count)
for s in ['Byte', 'KB', 'MB', 'GB', 'TB']:
if total_memory > 1024:
total_memory /= 1024
else:
break
print('model memory usage %8.2f %s' % (total_memory, s))
示例5: count_parameters
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import count_params [as 别名]
def count_parameters(model):
trainable_count = int(np.sum([K.count_params(p) for p in set(model.trainable_weights)]))
non_trainable_count = int(np.sum([K.count_params(p) for p in set(model.non_trainable_weights)]))
print('trainable {:>16,d}'.format(trainable_count))
print('non-trainable {:>16,d}'.format(non_trainable_count))
示例6: count_params
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import count_params [as 别名]
def count_params(keModel:keras.Model):
trainable_count = int(
np.sum([K.count_params(p) for p in set(keModel.trainable_weights)]))
non_trainable_count = int(
np.sum([K.count_params(p) for p in set(keModel.non_trainable_weights)]))
print('Total params: {:,}'.format(trainable_count + non_trainable_count))
print('Trainable params: {:,}'.format(trainable_count))
print('Non-trainable params: {:,}'.format(non_trainable_count))
return
示例7: compute_trainable_params
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import count_params [as 别名]
def compute_trainable_params(model):
""" Extract number of parameters from the given Keras model
Parameters
-----------
model : Keras model
Return
----------
python dictionary that contains trainable_params, non_trainable_params and total_params
"""
if str(type(model)).startswith("<class 'keras."):
from keras import backend as K
else:
import tensorflow.keras.backend as K
trainable_count = int(
np.sum([K.count_params(w) for w in model.trainable_weights])
)
non_trainable_count = int(
np.sum([K.count_params(w) for w in model.non_trainable_weights])
)
return {'trainable_params': trainable_count,
'non_trainable_params': non_trainable_count,
'total_params': (trainable_count + non_trainable_count)}
示例8: customLoss
# 需要导入模块: from keras import backend [as 别名]
# 或者: from keras.backend import count_params [as 别名]
def customLoss(yTrue,yPred):
if(yTrue.shape[1]!=None):
return (K.mean(K.square((K.log(yTrue) - K.log(yPred))),axis=-1)-(0.5/pow(K.count_params(yTrue),2))*K.sum((K.log(yTrue) - K.log(yPred)),axis=-1))
else:
return K.sum(K.abs(yPred - yTrue), axis=-1)