tensorflow统计⽹络参数量通过tf.trainable_variables来统计整个⽹络的参数量
本⽂列举摘抄了七种⽅法,但是⼤同⼩异,得出的结果也都相同
def count1():
total_parameters = 0
for variable ainable_variables():
# shape is an array of tf.Dimension
shape = _shape()
# print(shape)
# print(len(shape))
variable_parameters = 1
for dim in shape:
# print(dim)
variable_parameters *= dim.value
# print(variable_parameters)
total_parameters += variable_parameters
print(total_parameters)
def count2():
print np.sum([np._shape().as_list()) for v ainable_variables()])
def get_nb_params_shape(shape):
'''
Computes the total number of params for a given shap.
Works for any number of shapes etc [D,F] or [W,H,C] computes D*F and W*H*C.
'''
nb_params = 1
for dim in shape:
nb_params = nb_params*int(dim)
return nb_params
def count3():
tot_nb_params = 0
for trainable_variable ainable_variables():
shape = _shape()  # e.g [D,F] or [W,H,C]
current_nb_params = get_nb_params_shape(shape)
tot_nb_params = tot_nb_params + current_nb_params
print tot_nb_params
def count4():
size = lambda v: reduce(lambda x, y: x * y, v.get_shape().as_list())
n = sum(size(v) for v ainable_variables())
# print "Model size: %dK" % (n / 1000,)
print n
def count5():
total_parameters = 0
# iterating over all variables
for variable ainable_variables():
local_parameters = 1
shape = _shape()  # getting shape of a variable
for i in shape:
local_parameters *= i.value  # mutiplying dimension values
total_parameters += local_parameters
print(total_parameters)
1.
2.
3. 4.def count6():
total_parameters = 0
for variable ainable_variables():
variable_parameters = 1
for dim _shape():
variable_parameters *= dim.value
total_parameters += variable_parameters
print("Total number of trainable parameters: %d" % total_parameters)
def count7():
from functools import reduce
from operator import mul
num_params = 0
variable used in lambdafor variable ainable_variables():
shape = _shape()
num_params += reduce(mul, [dim.value for dim in shape], 1)
print num_params

版权声明:本站内容均来自互联网,仅供演示用,请勿用于商业和其他非法用途。如果侵犯了您的权益请与我们联系QQ:729038198,我们将在24小时内删除。