本文主要是介绍Tensorflow实现的MNIST数据集的2层卷积2层全连接网络,希望对大家解决编程问题提供一定的参考价值,需要的开发者们随着小编来一起学习吧!
import tensorflow as tf
"""
h=w 图片尺寸
f=卷积核
p=padding 边界填补 ‘SAME’补充
s=strides 每一次走的步长
(h-f+2*p)/s + 1
"""
# 10 分类,输入图片尺寸 784*784
n_input=784
n_output=10
# 获取数据MNIST
mnist=('data/',one_hot = True)weights={# [3,3,1,64] 3*3 = h*w 卷积核, 1 channel, 64个特征图'wc1':tf.Variable(tf.random_normal([3,3,1,64],stddev=0.1)),'wc2':tf.Variable(tf.random_normal([3, 3, 64, 128], stddev = 0.1)),'wd1':tf.Variable(tf.random_normal([7*7*128,1024],stddev=0.1)),'wd2':tf.Variable(tf.random_normal([1024,n_output],stddev=0.1))
}
biases={'bc1':tf.Variable(tf.random_normal([64],stddev=0.1)),'bc2':tf.Variable(tf.random_normal([128],stddev=0.1)),'bd1':tf.Variable(tf.random_normal([1024],stddev=0.1)),'bd2':tf.Variable(tf.random_normal([n_output],stddev=0.1))
}def conv_basic(input, w, b, keepratio):input_r=tf.reshape(input,shape=[-1, 28,28,1])conv1 = tf.nn.conv2d(input_r,w['wc1'],strides=[1,1,1,1],padding='SAME')conv1 = tf.nn.relu(tf.nn.bias_add(conv1,b['bc1']))pool1 = tf.nn.max_pool(conv1,ksize=[1,2,2,1],strides=[1,2,2,1],padding='SAME')pool_dr1 = tf.nn.dropout(pool1,keepratio)conv2 = tf.nn.conv2d(pool_dr1,w['wc2'],strides=[1,1,1,1],padding='SAME')conv2 = tf.nn.relu(tf.nn.bias_add(conv2,b['bc2']))pool2 = tf.nn.max_pool(conv2,ksize=[1,2,2,1],strides=[1,2,2,1],padding='SAME')pool_dr2 = tf.nn.dropout(pool2,keepratio)# 全连接层dense1 = tf.reshape(pool_dr2,[-1,w['wd1'].get_shape().as_list()[0]])fc1 = tf.nn.relu(tf.add(tf.matmul(dense1,w['wd1']),b['bd1']))fc_dr1 = tf.nn.dropout(fc1,keepratio)_out = tf.add(tf.matmul(fc_dr1,w['wd2']),b['bd2'])out ={'input_r':input_r,'conv1':conv1,'pool1':pool1, 'pool_dr1': pool_dr1,'conv2': conv2,'pool2': pool2, 'pool_dr2': pool_dr2,'dense1':dense1,'fc1':fc1, 'fc_dr1':fc_dr1,'out': _out}return outx = tf.placeholder(tf.float32,[None,n_input])
y = tf.placeholder(tf.float32,[None,n_output])
keepratio = tf.placeholder(tf.float32)_pred = conv_basic(x, weights, biases, keepratio)
loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(_pred,y))
optm = tf.train.AdamOptimizer(learning_rate=0.001).minimize(loss)_corr = tf.equal(tf.argmax(_pred,1),tf.argmax(y,1))
accr = tf.redece_mean(tf.cast(_corr,tf.float32))init = tf.global_variables_initializer()
sess = tf.Session()
sess.run(init)training_epochs = 15
batch_size = 16
display_step=1
for epoch in range(training_epochs):avg_cost=0.total_batch =10for i in range(total_batch):# 以 batch_size 大小来依次的获取数据batch_xs, batch_ys = mnist.train.next_batch(batch_size)sess.run(optm,feed_dict={x:batch_xs,y:batch_ys,keepratio:0.7})avg_cost += sess.run(loss,feed_dict={x:batch_xs,y:batch_ys,keepratio:1.})/total_batchif epoch % display_step==0:print('Epoch: %03d/%03d loss: %9f'%(epoch,training_epochs, avg_cost))train_acc = sess.run(accr, feed_dict={x:batch_xs,y:batch_ys,keepratio:0.7})
这篇关于Tensorflow实现的MNIST数据集的2层卷积2层全连接网络的文章就介绍到这儿,希望我们推荐的文章对编程师们有所帮助!