Welcome to OStack Knowledge Sharing Community for programmer and developer-Open, Learning and Share
Welcome To Ask or Share your Answers For Others

Categories

0 votes
426 views
in Technique[技术] by (71.8m points)

尝试使用cnn对图片识别

新手,不知道为什么结果像是随机的。最不能理解的是每次loss输出值一样,而accuracy值却不一样。调试了几天,依然没有头绪,大神救救我

clipboard.png

clipboard.png

def conv2d(x,w):
    return tf.nn.conv2d(x,w,strides=[1,1,1,1],padding='SAME')

def max_pool_2x2(x):
    return tf.nn.max_pool(x,ksize=[1,2,2,1],strides=[1,2,2,1],padding='SAME')

class network(object):
    def __init__(self):
        with tf.variable_scope("weights") as scope_weight:
            self.weights={
                "conv1":tf.get_variable("conv1",[3,2,1,32],
                                        initializer=tf.contrib.layers.xavier_initializer_conv2d()),
                "conv2":tf.get_variable("conv2",[3,2,32,64],
                                        initializer=tf.contrib.layers.xavier_initializer_conv2d()),
                "fc1":tf.get_variable("fc1",[6*4*64,1024],
                                        initializer=tf.contrib.layers.xavier_initializer_conv2d()),
                "fc2":tf.get_variable("fc2",[1024,34],
                                        initializer=tf.contrib.layers.xavier_initializer_conv2d()),
                }
            tf.summary.histogram("conv1_weight",self.weights["conv1"])
            tf.summary.histogram("conv2_weight",self.weights["conv2"])
            tf.summary.histogram("fc1_weight",self.weights["fc1"])
            tf.summary.histogram("fc2_weight",self.weights["fc2"])
            scope_weight.reuse_variables()
        with tf.variable_scope("bias")as scope_bias:
            self.bias={
                "conv1":tf.get_variable("conv1",[32],
                                        initializer=tf.constant_initializer(0.1)),
                "conv2":tf.get_variable("conv2",[64],
                                        initializer=tf.constant_initializer(0.1)),
                "fc1":tf.get_variable("fc1",[1024],
                                        initializer=tf.constant_initializer(0.1)),
                "fc2":tf.get_variable("fc2",[34],
                                        initializer=tf.constant_initializer(0.1))
                }
            tf.summary.histogram("conv1_bias",self.bias["conv1"])
            tf.summary.histogram("conv2_bias",self.bias["conv2"])
            tf.summary.histogram("fc1_bias",self.bias["fc1"])
            tf.summary.histogram("fc2_bias",self.bias["fc2"])
            scope_bias.reuse_variables()
    def inference(self,image):
        h_conv1=tf.nn.relu(conv2d(image,self.weights["conv1"])+self.bias["conv1"])
        h_pool1=max_pool_2x2(h_conv1)
        h_conv2=tf.nn.relu(conv2d(h_pool1,self.weights["conv2"])+self.bias["conv2"])
        h_pool2=max_pool_2x2(h_conv2)
        h_pool2_flat=tf.reshape(h_pool2,[-1,6*4*64])
        h_fc1=tf.nn.relu(tf.matmul(h_pool2_flat,self.weights["fc1"])+self.bias["fc1"])
        y_conv=tf.nn.relu(tf.matmul(h_fc1,self.weights["fc2"])+self.bias["fc2"])
        return y_conv

    def softmax_loss(self,predicts,labels):
        loss=tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(
            logits=predicts,labels=labels))
        self.cost=loss
        return self.cost

    def optimizer(self,loss,lr=0.01):
        train_optimizer=tf.train.AdamOptimizer(lr).minimize(loss)
        return train_optimizer
    
        
def train(batch_size):
    #testnamequeue=tr.generate_filenamequeue(['test24_16.tfrecord'])
    #[testimages,testlabels]=tr.get_batch(testnamequeue,2000)
    
    trainnamequeue=tr.generate_filenamequeue(['train24_16.tfrecord'])
    [trainimages,trainlabels]=tr.get_batch(trainnamequeue,batch_size)
    x=tf.reshape(trainimages,[-1,24,16,1])
    img=tf.summary.image('input',x,batch_size)
    y_=tf.cast(trainlabels,tf.float32)
    y_=tf.reshape(y_,[batch_size,34])
    with tf.variable_scope("train")as scope_train:
        net=network()
        y_train=net.inference(x)
        loss=net.softmax_loss(y_train,y_)
        opt=net.optimizer(loss)
        tf.summary.scalar("loss",loss)
        correct_prediction=tf.equal(tf.argmax(y_,1),tf.argmax(y_train,1))
        accuracy=tf.reduce_mean(tf.cast(correct_prediction,tf.float32))
        init_op = tf.group(tf.global_variables_initializer(),
                               tf.local_variables_initializer())
        sess=tf.InteractiveSession()
        sess.run(init_op)
        coord=tf.train.Coordinator()
        threads=tf.train.start_queue_runners(sess=sess,coord=coord)
        train_writer=tf.summary.FileWriter('train',sess.graph)
        i=0
        #train_summary=tf.summary.merge([img,loss,w_conv1_summ])
        train_summary=tf.summary.merge_all()
        try:
            while not coord.should_stop():
                [_,train_summ]=sess.run([opt,train_summary])
                train_writer.add_summary(train_summ,i)
                if i%100==0:
                    [ac,loss_num]=sess.run([accuracy,loss])
                    print loss_num
                    print("step %d, training accuracy %g"%(i, ac))
                i=i+1
        except tf.errors.OutOfRangeError:
            print('Done training -- epoch limit reached')
        finally:
        # When done, ask the threads to stop.
            coord.request_stop()
            train_writer.close()

    # Wait for threads to finish.
        coord.join(threads)


train(50)

与恶龙缠斗过久,自身亦成为恶龙;凝视深渊过久,深渊将回以凝视…
Welcome To Ask or Share your Answers For Others

1 Answer

0 votes
by (71.8m points)

楼主请问你的tr里边引用的变量是什么


与恶龙缠斗过久,自身亦成为恶龙;凝视深渊过久,深渊将回以凝视…
Welcome to OStack Knowledge Sharing Community for programmer and developer-Open, Learning and Share
Click Here to Ask a Question

...