ktl014
7/28/2017 - 4:36 PM

classification

classification

1.Copy the image data into the memory allocated for the net
#  net.blobs['data'].data[...] = transformed_image
#  deploy.blobs['data'].data[:batch_size] = batch

2.Set to forward propagation
# output = net.forward()

3. Compute output probability vector
# probs.append(np.copy(deploy.blobs['prob'].data[:batch_size, :]))
# output_prob = output['prob'][0]

4. Make prediction
#     print 'predicted class is:',output_prob.argmax()
#     print 'output label:',labels[output_prob.argmax()]
- Sort top five predictions from softmax output
#    top_inds = output_prob.argsort()[::-1][:5] # reverse sort and take five largest items
#    print('probabilities and labels')
#    prob_lbl = zip(output_prob[top_inds], labels[top_inds])
#    print(prob_lbl)
   deploy = caffe.Net(deploy_proto, caffe.TEST, weights=trained_weights)
    probs = []
    nSmpl = len(images)
    for i in range(0, len(images), 25):


        # Load input and configure preprocessing
        batch = [prep_image(img) for img in images[i:i+25]]
        batch_size = len(batch)

        # Load image in the data layer
        deploy.blobs['data'].data[:batch_size] = batch

        # Begin forward propagation
        deploy.forward()

        # Compute output probability vector from each image
        probs.append(np.copy(deploy.blobs['prob'].data[:batch_size, :]))    # Note np.copy. Otherwise, next forward() step will replace memory
        if i % 1000 == 0:
            print('Samples computed:', i, '/', nSmpl)
            sys.stdout.flush ()

    print ('probs list length:', len(probs))
    print ('probs element type:', type(probs[0]))
    print (probs[0])

    probs = np.concatenate(probs, 0)

    print ('probs shape after concatenate:', probs.shape)
    print (probs[0,:], type(probs[0,0]))

    # compute accuracy
    predictions = probs.argmax(1)
    gtruth = np.array(labels)
    total_accu = (predictions == gtruth).mean()*100

    print ('predictions shape:', predictions.shape)
    print (predictions[0:25])
    print('Total Accuracy', total_accu)
    #=============================================================================#
    #                                                                             #
    # CPU Classification                                                          #
    #                                                                             #
    #=============================================================================#

    net.blobs['data'].reshape(50,   # batch size
                              3,    # 3-channel (BGR) images
                              227,227) # image size is 227-227

    image = caffe.io.load_image(caffe_root + '/examples/images/cat.jpg')
    transformed_image = transformer.preprocess('data',image)
    plt.imshow(image)

    # Copy the image data into the memory allocated for the net
    net.blobs['data'].data[...] = transformed_image

    # Perform classification
    output = net.forward()
    output_prob = output['prob'][0] # the output probability vector for the first image in the batch

    print 'lowest probability is:', output_prob.argmin()
    print 'predicted class is:',output_prob.argmax()
    # predicted class is: 281

    labels_file = caffe_root + '/data/ilsvrc12/synset_words.txt'
    if not os.path.exists(labels_file):
        print("File not found")
        return

    labels = np.loadtxt(labels_file,str,delimiter='\t')
    print 'incorrect label:', labels[output_prob.argmin()]
    print 'output label:',labels[output_prob.argmax()]
    # output label: n02123045 tabby, tabby cat

    # sort top five predictions from softmax output
    top_inds = output_prob.argsort()[::-1][:5] # reverse sort and take five largest items
    print('probabilities and labels')
    prob_lbl = zip(output_prob[top_inds], labels[top_inds])
    print(prob_lbl)