Caffe-DeepBinaryCode
Caffe-DeepBinaryCode copied to clipboard
Python code for generating binary codes
@kevinlin311tw Hi Kevin, I have written a small Python code to generate binary codes. It may be useful for someone interested in Python:
#coding=utf-8
import numpy as np
import sys,os
import matplotlib
matplotlib.use('Agg')
import caffe
import time
import cv2
MODEL = "snapshots__iter_80000.caffemodel"
PROTO = "deploy.prototxt"
MEAN = "mean.npy"
class binaryCodesGenerator(object):
def __init__(self, gpuid, modelDir):
self.gpuid = gpuid
self.model = os.path.join(modelDir, MODEL)
self.proto = os.path.join(modelDir, PROTO)
self.mean = os.path.join(modelDir, MEAN)
self.initcaffe()
def initcaffe(self):
caffe.set_device(self.gpuid)
caffe.set_mode_gpu()
self.net = caffe.Net(self.proto, self.model, caffe.TEST)
self.net.forward()
self.transformer = caffe.io.Transformer({'data': self.net.blobs['data'].data.shape})
self.transformer.set_transpose('data', (2,0,1))
self.transformer.set_mean('data', np.load(self.mean).mean(1).mean(1))
self.transformer.set_raw_scale('data', 255)
self.transformer.set_channel_swap('data', (2,1,0))
def hashing(self, image):
array = np.fromstring(image, dtype='uint8')
im = cv2.imdecode(array,1)
im = im / 255.
im = im[:,:,(2,1,0)]
self.net.blobs['data'].data[...] = self.transformer.preprocess('data', im)
self.net.forward()
# obtain the output probabilities
feat = self.net.blobs['encode_neuron'].data[0]
# generate binary codes
binary_codes = feat > 0.5
binary_codes = binary_codes.astype(int)
return binary_codes
if __name__ == "__main__":
gpuID = 5
bCG = binaryCodesGenerator(gpuID, '/raid/yuanyong/imagenet/hashing_release')
# img1
img_path = '009_0001.jpg'
with open(img_path,'rb') as infile:
buf = infile.read()
binary_codes_1 = bCG.hashing(buf)
# img2
img_path = '056_0002.jpg'
with open(img_path,'rb') as infile:
buf = infile.read()
binary_codes_2 = bCG.hashing(buf)
# compute hamming distance
hamming_dis = np.count_nonzero(binary_codes_1 != binary_codes_2)
print "hamming distance: %d" % hamming_dis
Thank you for this!