Skip to content

Instantly share code, notes, and snippets.

@fehlfarbe
Last active February 1, 2019 20:32
Show Gist options
  • Select an option

  • Save fehlfarbe/a2a9058e05f364d31239a425ffddcb2e to your computer and use it in GitHub Desktop.

Select an option

Save fehlfarbe/a2a9058e05f364d31239a425ffddcb2e to your computer and use it in GitHub Desktop.
Some simple templatematching with ORB
import sys
import cv2
import numpy as np
from math import sqrt, acos
from optparse import OptionParser
#FULL = "full.jpg"
#CROP = "crop.jpg"
#FULL = "pleiades_hyades_50percent.jpg"
#CROP = "pleiades_hyades_50percent_crop.jpg"
#FULL = "baum0vbhs9m8xf.jpg"
#CROP = "baum0vbhs9m8xf_crop.jpg"
MATCH_COUNT = 10
if __name__ == '__main__':
parser = OptionParser()
(options, args) = parser.parse_args()
if len(args) != 2:
print "Usage: templatematching image1 crop"
sys.exit(0)
full = cv2.imread(args[0]) # queryImage
crop = cv2.imread(args[1]) # trainImage
img2 = cv2.cvtColor(full, cv2.COLOR_BGR2GRAY)
img1 = cv2.cvtColor(crop, cv2.COLOR_BGR2GRAY)
# Initiate SIFT detector
orb = cv2.ORB_create()
print orb
# find the keypoints and descriptors with SIFT
kp1, des1 = orb.detectAndCompute(img1, None)
kp2, des2 = orb.detectAndCompute(img2, None)
# create BFMatcher object
bf = cv2.BFMatcher(cv2.NORM_HAMMING, crossCheck=True)
# Match descriptors.
matches = bf.match(des1,des2)
# Sort them in the order of their distance.
matches = sorted(matches, key = lambda x:x.distance)[:MATCH_COUNT]
for m in matches:
print m.distance
if len(matches) >= MATCH_COUNT:
src_pts = np.float32([ kp1[m.queryIdx].pt for m in matches ]).reshape(-1,1,2)
dst_pts = np.float32([ kp2[m.trainIdx].pt for m in matches ]).reshape(-1,1,2)
M, mask = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC, 5.0)
matchesMask = mask.ravel().tolist()
h,w = img1.shape
pts = np.float32([ [0,0],[0,h-1],[w-1,h-1],[w-1,0] ]).reshape(-1,1,2)
dst = cv2.perspectiveTransform(pts,M)
mp = cv2.perspectiveTransform(np.float32([[w/2.0, h/2.0]]).reshape(-1,1,2), M)[0][0]
cv2.circle(img2, (mp[0], mp[1]), 5, 255, -1)
#img2 = cv2.polylines(img2,[np.int32(dst)],True,255,3, cv2.LINE_AA)
else:
print "Not enough matches! (minimum is %d matches" % MATCH_COUNT
sys.exit()
# Draw matches.
img2 = cv2.polylines(img2, [np.int32(dst)], True, 255, 5, cv2.LINE_AA)
img3 = cv2.drawMatches(img1, kp1, img2, kp2, matches, None, flags=2)
result = cv2.polylines(full, [np.int32(dst)], True, (255, 255, 255), 5, cv2.LINE_AA)
cv2.circle(result, (mp[0], mp[1]), 2, (255, 255, 255), -1)
cv2.circle(result, (mp[0], mp[1]), 10, (255, 255, 255), 2)
###########
# calculate info
###########
# vector of upper edge
vec = dst[3][0] - dst[0][0]
#print sqrt(np.dot(vec, vec))
# zoom factor crop width / full width
zoom = img2.shape[1] / sqrt(np.dot(vec, vec))
# angle upper edge to x axis
angle = acos(np.dot(vec, np.array([1, 0])) / (sqrt(vec[0]**2 + vec[1]**2)))
print args[0], args[1]
print "middlepoint:", mp
print "zoom:", zoom
print "angle:", np.rad2deg(angle)
print "corners:"
print "\n".join([str(i[0]) for i in dst])
### write data to file
with open("results.txt", "a") as f:
f.write("\n")
f.write("%s %s\n" % (args[0], args[1]))
f.write("middlepoint: %s\n" % str(mp) )
f.write("zoom: %.2f\n" % zoom )
f.write("angle: %.2f\n" % np.rad2deg(angle) )
f.write("corners: %s\n" % ", ".join([str(i[0]) for i in dst]))
cv2.namedWindow("image", cv2.WINDOW_NORMAL)
cv2.imshow('image', img3)
cv2.waitKey(0)
cv2.imshow('image', result)
cv2.waitKey(0)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment