Skip to content

project 2 submission #1

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 29 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
9934295
stuff
maxthibeau Feb 1, 2019
96578cb
edge detection(sorta)
maxthibeau Feb 6, 2019
9a95bda
finished corner detection
maxthibeau Feb 6, 2019
f53c10c
commit chessboard.png
dgketchum Feb 8, 2019
3717c60
corner detection but points are filtered
maxthibeau Feb 8, 2019
500d7c8
made directory for feature matching
maxthibeau Feb 11, 2019
9611690
function needed
maxthibeau Feb 11, 2019
10f6535
stuff for things
maxthibeau Feb 11, 2019
2393cde
a nondescript bug
maxthibeau Feb 11, 2019
c205270
Merge remote-tracking branch 'origin/master'
dgketchum Feb 11, 2019
bd5b84a
got feature matching to work, codes kinda ugly
maxthibeau Feb 11, 2019
4e9bbc7
outside photos initial commit
dgketchum Feb 11, 2019
889c7a5
rename package to avoid namespace problem, add __init__
dgketchum Feb 11, 2019
2722e43
david's attempt to feature match using Max's harris and local maxima …
dgketchum Feb 11, 2019
99e370a
funny behavior grouping of points in one part of the image
dgketchum Feb 11, 2019
842a9fe
add optional evaluation fraction (rather than hardcoded 1/20)
dgketchum Feb 11, 2019
b0c5aa7
add optional evaluation fraction (rather than hardcoded 1/20)
dgketchum Feb 11, 2019
4bb15b7
getting there
dgketchum Feb 11, 2019
715c4c6
add evaluation_fraction kwarg
dgketchum Feb 13, 2019
fd16275
fix mistake
dgketchum Feb 13, 2019
c426085
stuff
maxthibeau Feb 13, 2019
02b1f32
huh?
maxthibeau Feb 13, 2019
3ac14c8
pls
maxthibeau Feb 13, 2019
98feab7
cleanup
maxthibeau Feb 13, 2019
8aa02a4
stuff
maxthibeau Feb 13, 2019
1acbf39
tried RANSAC
maxthibeau Feb 22, 2019
348ea0a
what might be the final attempt
maxthibeau Feb 25, 2019
09df128
finished RANSAC
maxthibeau Feb 25, 2019
b860d72
Update README.md
maxthibeau Jul 18, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
68 changes: 68 additions & 0 deletions RANSAC/RANSAC.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
import numpy as np
import matplotlib.pyplot as plt
import skimage.transform as skt
from homography import *

def apply_homography(X, H):
Xprime = (H.dot(X.T)).T
Xprime/=Xprime[:,2][:,np.newaxis]
return Xprime

def RANSAC(number_of_iterations,matches,n,r,d):
H_best = np.array([[1,0,0],[0,1,0],[0,0,1]])
list_of_inliers = np.zeros((2, d, 2))
# for 3D projection
matches = np.insert(matches, 2, 1, axis=2)
for i in range(number_of_iterations):
# swap the coords so they're xy instead of yx, what a great
matches[:,:,[0, 1]] = matches[:,:,[1, 0]]
# 1. Select a random sample of length n from the matches
indices = np.random.choice(np.arange(0, matches.shape[1]), size=n, replace=False)
sample = matches[:,indices,:]
# 2. Compute a homography based on these points using the methods given above
H = generate_homography(sample[0], sample[1], n=4)
# 3. Apply this homography to the remaining points that were not randomly selected
pred_location = apply_homography(matches[0], H)
# 4. Compute the residual between observed and predicted feature locations
residuals = np.sqrt((matches[1,:,0] - pred_location[:,0])**2 + (matches[1,:,1] - pred_location[:,1])**2)
# 5. Flag predictions that lie within a predefined distance r from observations as inliers
inliers = matches[:,residuals<=r]
# 6. If number of inliers is greater than the previous best
# and greater than a minimum number of inliers d,
# 7. update H_best
# 8. update list_of_inliers
if (inliers.shape[1] > list_of_inliers.shape[1]):
H_best = H
list_of_inliers = inliers
return H_best, list_of_inliers[:,:,:2].astype(int)

I_1 = plt.imread('photo_1.jpg')
I_2 = plt.imread('photo_2.jpg')

I_1 = I_1.mean(axis=2)
I_2 = I_2.mean(axis=2)

corners_1 = harris_corner_detection(I_1)
corners_2 = harris_corner_detection(I_2)

descriptors_1 = extract_descriptors(I_1, corners_1, 21)
descriptors_2 = extract_descriptors(I_2, corners_2, 21)

matching_indices = get_matches(descriptors_1, descriptors_2 , .5).astype(int)

matching_corners_1 = corners_1[matching_indices[:,0]]
matching_corners_2 = corners_2[matching_indices[:,1]]

matches = np.array([matching_corners_1, matching_corners_2])

H_best, inliers = RANSAC(10000, matches, 10, 30, 4)

# Create a projective transform based on the homography matrix $H$
proj_trans = skt.ProjectiveTransform(H_best)

# Warp the image into image 1's coordinate system
I_2_transformed = skt.warp(I_2,proj_trans)

plt.imshow(I_1)
plt.imshow(I_2_transformed, alpha=.5)
plt.show()
68 changes: 68 additions & 0 deletions RANSAC/RANSAC.py~
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
import numpy as np
import matplotlib.pyplot as plt
import skimage.transform as skt
from homography import *

def apply_homography(X, H):
Xprime = (H.dot(X.T)).T
Xprime/=Xprime[:,2][:,np.newaxis]
return Xprime

def RANSAC(number_of_iterations,matches,n,r,d):
H_best = np.array([[1,0,0],[0,1,0],[0,0,1]])
list_of_inliers = np.zeros((2, d, 2))
matches = np.insert(matches, 2, 1, axis=2)
for i in range(number_of_iterations):
# 1. Select a random sample of length n from the matches
indices = np.random.choice(np.arange(0, matches.shape[1]), size=n, replace=False)
sample = matches[:,indices,:]
# for 3D translation
# 2. Compute a homography based on these points using the methods given above
H = generate_homography(sample[0], sample[1], n=4)
# 3. Apply this homography to the remaining points that were not randomly selected
pred_location = apply_homography(matches[0], H)
# 4. Compute the residual between observed and predicted feature locations
residuals = np.sqrt((matches[1,:,0] - pred_location[:,0])**2 + (matches[1,:,1] - pred_location[:,1])**2)
# 5. Flag predictions that lie within a predefined distance r from observations as inliers
inliers = matches[:,residuals<=r]
# 6. If number of inliers is greater than the previous best
# and greater than a minimum number of inliers d,
# 7. update H_best
# 8. update list_of_inliers
if (inliers.shape[1] > list_of_inliers.shape[1]):
H_best = H
list_of_inliers = inliers
return H_best, list_of_inliers

I_1 = plt.imread('photo_1.jpg')
I_2 = plt.imread('photo_2.jpg')

I_1 = I_1.mean(axis=2)
I_2 = I_2.mean(axis=2)

corners_1 = harris_corner_detection(I_1)
corners_2 = harris_corner_detection(I_2)

descriptors_1 = extract_descriptors(I_1, corners_1, 21)
descriptors_2 = extract_descriptors(I_2, corners_2, 21)

matching_indices = get_min_descriptor_errors(descriptors_1, descriptors_2 , .7).astype(int)

matching_corners_1 = corners_1[matching_indices[:,0]]
matching_corners_2 = corners_2[matching_indices[:,1]]

matches = np.array([matching_corners_1, matching_corners_2])

H_best, inliers = RANSAC(1000, matches, 10, 30, 1)

H_best = np.random.rand(3,3)
# Create a projective transform based on the homography matrix $H$
proj_trans = skt.ProjectiveTransform(H_best)

# Warp the image into image 1's coordinate system
I_2_transformed = skt.warp(I_2,proj_trans)
print(I_2_transformed)
print(I_2)
plt.imshow(I_2)
plt.imshow(I_2_transformed, alpha=1)
plt.show()
68 changes: 68 additions & 0 deletions RANSAC/feature_matching.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
import numpy as np
from keypoint_detection import *

def pad_with(vector, pad_width, iaxis, kwargs):
pad_value = kwargs.get('padder', 0)
vector[:pad_width[0]] = pad_value
vector[-pad_width[1]:] = pad_value
return vector

def get_matches(descriptors_1, descriptors_2, r):
matches = np.zeros((descriptors_1.shape[0], 3))
for i in range(0, descriptors_1.shape[0]):
error = float('inf')
matches[i][0] = i
for j in range(0, descriptors_2.shape[0]):
test_error = np.sum((descriptors_1[i] - descriptors_2[j])**2)
if (test_error < error):
matches[i][1] = j
# is robust
matches[i][2] = test_error < error * r
error = test_error
return matches[matches[:,2] == 1][:,:2].astype(int)

def extract_descriptors(I, corners, l):
descriptors = np.zeros((corners.shape[0], l/2*2, l/2*2))
I = np.pad(I, l/2 + 1, pad_with)
for i in range(0, corners.shape[0]):
# uuh might have indices swapped
x = corners[i][0] + l/2
y = corners[i][1] + l/2
descriptor = I[int(x-l/2):int(x+l/2),int(y-l/2):int(y+l/2)]
descriptors[i] = descriptor
return descriptors

'''
I_1 = plt.imread('photo_1.jpg')
I_2 = plt.imread('photo_2.jpg')

I_1 = I_1.mean(axis=2)
I_2 = I_2.mean(axis=2)

corners_1 = harris_corner_detection(I_1)
corners_2 = harris_corner_detection(I_2)

descriptors_1 = extract_descriptors(I_1, corners_1, 21)
descriptors_2 = extract_descriptors(I_2, corners_2, 21)

img = np.zeros((I_1.shape[0], I_1.shape[1] * 2))
img[:,I_1.shape[1]:] = I_2
img[:,:I_1.shape[1]] = I_1

matches = get_matches(descriptors_1, descriptors_2 ,.09)

for match in matches:
x1 = corners_1[match[0]][1]
y1 = corners_1[match[0]][0]

x2 = I_2.shape[1] + corners_2[match[1]][1]
y2 = corners_2[match[1]][0]

plt.plot([x1, x2], [y1, y2])

plt.scatter(corners_1[:,1], corners_1[:,0], c='blue')
plt.scatter(I_1.shape[1] + corners_2[:,1], corners_2[:,0], c='white')
plt.imshow(img)
plt.show()
'''

Binary file added RANSAC/feature_matching.pyc
Binary file not shown.
60 changes: 60 additions & 0 deletions RANSAC/homography.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
import numpy as np
import matplotlib.pyplot as plt
from feature_matching import *

def make_A_row(corner_1, corner_2):
u1 = corner_1[0]
uprime = corner_2[0]
v1 = corner_1[1]
vprime = corner_2[1]
A_row = np.array([
[0,0,0,-u1,-v1,-1,vprime*u1,vprime*v1,vprime],
[u1,v1,1,0,0,0,-uprime*u1,-uprime*v1,-uprime]
])
return A_row

def generate_A(corners_1, corners_2, n):
A = np.zeros((2*n, 9))
for i in range(0, 2*n, 2):
A[i:i+2] = make_A_row(corners_1[i/2], corners_2[i/2])
return A

def generate_homography(corners_1, corners_2, n):
A = generate_A(corners_1, corners_2, n)
U,Sigma,Vt = np.linalg.svd(A)
return np.reshape(Vt[-1], (3, 3))

X = np.array([[0,0,1],
[1,0,1],
[1,1,1],
[0,1,1],
[0,0,1]])

H = np.random.rand(3,3)
#H/= H[2,2]

Xprime = (H.dot(X.T)).T
Xprime/=Xprime[:,2][:,np.newaxis]

H_gen = generate_homography(X, Xprime, 4)
'''
I_1 = plt.imread('photo_1.jpg')
I_2 = plt.imread('photo_2.jpg')

I_1 = I_1.mean(axis=2)
I_2 = I_2.mean(axis=2)

corners_1 = harris_corner_detection(I_1)
corners_2 = harris_corner_detection(I_2)

descriptors_1 = extract_descriptors(I_1, corners_1, 21)
descriptors_2 = extract_descriptors(I_2, corners_2, 21)

matches = get_min_descriptor_errors(descriptors_1, descriptors_2, .09).astype(int)

matching_corners_1 = corners_1[matches[:,0],:]
matching_corners_2 = corners_2[matches[:,1],:]


H = generate_H(matching_corners_1, matching_corners_2)
'''
Binary file added RANSAC/homography.pyc
Binary file not shown.
48 changes: 48 additions & 0 deletions RANSAC/homography.py~
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import numpy as np
import matplotlib.pyplot as plt
from feature_matching import *

def make_A_row(corner_1, corner_2):

u1 = corner_1[0]
u2 = corner_2[0]
v1 = corner_1[1]
v2 = corner_1[1]
A_row = np.array([
[0,0,0,-u1,-v1,-1,v2*u1,v2*v1,v1],
[u1,v1,1,0,0,0,-u2*u1,u1*v1,-u1]
])
return A_row

def generate_A(corners_1, corners_2, n):
A = np.zeros((2*n, 9))
for i in range(0, 2*n, 2):
A[i:i+2] = make_A_row(corners_1[i/2], corners_2[i/2])
return A

def generate_homography(corners_1, corners_2, n):
A = generate_A(corners_1, corners_2, n)
U,Sigma,Vt = np.linalg.svd(A)
return np.reshape(Vt[-1], (3, 3))

'''
I_1 = plt.imread('photo_1.jpg')
I_2 = plt.imread('photo_2.jpg')

I_1 = I_1.mean(axis=2)
I_2 = I_2.mean(axis=2)

corners_1 = harris_corner_detection(I_1)
corners_2 = harris_corner_detection(I_2)

descriptors_1 = extract_descriptors(I_1, corners_1, 21)
descriptors_2 = extract_descriptors(I_2, corners_2, 21)

matches = get_min_descriptor_errors(descriptors_1, descriptors_2, .09).astype(int)

matching_corners_1 = corners_1[matches[:,0],:]
matching_corners_2 = corners_2[matches[:,1],:]


H = generate_H(matching_corners_1, matching_corners_2)
'''
57 changes: 57 additions & 0 deletions RANSAC/keypoint_detection.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
import numpy as np
from scipy import signal
from matplotlib import pyplot as plt

def pull_local_maxima(I, evaluation_fraction=.05):
local_maxima = []
for i in range(1, I.shape[0] - 1):
for j in range(1, I.shape[1] - 1):
if (I[i + 1][j] <= I[i][j]
and I[i - 1][j] <= I[i][j]
and I[i][j + 1] <= I[i][j]
and I[i + 1][j - 1] <= I[i][j]):
local_maxima.append((i, j, I[i, j]))

local_maxima.sort(key=lambda local_maxima: local_maxima[2], reverse=True)
local_maxima = np.array(local_maxima)
evaluation_slice = int(local_maxima.shape[0] * evaluation_fraction)
local_maxima = local_maxima[:evaluation_slice]
radii = np.zeros((local_maxima.shape[0], 1)) + float("inf")
local_maxima = np.hstack((local_maxima, radii))

for i in range(0, local_maxima.shape[0]):
for j in range(0, local_maxima.shape[0]):
distance = np.sqrt(
(local_maxima[i][0] - local_maxima[j][0]) ** 2 + (local_maxima[i][1] - local_maxima[j][1]) ** 2)
if (local_maxima[j][2] > local_maxima[i][2] and distance < local_maxima[i][3]):
local_maxima[i][3] = distance

indices = np.argsort(-local_maxima[:, 3])
local_maxima = local_maxima[indices]
return local_maxima[:100]


def harris_corner_detection(I, evaluation_fraction=.05):
Su = np.matrix(
[[-1, 0, 1],
[-2, 0, 2],
[-1, 0, 1]])
w = np.matrix([
[0.023528, 0.033969, 0.038393, 0.033969, 0.023528],
[0.033969, 0.049045, 0.055432, 0.049045, 0.033969],
[0.038393, 0.055432, 0.062651, 0.055432, 0.038393],
[0.033969, 0.049045, 0.055432, 0.049045, 0.033969],
[0.023528, 0.033969, 0.038393, 0.033969, 0.023528]
])

Iu = signal.convolve2d(I, Su)
Iv = signal.convolve2d(I, Su.T)

Iuu = signal.convolve2d(np.multiply(Iu, Iu), w)
Ivv = signal.convolve2d(np.multiply(Iv, Iv), w)
Iuv = signal.convolve2d(np.multiply(Iu, Iv), w)

H = np.divide(np.multiply(Iuu, Ivv) - np.multiply(Iuv, Iuv), Iuu + Ivv + 1e-10)

local_maxima = pull_local_maxima(H, evaluation_fraction)
return local_maxima[:,:2]
Binary file added RANSAC/keypoint_detection.pyc
Binary file not shown.
Binary file added RANSAC/photo_1.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added RANSAC/photo_2.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
15 changes: 15 additions & 0 deletions RANSAC/test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import numpy as np
import matplotlib.pyplot as plt


X = np.array([0,0,1])

H = np.random.rand(3,3)
#H/= H[2,2]

Xprime = (H.dot(X.T)).T
Xprime/=Xprime[:,2][:,np.newaxis]

plt.plot(X[:,0],X[:,1],'g-')
plt.plot(Xprime[:,0],Xprime[:,1],'b-')
plt.show()
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
# project_2
# project_2
This is a panoramic stitcher I made for Computer Vision
Loading