1 year ago

#282928

test-img

Oliver Taylor

Image rotation and translation using an aruco marker

I am trying to warp an image based of the orientation of the camera relative to an aruco marker in the middle of the image. I have managed to get the translation part working but the rotation element is not working. It seems like the image isn't rotating about the centre of the aruco axis. The reference image was taken straight on and the warped image is overlayed.

# Find centre of the marker
top_left_x = (corners[0][0][0, 0])
top_left_y = (corners[0][0][0, 1])
top_right_x = (corners[0][0][1, 0])
top_right_y = (corners[0][0][1, 1])
bottom_right_x = (corners[0][0][2, 0])
bottom_right_y = (corners[0][0][2, 1])
bottom_left_x = (corners[0][0][3, 0])
bottom_left_y = (corners[0][0][3, 1])
# Compare this to the centre of the image to calculate the offset
mid_x = top_right_x - (top_right_x - top_left_x) / 2  
mid_y = bottom_left_y - (bottom_left_y - top_left_y) / 2
x_centre = 960
y_centre = 540
x_offset = x_centre - mid_x
y_offset = y_centre - mid_y
if x_centre > mid_x:  # gone right
    x_offset = 1 * (x_centre - mid_x)  # correction to the left
if x_centre < mid_x:  # gone left
    x_offset = -1 * (mid_x - x_centre)  # correction to the right

if y_centre > mid_y:  # gone down
    y_offset = 1 * (y_centre - mid_y)  # correction to the left
if y_centre < mid_y:  # gone left
    y_offset = -1 * (mid_y - y_centre)  # correction to the right
current_z_distance = (math.sqrt((pos_camera[0]**2) + (pos_camera[1]**2) + 
(pos_camera[2]**2))) * 15.4

img = cv2.imread('Corrected.png')
corrected_z = 31  # Distance when image was taken
initial_z_distance = corrected_z * 15.4  # Pixels

delta_z = (initial_z_distance - current_z_distance)
scale_factor = current_z_distance / initial_z_distance # how much larger the image 
now is. Used for scaling

z_translation = delta_z * 1.54  # how much the image has moved. negative for going 
backwards
z_translation = 0
z_axis = 960 / scale_factor 

proj2dto3d = np.array([[1, 0, -mid_x],
                        [0, 1, -mid_y],
                        [0, 0, 0],
                        [0, 0, 1]], np.float32)

proj3dto2d = np.array([[z_axis, 0, mid_x, 0],
                        [0, z_axis, mid_y, 0],  # defines to centre of rotation
                        [0, 0, 1, 0]], np.float32)

trans = np.array([[1, 0, 0, x_offset * -1], # Working
                  [0, 1, 0, y_offset * -1],
                  [0, 0, 1, 960],  # keep as 960
                  [0, 0, 0, 1]], np.float32)

x = math.degrees(roll_marker) * -1  # forwards and backwards
y = math.degrees(pitch_marker) * -1  # Left and right
z = 0
rx = np.array([[1, 0, 0, 0],
               [0, 1, 0, 0],
               [0, 0, 1, 0],
               [0, 0, 0, 1]], np.float32)  #

ry = np.array([[1, 0, 0, 0],
               [0, 1, 0, 0],
               [0, 0, 1, 0],
               [0, 0, 0, 1]], np.float32)

rz = np.array([[1, 0, 0, 0],
               [0, 1, 0, 0],
               [0, 0, 1, 0],
               [0, 0, 0, 1]], np.float32)
ax = float(x * (math.pi / 180.0))  # 0
ay = float(y * (math.pi / 180.0))
az = float(z * (math.pi / 180.0))  # 0

rx[1, 1] = math.cos(ax)  # 0
rx[1, 2] = -math.sin(ax)  # 0
rx[2, 1] = math.sin(ax)  # 0
rx[2, 2] = math.cos(ax)  # 0

ry[0, 0] = math.cos(ay)
ry[0, 2] = -math.sin(ay)
ry[2, 0] = math.sin(ay)
ry[2, 2] = math.cos(ay)

rz[0, 0] = math.cos(az)  # 0
rz[0, 1] = -math.sin(az)  # 0
rz[1, 0] = math.sin(az)  # 0
rz[1, 1] = math.cos(az)  # 0

#  Translation matrix

#  r = rx.dot(ry)  # if we remove the lines we put    r=ry
r = rx.dot(ry) # order may need to  be changed
final = proj3dto2d.dot(trans.dot(r.dot(proj2dto3d)))  # just rotation
dst = cv2.warpPerspective(img, final, (img.shape[1], img.shape[0]), None, cv2.INTER_LINEAR, cv2.BORDER_CONSTANT, (255, 255, 255))

python

numpy

opencv

3d

aruco

0 Answers

Your Answer

Accepted video resources