experiment simulator for Boxfish ROV
from boxsim.trajectory import *

We choose a large natural image to get image slices to simulate with Yosemite_NP_M (1880 x 2880 x 3)

main_img = np.asarray(PIL.Image.open("Yosemite_NP_M.jpg"))

show_img(main_img)
main_img.shape
(1880, 2880, 3)

Show the trajectory of the random walk around the image

X,Y dimensions are in pixels and starts at pixel 0,0

traj= Trajectory(np.asarray(PIL.Image.open("Yosemite_NP_M.jpg")), 50)
plt.plot(traj.xyza[:,0], traj.xyza[:,1])
plt.legend(['Ground Truth XY trajectory'])
plt.title('2d XY')
plt.show()

Plot all dimensions of the trajectory

'x', 'y', 'scale', 'angle'

plt.plot(traj.xyza)
plt.legend(['x', 'y', 'scale', 'angle'])
plt.title('x, y, scale, angle')
plt.show()

show trajectory in a cv imshow window

this will only show on local version

traj = Trajectory(np.asarray(PIL.Image.open("Yosemite_NP_M.jpg")), 50)

move_calc = np.zeros((traj.xyza.shape[0], 3))
move_true = np.zeros((traj.xyza.shape[0], 3))
num_kpts = np.zeros(traj.xyza.shape[0], dtype=np.int0)

for i,f in enumerate(traj):
    cur_img, move_true[i,:] = f

    num_kpts[i] = 0    
    move_calc[i,:] = 0,0,0
    
    # cv2.imshow("keypoints", img_kps)
    cv2.imshow("keypoints", cv2.cvtColor(cur_img, cv2.COLOR_RGB2BGR))
    cv2.waitKey(10)
    print(':', end='', flush=True)  

cv2.destroyAllWindows() 

move_true[:,:2] = move_true[:,:2] - traj.startpos

df = pandas.DataFrame(np.column_stack((move_true, move_calc, num_kpts)))
df.columns = ['true_x','true_y', 'true_a','calc_y','calc_x', 'calc_a', 'num_kpts']
print("")
print(df.head())
::::::::::::::::::::::::::::::::::::::::::::::::::
   true_x  true_y    true_a  calc_y  calc_x  calc_a  num_kpts
0     0.0     0.0  2.500000     0.0     0.0     0.0       0.0
1     0.0     0.0  3.438168     0.0     0.0     0.0       0.0
2     1.0     1.0  4.239206     0.0     0.0     0.0       0.0
3     1.0     1.0  4.786032     0.0     0.0     0.0       0.0
4     2.0     2.0  4.998716     0.0     0.0     0.0       0.0

Save to MP4 video

traj = Trajectory(np.asarray(PIL.Image.open("Yosemite_NP_M.jpg")), 50)
with FFMPEG_VideoWriter('out.mp4', (600,400), 24.0) as video:
    for i,f in enumerate(traj):
        cur_img, move_true[i,:] = f
        video.write_frame(cur_img)
        print(':', end='', flush=True)  
::::::::::::::::::::::::::::::::::::::::::::::::::

Show result video

mvp.ipython_display('out.mp4', loop=True)

Compere actual to ground truth video

cum_mov_true = np.cumsum(move_true, axis=0) 
cum_mov_calc = np.cumsum(move_calc, axis=0) 

fig, axes = plt.subplots(1, 2, figsize=(10, 3))
axes[0].plot(cum_mov_true[:,0], cum_mov_true[:,1])
axes[0].set_title('Ground Truth trajectory')
axes[1].plot(cum_mov_calc[:,1], cum_mov_calc[:,0])
axes[1].set_title('Calculated Trajectory')

plt.tight_layout()

TODO's

Example cv2.goodFeaturesToTrack() on a 400 x 600 slice of the large image

Uses harris corner detector, good for buildings and furnature, does not seem robust to noise on natural images

img0 = np.copy(sliceImage(main_img, [400,600], [1100,1000])[0])

cnrs = cv2.goodFeaturesToTrack(cv2.cvtColor(img0, cv2.COLOR_BGR2GRAY),
                                   maxCorners=200,
                                   qualityLevel=0.01,
                                   minDistance=30,
                                   blockSize=3)
print('Number of keypoints', len(cnrs))
for i in cnrs:
    x,y = i.ravel()
    cv2.circle(img0,(x,y),3,255,-1)

show_img(img0)
Number of keypoints 191
<matplotlib.axes._subplots.AxesSubplot at 0x7f489c161748>

Todo

ORB: Simulation of Position holding for ROV

ORB: An efficient alternative to SIFT or SURF