You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

313 lines
12 KiB

  1. import serial
  2. import threading
  3. import time
  4. class Frame():
  5. def __init__(self, time, data):
  6. self.time = time
  7. self.data = data
  8. class GridEye():
  9. def __init__(self, serialPort, baudrate):
  10. self.port = serial.Serial(serialPort, baudrate)
  11. self.frame1 = None
  12. self.frame2 = None
  13. self.reading = True
  14. self.distance = -1
  15. self.thread = threading.Thread(target = self.reader)
  16. self.thread.setDaemon(True)
  17. self.lock = threading.Lock()
  18. def start(self):
  19. self.port.reset_input_buffer()
  20. self.thread.start()
  21. def stop(self):
  22. self.reading = False
  23. self.thread.join()
  24. def reader(self):
  25. while (self.reading):
  26. line = b''
  27. while (self.reading):
  28. c = self.port.read()
  29. if c == b'\n':
  30. break
  31. line += c
  32. #line = self.port.readline()#.decode('utf-8')
  33. # if line:
  34. # print (line)
  35. # time.sleep(0.01)
  36. # if self.port.in_waiting > 0:
  37. # print (self.port.in_waiting)
  38. if b':' in line:
  39. try:
  40. tag = line.decode('utf-8').split(':')[0]
  41. if 'Distance' in tag:
  42. dist = float(line.decode('utf-8').split(':')[1])
  43. if dist > 200.0:
  44. dist = 200.0
  45. self.lock.acquire()
  46. self.distance = dist
  47. self.lock.release()
  48. else:
  49. values = [int(x, 16)*0.25 for x in line.decode('utf-8').split(':')[1].split()]
  50. if len(values) == 64:
  51. #print (data)
  52. data = []
  53. for i in range(8):
  54. data.append(values[i*8:i*8+8])
  55. self.lock.acquire()
  56. if '104' in tag:
  57. self.frame1 = Frame(time.time(), data)
  58. else:
  59. self.frame2 = Frame(time.time(), data)
  60. self.lock.release()
  61. else:
  62. print ('something wrong', len(data))
  63. except Exception as e:
  64. print (e)
  65. class Distance():
  66. def __init__(self, serialPort, baudrate):
  67. self.port = serial.Serial(serialPort, baudrate)
  68. self.distance = 200
  69. self.reading = True
  70. self.thread = threading.Thread(target = self.reader)
  71. self.thread.setDaemon(True)
  72. self.lock = threading.Lock()
  73. def start(self):
  74. self.port.reset_input_buffer()
  75. self.thread.start()
  76. def stop(self):
  77. self.reading = False
  78. self.thread.join()
  79. def reader(self):
  80. while (self.reading):
  81. line = b''
  82. while (self.reading):
  83. c = self.port.read()
  84. if c == b'\r':
  85. c = self.port.read()
  86. break
  87. if c == b'\n':
  88. break
  89. line += c
  90. if b'Distance' in line:
  91. try:
  92. dist = float(line.decode('utf-8').split(':')[1])
  93. print (dist)
  94. if dist > 200.0:
  95. dist = 200.0
  96. self.lock.acquire()
  97. self.distance = dist
  98. self.lock.release()
  99. except ValueError as e:
  100. print ('error', e)
  101. if __name__ == '__main__':
  102. import cv2
  103. import numpy as np
  104. import math
  105. def exponential(img, value):
  106. tmp = cv2.pow(img.astype(np.double), value)*(255.0/(255.0**value))
  107. return tmp.astype(np.uint8)
  108. SIZE = 128
  109. AVERAGE_FRAME = 10
  110. distanceBetweenSensors_w = 2.6 #cm
  111. distanceBetweenSensors_h = 2.6 #cm
  112. distance2Object = 60.0 #cm
  113. ADJUST_BACK = 5
  114. EXPONENTAL_VALUE = 0.4
  115. grideye = GridEye('COM25', 115200)
  116. grideye.start()
  117. grideye2 = GridEye('COM24', 115200)
  118. grideye2.start()
  119. # distanceSensor = Distance('COM18', 9600)
  120. # distanceSensor.start()
  121. fourcc = cv2.VideoWriter_fourcc(*'XVID')
  122. videoWriter = cv2.VideoWriter('output.avi', fourcc, 10.0, (SIZE*4,SIZE*4))
  123. siftVideoWriter = cv2.VideoWriter('sift.avi', fourcc, 10.0, (SIZE*2,SIZE*1))
  124. cv2.imshow('sample', np.zeros((SIZE*3,SIZE*2), np.uint8))
  125. cnt = 0
  126. avers = []
  127. while True:
  128. if grideye.frame1 and grideye.frame2 and grideye2.frame1 and grideye2.frame2:
  129. grideye.lock.acquire()
  130. grideye2.lock.acquire()
  131. frames = [grideye.frame1, grideye.frame2, grideye2.frame1, grideye2.frame2]
  132. grideye.frame1 = None
  133. grideye.frame2 = None
  134. grideye2.frame1 = None
  135. grideye2.frame2 = None
  136. distance2Object = grideye.distance + grideye2.distance + 1
  137. print (distance2Object)
  138. if distance2Object <= 0:
  139. distance2Object = 200
  140. grideye2.lock.release()
  141. grideye.lock.release()
  142. imgs = []
  143. for frame in frames:
  144. img = (np.array(frame.data)-15)*10
  145. img = cv2.resize(img.astype(np.uint8), (SIZE,SIZE), interpolation = cv2.INTER_LINEAR) # INTER_LINEAR, INTER_CUBIC
  146. imgs.append(img)
  147. avers.append(np.zeros((SIZE,SIZE), np.uint16))
  148. if cnt < AVERAGE_FRAME:
  149. cnt += 1
  150. for i in range(len(imgs)):
  151. avers[i] += imgs[i]
  152. if cnt == AVERAGE_FRAME:
  153. for i in range(len(avers)):
  154. avers[i] = avers[i]/AVERAGE_FRAME
  155. avers[i] = avers[i].astype(np.uint8)
  156. avers[i] += ADJUST_BACK
  157. continue
  158. for i in range(len(imgs)):
  159. imgs[i] = cv2.subtract(imgs[i], avers[i])
  160. print ('xdd')
  161. out = np.full((SIZE*4, SIZE*4), 255, dtype=np.uint16)
  162. out[:SIZE, :SIZE] = imgs[0]
  163. out[:SIZE, SIZE:SIZE*2] = imgs[1]
  164. out[SIZE:SIZE*2, :SIZE] = imgs[2]
  165. out[SIZE:SIZE*2, SIZE:SIZE*2] = imgs[3]
  166. try:
  167. overlap_w = int(SIZE - (distanceBetweenSensors_w / (2*distance2Object*math.tan(30.0/180.0*math.pi))) * SIZE)
  168. except:
  169. overlap_w = 0
  170. if overlap_w < 0:
  171. overlap_w = 0
  172. try:
  173. overlap_h = int(SIZE - (distanceBetweenSensors_h / (2*distance2Object*math.tan(30.0/180.0*math.pi))) * SIZE)
  174. except:
  175. overlap_h = 0
  176. if overlap_h < 0:
  177. overlap_h = 0
  178. tmp = np.zeros((SIZE, SIZE*2-overlap_w), dtype=np.uint16)
  179. tmp[:, :SIZE] = imgs[0]
  180. tmp[:, -SIZE:] += imgs[1]
  181. tmp[:, (SIZE-overlap_w): SIZE] = tmp[:, (SIZE-overlap_w): SIZE]/2
  182. tmp2 = np.zeros((SIZE, SIZE*2-overlap_w), dtype=np.uint16)
  183. tmp2[:, :SIZE] = imgs[2]
  184. tmp2[:, -SIZE:] += imgs[3]
  185. tmp2[:, (SIZE-overlap_w): SIZE] = tmp2[:, (SIZE-overlap_w): SIZE]/2
  186. merge = np.zeros((SIZE*2-overlap_h, SIZE*2-overlap_w), dtype=np.uint16)
  187. merge[:SIZE, :] = tmp
  188. merge[-SIZE:, :] += tmp2
  189. merge[(SIZE-overlap_h):SIZE, :] = merge[(SIZE-overlap_h):SIZE, :]/2
  190. # merge = exponential(merge, EXPONENTAL_VALUE)
  191. offset_w = int(overlap_w/2)
  192. offset_h = int(overlap_h/2)
  193. print (SIZE*2+offset_h, SIZE*4-overlap_h+offset_h, offset_w, SIZE*2-overlap_w+offset_w)
  194. out[SIZE*2+offset_h:SIZE*4-overlap_h+offset_h, offset_w: SIZE*2-overlap_w+offset_w] = merge
  195. maxProduct = 0
  196. overlap_w = 0
  197. for i in range(80, 128):
  198. product = sum(imgs[0][:,SIZE-i:].astype(np.uint32)*imgs[1][:,:i].astype(np.uint32))
  199. product += sum(imgs[2][:,SIZE-i:].astype(np.uint32)*imgs[3][:,:i].astype(np.uint32))
  200. product = sum(product) / len(product)
  201. if product > maxProduct:
  202. maxProduct = product
  203. overlap_w = i
  204. maxProduct = 0
  205. overlap_h = 0
  206. for i in range(80, 128):
  207. product = sum(imgs[0][SIZE-i:, :].astype(np.uint32)*imgs[2][:i,:].astype(np.uint32))
  208. product += sum(imgs[1][SIZE-i:, :].astype(np.uint32)*imgs[3][:i,:].astype(np.uint32))
  209. product = sum(product) / len(product)
  210. if product > maxProduct:
  211. maxProduct = product
  212. overlap_h = i
  213. tmp = np.zeros((SIZE, SIZE*2-overlap_w), dtype=np.uint16)
  214. tmp[:, :SIZE] = imgs[0]
  215. tmp[:, -SIZE:] += imgs[1]
  216. tmp[:, (SIZE-overlap_w): SIZE] = tmp[:, (SIZE-overlap_w): SIZE]/2
  217. tmp2 = np.zeros((SIZE, SIZE*2-overlap_w), dtype=np.uint16)
  218. tmp2[:, :SIZE] = imgs[2]
  219. tmp2[:, -SIZE:] += imgs[3]
  220. tmp2[:, (SIZE-overlap_w): SIZE] = tmp2[:, (SIZE-overlap_w): SIZE]/2
  221. merge = np.zeros((SIZE*2-overlap_h, SIZE*2-overlap_w), dtype=np.uint16)
  222. merge[:SIZE, :] = tmp
  223. merge[-SIZE:, :] += tmp2
  224. merge[(SIZE-overlap_h):SIZE, :] = merge[(SIZE-overlap_h):SIZE, :]/2
  225. offset_w = int(overlap_w/2)
  226. offset_h = int(overlap_h/2)
  227. out[SIZE*2+offset_h:SIZE*4-overlap_h+offset_h, SIZE*2+offset_w: SIZE*4-overlap_w+offset_w] = merge
  228. # offset = int(overlap2/2)
  229. # tmp = np.zeros((SIZE, SIZE*2-overlap2), dtype=np.uint16)
  230. # tmp[:, :SIZE] = img
  231. # tmp[:, -SIZE:] += img2
  232. # tmp[:, (SIZE-overlap2): SIZE] = tmp[:, (SIZE-overlap2): SIZE]/2
  233. # tmp = exponential(tmp, EXPONENTAL_VALUE)
  234. # out[SIZE*2:, offset: SIZE*2-overlap2+offset] = tmp
  235. out = out.astype(np.uint8)
  236. out = exponential(out, EXPONENTAL_VALUE)
  237. cv2.imshow('sample', out)
  238. videoWriter.write(cv2.cvtColor(out,cv2.COLOR_GRAY2BGR))
  239. try:
  240. sift = cv2.xfeatures2d.SIFT_create()
  241. img1 = exponential(imgs[0], EXPONENTAL_VALUE)
  242. img2 = exponential(imgs[1], EXPONENTAL_VALUE)
  243. kp_1, desc_1 = sift.detectAndCompute(img1, None)
  244. kp_2, desc_2 = sift.detectAndCompute(img2, None)
  245. index_params = dict(algorithm=0, trees=5)
  246. search_params = dict()
  247. flann = cv2.FlannBasedMatcher(index_params, search_params)
  248. matches = flann.knnMatch(desc_1, desc_2, k=2)
  249. good_points = []
  250. ratio = 0.8
  251. for m, n in matches:
  252. if m.distance < ratio*n.distance:
  253. good_points.append(m)
  254. result = cv2.drawMatches(img1, kp_1, img2, kp_2, good_points, None)
  255. cv2.imshow("result", result)
  256. print (result.shape)
  257. siftVideoWriter.write(result)
  258. except:
  259. pass
  260. key = cv2.waitKey(1)
  261. if key == ord('q'):
  262. break
  263. elif key == ord('c'):
  264. cv2.imwrite('out.jpg', out)
  265. time.sleep(0.001)
  266. grideye.stop()
  267. videoWriter.release()
  268. siftVideoWriter.release()
  269. cv2.destroyAllWindows()