555
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

294 lines
12 KiB

  1. import serial
  2. import threading
  3. import time
  4. class Frame():
  5. def __init__(self, time, data):
  6. self.time = time
  7. self.data = data
  8. class GridEye():
  9. def __init__(self, serialPort, baudrate):
  10. self.port = serial.Serial(serialPort, baudrate)
  11. self.frame1 = None
  12. self.frame2 = None
  13. self.reading = True
  14. self.distance = -1
  15. self.thread = threading.Thread(target = self.reader)
  16. self.thread.setDaemon(True)
  17. self.lock = threading.Lock()
  18. def start(self):
  19. self.port.reset_input_buffer()
  20. self.thread.start()
  21. def stop(self):
  22. self.reading = False
  23. self.thread.join()
  24. def reader(self):
  25. while (self.reading):
  26. line = b''
  27. while (self.reading):
  28. c = self.port.read()
  29. if c == b'\n':
  30. break
  31. line += c
  32. #line = self.port.readline()#.decode('utf-8')
  33. # if line:
  34. # print (line)
  35. # time.sleep(0.01)
  36. # if self.port.in_waiting > 0:
  37. # print (self.port.in_waiting)
  38. if b':' in line:
  39. try:
  40. tag = line.decode('utf-8').split(':')[0]
  41. if 'Distance' in tag:
  42. dist = float(line.decode('utf-8').split(':')[1])
  43. if dist > 200.0:
  44. dist = 200.0
  45. self.lock.acquire()
  46. self.distance = dist
  47. self.lock.release()
  48. else:
  49. values = [int(x, 16)*0.25 for x in line.decode('utf-8').split(':')[1].split()]
  50. if len(values) == 64:
  51. #print (data)
  52. data = []
  53. for i in range(8):
  54. data.append(values[i*8:i*8+8])
  55. self.lock.acquire()
  56. if '105' in tag:
  57. self.frame1 = Frame(time.time(), data)
  58. else:
  59. self.frame2 = Frame(time.time(), data)
  60. self.lock.release()
  61. else:
  62. print ('something wrong', len(data))
  63. except Exception as e:
  64. print (e)
  65. if __name__ == '__main__':
  66. import cv2
  67. import numpy as np
  68. import math
  69. import json
  70. def exponential(img, value):
  71. tmp = cv2.pow(img.astype(np.double), value)*(255.0/(255.0**value))
  72. return tmp.astype(np.uint8)
  73. SIZE = 128
  74. AVERAGE_FRAME = 10
  75. distanceBetweenSensors_w = 2.6 #cm
  76. distanceBetweenSensors_h = 2.6 #cm
  77. distance2Object = 60.0 #cm
  78. ADJUST_BACK = 5
  79. EXPONENTAL_VALUE = 0.4
  80. PRODUCTION_THRESHOLD = 100
  81. MIN_EXIST_TIME = 0.5
  82. W_ARRAY = np.array([list(range(SIZE*2)) for x in range(SIZE*2)])
  83. H_ARRAY = np.array([[x]*(SIZE*2) for x in range(SIZE*2)])
  84. grideye = GridEye('COM18', 115200)
  85. grideye.start()
  86. grideye2 = GridEye('COM24', 115200)
  87. grideye2.start()
  88. # distanceSensor = Distance('COM18', 9600)
  89. # distanceSensor.start()
  90. fourcc = cv2.VideoWriter_fourcc(*'XVID')
  91. videoWriter = cv2.VideoWriter('output.avi', fourcc, 10.0, (SIZE*4,SIZE*4))
  92. siftVideoWriter = cv2.VideoWriter('sift.avi', fourcc, 10.0, (SIZE*2,SIZE*1))
  93. cv2.imshow('sample', np.zeros((SIZE*3,SIZE*2), np.uint8))
  94. cnt = 0
  95. avers = []
  96. hasPos = False
  97. endTime = 0
  98. startTime = 0
  99. while True:
  100. if grideye.frame1 and grideye.frame2 and grideye2.frame1 and grideye2.frame2:
  101. grideye.lock.acquire()
  102. grideye2.lock.acquire()
  103. frames = [grideye.frame1, grideye.frame2, grideye2.frame1, grideye2.frame2]
  104. grideye.frame1 = None
  105. grideye.frame2 = None
  106. grideye2.frame1 = None
  107. grideye2.frame2 = None
  108. distance2Object = grideye.distance + grideye2.distance + 1
  109. print (distance2Object)
  110. if distance2Object <= 0:
  111. distance2Object = 200
  112. grideye2.lock.release()
  113. grideye.lock.release()
  114. with open('log.txt', 'a') as f:
  115. f.write(json.dumps(frames[0].time)+'\n')
  116. for frame in frames:
  117. f.write(json.dumps(frame.data)+'\n')
  118. #print (json.dumps(frames))
  119. imgs = []
  120. for frame in frames:
  121. img = (np.array(frame.data)-15)*10
  122. img = cv2.resize(img.astype(np.uint8), (SIZE,SIZE), interpolation = cv2.INTER_LINEAR) # INTER_LINEAR, INTER_CUBIC
  123. imgs.append(img)
  124. avers.append(np.zeros((SIZE,SIZE), np.uint16))
  125. if cnt < AVERAGE_FRAME:
  126. cnt += 1
  127. for i in range(len(imgs)):
  128. avers[i] += imgs[i]
  129. if cnt == AVERAGE_FRAME:
  130. for i in range(len(avers)):
  131. avers[i] = avers[i]/AVERAGE_FRAME
  132. avers[i] = avers[i].astype(np.uint8)
  133. avers[i] += ADJUST_BACK
  134. continue
  135. for i in range(len(imgs)):
  136. imgs[i] = cv2.subtract(imgs[i], avers[i])
  137. print ('xdd')
  138. out = np.full((SIZE*4, SIZE*4), 255, dtype=np.uint16)
  139. out[:SIZE, :SIZE] = imgs[0]
  140. out[:SIZE, SIZE:SIZE*2] = imgs[1]
  141. out[SIZE:SIZE*2, :SIZE] = imgs[2]
  142. out[SIZE:SIZE*2, SIZE:SIZE*2] = imgs[3]
  143. '''
  144. try:
  145. overlap_w = int(SIZE - (distanceBetweenSensors_w / (2*distance2Object*math.tan(30.0/180.0*math.pi))) * SIZE)
  146. except:
  147. overlap_w = 0
  148. if overlap_w < 0:
  149. overlap_w = 0
  150. try:
  151. overlap_h = int(SIZE - (distanceBetweenSensors_h / (2*distance2Object*math.tan(30.0/180.0*math.pi))) * SIZE)
  152. except:
  153. overlap_h = 0
  154. if overlap_h < 0:
  155. overlap_h = 0
  156. tmp = np.zeros((SIZE, SIZE*2-overlap_w), dtype=np.uint16)
  157. tmp[:, :SIZE] = imgs[0]
  158. tmp[:, -SIZE:] += imgs[1]
  159. tmp[:, (SIZE-overlap_w): SIZE] = tmp[:, (SIZE-overlap_w): SIZE]/2
  160. tmp2 = np.zeros((SIZE, SIZE*2-overlap_w), dtype=np.uint16)
  161. tmp2[:, :SIZE] = imgs[2]
  162. tmp2[:, -SIZE:] += imgs[3]
  163. tmp2[:, (SIZE-overlap_w): SIZE] = tmp2[:, (SIZE-overlap_w): SIZE]/2
  164. merge = np.zeros((SIZE*2-overlap_h, SIZE*2-overlap_w), dtype=np.uint16)
  165. merge[:SIZE, :] = tmp
  166. merge[-SIZE:, :] += tmp2
  167. merge[(SIZE-overlap_h):SIZE, :] = merge[(SIZE-overlap_h):SIZE, :]/2
  168. # merge = exponential(merge, EXPONENTAL_VALUE)
  169. offset_w = int(overlap_w/2)
  170. offset_h = int(overlap_h/2)
  171. print (SIZE*2+offset_h, SIZE*4-overlap_h+offset_h, offset_w, SIZE*2-overlap_w+offset_w)
  172. out[SIZE*2+offset_h:SIZE*4-overlap_h+offset_h, offset_w: SIZE*2-overlap_w+offset_w] = merge
  173. maxProduct = 0
  174. overlap_w = 0
  175. for i in range(80, 128):
  176. product = sum(imgs[0][:,SIZE-i:].astype(np.uint32)*imgs[1][:,:i].astype(np.uint32))
  177. product += sum(imgs[2][:,SIZE-i:].astype(np.uint32)*imgs[3][:,:i].astype(np.uint32))
  178. product = sum(product) / len(product)
  179. if product > maxProduct:
  180. maxProduct = product
  181. overlap_w = i
  182. tmp = maxProduct
  183. maxProduct = 0
  184. overlap_h = 0
  185. for i in range(80, 128):
  186. product = sum(imgs[0][SIZE-i:, :].astype(np.uint32)*imgs[2][:i,:].astype(np.uint32))
  187. product += sum(imgs[1][SIZE-i:, :].astype(np.uint32)*imgs[3][:i,:].astype(np.uint32))
  188. product = sum(product) / len(product)
  189. if product > maxProduct:
  190. maxProduct = product
  191. overlap_h = i
  192. maxProduct = (tmp + maxProduct)/2
  193. tmp = np.zeros((SIZE, SIZE*2-overlap_w), dtype=np.uint16)
  194. tmp[:, :SIZE] = imgs[0]
  195. tmp[:, -SIZE:] += imgs[1]
  196. tmp[:, (SIZE-overlap_w): SIZE] = tmp[:, (SIZE-overlap_w): SIZE]/2
  197. tmp2 = np.zeros((SIZE, SIZE*2-overlap_w), dtype=np.uint16)
  198. tmp2[:, :SIZE] = imgs[2]
  199. tmp2[:, -SIZE:] += imgs[3]
  200. tmp2[:, (SIZE-overlap_w): SIZE] = tmp2[:, (SIZE-overlap_w): SIZE]/2
  201. merge = np.zeros((SIZE*2-overlap_h, SIZE*2-overlap_w), dtype=np.uint16)
  202. merge[:SIZE, :] = tmp
  203. merge[-SIZE:, :] += tmp2
  204. merge[(SIZE-overlap_h):SIZE, :] = merge[(SIZE-overlap_h):SIZE, :]/2
  205. offset_w = int(overlap_w/2)
  206. offset_h = int(overlap_h/2)
  207. out[SIZE*2+offset_h:SIZE*4-overlap_h+offset_h, SIZE*2+offset_w: SIZE*4-overlap_w+offset_w] = merge
  208. '''
  209. # offset = int(overlap2/2)
  210. # tmp = np.zeros((SIZE, SIZE*2-overlap2), dtype=np.uint16)
  211. # tmp[:, :SIZE] = img
  212. # tmp[:, -SIZE:] += img2
  213. # tmp[:, (SIZE-overlap2): SIZE] = tmp[:, (SIZE-overlap2): SIZE]/2
  214. # tmp = exponential(tmp, EXPONENTAL_VALUE)
  215. # out[SIZE*2:, offset: SIZE*2-overlap2+offset] = tmp
  216. out = out.astype(np.uint8)
  217. out = exponential(out, EXPONENTAL_VALUE)
  218. out = cv2.cvtColor(out,cv2.COLOR_GRAY2BGR)
  219. if False and maxProduct > PRODUCTION_THRESHOLD:
  220. print ('XDDDD',maxProduct)
  221. position = [0,0]
  222. rows,cols = merge.shape
  223. position[0] = sum(sum(H_ARRAY[:rows,:cols]*merge))/sum(sum(merge))
  224. position[1] = sum(sum(W_ARRAY[:rows,:cols]*merge))/sum(sum(merge))
  225. pos_w = distanceBetweenSensors_w/(SIZE-overlap_w)*position[0]
  226. pos_h = distanceBetweenSensors_h/(SIZE-overlap_h)*position[1]
  227. cv2.circle(out, (SIZE*2+offset_w+int(position[1]), SIZE*2+offset_h+int(position[0])), 10, (255,0,0), 5)
  228. if not hasPos:
  229. startPos = [pos_w, pos_h]
  230. startTime = frames[0].time
  231. hasPos = True
  232. endPos = [pos_w, pos_h]
  233. endTime = frames[0].time
  234. elif hasPos:
  235. if endTime - startTime > MIN_EXIST_TIME:
  236. print (startPos, endPos)
  237. print ('speed:', ((endPos[0]-startPos[0])**2+(endPos[1]-startPos[1])**2)**0.5/(endTime - startTime))
  238. print ('time:', endTime-startTime)
  239. hasPos = False
  240. if endTime - startTime > MIN_EXIST_TIME:
  241. speed = ((endPos[0]-startPos[0])**2+(endPos[1]-startPos[1])**2)**0.5/(endTime - startTime)
  242. cv2.putText(out, f'{speed:.2f}',
  243. (0, SIZE*2),cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2, cv2.LINE_AA)
  244. cv2.imshow('sample', out)
  245. videoWriter.write(out)
  246. key = cv2.waitKey(1)
  247. if key == ord('q'):
  248. break
  249. elif key == ord('c'):
  250. cv2.imwrite('out.jpg', out)
  251. with open('log_captured.txt', 'a') as f:
  252. f.write(json.dumps(frames[0].time)+'\n')
  253. for frame in frames:
  254. f.write(json.dumps(frame.data)+'\n')
  255. time.sleep(0.001)
  256. grideye.stop()
  257. videoWriter.release()
  258. siftVideoWriter.release()
  259. cv2.destroyAllWindows()