facetracker.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306
  1. #! /usr/bin/python3
  2. from picamera.array import PiRGBArray
  3. from picamera import PiCamera
  4. from datetime import datetime
  5. import time
  6. import cv2
  7. import sys
  8. import imutils
  9. import np
  10. import math # for sqrt distance formula
  11. # i2c stuff
  12. import smbus
  13. bus = smbus.SMBus(1)
  14. SLAVE_ADDRESS = 0x04
  15. time_before_scan = 3*60*1000.0
  16. time_after_scan = (3*60+15)*1000.0
  17. # Create the haar cascade
  18. #frontalfaceCascade = cv2.CascadeClassifier("haarcascade_frontalface_default.xml")
  19. frontalfaceCascade = cv2.CascadeClassifier("frontalface_fromweb.xml")
  20. profilefaceCascade = cv2.CascadeClassifier("haarcascade_profileface.xml")
  21. face = [0,0,0,0] # This will hold the array that OpenCV returns when it finds a face: (makes a rectangle)
  22. center = [0,0] # Center of the face: a point calculated from the above variable
  23. lastface = 0 # int 1-3 used to speed up detection. The script is looking for a right profile face,-
  24. # a left profile face, or a frontal face; rather than searching for all three every time,-
  25. # it uses this variable to remember which is last saw: and looks for that again. If it-
  26. # doesn't find it, it's set back to zero and on the next loop it will search for all three.-
  27. # This basically tripples the detect time so long as the face hasn't moved much.
  28. scanleft = True # Should we scan for left profiles?
  29. scanright = True # should we scan for right profiles?
  30. # initialize the camera and grab a reference to the raw camera capture
  31. camera = PiCamera()
  32. #camera.resolution = (160, 120)
  33. #camera.resolution = (640,480)
  34. camera.resolution = (1024,768)
  35. cameracenter = (camera.resolution[0]/2, camera.resolution[1]/2)
  36. camera.framerate = 32
  37. rawCapture = PiRGBArray(camera, camera.resolution)
  38. # Points to the last place we sawa a face
  39. target = ( camera.resolution[0]/2, camera.resolution[1]/2 )
  40. # Fisheye corrections. See https://medium.com/@kennethjiang/calibrate-fisheye-lens-using-opencv-333b05afa0b0
  41. # 640x480:
  42. #correct_fisheye = False
  43. #DIM=(640, 480)
  44. #K=np.array([[363.787052141742, 0.0, 332.09761373599576], [0.0, 362.23769923959975, 238.35982850966641], [0.0, 0.0, 1.0]])
  45. #D=np.array([[-0.019982864934848042], [-0.10107557279423625], [0.20401597940960342], [-0.1406464201639892]])
  46. # 1024x768:
  47. correct_fisheye = True
  48. DIM=(1024, 768)
  49. K=np.array([[583.6639649321671, 0.0, 518.0139106134624], [0.0, 580.8039721094127, 384.32095600935503], [0.0, 0.0, 1.0]])
  50. D=np.array([[0.0028045742945672475], [-0.14423839478882694], [0.23715105072799644], [-0.1400677375634837]])
  51. #def distance(p0, p1):
  52. # return math.sqrt((p0[0] - p1[0])**2 + (p0[1] - p1[1])**2)
  53. def search_rightprofile(i):
  54. return ()
  55. # return profilefaceCascade.detectMultiScale(i,1.3,4,(cv2.CV_HAAR_DO_CANNY_PRUNING + cv2.CV_HAAR_FIND_BIGGEST_OBJECT + cv2.CV_HAAR_DO_ROUGH_SEARCH),(30,30))
  56. if scanright:
  57. return profilefaceCascade.detectMultiScale(i)
  58. return profilefaceCascade.detectMultiScale(i, maxSize=(30,30))
  59. else:
  60. return ()
  61. def search_leftprofile(i):
  62. return ()
  63. if scanleft:
  64. revimage = cv2.flip(i, 1) # Flip the image
  65. # return profilefaceCascade.detectMultiScale(i,1.3,4,(cv2.CV_HAAR_DO_CANNY_PRUNING + cv2.CV_HAAR_FIND_BIGGEST_OBJECT + cv2.CV_HAAR_DO_ROUGH_SEARCH),(30,30))
  66. return profilefaceCascade.detectMultiScale(i)
  67. return profilefaceCascade.detectMultiScale(i, maxSize=(30,30))
  68. else:
  69. return ()
  70. def search_frontface(i):
  71. # return frontalfaceCascade.detectMultiScale(i,1.3,4,(cv2.CV_HAAR_DO_CANNY_PRUNING + cv2.CV_HAAR_FIND_BIGGEST_OBJECT + cv2.CV_HAAR_DO_ROUGH_SEARCH),(30,30))
  72. return frontalfaceCascade.detectMultiScale(i)
  73. return frontalfaceCascade.detectMultiScale(i, maxSize=(30,30))
  74. def undistort(i, balance=0.0, dim2=None, dim3=None):
  75. # Sanity Check the source dimensions
  76. dim1 = i.shape[:2][::-1] #dim1 is the dimension of input image to un-distort
  77. assert dim1[0]/dim1[1] == DIM[0]/DIM[1], "Image to undistort needs to have same aspect ratio as the ones used in calibration"
  78. if not dim2:
  79. dim2 = dim1
  80. if not dim3:
  81. dim3 = dim1
  82. scaled_K = K * dim1[0] / DIM[0] # The values of K is to scale with image dimension.
  83. scaled_K[2][2] = 1.0 # Except that K[2][2] is always 1.0
  84. # This is how scaled_K, dim2 and balance are used to determine the final K used to un-distort image. OpenCV document failed to make this clear!
  85. new_K = cv2.fisheye.estimateNewCameraMatrixForUndistortRectify(scaled_K, D, dim2, np.eye(3), balance=balance)
  86. map1, map2 = cv2.fisheye.initUndistortRectifyMap(scaled_K, D, np.eye(3), new_K, dim3, cv2.CV_16SC2)
  87. return cv2.remap(i, map1, map2, interpolation=cv2.INTER_LINEAR, borderMode=cv2.BORDER_CONSTANT)
  88. def findface(image):
  89. global lastface
  90. global correct_fisheye
  91. faces = ();
  92. # TODO: There is a better way to do this. Find it.
  93. # First Scan
  94. if lastface == 1:
  95. faces = search_rightprofile(image)
  96. if faces != ():
  97. lastface = 1
  98. return faces
  99. elif lastface == 2:
  100. faces = search_leftprofile(image)
  101. if faces != ():
  102. lastface = 2
  103. return faces
  104. else:
  105. faces = search_frontface(image)
  106. if faces != ():
  107. faceFound=True
  108. return faces
  109. # Second scan
  110. if lastface == 1:
  111. faces = search_frontface(image)
  112. if faces != ():
  113. lastface = 3
  114. return faces
  115. elif lastface == 2:
  116. faces = search_rightprofile(image)
  117. if faces != ():
  118. lastface = 1
  119. return faces
  120. else:
  121. faces = search_leftprofile(image)
  122. if faces != ():
  123. lastface = 2
  124. return faces
  125. # Third scan
  126. if lastface == 1:
  127. faces = search_leftprofile(image)
  128. if faces != ():
  129. lastface = 2
  130. return faces
  131. elif lastface == 2:
  132. faces = search_frontface(image)
  133. if faces != ():
  134. lastface = 3
  135. return faces
  136. else:
  137. faces = search_rightprofile(image)
  138. if faces != ():
  139. lastface = 1
  140. return faces
  141. return ()
  142. def circlefaces(image, faces):
  143. global lastface
  144. for (x, y, w, h) in faces:
  145. cv2.circle(image, (int(x+w/2), int(y+h/2)), int((w+h)/3), (255, 255, 0), 1)
  146. # Temporary, save the image
  147. cv2.imwrite("tmp/img.{}.facetype{}.png".format(datetime.now().strftime("%Y%m%d.%H%M%S.%f"), lastface), image)
  148. def distance_to_closest(faces):
  149. # Negative values will be left
  150. closestdistance = None
  151. for f in faces:
  152. x,y,w,h = f
  153. print("Face found at {},{} with width {} and height {}.".format(x,y,w,h))
  154. centerpoint = (w/2)+x
  155. distance = centerpoint - cameracenter[0]
  156. if(closestdistance == None or abs(distance) < closestdistance):
  157. print("Face closer to center detected. New target location: {} (ctr: {}) - distance: {}".format(centerpoint, cameracenter[0], distance))
  158. closestdistance = distance
  159. return closestdistance
  160. def send_char(tc):
  161. try:
  162. bus.write_byte(SLAVE_ADDRESS, ord(tc))
  163. except Exception as e:
  164. print("Bus Error while sending {}: {}".format(tc, str(e)))
  165. def stop():
  166. print("STOPPING")
  167. send_char(' ')
  168. return
  169. def left(distance):
  170. send_char('a')
  171. if abs(distance) > 300:
  172. print('GO LEFT')
  173. return
  174. elif abs(distance) > 50:
  175. print('GO LEFT FOR {}s (divisior 175)'.format(1.0*abs(distance)/175.0))
  176. time.sleep(1.0*abs(distance)/175.0)
  177. stop()
  178. else:
  179. print('GO LEFT FOR {}s (divisor 100)'.format(1.0*abs(distance)/100.0))
  180. time.sleep(1.0*abs(distance)/100.0)
  181. stop()
  182. def right(distance=None):
  183. send_char('d')
  184. if abs(distance) > 300:
  185. print("GO RIGHT")
  186. return
  187. elif abs(distance) > 50:
  188. print('GO RIGHT FOR {}s (divisior 175)'.format(1.0*abs(distance)/175.0))
  189. time.sleep(1.0*abs(distance)/175.0)
  190. stop()
  191. else:
  192. print('GO RIGHT FOR {}s (divisor 100)'.format(1.0*abs(distance)/100.0))
  193. time.sleep(1.0*abs(distance)/100.0)
  194. stop()
  195. def fire():
  196. print("FIRING!")
  197. send_char('F')
  198. return
  199. if __name__ == "__main__":
  200. # allow the camera to warmup
  201. time.sleep(0.1)
  202. lastTime = time.time()*1000.0
  203. lastDetected = lastTime
  204. lastAction = None
  205. # capture frames from the camera
  206. for frame in camera.capture_continuous(rawCapture, format="bgr", use_video_port=True):
  207. # grab the raw NumPy array representing the image, then initialize the timestamp
  208. # and occupied/unoccupied text
  209. timeSinceDetected = 1000.0*time.time() - lastDetected
  210. print('Time: {}; Time since detected: {} ({}/{})'.format(time.time()*1000.0 - lastTime, timeSinceDetected, time_before_scan, time_after_scan))
  211. lastTime = time.time()*1000.0
  212. image = frame.array
  213. # image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) # convert to greyscale
  214. if correct_fisheye:
  215. image = undistort(image, 0.8)
  216. faces = findface(image)
  217. if faces == ():
  218. print("No face found.")
  219. rawCapture.truncate(0)
  220. if timeSinceDetected > time_before_scan and timeSinceDetected < time_after_scan:
  221. if lastAction != "Scan":
  222. lastAction = "Scan"
  223. print("Beginning Scanning...")
  224. right(1000.0) # 1000 is arbitrary
  225. # Otherwise just keep doing what youre doing
  226. elif timeSinceDetected > time_after_scan:
  227. lastAction = "Stop"
  228. print("Stopping scanning...")
  229. lastDetected = time.time()*1000.0
  230. stop()
  231. else:
  232. lastAction = "Stop"
  233. stop()
  234. continue
  235. lastDetected = time.time() * 1000.0
  236. circlefaces(image, faces)
  237. distance = distance_to_closest(faces)
  238. if lastAction == "Scan":
  239. # Scanning, but detected a face. Stop and continue
  240. print("Face detected. Aborted scanning.")
  241. stop()
  242. lastAction = "Stop"
  243. elif abs(distance) < 15:
  244. if lastAction == "Fire":
  245. # Do nothing
  246. time.sleep(1)
  247. elif lastAction == "Stop":
  248. lastAction = "Fire"
  249. fire()
  250. print("Sleeping for 15 seconds...")
  251. time.sleep(15)
  252. print("Resuming...")
  253. else:
  254. lastAction = "Stop"
  255. stop()
  256. elif distance < 0:
  257. lastAction = "Left"
  258. left(distance)
  259. elif distance > 0:
  260. lastAction = "Right"
  261. right(distance)
  262. else:
  263. print("Face found but no action taken. Distance = {}; Last Action = {}".format(distance, lastAction))
  264. # clear the stream in preparation for the next frame
  265. rawCapture.truncate(0)