Advertisement
Guest User

Untitled

a guest
Mar 25th, 2019
81
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 27.01 KB | None | 0 0
  1. import rospy
  2. import cv2
  3. import numpy as np
  4. import math
  5. from sensor_msgs.msg import Image, LaserScan
  6. from cv_bridge import CvBridge, CvBridgeError
  7. from nav_msgs.msg import OccupancyGrid, Odometry
  8. from geometry_msgs.msg import Pose,Point,Quaternion,Twist, PoseWithCovarianceStamped
  9. from move_base_msgs.msg import MoveBaseAction, MoveBaseGoal
  10. from tf import transformations
  11. from tf.transformations import euler_from_quaternion, quaternion_from_euler
  12. import actionlib
  13. from operator import itemgetter
  14. PI = 3.1415926535897
  15.  
  16. class objectFinder:
  17. # Local variables, mainly used for boolean checks and access to data in class functions
  18. occupancyGrid=None
  19. x = 0
  20. y= 0
  21. z= 0
  22. resolution = 0
  23. posex = 0
  24. posey = 0
  25. angle = None
  26. distanceToObject = None
  27. goalCount = 0
  28. goals = []
  29. masked = {}
  30. quat = transformations.quaternion_from_euler(0,0,math.radians(90))
  31. objectFound = False
  32. inAction = False
  33. centering = False
  34. finalAlignment = False
  35. search = False
  36. currMask = None
  37. currColor = None
  38. skipColor = []
  39. blueFound = False
  40. greenFound = False
  41. redFound = False
  42. yellowFound = False
  43. foundColors = []
  44. stopTurn = False
  45. objectWayPoints = []
  46. objectWaypointCount = 0
  47. currObjectWaypoint = []
  48. stopTurn = False
  49. allFound = False
  50. def __init__(self):
  51. #class initialisation
  52.  
  53. self.bridge = CvBridge()
  54. # Conversion between ROS camera messages to image formats
  55. cv2.startWindowThread()
  56. rospy.init_node('test')
  57. ###########################PUBLISHERS########################################################
  58. # Publisher to publish simple movemement commands in the form of linear and angular velocities
  59. self.cmd_vel_pub = rospy.Publisher('/mobile_base/commands/velocity', Twist, queue_size=1)
  60. ##########################SUBSCRIBERS########################################################
  61. # Subscribes to the turtlebots camera returning the message to imageCB
  62. self.image_sub = rospy.Subscriber("/camera/rgb/image_raw", Image, self.imageCB)
  63. # Subscribes to odometry information so the robots current position can be used
  64. self.odom_sub = rospy.Subscriber('/odom', Odometry, self.poseCB)
  65. # Subscribes to the psuedo laser, psuedo as the turtlebot does not contain a laser scanner but is inferred from the Pointcloud
  66. # which is a combination of the RGB image + depth in the form RGBD (Red, Green, Blue, Depth), could make use of the pointcloud
  67. # but aperture's of the RGB and depth camera are different, not all RGB values have a depth would need an averaging filter to
  68. # be passed over
  69. self.laser_sub = rospy.Subscriber("/scan", LaserScan, self.scanCB)
  70. # Subscribes to the map topic to get the occupancy grid
  71. self.mapSub = rospy.Subscriber("/map", OccupancyGrid, self.callback)
  72. # Twist object used as a template for twist messages for linear/angular velocities
  73. self.twist = Twist()
  74. # Creates a simple action client to publish move base commands, this allows for the use of the ROS navigation stack
  75. # and utilize the global and local path planner
  76. self.client = actionlib.SimpleActionClient('/move_base', MoveBaseAction)
  77.  
  78. # Waits for action client to become available to avoid pushing navigation goals before it is initialized
  79. wait = self.client.wait_for_server()
  80. if not wait:
  81. rospy.logerr("action server not available")
  82. print(wait)
  83. # Function to issue the move navigation goal
  84. self.movebase_client()
  85. # Main class that manages the core logic loop
  86. self.classMain()
  87. rospy.spin()
  88.  
  89. def classMain(self):
  90. # Runs the main loop until all of the objects have been found
  91. while self.allFound == False:
  92. # Constantly checks the current frame (image) contains one of the objects through colour thresholding
  93. check = self.checkimage()
  94. if check:
  95. # If an object has been seen, aligns the camera with the object and check the distance (laserscan has a limit of 5m)
  96. self.client.cancel_all_goals()
  97. inDistance = self.align()
  98. print("aligned with object")
  99. if inDistance:
  100. # Gets the current X Y Z pose of the robot and it's distance from the detected object
  101. currAngle, currPosX, currPosY, currDis = self.angle, self.posex, self.posey, self.distanceToObject
  102. # Passes the values to the helper function that converts Polar co-ordinates to Cartesian using trigonometry
  103. x,y=self.polar2cart(currPosX, currPosY, currAngle,currDis)
  104. # Initializes an empty list (replacing existing points if they are lingering) to store the 4 co-ordinates around the object
  105. self.objectWayPoints = []
  106. # Generates 4 co-ordinates around the object at a distance of +-0.8m away in both X and Y axis
  107. self.objectWayPoints = self.objectWayPointGen(x,y)
  108.  
  109. # Create movebase goal to first point around object
  110. goal = MoveBaseGoal()
  111. goal.target_pose.header.frame_id='map'
  112. goal.target_pose.header.stamp = rospy.Time.now()
  113. print(len(self.goals))
  114. goal.target_pose.pose = Pose(Point(self.objectWayPoints[self.objectWaypointCount][0],self.objectWayPoints[self.objectWaypointCount][1],0), Quaternion(self.quat[0], self.quat[1], self.quat[2], self.quat[3]))
  115. print(goal)
  116. self.search = True
  117. print(self.search)
  118. # Sends goal the the movebase client
  119. self.client.send_goal(goal, self.done_cb_object, self.active_cb_object, self.feedback_cb_object)
  120. # Boolean condition that passes until the search has been complete
  121. while self.search == True:
  122. pass
  123. # Once search has completed, returns back to the goal it was previously navigating to before searching for object
  124. self.movebase_client()
  125. print("in search loop")
  126. else:
  127. # If the distance to the object is too far, continues to the waypoint it was navigating to
  128. self.movebase_client()
  129. ############################################# CALLBACK FUNCTIONS ###############################################################
  130.  
  131. # Callback function for odometry data, used to store the robots current pose in a class variable
  132. def poseCB(self, data):
  133. self.posex = data.pose.pose.position.x
  134. self.posey = data.pose.pose.position.y
  135. # Convertes quaternions to euler angles for use in trigonometry (only the Yaw/Rotation in relation to Z axis needed)
  136. currQuat=[data.pose.pose.orientation.x, data.pose.pose.orientation.y, data.pose.pose.orientation.z, data.pose.pose.orientation.w]
  137. roll, pitch, yaw = euler_from_quaternion(currQuat)
  138. self.angle = yaw
  139. # Callback function for laser scan (interpreted from poincloud as noted above)
  140. def scanCB(self, scan):
  141. scan.angle_max=1
  142. scan.angle_min=-1
  143. # Get the distance of the centerpoint of the laser scan, used at it will be at the same angle as the robots pose in the Z axis
  144. distance = scan.ranges[len(scan.ranges)/2]
  145. self.distanceToObject=distance
  146.  
  147. # Callback for the image subscriber
  148. def imageCB(self, rgb):
  149. # Creates an openCV window to see the robots viewpoint
  150. cv2.namedWindow("window", 1)
  151.  
  152.  
  153. rgbImage=self.bridge.imgmsg_to_cv2(rgb, "bgr8")
  154. # Converts the RGB image to HSV to decouple the hue, saturation and value and easier to perform accurate colour slicing
  155. hsvImage=cv2.cvtColor(rgbImage, cv2.COLOR_BGR2HSV)
  156.  
  157. # Stores the height and width of the image, used for splitting the image in half to avoid the robot detecting poles behind small objects as laser
  158. # scan would be innacurate
  159. self.h,self.w,self.d=hsvImage.shape
  160. self.searchHalf = self.h/2
  161. # Upper and lower bounds for colour slicing
  162. lower_blue = np.array([110,50,50])
  163. upper_blue = np.array([130,255,255])
  164.  
  165. lower_green = np.array([50,150,100])
  166. upper_green = np.array([90,255,255])
  167.  
  168. lower_red = np.array([0,150,30])
  169. upper_red = np.array([8,255,150])
  170.  
  171. lower_yellow = np.array([27,195,100])
  172. upper_yellow = np.array([50,255,255])
  173.  
  174. bMask=cv2.inRange(hsvImage, lower_blue,upper_blue)
  175. gMask=cv2.inRange(hsvImage, lower_green,upper_green)
  176. rMask=cv2.inRange(hsvImage, lower_red,upper_red)
  177. yMask=cv2.inRange(hsvImage, lower_yellow,upper_yellow)
  178. # Threshhold all the colours to create masked images for colour detection
  179. # and store them in class variables for later use
  180. self.masked["blue"]=bMask
  181. self.masked["green"]=gMask
  182. self.masked["red"]=rMask
  183. self.masked["yellow"]=yMask
  184. # Output the image to the OpenCV window
  185. cv2.imshow("window", hsvImage)
  186. cv2.waitKey(1)
  187.  
  188. def callback(self, data):
  189.  
  190. self.x = data.info.origin.position.x
  191. self.y = data.info.origin.position.y
  192. self.z = data.info.origin.position.z
  193. #Get origin position, used for offset when calculating x, y co-ords as map origin and robots origin are different
  194.  
  195. onedMap = data.data
  196. width = data.info.width
  197. height = data.info.height
  198. resolution = data.info.resolution
  199. # Store data about the maps shape and resolution scale, used to turn map co-ords into world co-ords
  200. self.resolution = resolution
  201. numpyArr = np.asarray(onedMap)
  202. # Reshape the 1D array into a 2D array so co-ordinates can be generated (originally in Row Major format)
  203. b = np.reshape(numpyArr, (height, width))
  204. self.occupancyGrid = b
  205. # Saves the occupancy grid in a class variable
  206. # Occupancy grid values are a range of 0-100 for occupied, -1 for unknown. Map given only had 100, -1 or 0 changes 100 and -1 values to 255
  207. # to represent a binary image
  208. for i in range(height):
  209. for j in range(width):
  210. if b[i][j] == 100 :
  211. b[i][j] = 255
  212. elif b[i][j] == -1:
  213. b[i][j] = 255
  214. # Kernels for morpholigical operations on the map
  215. kernel = np.ones((8,8), np.uint8)
  216. erosionKernel = np.ones((4,4), np.uint8)
  217.  
  218. # Dilates the map to accentuate it's features, also to minimise any nose
  219. dilatedMap = cv2.dilate(b.astype(np.uint8), kernel, iterations=2)
  220. # Erodes the map to reduce it,improves the clarity of the edges for canny edge detection
  221. erosion = cv2.erode(dilatedMap,erosionKernel,iterations = 2)
  222. # Performs canny edge detection on the eroded map to get the inner edge for hough line detection
  223. edgesDetected = cv2.Canny(erosion,100,200,3)
  224. # Empty array will hold the results of hough line detection
  225. lineIMG=np.zeros((height,width), np.uint8)
  226. # Hough lines detection on the canny edge detected copy of the map
  227. lines = cv2.HoughLinesP(edgesDetected,rho=1,theta = np.pi/180,threshold = 50,minLineLength=86,maxLineGap=60)
  228. # Iterates through the lines found through Hough Line Detection
  229. for line in lines:
  230. # Iterates through the endpoints for each of the lines
  231.  
  232. for x1,y1,x2,y2 in line:
  233. # Calculates the gradient (different in x/different in x)
  234. gradient=(float(y2)-y1)/(float(x2)-x1)
  235. # Calculates the centerpoint of the line from which it will extend outwards
  236. centerX=(x1+x2)/2
  237. centerY=(y1+y2)/2
  238.  
  239. # Calculates the distance between the centerpoint of the line and it's first point (the same for both points as it's calculated
  240. # From the distance)
  241. distance1P=math.hypot(centerX - x1, centerY - y1)
  242.  
  243. #Helper function for finding the new X, Y coordinates of the line extended by the same value on both endpoints
  244. newX, newY, newX2, newY2 =self.findPoint(distance1P, centerX, centerY, gradient, 10)
  245. # Adds a line to the line IMG
  246. cv2.line(lineIMG,(int(newX),int(newY)),(int(newX2),int(newY2)),(255,0,0),6)
  247.  
  248. print(lines)
  249. # Finds the contours (joined points) in the line image, to seperate the image into different rooms
  250. im2, contours, hierarchy = cv2.findContours(lineIMG, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
  251. for contour in contours:
  252. # Iterates through the detected shapes and colculates the co-ordinate of each ones centerpoint
  253. M = cv2.moments(contour)
  254. cx = int(M['m10']/M['m00'])
  255. cy = int(M['m01']/M['m00'])
  256. # Converts the map co-ordinate into a world co-ordinate that can be used by the navigation stacks global planner
  257. worldX, worldY = self.coordConverter(cx,cy,)
  258. coords = [worldX, worldY]
  259. self.goals.append(coords)
  260. # Draw a circle around it for visualisation purposes
  261. cv2.circle(lineIMG,(cx,cy), 10, 255, -1)
  262. self.pointCleaner()
  263.  
  264. # Sort the goals by Y axis values (Searches from the bottom of the room to the top), has issues with local planner going
  265. # Left to right/vice vera and top to bottom, could possibly change navigation stack cost map parameters? outside scope of this project
  266. sortedGoals = sorted(self.goals, key = lambda x: x[1])
  267. self.goals = sortedGoals
  268. cv2.drawContours(im2, contours, -1, (0,255,0), 3 )
  269. cv2.imwrite( "Gray_Image.jpg", lineIMG)
  270. # Function to align the robot with the center of the camera so the Z pose can be used as an angle to convert polar to cartesian co-ordinates
  271. # OpenCV moments function returns features of the thresholded image
  272. def align(self):
  273. isAligned = False
  274. while isAligned == False:
  275. mask = self.masked[self.currColor]
  276. # Sets the top half of the mask to 0, stops the robot detecting/aligning with objects behind small objects as the laserscan distance would belong to the blocking object
  277. mask[self.searchHalf:self.h,0:self.w]=0
  278. M = cv2.moments(mask)
  279. # If the area of the moment is above 0, calculate the centroid and calculates the distance between the centerpoint and center of the moment
  280. if M['m00']>0:
  281. cx = int(M['m10']/M['m00'])
  282. cy = int(M['m01']/M['m00'])
  283. cv2.circle(mask, (cx, cy), 20, (0, 0, 255), -1)
  284. err = cx - self.w/2
  285. if -float(err)/100 == 0.01:
  286. # If the laser scan is out of range ( max 5m) returns NaN, distance is needed for trig to convert polar to cartesian co-ordinates so stops looking for that colour
  287. # until it has reached the next waypoint
  288. if math.isnan(float(self.distanceToObject)):
  289. print(self.distanceToObject)
  290. print("object too far to get distance")
  291. self.skipColor.append(self.currColor)
  292. print(self.skipColor)
  293. self.currColor = None
  294. return False
  295. else:
  296. # Returns true if the distance is non NaN, and used in cart2polar to calculate object co-ords
  297. print(self.distanceToObject)
  298. return True
  299. # Publish a twist message along the velocity publisher, rotates towards the center of the object
  300. if -float(err)/100 != 0.000:
  301. self.twist.angular.z = -float(err)/100
  302. self.cmd_vel_pub.publish(self.twist)
  303.  
  304. def alignFinal(self):
  305. isAligned = False
  306. while isAligned == False:
  307. mask = self.masked[self.currColor]
  308. mask[self.searchHalf:self.h,0:self.w]=0
  309. M = cv2.moments(mask)
  310. if M['m00']>0:
  311. cx = int(M['m10']/M['m00'])
  312. cy = int(M['m01']/M['m00'])
  313. cv2.circle(mask, (cx, cy), 20, (0, 0, 255), -1)
  314. err = cx - self.w/2
  315. if -float(err)/100 == 0.01:
  316. if self.distanceToObject < 1:
  317. print(self.distanceToObject)
  318. print("Object within 1m")
  319. print(self.skipColor)
  320. isAligned=True
  321. self.noteFound()
  322. self.currColor = None
  323. return True
  324.  
  325. if -float(err)/100 != 0.000:
  326. self.twist.angular.z = -float(err)/100
  327. self.cmd_vel_pub.publish(self.twist)
  328. else:
  329. return False
  330.  
  331. def checkimageClose(self):
  332. i = self.masked[self.currColor]
  333. i[self.searchHalf:self.h, 0:self.w]=0
  334. M=cv2.moments(i)
  335. if M['m00']>150000:
  336. print("found object in close search")
  337. return True
  338. else:
  339. return False
  340.  
  341. def checkimage(self):
  342. for index, i in self.masked.items():
  343. if index not in self.skipColor and index not in self.foundColors:
  344. i[0:self.searchHalf, 0:self.w]=0
  345. M=cv2.moments(i)
  346. if M['m00']>150000:
  347. print("at finding")
  348. self.currColor = index
  349. print(self.currColor)
  350. return True
  351. return False
  352.  
  353.  
  354.  
  355.  
  356.  
  357.  
  358. def noteFound(self):
  359. print("in noteFound Function")
  360. if self.currColor == "blue":
  361. self.blueFound = True
  362. self.foundColors.append(self.currColor)
  363. print("Blue found at", self.currObjectWaypoint)
  364. self.currObjectWaypoint = []
  365. self.currColor = None
  366. if self.currColor == "green":
  367. self.greenFound = True
  368. self.foundColors.append(self.currColor)
  369. print("Green found at", self.currObjectWaypoint)
  370. self.currObjectWaypoint = []
  371. self.currColor = None
  372. if self.currColor == "red":
  373. self.redFound = True
  374. self.foundColors.append(self.currColor)
  375. print("Red found at", self.currObjectWaypoint)
  376. self.currObjectWaypoint = []
  377. self.currColor = None
  378. if self.currColor == "yellow":
  379. self.yellowFound = True
  380. self.foundColors.append(self.currColor)
  381. print("Yellow found at", self.currObjectWaypoint)
  382. self.currObjectWaypoint = []
  383. self.currColor = None
  384.  
  385. def objectWayPointGen(self, x, y):
  386. print(x, y)
  387. self.objectWayPoints = []
  388. toCheck = []
  389. toCheck.append([x+0.7,y])
  390. toCheck.append([x-0.7,y])
  391. toCheck.append([x, y+0.7])
  392. toCheck.append([x, y-0.7])
  393. return toCheck
  394.  
  395. def movebase_client_object(self):
  396. print("starting Navigation")
  397. goal = MoveBaseGoal()
  398. goal.target_pose.header.frame_id='map'
  399. goal.target_pose.header.stamp = rospy.Time.now()
  400. print(len(self.goals))
  401. goal.target_pose.pose = Pose(Point(self.objectWayPoints[self.objectWaypointCount][0],self.objectWayPoints[self.objectWaypointCount][1],0), Quaternion(self.quat[0], self.quat[1], self.quat[2], self.quat[3]))
  402. print(goal)
  403. self.client.send_goal(goal, self.done_cb_object, self.active_cb_object, self.feedback_cb_object)
  404. rospy.spin()
  405.  
  406. def active_cb_object(self):
  407. rospy.loginfo("Active object goal object")
  408.  
  409. def feedback_cb_object(self, feedback):
  410. pass
  411.  
  412. def done_cb_object(self, status, result):
  413.  
  414. if status == 2:
  415. rospy.loginfo("Goal cancelled after starting")
  416. if status == 3:
  417.  
  418. if self.objectWaypointCount < (len(self.objectWayPoints)-1):
  419. myYaw = math.degrees(self.angle)+3
  420. while not self.stopTurn:
  421. if math.degrees(self.angle) == myYaw:
  422. self.stopTurn = True
  423. else:
  424. self.twist.angular.z = 0.6
  425. self.cmd_vel_pub.publish(self.twist)
  426. check = self.checkimageClose()
  427. if check:
  428. print("check")
  429. self.stopTurn = True
  430. isFound = self.alignFinal()
  431. print(isFound)
  432. if not isFound:
  433. print("not found, moving to next waypoint")
  434. self.objectWaypointCount+=1
  435. self.skipColor=[]
  436. goal = MoveBaseGoal()
  437. goal.target_pose.header.frame_id='map'
  438. goal.target_pose.header.stamp = rospy.Time.now()
  439. goal.target_pose.pose = Pose(Point(self.objectWayPoints[self.objectWaypointCount][0],self.objectWayPoints[self.objectWaypointCount][1],0), Quaternion(self.quat[0], self.quat[1], self.quat[2], self.quat[3]))
  440. self.client.send_goal(goal, self.done_cb_object, self.active_cb_object, self.feedback_cb_object)
  441. rospy.loginfo("Goal reached")
  442. if isFound:
  443. self.noteFound()
  444. self.stopTurn = False
  445. print("Object found, exiting object waypoint navigation")
  446. self.search = False
  447. else:
  448. print("visited all object waypoints")
  449. self.search = False
  450. if status == 4:
  451. rospy.loginfo("Goal pose aborted by action server")
  452. self.search = False
  453. if status ==5:
  454. rospy.loginfo("Goal rejected")
  455. if status ==8:
  456. rospy.loginfo("Cancelled before execution")
  457. print(result)
  458.  
  459. def polar2cart(self, x,y,angle,dist):
  460. print(x, "curr X", y, "curr Y", angle, "curr Angle", dist, "curr Dist")
  461. newx=(x+math.cos(angle)*dist)
  462. newy=(y+math.sin(angle)*dist)
  463. print(newx, newy)
  464. return newx,newy
  465.  
  466. def findPoint(self, dis, x1, y1, m, incr):
  467. fullDistance = dis+incr
  468. if m == 0.0:
  469. endX = x1+fullDistance
  470. endY = y1
  471.  
  472. endX2 = x1-fullDistance
  473. endY2 = y1
  474. elif m == float('-inf'):
  475. endX = x1
  476. endY = y1+fullDistance
  477.  
  478. endX2= x1
  479. endY2= y1-fullDistance
  480. else:
  481. dx = (fullDistance/math.sqrt(1+(m*m)))
  482. dy = m*dx
  483. endX = x1+dx
  484. endY = y1+dy
  485.  
  486. endX2 = x1-dx
  487. endY2 = y1-dy
  488. return endX, endY, endX2, endY2
  489.  
  490. def pointCleaner(self):
  491. print(len(self.goals))
  492. del self.goals[7]
  493. del self.goals[4]
  494. del self.goals[3]
  495.  
  496.  
  497.  
  498. def active_cb(self):
  499. rospy.loginfo("Active goal")
  500.  
  501. def feedback_cb(self, feedback):
  502. pass
  503.  
  504. def rotate(self):
  505. PI = 3.1415926535897
  506. #Starts a new node
  507. #rospy.init_node('robot_cleaner', anonymous=True)
  508. #velocity_publisher = rospy.Publisher('/cmd_vel_mux/input/teleop', Twist, queue_size=1)
  509.  
  510. # Receiveing the user's input
  511. print("Let's rotate your robot")
  512.  
  513. #Converting from angles to radians
  514. angular_speed = 1
  515. relative_angle = 360*2*PI/360
  516.  
  517.  
  518. # Checking if our movement is CW or CCW
  519. self.twist.angular.z = -abs(angular_speed)
  520.  
  521. # Setting the current time for distance calculus
  522. t0 = rospy.Time.now().to_sec()
  523. current_angle = 0
  524.  
  525. while(current_angle < relative_angle):
  526. self.cmd_vel_pub.publish(self.twist)
  527. t1 = rospy.Time.now().to_sec()
  528. current_angle = angular_speed*(t1-t0)
  529.  
  530.  
  531. #Forcing our robot to stop
  532. self.twist.angular.z = 0
  533. self.cmd_vel_pub.publish(self.twist)
  534. def done_cb(self, status, result):
  535. if status == 2:
  536. rospy.loginfo("Goal cancelled after starting")
  537. if status == 3:
  538. self.skipColor = []
  539. self.rotate()
  540. if self.goalCount < (len(self.goals)-1):
  541.  
  542. if self.currColor is None:
  543. print("not found, moving to next waypoint")
  544. self.skipColor=[]
  545. self.goalCount+=1
  546. goal = MoveBaseGoal()
  547. goal.target_pose.header.frame_id='map'
  548. goal.target_pose.header.stamp = rospy.Time.now()
  549. print(len(self.goals))
  550. goal.target_pose.pose = Pose(Point(self.goals[self.goalCount][0],self.goals[self.goalCount][1],0), Quaternion(self.quat[0], self.quat[1], self.quat[2], self.quat[3]))
  551. self.client.send_goal(goal, self.done_cb, self.active_cb, self.feedback_cb)
  552.  
  553. rospy.loginfo("Goal reached")
  554. else:
  555. print("all waypoint visited")
  556. if status == 4:
  557. goal = MoveBaseGoal()
  558. goal.target_pose.header.frame_id='map'
  559. goal.target_pose.header.stamp = rospy.Time.now()
  560. goal.target_pose.pose = Pose(Point(self.goals[self.goalCount-1][0],self.goals[self.goalCount-1][1],0), Quaternion(self.quat[0], self.quat[1], self.quat[2], self.quat[3]))
  561. self.client.send_goal(goal, self.done_cb, self.active_cb, self.feedback_cb)
  562. rospy.loginfo("Goal pose aborted by action server")
  563.  
  564. if status ==5:
  565. rospy.loginfo("Goal rejected")
  566. if status ==8:
  567. rospy.loginfo("Cancelled before execution")
  568. print(result)
  569.  
  570. def coordConverter(self, pointX, pointY):
  571. worldX = (pointX * self.resolution) + self.x
  572. worldY = (pointY * self.resolution) + self.y
  573. return worldX, worldY
  574.  
  575. def world2cost(self, pointX, pointY):
  576. costX = (pointX-self.x)/self.resolution
  577. costY = (pointY-self.y)/self.resolution
  578. return costX, costY
  579.  
  580. def movebase_client(self):
  581. print("starting Navigation")
  582. goal = MoveBaseGoal()
  583. goal.target_pose.header.frame_id='map'
  584. goal.target_pose.header.stamp = rospy.Time.now()
  585. print(len(self.goals))
  586. goal.target_pose.pose = Pose(Point(self.goals[self.goalCount][0],self.goals[self.goalCount][1],0), Quaternion(self.quat[0], self.quat[1], self.quat[2], self.quat[3]))
  587. self.client.send_goal(goal, self.done_cb, self.active_cb, self.feedback_cb)
  588.  
  589. objectFinder()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement