Commit 1bed8728 authored by Mathieu Reymond's avatar Mathieu Reymond
Browse files

training and running process

parent f84a5677
......@@ -24,61 +24,75 @@ class BaxterAgent(object):
self.is_train = config['is_train']
self.navs = {'left': baxter_interface.Navigator('left'),
'right': baxter_interface.Navigator('right')}
'right': baxter_interface.Navigator('right'),
'torso_left': baxter_interface.Navigator('torso_left'),
'torso_right': baxter_interface.Navigator('torso_right')}
if self.is_train:
self.tree = DecisionTreeGR()
self.train()
else :
self.tree = self.load_decision_tree()
self.run()
def _train_sort_left(self, msg):
if msg:
self.baxter.sort('left')
def extract_features(self, img):
return {'shape': 'SPHERE',
'color': 'RED',
'size': 'SMALL'}
def _train_sort_right(self, msg):
def _train_sort(self, arm_name, msg):
arm = getattr(self.baxter, arm_name)
if msg:
self.baxter.sort('right')
def _train_cancel(self, msg):
arm.move(arm.top)
rospy.sleep(2)
img = arm.take_snapshot()
self.baxter.sort(arm_name)
f = self.extract_features(img)
f['direction'] = arm_name.upper()
self.tree.add_training_sample(f)
def _train_end(self, msg):
if msg:
self.is_train = False
self.tree.train()
self.save_decision_tree()
def train(self):
self.drawTrainingUI()
self.navs['left'].button0_changed.connect(self._train_sort_left)
self.navs['right'].button0_changed.connect(self._train_sort_right)
self.navs['left'].button1_changed.connect(self._train_cancel)
self.navs['right'].button1_changed.connect(self._train_cancel)
train_left = lambda m: self._train_sort('left', m)
train_right = lambda m: self._train_sort('right', m)
self.navs['left'].button0_changed.connect(train_left)
self.navs['right'].button0_changed.connect(train_right)
self.navs['left'].button1_changed.connect(self._train_end)
self.navs['right'].button1_changed.connect(self._train_end)
# blocking call until end button is clicked
while self.is_train:
######
#TODO# take snapshot and extract features if object was added to the scene. Update scene accordingly, send image to baxte face
######
features = {}
pass
print('done training')
#self.tree.train()
#self.save_decision_tree() #Store trained decision tree
def _sort(self, msg):
if msg:
self.baxter.left.move(self.baxter.left.top)
img = self.baxter.left.take_snapshot()
features = self.extract_features(img)
direction = self.tree.classify(features).lower()
if direction is not 'left':
self.baxter.left.move(self.baxter.left.neutral)
self.baxter.sort(direction)
def _run_end(self, m):
if m:
self.is_run = False
def run(self):
self.drawProductionUI()
run = True
while run:
#TODO# Check if object on table and retrieve features
features = {}
#Listen to navigators for both sides
left_nav = rospy.wait_for_message('/robot/navigators/left_itb/state', ITBState, 3)
right_nav = rospy.wait_for_message('/robot/navigators/right_itb/state', ITBState, 3)
if left_nav.buttons[0] or right_nav.buttons[0]:
side = self.tree.classify(features) #Choose sorting side
if side == 'RIGHT':
self.sort_right()
elif side == 'LEFT':
self.sort_left()
elif left_nav.buttons[1] or right_nav.buttons[1]:
run = False
self.init_move()
self.is_run = True
self.navs['torso_left'].button0_changed.connect(self._sort)
self.navs['torso_right'].button0_changed.connect(self._sort)
self.navs['torso_left'].button1_changed.connect(self._run_end)
self.navs['torso_right'].button1_changed.connect(self._run_end)
# blocking call until end button is clicked
while self.is_run:
pass
print('done')
def load_decision_tree(self):
......
......@@ -28,7 +28,7 @@ BIG = 2
SHAPE = 'shape'
CUBE_STR = 'CUBE'
SPHERE_STR = 'SPEHRE'
SPHERE_STR = 'SPHERE'
TRIANGLE_STR = 'TRIANGLE'
CUBE = 0
SPHERE = 1
......
......@@ -14,8 +14,9 @@ config = {
def main():
moveit_commander.roscpp_initialize(sys.argv)
rospy.init_node('garbage_bot', anonymous=True)
BaxterAgent(config)
b = BaxterAgent(config)
b.train()
b.run()
if __name__ == '__main__':
......
......@@ -47,10 +47,22 @@ class Arm(moveit_commander.MoveGroupCommander):
def ready_camera(self):
arm = self.get_name()[:-4] #remove _arm from name
callback = lambda m: self._regular_snapshot(m)
subscriber = rospy.Subscriber('cameras/'+arm+'_hand_camera/image', Image, callback)
baxter_interface.CameraController(arm+'_hand_camera').open()
#callback = lambda m: self._regular_snapshot(m)
#subscriber = rospy.Subscriber('cameras/'+arm+'_hand_camera/image', Image, callback)
msg = rospy.wait_for_message('cameras/'+arm+'_hand_camera/image', Image)
self._msg_to_jpg(msg, 'snap')
# no necessary in simulator
#baxter_interface.CameraController(arm+'_hand_camera').open()
def take_snapshot(self):
arm = self.get_name()[:-4] #remove _arm from name
msg = rospy.wait_for_message('cameras/'+arm+'_hand_camera/image', Image)
try:
# Convert your ROS Image message to OpenCV2
cv2_img = self.bridge.imgmsg_to_cv2(msg, "bgr8")
except cvb.CvBridgeError, e:
print(e)
return cv2_img
class Arms(object):
def __init__(self, name, *args):
......@@ -86,7 +98,10 @@ class Arms(object):
class Baxter(object):
def __init__(self):
self.left = Arm('left_arm', neutral=[0.64623441, 0.84175585, 0.0570087, -0.3827256, 0.92221457, 0.02060252, 0.05115608])
self.left = Arm('left_arm', neutral=[0.64623441, 0.84175585, 0.0570087, -0.3827256, 0.92221457, 0.02060252, 0.05115608],
top=[0.74667248, 0.02808032, 0.50554368, -0.00295978, 0.97005472, -0.02409497, 0.24167026],
pick=[7.54538492e-01, 3.44879303e-04, -6.50381151e-02, 5.63301315e-03, 9.99927904e-01, -9.66346662e-03, -4.36723168e-03],
grasp=[0.75390833, -0.00113117, -0.14, 0.00568284, 0.99992606, -0.00980662, -0.0044056])
self.right = Arm('right_arm',
neutral=[0.649446, -0.83735957, 0.06168316, 0.38299498, 0.92211736, -0.02071508, 0.050846],
top=[0.74667248, 0.02808032, 0.50554368, -0.00295978, 0.97005472, -0.02409497, 0.24167026],
......@@ -120,10 +135,7 @@ class Baxter(object):
return self._commander.get_planning_frame()
def sort(self, arm):
if arm is 'left':
arm = self.left
else:
arm = self.right
arm = getattr(self, arm)
arm.move(arm.pick)
arm.gripper.open()
rospy.sleep(3)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment