#!/usr/bin/env python # coding: utf-8 # #A method to use an IMU to balance poppy with movements of his arms # In[1]: from poppy.creatures import PoppyHumanoid poppy = PoppyHumanoid(simulator='vrep') get_ipython().run_line_magic('pylab', 'inline') # the class time is used to set the time object to be the simulated time in V-REP and not the default python time import time as real_time class time: def __init__(self,robot): self.robot=robot def time(self): t_simu = self.robot.current_simulation_time return t_simu def sleep(self,t): t0 = self.robot.current_simulation_time while (self.robot.current_simulation_time - t0) < t-0.01: real_time.sleep(0.001) time = time(poppy) print time.time() time.sleep(0.025) #0.025 is the minimum step according to the V-REP defined dt print time.time() # We need a primitive to run the force apply to poppy's chest. # In[49]: from pypot.primitive import Primitive class force_primitive(Primitive): def __init__(self, robot, fx, fy, fz, shape): self.robot = robot self.fx = fx self.fy = fy self.fz = fz self.shape = shape Primitive.__init__(self, robot) def run(self): while not self.should_stop(): poppy.set_vrep_force([self.fx,self.fy,self.fz],self.shape) time.sleep(0.025) poppy.set_vrep_force([0,0,0],self.shape) time.sleep(2) # To start and stop the force primitive : # In[76]: force = force_primitive(poppy,-15,0,0,'chest_respondable') # In[77]: force.start() # In[75]: force.stop() # Prepared poppy for experiments # In[78]: poppy.head_y.goto_position(0, 4, wait=False) poppy.r_shoulder_y.goto_position(0, 4, wait=False) poppy.l_shoulder_y.goto_position(0, 4, wait=True) poppy.l_shoulder_x.goto_position(20, 4, wait=False) poppy.r_shoulder_x.goto_position(-20, 4, wait=True) # Track the head like an IMU could do. On the next graph, you can see the pushing force every two minutes on the chest. # In[24]: list_pos_x = [] list_pos_y = [] list_pos_z = [] t= [] pos = poppy.get_object_position('head_visual') pos_x=pos[0] pos_y=pos[1] pos_z=pos[2] t0 = time.time() while time.time() - t0 < 20: pos = poppy.get_object_position('head_visual') if pos_x != pos[0]: decalage_x=pos[0]-pos_x decalage_y=pos[1]-pos_y decalage_z=pos[2]-pos_z list_pos_x.append(decalage_x) list_pos_y.append(decalage_y) list_pos_z.append(decalage_z) pos_x = pos[0] pos_y = pos[1] pos_z = pos[2] t.append(poppy.current_simulation_time) time.sleep(0.01) plot(t, list_pos_x) plot(t, list_pos_y) plot(t, list_pos_z) legend(('dx', 'dy','dz')) # Changing the reference frame from absolute to a frame relative to poppy (if you want to know more on matrix of rotation, you have [this tutorial](http://nbviewer.ipython.org/github/jjehl/poppy_balance/blob/master/tutorial_rotation_matrix.ipynb). Choosing the frame oriented by visual chest. The graph shows the same movement but in the relative frame and the arms try to decrease the ocillation by balancing the movement in the opposite direction. # In[25]: def rotate_matrix(a,b,g): Rx = np.mat([[1,0,0], [0,cos(a),-sin(a)], [0,sin(a),cos(a)]]) Ry = np.mat([[cos(b),0,sin(b)], [0,1,0], [-sin(b),0,cos(b)]]) Rz = np.mat([[cos(g),-sin(g),0], [sin(g),cos(g),0], [0,0,1]]) Rot = Rz*Ry*Rx return Rot list_pos_x = [] list_pos_y = [] list_pos_z = [] t= [] pos0 = np.mat(poppy.get_object_position('head_visual')).transpose() t0 = time.time() time.sleep(0.01) while time.time() - t0 < 20: pos1 = np.mat(poppy.get_object_position('head_visual')).transpose() if any(pos1 != pos0): d_pos = pos1-pos0 #make a rotation to d_pos to transpose the movement in a relative frame (frame of chest_visual) orient_chest = poppy.get_object_orientation('chest_visual') Rot=rotate_matrix(-orient_chest[0],-orient_chest[1],-orient_chest[2]) d_pos=Rot*d_pos #balance the movement with an opposite movement of the arm #le terme instantanée doit être ajouté à un terme intégré sur les 10 derniers mouvements poppy.r_shoulder_y.goal_position = d_pos[1]*2000 poppy.l_shoulder_y.goal_position = d_pos[1]*2000 poppy.r_shoulder_x.goal_position = -20-d_pos[2]*2000 poppy.l_shoulder_x.goal_position = 20-d_pos[2]*2000 list_pos_x.append(float(d_pos[0])) list_pos_y.append(float(d_pos[1])) list_pos_z.append(float(d_pos[2])) t.append(poppy.current_simulation_time) pos0 = pos1 time.sleep(0.01) plot(t, list_pos_x) plot(t, list_pos_y) plot(t, list_pos_z) legend(('dx', 'dy','dz')) # The balance use only the speed of the movement (i.e the movement of the arm is only proportional to the speed of the move of the head. It could be better to use also the acceleration and the position (integration of speed). So to be continued... # One test more with the possibility to react in function of the position and the acceleration. # In[43]: # choose the coefficient you want to apply to speed, position and acceleration. (P S A) P = 200 S = 2000 A = 2000 class IMU: def __init__(self,nb_record=10): self.record_pos=[] i=0 while i