1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * Interface for Kinova Jaco robot.
33  *
34  * Authors:
35  * Fabien Spindler
36  *
37  *****************************************************************************/
38 
39 #include <visp3/core/vpConfig.h>
40 
41 #ifdef VISP_HAVE_JACOSDK
42 
43 #include <visp3/core/vpIoTools.h>
44 #include <visp3/robot/vpRobotException.h>
45 
46 /*!
47  * \file vpRobotKinova.cpp
48  * \brief Interface for Kinova Jaco2 robot.
49  */
50 
51 #include <visp3/core/vpHomogeneousMatrix.h>
52 #include <visp3/robot/vpRobotKinova.h>
53 
54 /*!
55  * Default constructor that consider a 6 DoF Jaco arm. Use setDoF() to change the degrees of freedom.
56  */
vpRobotKinova()57 vpRobotKinova::vpRobotKinova()
58   : m_eMc(), m_plugin_location("./"), m_verbose(false), m_plugin_loaded(false), m_devices_count(0),
59   m_devices_list(NULL), m_active_device(-1), m_command_layer(CMD_LAYER_UNSET), m_command_layer_handle()
60 {
61   init();
62 }
63 
64 /*!
65  * Destructor.
66  */
~vpRobotKinova()67 vpRobotKinova::~vpRobotKinova()
68 {
69   closePlugin();
70 
71   if (m_devices_list) {
72     delete[] m_devices_list;
73   }
74 }
75 
76 /*!
77  * Specify Jaco robot degrees of freedom.
78  * \param dof : Possible values are 4, 6 or 7 corresponding to the degrees of freedom of your Kinova Jaco robot.
79  */
setDoF(unsigned int dof)80 void vpRobotKinova::setDoF(unsigned int dof)
81 {
82   if (dof == 4 || dof == 6 || dof == 7) {
83     nDof = dof;
84   }
85   else {
86     throw(vpException(vpException::fatalError, "Unsupported Kinova Jaco degrees of freedom: %d. Possible values are 4, 6 or 7.", dof));
87   }
88 }
89 
90 /*!
91  * Basic initialization.
92  */
init()93 void vpRobotKinova::init()
94 {
95   // If you want to control the robot in Cartesian in a tool frame, set the corresponding transformation in m_eMc
96   // that is set to identity by default in the constructor.
97 
98   maxRotationVelocity = maxRotationVelocityDefault;
99   maxTranslationVelocity = maxTranslationVelocityDefault;
100 
101   // Set here the robot degrees of freedom number
102   nDof = 6; // Jaco2 default dof = 6
103 
104   m_devices_list = new KinovaDevice[MAX_KINOVA_DEVICE];
105 }
106 
107 /*
108 
109   At least one of these function has to be implemented to control the robot with a
110   Cartesian velocity:
111   - get_eJe()
112   - get_fJe()
113 
114 */
115 
116 /*!
117  * Get the robot Jacobian expressed in the end-effector frame. This function
118  * is not implemented. In fact, we don't need it since we can control the robot
119  * in cartesian in end-effector frame.
120  *
121  * \param[out] eJe : End-effector frame Jacobian.
122  */
get_eJe(vpMatrix & eJe)123 void vpRobotKinova::get_eJe(vpMatrix &eJe)
124 {
125   if (!m_plugin_loaded) {
126     throw(vpException(vpException::fatalError, "Jaco SDK plugin not loaded"));
127   }
128   if (!m_devices_count) {
129     throw(vpException(vpException::fatalError, "No Kinova robot found"));
130   }
131 
132   (void)eJe;
133   std::cout << "Not implemented ! " << std::endl;
134 }
135 
136 /*!
137  * Get the robot Jacobian expressed in the robot reference frame. This function
138  * is not implemented. In fact, we don't need it since we can control the robot
139  * in cartesian in end-effector frame.
140  *
141  * \param[out] fJe : Base (or reference) frame Jacobian.
142  */
get_fJe(vpMatrix & fJe)143 void vpRobotKinova::get_fJe(vpMatrix &fJe)
144 {
145   if (!m_plugin_loaded) {
146     throw(vpException(vpException::fatalError, "Jaco SDK plugin not loaded"));
147   }
148   if (!m_devices_count) {
149     throw(vpException(vpException::fatalError, "No Kinova robot found"));
150   }
151 
152   (void)fJe;
153   std::cout << "Not implemented ! " << std::endl;
154 }
155 
156 /*
157 
158   At least one of these function has to be implemented to control the robot:
159   - setCartVelocity()
160   - setJointVelocity()
161 
162 */
163 
164 /*!
165  * Send to the controller a 6-dim velocity twist vector expressed in a Cartesian frame.
166  *
167  * \param[in] frame : Cartesian control frame. Units are m/s for translation and rad/s for rotation velocities.
168  * - In CAMERA_FRAME or TOOL_FRAME, we consider that \e v 6-dim velocity twist vector contains translation and rotation velocities expressed
169  *   in the camera or tool frame respectively.
170  * - In END_EFFECTOR_FRAME, we consider that \e v 6-dim velocity twist vector contains translation and rotation velocities expressed
171  *   in the end-effector frame.
172  * - In MIXT_FRAME, we consider that \e v 6-dim velocity twist vector contains translation velocities expressed
173  *   in the base frame and rotation velocities expressed in the effector frame.
174  *
175  * \param[in] v : 6-dim velocity twist vector that contains 3 translation velocities followed by 3 rotation velocities.
176  * Units are m/s for translation and rad/s for rotation velocities.
177  */
setCartVelocity(const vpRobot::vpControlFrameType frame,const vpColVector & v)178 void vpRobotKinova::setCartVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &v)
179 {
180   if (v.size() != 6) {
181     throw(vpException(vpException::fatalError, "Cannot send a velocity twist vector in tool frame that is not 6-dim (%d)", v.size()));
182   }
183 
184   vpColVector v_e; // This is the velocity that the robot is able to apply in the end-effector frame
185   vpColVector v_c; // This is the velocity that the robot is able to apply in the camera frame
186   vpColVector v_mix; // This is the velocity that the robot is able to apply in the mix frame
187     switch (frame) {
188   case vpRobot::CAMERA_FRAME: {
189     // We have to transform the requested velocity in the end-effector frame.
190     // Knowing that the constant transformation between the tool frame and the end-effector frame obtained
191     // by extrinsic calibration is set in m_eMc we can compute the velocity twist matrix eVc that transform
192     // a velocity twist from tool (or camera) frame into end-effector frame
193     vpVelocityTwistMatrix eVc(m_eMc); // GET IT FROM CAMERA EXTRINSIC CALIBRATION FILE
194 
195     // Input velocity is expressed in camera or tool frame
196     v_c = v;
197 
198     // Tranform velocity in end-effector
199     v_e = eVc * v_c;
200 
201     // Convert end-effector translation velocity in base frame, rotation velocity is unchanged
202     vpColVector p_e;
203     getPosition(vpRobot::END_EFFECTOR_FRAME, p_e);
204     vpRxyzVector bre(p_e[3], p_e[4], p_e[5]);
205     vpRotationMatrix bRe(bre);
206     vpMatrix bVe(6, 6, 0);
207     bVe.eye();
208     bVe.insert(bRe, 0, 0);
209     v_mix = bVe * v_e;
210 
211     TrajectoryPoint pointToSend;
212     pointToSend.InitStruct();
213     // We specify that this point will be used an angular (joint by joint) velocity vector
214     pointToSend.Position.Type = CARTESIAN_VELOCITY;
215     pointToSend.Position.HandMode = HAND_NOMOVEMENT;
216 
217     pointToSend.Position.CartesianPosition.X = static_cast<float>(v_mix[0]);
218     pointToSend.Position.CartesianPosition.Y = static_cast<float>(v_mix[1]);
219     pointToSend.Position.CartesianPosition.Z = static_cast<float>(v_mix[2]);
220     pointToSend.Position.CartesianPosition.ThetaX = static_cast<float>(v_mix[3]);
221     pointToSend.Position.CartesianPosition.ThetaY = static_cast<float>(v_mix[4]);
222     pointToSend.Position.CartesianPosition.ThetaZ = static_cast<float>(v_mix[5]);
223 
224     KinovaSetCartesianControl(); // Not sure that this function is useful here
225 
226     KinovaSendBasicTrajectory(pointToSend);
227     break;
228   }
229 
230   case vpRobot::END_EFFECTOR_FRAME: {
231     // Input velocity is expressed in end-effector
232     v_e = v;
233 
234     // Convert end-effector translation velocity in base frame, rotation velocity is unchanged
235     vpColVector p_e;
236     getPosition(vpRobot::END_EFFECTOR_FRAME, p_e);
237     vpRxyzVector bre(p_e[3], p_e[4], p_e[5]);
238     vpRotationMatrix bRe(bre);
239     vpMatrix bVe(6, 6, 0);
240     bVe.eye();
241     bVe.insert(bRe, 0, 0);
242     v_mix = bVe * v_e;
243 
244     TrajectoryPoint pointToSend;
245     pointToSend.InitStruct();
246     // We specify that this point will be used an angular (joint by joint) velocity vector
247     pointToSend.Position.Type = CARTESIAN_VELOCITY;
248     pointToSend.Position.HandMode = HAND_NOMOVEMENT;
249 
250     pointToSend.Position.CartesianPosition.X = static_cast<float>(v_mix[0]);
251     pointToSend.Position.CartesianPosition.Y = static_cast<float>(v_mix[1]);
252     pointToSend.Position.CartesianPosition.Z = static_cast<float>(v_mix[2]);
253     pointToSend.Position.CartesianPosition.ThetaX = static_cast<float>(v_mix[3]);
254     pointToSend.Position.CartesianPosition.ThetaY = static_cast<float>(v_mix[4]);
255     pointToSend.Position.CartesianPosition.ThetaZ = static_cast<float>(v_mix[5]);
256 
257     KinovaSetCartesianControl(); // Not sure that this function is useful here
258 
259     KinovaSendBasicTrajectory(pointToSend);
260     break;
261   }
262 
263   case vpRobot::MIXT_FRAME: {
264 
265     // Convert end-effector translation velocity in base frame, rotation velocity is unchanged
266     vpColVector p_e;
267     getPosition(vpRobot::END_EFFECTOR_FRAME, p_e);
268     vpRxyzVector bre(p_e[3], p_e[4], p_e[5]);
269     vpRotationMatrix bRe(bre);
270     std::cout << "rotation matrix from base to endeffector is bRe : " << std::endl;
271     std::cout << "bRe:\n" << bRe << std::endl;
272     vpMatrix bVe(6, 6, 0);
273     bVe.eye();
274     bVe.insert(bRe, 0, 0);
275     v_e = v;
276     //vpColVector bVe;
277     vpColVector v_mix = bVe * v_e;
278 
279     TrajectoryPoint pointToSend;
280     pointToSend.InitStruct();
281     // We specify that this point will be used an angular (joint by joint) velocity vector
282     pointToSend.Position.Type = CARTESIAN_VELOCITY;
283     pointToSend.Position.HandMode = HAND_NOMOVEMENT;
284 
285     pointToSend.Position.CartesianPosition.X = static_cast<float>(v_mix[0]);
286     pointToSend.Position.CartesianPosition.Y = static_cast<float>(v_mix[1]);
287     pointToSend.Position.CartesianPosition.Z = static_cast<float>(v_mix[2]);
288     pointToSend.Position.CartesianPosition.ThetaX = static_cast<float>(v_e[3]);
289     pointToSend.Position.CartesianPosition.ThetaY = static_cast<float>(v_e[4]);
290     pointToSend.Position.CartesianPosition.ThetaZ = static_cast<float>(v_e[5]);
291 
292     KinovaSetCartesianControl(); // Not sure that this function is useful here
293     KinovaSendBasicTrajectory(pointToSend);
294     break;
295   }
296   case vpRobot::REFERENCE_FRAME:
297   case vpRobot::JOINT_STATE:
298     // Out of the scope
299     break;
300   }
301 }
302 
303 /*!
304  * Send a joint velocity to the controller.
305  * \param[in] qdot : Joint velocities vector. Units are rad/s for a robot arm joint velocities.
306  */
setJointVelocity(const vpColVector & qdot)307 void vpRobotKinova::setJointVelocity(const vpColVector &qdot)
308 {
309   if (qdot.size() != static_cast<unsigned int>(nDof)) {
310     throw(vpException(vpException::dimensionError, "Cannot apply a %d-dim joint velocity vector to the Jaco robot configured with %d DoF", qdot.size(), nDof));
311   }
312   TrajectoryPoint pointToSend;
313   pointToSend.InitStruct();
314   // We specify that this point will be used an angular (joint by joint) velocity vector
315   pointToSend.Position.Type = ANGULAR_VELOCITY;
316   pointToSend.Position.HandMode = HAND_NOMOVEMENT;
317   switch (nDof) {
318   case 7: {
319     pointToSend.Position.Actuators.Actuator7 = static_cast<float>(vpMath::deg(qdot[6]));
320     pointToSend.Position.Actuators.Actuator6 = static_cast<float>(vpMath::deg(qdot[5]));
321     pointToSend.Position.Actuators.Actuator5 = static_cast<float>(vpMath::deg(qdot[4]));
322     pointToSend.Position.Actuators.Actuator4 = static_cast<float>(vpMath::deg(qdot[3]));
323     pointToSend.Position.Actuators.Actuator3 = static_cast<float>(vpMath::deg(qdot[2]));
324     pointToSend.Position.Actuators.Actuator2 = static_cast<float>(vpMath::deg(qdot[1]));
325     pointToSend.Position.Actuators.Actuator1 = static_cast<float>(vpMath::deg(qdot[0]));
326     break;
327   }
328   case 6: {
329     pointToSend.Position.Actuators.Actuator6 = static_cast<float>(vpMath::deg(qdot[5]));
330     pointToSend.Position.Actuators.Actuator5 = static_cast<float>(vpMath::deg(qdot[4]));
331     pointToSend.Position.Actuators.Actuator4 = static_cast<float>(vpMath::deg(qdot[3]));
332     pointToSend.Position.Actuators.Actuator3 = static_cast<float>(vpMath::deg(qdot[2]));
333     pointToSend.Position.Actuators.Actuator2 = static_cast<float>(vpMath::deg(qdot[1]));
334     pointToSend.Position.Actuators.Actuator1 = static_cast<float>(vpMath::deg(qdot[0]));
335     break;
336   }
337   case 4: {
338     pointToSend.Position.Actuators.Actuator4 = static_cast<float>(vpMath::deg(qdot[3]));
339     pointToSend.Position.Actuators.Actuator3 = static_cast<float>(vpMath::deg(qdot[2]));
340     pointToSend.Position.Actuators.Actuator2 = static_cast<float>(vpMath::deg(qdot[1]));
341     pointToSend.Position.Actuators.Actuator1 = static_cast<float>(vpMath::deg(qdot[0]));
342     break;
343   }
344   default:
345     throw(vpException(vpException::fatalError, "Jaco robot non supported %d DoF", nDof));
346   }
347 
348   KinovaSetAngularControl(); // Not sure that this function is useful here
349 
350   KinovaSendBasicTrajectory(pointToSend);
351 }
352 
353 /*!
354  * Send to the controller a velocity in a given frame.
355  *
356  * \param[in] frame : Control frame in which the velocity \e vel is expressed.
357  * In cartesian control frames, units are m/s for translation and rad/s for rotation velocities.
358  * - In CAMERA_FRAME or TOOL_FRAME, we consider that \e vel 6-dim velocity twist vector contains translation and rotation velocities expressed
359  *   in the camera or tool frame respectively.
360  * - In END_EFFECTOR_FRAME, we consider that \e vel 6-dim velocity twist vector contains translation and rotation velocities expressed
361  *   in the end-effector frame.
362  * - In MIXT_FRAME, we consider that \e vel 6-dim velocity twist vector contains translation velocities expressed
363  *   in the base frame and rotation velocities expressed in the effector frame.
364  * To send a joint velocity, use rather JOINT_STATE. Units are rad/s for a robot arm joint velocities.
365  *
366  * \param[in] vel : Vector that contains the velocity to apply to the robot. In cartesian control frames, 6-dim velocity
367  * twist vector that contains 3 translation velocities followed by 3 rotation velocities.
368  * When a joint velocities vector is given, 6-dim vector corresponding to joint velocities.
369  */
setVelocity(const vpRobot::vpControlFrameType frame,const vpColVector & vel)370 void vpRobotKinova::setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel)
371 {
372   if (!m_plugin_loaded) {
373     throw(vpException(vpException::fatalError, "Jaco SDK plugin not loaded"));
374   }
375   if (!m_devices_count) {
376     throw(vpException(vpException::fatalError, "No Kinova robot found"));
377   }
378   if (vpRobot::STATE_VELOCITY_CONTROL != getRobotState()) {
379     throw vpRobotException(vpRobotException::wrongStateError,
380       "Cannot send a velocity to the robot. "
381       "Call setRobotState(vpRobot::STATE_VELOCITY_CONTROL) once before "
382       "entering your control loop.");
383   }
384 
385   vpColVector vel_sat(6);
386 
387   // Velocity saturation
388   switch (frame) {
389   // Saturation in cartesian space
390   case vpRobot::TOOL_FRAME:
391   case vpRobot::REFERENCE_FRAME:
392   case vpRobot::END_EFFECTOR_FRAME:
393   case vpRobot::MIXT_FRAME: {
394     if (vel.size() != 6) {
395       throw(vpException(vpException::dimensionError, "Cannot apply a Cartesian velocity that is not a 6-dim vector (%d)", vel.size()));
396     }
397     vpColVector vel_max(6);
398 
399     for (unsigned int i = 0; i < 3; i++)
400       vel_max[i] = getMaxTranslationVelocity();
401     for (unsigned int i = 3; i < 6; i++)
402       vel_max[i] = getMaxRotationVelocity();
403 
404     vel_sat = vpRobot::saturateVelocities(vel, vel_max, true);
405 
406     setCartVelocity(frame, vel_sat);
407     break;
408   }
409 
410   // Saturation in joint space
411   case vpRobot::JOINT_STATE: {
412     if (vel.size() != static_cast<size_t>(nDof)) {
413       throw(vpException(vpException::dimensionError, "Cannot apply a joint velocity that is not a %-dim vector (%d)", nDof, vel.size()));
414     }
415     vpColVector vel_max(vel.size());
416 
417     // Since the robot has only rotation axis all the joint max velocities are set to getMaxRotationVelocity()
418     vel_max = getMaxRotationVelocity();
419 
420     vel_sat = vpRobot::saturateVelocities(vel, vel_max, true);
421 
422     setJointVelocity(vel_sat);
423   }
424   }
425 }
426 
427 /*
428 
429   THESE FUNCTIONS ARE NOT MENDATORY BUT ARE USUALLY USEFUL
430 
431 */
432 
433 /*!
434  * Get robot joint positions.
435  *
436  * \warning We consider here that the robot has only 6 dof, but from the Jaco SDK it could be 7. Should be improved.
437  *
438  * \param[in] q : Joint position in rad.
439  */
getJointPosition(vpColVector & q)440 void vpRobotKinova::getJointPosition(vpColVector &q)
441 {
442   AngularPosition currentCommand;
443 
444   // We get the actual angular command of the robot. Values are in deg
445   KinovaGetAngularCommand(currentCommand);
446 
447   q.resize(nDof);
448   switch (nDof) {
449   case 7: {
450     q[6] = vpMath::rad(currentCommand.Actuators.Actuator7);
451     q[5] = vpMath::rad(currentCommand.Actuators.Actuator6);
452     q[4] = vpMath::rad(currentCommand.Actuators.Actuator5);
453     q[3] = vpMath::rad(currentCommand.Actuators.Actuator4);
454     q[2] = vpMath::rad(currentCommand.Actuators.Actuator3);
455     q[1] = vpMath::rad(currentCommand.Actuators.Actuator2);
456     q[0] = vpMath::rad(currentCommand.Actuators.Actuator1);
457     break;
458   }
459   case 6: {
460     q[5] = vpMath::rad(currentCommand.Actuators.Actuator6);
461     q[4] = vpMath::rad(currentCommand.Actuators.Actuator5);
462     q[3] = vpMath::rad(currentCommand.Actuators.Actuator4);
463     q[2] = vpMath::rad(currentCommand.Actuators.Actuator3);
464     q[1] = vpMath::rad(currentCommand.Actuators.Actuator2);
465     q[0] = vpMath::rad(currentCommand.Actuators.Actuator1);
466     break;
467   }
468   case 4: {
469     q[3] = vpMath::rad(currentCommand.Actuators.Actuator4);
470     q[2] = vpMath::rad(currentCommand.Actuators.Actuator3);
471     q[1] = vpMath::rad(currentCommand.Actuators.Actuator2);
472     q[0] = vpMath::rad(currentCommand.Actuators.Actuator1);
473     break;
474   }
475   default:
476     throw(vpException(vpException::fatalError, "Jaco robot non supported %d DoF", nDof));
477   }
478 }
479 
480 /*!
481  * Get robot position.
482  *
483  * \param[in] frame : Considered cartesian frame or joint state.
484  * \param[out] position : Either joint or cartesian position. When `frame` is set to vpRobot::JOINT_STATE, `position` contains joint angles expressed in rad,
485  * while when `frame` is set to vpRobot::END_EFFECTOR_FRAME `position` contains the cartesian position of the end-effector in the robot base frame as a 6-dim vector,
486  * with first the 3 translations expressed in meter, and then the 3 Euler rotations Rxyz expressed in radians.
487  *
488  * The following code shows how to use this fonction and convert the resulting position into an homogeneous matrix
489  * that gives the transformation between the robot base frame and the end-effector:
490  * \code
491  * vpColVector position;
492  * ...
493  * robot.getPosition(vpRobot::END_EFFECTOR_FRAME, position);
494  * vpTranslationVector wte; // reference frame to end-effector frame translations
495  * vpRxyzVector wre; // reference frame to end-effector frame rotations
496  * // Update the transformation between reference frame and end-effector frame
497  * for (unsigned int i=0; i < 3; i++) {
498  *   wte[i] = position[i];   // tx, ty, tz
499  *   wre[i] = position[i+3]; // ry, ry, rz
500  * }
501  * // Create a rotation matrix from the Rxyz rotation angles
502  * vpRotationMatrix wRe(wre); // reference frame to end-effector frame rotation matrix
503  * // Create reference frame to end-effector frame transformation in terms of an homogeneous matrix
504  * vpHomogeneousMatrix wMe(wte, wRe);
505  * \endcode
506  */
getPosition(const vpRobot::vpControlFrameType frame,vpColVector & position)507 void vpRobotKinova::getPosition(const vpRobot::vpControlFrameType frame, vpColVector &position)
508 {
509   if (!m_plugin_loaded) {
510     throw(vpException(vpException::fatalError, "Jaco SDK plugin not loaded"));
511   }
512   if (!m_devices_count) {
513     throw(vpException(vpException::fatalError, "No Kinova robot found"));
514   }
515 
516   if (frame == JOINT_STATE) {
517     getJointPosition(position);
518   }
519   else if (frame == END_EFFECTOR_FRAME) {
520     CartesianPosition currentCommand;
521     // We get the actual cartesian position of the robot
522     KinovaGetCartesianCommand(currentCommand);
523     position.resize(6);
524     position[0] = currentCommand.Coordinates.X;
525     position[1] = currentCommand.Coordinates.Y;
526     position[2] = currentCommand.Coordinates.Z;
527     position[3] = currentCommand.Coordinates.ThetaX;
528     position[4] = currentCommand.Coordinates.ThetaY;
529     position[5] = currentCommand.Coordinates.ThetaZ;
530   }
531   else {
532     std::cout << "Not implemented ! " << std::endl;
533   }
534 }
535 
536 /*!
537  * Get robot position.
538  *
539  * \param[in] frame : Type of cartesian position to retrieve. Admissible value is:
540  * - vpRobot::END_EFFECTOR_FRAME to retrieve the cartesian position of the end-effector frame wrt the robot base frame.
541  * \param[out] pose : Cartesian position of the end-effector in the robot base frame as a 6-dim pose vector,
542  * with first the 3 translations expressed in meter, and then the 3 rotations in radian as a \f$\theta {\bf u}\f$ vector (see vpThetaUVector).
543  *
544  * The following code shows how to use this function and convert the resulting position into an homogeneous matrix
545  * that gives the transformation between the robot base frame and the end-effector:
546  * \code
547  * vpPoseVector pose;
548  * ...
549  * robot.getPosition(vpRobot::END_EFFECTOR_FRAME, pose);
550  * // Create reference frame to end-effector frame transformation in terms of an homogeneous matrix
551  * vpHomogeneousMatrix wMe(pose);
552  * \endcode
553  */
getPosition(const vpRobot::vpControlFrameType frame,vpPoseVector & pose)554 void vpRobotKinova::getPosition(const vpRobot::vpControlFrameType frame, vpPoseVector &pose)
555 {
556   if (frame == JOINT_STATE) {
557     throw(vpException(vpException::fatalError, "Cannot get Jaco joint position as a pose vector"));
558   }
559 
560   vpColVector position;
561   getPosition(frame, position);
562 
563   vpRxyzVector rxyz; // reference frame to end-effector frame rotations
564   // Update the transformation between reference frame and end-effector frame
565   for (unsigned int i=0; i < 3; i++) {
566     pose[i] = position[i];   // tx, ty, tz
567     rxyz[i] = position[i+3]; // ry, ry, rz
568   }
569   vpThetaUVector tu(rxyz);
570   for (unsigned int i=0; i < 3; i++) {
571     pose[i+3] = tu[i];   // tux, tuy, tuz
572   }
573 }
574 
575 /*!
576  * Set a position to reach.
577  *
578  * \param[in] frame : Considered cartesian frame or joint state.
579  * \param[in] q : Position to reach.
580  */
setPosition(const vpRobot::vpControlFrameType frame,const vpColVector & q)581 void vpRobotKinova::setPosition(const vpRobot::vpControlFrameType frame, const vpColVector &q)
582 {
583   if (!m_plugin_loaded) {
584     throw(vpException(vpException::fatalError, "Jaco SDK plugin not loaded"));
585   }
586   if (!m_devices_count) {
587     throw(vpException(vpException::fatalError, "No Kinova robot found"));
588   }
589   if (frame == vpRobot::JOINT_STATE) {
590     if (static_cast<int>(q.size()) != nDof) {
591       throw(vpException(vpException::fatalError, "Cannot move robot to a joint position of dim %u that is not a %d-dim vector", q.size(), nDof));
592     }
593     TrajectoryPoint pointToSend;
594     pointToSend.InitStruct();
595     // We specify that this point will be an angular(joint by joint) position.
596     pointToSend.Position.Type = ANGULAR_POSITION;
597     pointToSend.Position.HandMode = HAND_NOMOVEMENT;
598     switch (nDof) {
599     case 7: {
600       pointToSend.Position.Actuators.Actuator7 = static_cast<float>(vpMath::deg(q[6]));
601       pointToSend.Position.Actuators.Actuator6 = static_cast<float>(vpMath::deg(q[5]));
602       pointToSend.Position.Actuators.Actuator5 = static_cast<float>(vpMath::deg(q[4]));
603       pointToSend.Position.Actuators.Actuator4 = static_cast<float>(vpMath::deg(q[3]));
604       pointToSend.Position.Actuators.Actuator3 = static_cast<float>(vpMath::deg(q[2]));
605       pointToSend.Position.Actuators.Actuator2 = static_cast<float>(vpMath::deg(q[1]));
606       pointToSend.Position.Actuators.Actuator1 = static_cast<float>(vpMath::deg(q[0]));
607       break;
608     }
609     case 6: {
610       pointToSend.Position.Actuators.Actuator6 = static_cast<float>(vpMath::deg(q[5]));
611       pointToSend.Position.Actuators.Actuator5 = static_cast<float>(vpMath::deg(q[4]));
612       pointToSend.Position.Actuators.Actuator4 = static_cast<float>(vpMath::deg(q[3]));
613       pointToSend.Position.Actuators.Actuator3 = static_cast<float>(vpMath::deg(q[2]));
614       pointToSend.Position.Actuators.Actuator2 = static_cast<float>(vpMath::deg(q[1]));
615       pointToSend.Position.Actuators.Actuator1 = static_cast<float>(vpMath::deg(q[0]));
616       break;
617     }
618     case 4: {
619       pointToSend.Position.Actuators.Actuator4 = static_cast<float>(vpMath::deg(q[3]));
620       pointToSend.Position.Actuators.Actuator3 = static_cast<float>(vpMath::deg(q[2]));
621       pointToSend.Position.Actuators.Actuator2 = static_cast<float>(vpMath::deg(q[1]));
622       pointToSend.Position.Actuators.Actuator1 = static_cast<float>(vpMath::deg(q[0]));
623       break;
624     }
625     default:
626       throw(vpException(vpException::fatalError, "Jaco robot non supported %d DoF", nDof));
627     }
628 
629     KinovaSetAngularControl(); // Not sure that this function is useful here
630 
631     if (m_verbose) {
632       std::cout << "Move robot to joint position [rad rad rad rad rad rad]: " << q.t() << std::endl;
633     }
634     KinovaSendBasicTrajectory(pointToSend);
635   }
636   else if (frame == vpRobot::END_EFFECTOR_FRAME) {
637     if (q.size() != 6) {
638       throw(vpException(vpException::fatalError, "Cannot move robot to cartesian position of dim %d that is not a 6-dim vector", q.size()));
639     }
640     TrajectoryPoint pointToSend;
641     pointToSend.InitStruct();
642     // We specify that this point will be an angular(joint by joint) position.
643     pointToSend.Position.Type = CARTESIAN_POSITION;
644     pointToSend.Position.HandMode = HAND_NOMOVEMENT;
645     pointToSend.Position.CartesianPosition.X = static_cast<float>(q[0]);
646     pointToSend.Position.CartesianPosition.Y = static_cast<float>(q[1]);
647     pointToSend.Position.CartesianPosition.Z = static_cast<float>(q[2]);
648     pointToSend.Position.CartesianPosition.ThetaX = static_cast<float>(q[3]);
649     pointToSend.Position.CartesianPosition.ThetaY = static_cast<float>(q[4]);
650     pointToSend.Position.CartesianPosition.ThetaZ = static_cast<float>(q[5]);
651 
652     KinovaSetCartesianControl(); // Not sure that this function is useful here
653 
654     if (m_verbose) {
655       std::cout << "Move robot to cartesian position [m m m rad rad rad]: " << q.t() << std::endl;
656     }
657     KinovaSendBasicTrajectory(pointToSend);
658 
659   }
660   else {
661     throw(vpException(vpException::fatalError, "Cannot move robot to a cartesian position. Only joint positioning is implemented"));
662   }
663 }
664 
665 /*!
666  * Get a displacement.
667  *
668  * \param[in] frame : Considered cartesian frame or joint state.
669  * \param[out] q : Displacement in meter and rad.
670  */
getDisplacement(const vpRobot::vpControlFrameType frame,vpColVector & q)671 void vpRobotKinova::getDisplacement(const vpRobot::vpControlFrameType frame, vpColVector &q)
672 {
673   if (!m_plugin_loaded) {
674     throw(vpException(vpException::fatalError, "Jaco SDK plugin not loaded"));
675   }
676   if (!m_devices_count) {
677     throw(vpException(vpException::fatalError, "No Kinova robot found"));
678   }
679 
680   (void)frame;
681   (void)q;
682   std::cout << "Not implemented ! " << std::endl;
683 }
684 
685 /*!
686  * Load functions from Jaco SDK plugin.
687  * - When command layer is set to CMD_LAYER_USB we load `Kinova.API.USBCommandLayerUbuntu.so` or `CommandLayerWindows.dll`
688  * respectively on unix-like or Windows platform.
689  * - When command layer is set to CMD_LAYER_ETHERNET we load `Kinova.API.EthCommandLayerUbuntu.so` or `CommandLayerEthernet.dll`
690  * respectively on unix-like or Windows platform.
691  *
692  * There is setPluginLocation() that allows to modify the default location of the plugin set to "./".
693  *
694  * \sa setPluginLocation(), setCommandLayer()
695  */
loadPlugin()696 void vpRobotKinova::loadPlugin()
697 {
698   if (m_command_layer == CMD_LAYER_UNSET) {
699     throw(vpException(vpException::fatalError, "Kinova robot command layer unset"));
700   }
701   if (m_plugin_loaded) {
702     closePlugin();
703   }
704 #ifdef __linux__
705   // We load the API
706   std::string plugin_name = (m_command_layer == CMD_LAYER_USB) ? std::string("Kinova.API.USBCommandLayerUbuntu.so") : std::string("Kinova.API.EthCommandLayerUbuntu.so");
707   std::string plugin = vpIoTools::createFilePath(m_plugin_location, plugin_name);
708   if (m_verbose) {
709     std::cout << "Load plugin: \"" << plugin << "\"" << std::endl;
710   }
711   m_command_layer_handle = dlopen(plugin.c_str(), RTLD_NOW | RTLD_GLOBAL);
712 
713   std::string prefix = (m_command_layer == CMD_LAYER_USB) ? std::string("") : std::string("Ethernet_");
714   // We load the functions from the library
715   KinovaCloseAPI = (int(*)()) dlsym(m_command_layer_handle, (prefix + std::string("CloseAPI")).c_str());
716   KinovaGetAngularCommand = (int(*)(AngularPosition &)) dlsym(m_command_layer_handle, (prefix + std::string("GetAngularCommand")).c_str());
717   KinovaGetCartesianCommand = (int(*)(CartesianPosition &)) dlsym(m_command_layer_handle, (prefix + std::string("GetCartesianCommand")).c_str());
718   KinovaGetDevices = (int(*)(KinovaDevice devices[MAX_KINOVA_DEVICE], int &result)) dlsym(m_command_layer_handle, (prefix + std::string("GetDevices")).c_str());
719   KinovaInitAPI = (int(*)()) dlsym(m_command_layer_handle, (prefix + std::string("InitAPI")).c_str());
720   KinovaInitFingers = (int(*)()) dlsym(m_command_layer_handle, (prefix + std::string("InitFingers")).c_str());
721   KinovaMoveHome = (int(*)()) dlsym(m_command_layer_handle, (prefix + std::string("MoveHome")).c_str());
722   KinovaSendBasicTrajectory = (int(*)(TrajectoryPoint)) dlsym(m_command_layer_handle, (prefix + std::string("SendBasicTrajectory")).c_str());
723   KinovaSetActiveDevice = (int(*)(KinovaDevice devices)) dlsym(m_command_layer_handle, (prefix + std::string("SetActiveDevice")).c_str());
724   KinovaSetAngularControl = (int(*)()) dlsym(m_command_layer_handle, (prefix + std::string("SetAngularControl")).c_str());
725   KinovaSetCartesianControl = (int(*)()) dlsym(m_command_layer_handle, (prefix + std::string("SetCartesianControl")).c_str());
726 #elif _WIN32
727   // We load the API.
728   std::string plugin_name = (m_command_layer == CMD_LAYER_USB) ? std::string("CommandLayerWindows.dll") : std::string("CommandLayerEthernet.dll");
729   std::string plugin = vpIoTools::createFilePath(m_plugin_location, plugin_name);
730   if (m_verbose) {
731     std::cout << "Load plugin: \"" << plugin << "\"" << std::endl;
732   }
733   m_command_layer_handle = LoadLibrary(TEXT(plugin.c_str()));
734 
735   // We load the functions from the library
736   KinovaCloseAPI = (int(*)()) GetProcAddress(m_command_layer_handle, "CloseAPI");
737   KinovaGetAngularCommand = (int(*)(AngularPosition &)) GetProcAddress(m_command_layer_handle, "GetAngularCommand");
738   KinovaGetCartesianCommand = (int(*)(CartesianPosition &)) GetProcAddress(m_command_layer_handle, "GetCartesianCommand");
739   KinovaGetDevices = (int(*)(KinovaDevice[MAX_KINOVA_DEVICE], int&)) GetProcAddress(m_command_layer_handle, "GetDevices");
740   KinovaInitAPI = (int(*)()) GetProcAddress(m_command_layer_handle, "InitAPI");
741   KinovaInitFingers = (int(*)()) GetProcAddress(m_command_layer_handle, "InitFingers");
742   KinovaMoveHome = (int(*)()) GetProcAddress(m_command_layer_handle, "MoveHome");
743   KinovaSendBasicTrajectory = (int(*)(TrajectoryPoint)) GetProcAddress(m_command_layer_handle, "SendBasicTrajectory");
744   KinovaSetActiveDevice = (int(*)(KinovaDevice)) GetProcAddress(m_command_layer_handle, "SetActiveDevice");
745   KinovaSetAngularControl = (int(*)()) GetProcAddress(m_command_layer_handle, "SetAngularControl");
746   KinovaSetCartesianControl = (int(*)()) GetProcAddress(m_command_layer_handle, "SetCartesianControl");
747 #endif
748 
749   // Verify that all functions has been loaded correctly
750   if ((KinovaCloseAPI == NULL) ||
751     (KinovaGetAngularCommand == NULL) || (KinovaGetAngularCommand == NULL) || (KinovaGetCartesianCommand == NULL) || (KinovaGetDevices == NULL) ||
752     (KinovaInitAPI == NULL) || (KinovaInitFingers == NULL) ||
753     (KinovaMoveHome == NULL) || (KinovaSendBasicTrajectory == NULL) ||
754     (KinovaSetActiveDevice == NULL) || (KinovaSetAngularControl == NULL) || (KinovaSetCartesianControl == NULL)) {
755     throw(vpException(vpException::fatalError, "Cannot load plugin from \"%s\" folder", m_plugin_location.c_str()));
756   }
757   if (m_verbose) {
758     std::cout << "Plugin successfully loaded" << std::endl;
759   }
760 
761   m_plugin_loaded = true;
762 }
763 
764 /*!
765  * Close plugin.
766  */
closePlugin()767 void vpRobotKinova::closePlugin()
768 {
769   if (m_plugin_loaded) {
770     if (m_verbose) {
771       std::cout << "Close plugin" << std::endl;
772     }
773 #ifdef __linux__
774     dlclose(m_command_layer_handle);
775 #elif _WIN32
776     FreeLibrary(m_command_layer_handle);
777 #endif
778     m_plugin_loaded = false;
779   }
780 }
781 
782 /*!
783  * Move the robot to home position.
784  */
homing()785 void vpRobotKinova::homing()
786 {
787   if (!m_plugin_loaded) {
788     throw(vpException(vpException::fatalError, "Jaco SDK plugin not loaded"));
789   }
790   if (!m_devices_count) {
791     throw(vpException(vpException::fatalError, "No Kinova robot found"));
792   }
793 
794   if (m_verbose) {
795     std::cout << "Move the robot to home position" << std::endl;
796   }
797   KinovaMoveHome();
798 }
799 
800 /*!
801  * Connect to Kinova devices.
802  * \return Number of devices that are connected.
803  */
connect()804 int vpRobotKinova::connect()
805 {
806   loadPlugin();
807   if (!m_plugin_loaded) {
808     throw(vpException(vpException::fatalError, "Jaco SDK plugin not loaded"));
809   }
810 
811   int result = (*KinovaInitAPI)();
812 
813   if (m_verbose) {
814     std::cout << "Initialization's result: " << result << std::endl;
815   }
816 
817   m_devices_count = KinovaGetDevices(m_devices_list, result);
818 
819   if (m_verbose) {
820     std::cout << "Found " << m_devices_count << " devices" << std::endl;
821   }
822 
823   // By default set the first device as active
824   setActiveDevice(0);
825 
826   // Initialize fingers
827   KinovaInitFingers();
828 
829   return m_devices_count;
830 }
831 
832 /*!
833  * Set active device.
834  * \param[in] device : Device id corresponding to the active device. The first device has id 0.
835  * The last device is is given by getNumDevices() - 1.
836  * By default, the active device is the first one.
837  *
838  * To know how many devices are connected, use getNumDevices().
839  *
840  * \sa getActiveDevice()
841  */
setActiveDevice(int device)842 void vpRobotKinova::setActiveDevice(int device)
843 {
844   if (!m_plugin_loaded) {
845     throw(vpException(vpException::fatalError, "Jaco SDK plugin not loaded"));
846   }
847   if (!m_devices_count) {
848     throw(vpException(vpException::fatalError, "No Kinova robot found"));
849   }
850   if (device < 0 || device >= m_devices_count) {
851     throw(vpException(vpException::badValue, "Cannot set Kinova active device %d. Value should be in range [0, %d]", device, m_devices_count - 1));
852   }
853   if (device != m_active_device) {
854     m_active_device = device;
855     KinovaSetActiveDevice(m_devices_list[m_active_device]);
856   }
857 }
858 
859 #elif !defined(VISP_BUILD_SHARED_LIBS)
860  // Work arround to avoid warning: libvisp_robot.a(vpRobotKinova.cpp.o) has
861  // no symbols
dummy_vpRobotKinova()862 void dummy_vpRobotKinova() {};
863 #endif
864