HandEyeCalibrationVrepPlugin.hpp
Go to the documentation of this file.
1 #ifndef _HAND_EYE_CALIBRATION_VREP_PLUGIN_HPP_
2 #define _HAND_EYE_CALIBRATION_VREP_PLUGIN_HPP_
3 
4 #include <iostream>
5 #include <memory>
6 
7 #include <boost/log/trivial.hpp>
8 #include <boost/exception/all.hpp>
9 
10 #include "grl/vrep/Eigen.hpp"
11 #include "grl/vrep/Vrep.hpp"
12 #include "camodocal/calib/HandEyeCalibration.h"
13 #include "camodocal/calib/CamOdoCalibration.h"
14 
15 #include "v_repLib.h"
16 
17 /// @todo move elsewhere, because it will conflict with others' implementations of outputting vectors
18 template<typename T>
19 inline boost::log::formatting_ostream& operator<<(boost::log::formatting_ostream& out, std::vector<T>& v)
20 {
21  out << "[";
22  size_t last = v.size() - 1;
23  for(size_t i = 0; i < v.size(); ++i) {
24  out << v[i];
25  if (i != last)
26  out << ", ";
27  }
28  out << "]";
29  return out;
30 }
31 
32 namespace grl {
33 
34 
35 
36 /// Creates a complete vrep plugin object
37 /// usage:
38 /// @code
39 /// auto handEyeCalib = std::make_shared<grl::HandEyeCalibrationVrepPlugin>();
40 /// handEyeCalib->construct();
41 /// // move objects to new position
42 /// @endcode
43 ///
44 /// @todo this implementation is a bit hacky, redesign it
45 /// @todo separate out grl specific code from general kuka control code
46 /// @todo Template on robot driver and create a driver that just reads/writes to/from the simulation, then pass the two templates so the simulation and the real driver can be selected.
47 class HandEyeCalibrationVrepPlugin : public std::enable_shared_from_this<HandEyeCalibrationVrepPlugin> {
48 public:
49 
50  static const bool debug = false;
51 
52  enum ParamIndex {
53  RobotBaseName, ///< V-REP handle string for the base of the first known transform, often a robot base
54  RobotTipName, ///< V-REP handle string for the tip of the first known transform, often a robot tip
55  OpticalTrackerBaseName, ///< V-REP handle string for the tip of the first known transform, often a robot tip
57  };
58 
59  /// @todo allow default params
60  typedef std::tuple<
61  std::string,
62  std::string,
63  std::string,
64  std::string
65  > Params;
66 
67 
68  static const Params defaultParams(){
69  return std::make_tuple(
70  "Robotiiwa" , // RobotBaseHandle,
71  "RobotFlangeTip" , // RobotTipHandle,
72  "OpticalTrackerBase#0" , // OpticalTrackerBaseName,
73  "Fiducial#22" // FiducialArmTip
74  );
75  }
76 
77 /// @todo allow KukaFRIThreadSeparator parameters to be updated
79  :
80  isFirstFrame(true),
81  frameCount(0),
82  params_(params)
83 {
84 /// @todo figure out how to re-enable when .so isn't loaded
85  // initHandles();
86 
87 }
88 
89 /// construct() function completes initialization of the plugin
90 /// @todo move this into the actual constructor, but need to correctly handle or attach vrep shared libraries for unit tests.
91 void construct(){
92  initHandles();
93 
94  // set the current transformEstimate to the initial estimate already in vrep
95  if(allHandlesSet) transformEstimate = getObjectTransform(opticalTrackerDetectedObjectName,robotTip);
96 }
97 
98 
99 void addFrame() {
100  BOOST_LOG_TRIVIAL(trace) << "Adding hand eye calibration frame #" << ++frameCount << std::endl;
101 
102  auto robotTipInRobotBase = getObjectTransform(robotTip,robotBase);
103  auto fiducialInOpticalTrackerBase = getObjectTransform(opticalTrackerDetectedObjectName,opticalTrackerBase);
104 
105  if(isFirstFrame){
106  firstRobotTipInRobotBaseInverse = robotTipInRobotBase.inverse();
107  firstFiducialInOpticalTrackerBaseInverse = fiducialInOpticalTrackerBase.inverse();
108  isFirstFrame = false;
109  }
110 
111  auto robotTipInFirstTipBase = firstRobotTipInRobotBaseInverse * robotTipInRobotBase; // A_0_Inv * A_i
112  auto fiducialInFirstFiducialBase = firstFiducialInOpticalTrackerBaseInverse * fiducialInOpticalTrackerBase; // B_0_Inv * B_i
113 
114 
115  rvecsArm.push_back( eigenRotToEigenVector3dAngleAxis(robotTipInFirstTipBase.rotation() ));
116  tvecsArm.push_back( robotTipInFirstTipBase.translation() );
117 
118  rvecsFiducial.push_back(eigenRotToEigenVector3dAngleAxis(fiducialInFirstFiducialBase.rotation() ));
119  tvecsFiducial.push_back( fiducialInFirstFiducialBase.translation());
120 
121 
122  if(debug){
123 
124  std::cout << "\nrobotTipInRobotBase:\n" << poseString(robotTipInRobotBase) << "\n";
125  std::cout << "\nfiducialInOpticalTrackerBase:\n" << poseString(fiducialInOpticalTrackerBase) << "\n";
126 
127  std::cout << "\nrobotTipInFirstTipBase:\n" << poseString(robotTipInFirstTipBase) << "\n";
128  std::cout << "\nfiducialInFirstFiducialBase:\n" << poseString(fiducialInFirstFiducialBase) << "\n";
129 
130  // print simulation transfrom from tip to fiducial
131  Eigen::Affine3d RobotTipToFiducial = getObjectTransform(opticalTrackerDetectedObjectName,robotTip);
132  BOOST_LOG_TRIVIAL(info) << "\n" << poseString(RobotTipToFiducial,"expected RobotTipToFiducial (simulation only): ") << std::endl;
133 
134  BOOST_VERIFY(robotTipInFirstTipBase.translation().norm() - fiducialInFirstFiducialBase.translation().norm() < 0.1);
135  }
136 }
137 
138 /// @brief run solver to estimate the unknown transform and set the simulation to the estimated value
139 ///
140 /// after calling addFrame a number of times in different positions and orientations
141 ///
142 /// @todo probably want to run at least part of this in a separate thread.
143 /// @todo evaluate if applyEstimate should not be called by this
145 
146  BOOST_LOG_TRIVIAL(trace) << "Running Hand Eye Screw Estimate with the following numbers of entries in each category: rvecsFiducial" << rvecsFiducial.size()
147  << " tvecsFiducial: " << tvecsFiducial.size() << " rvecsArm: " << rvecsArm.size() << " tvecsArm: " << tvecsArm.size() << std::endl;
148 
149  BOOST_VERIFY(allHandlesSet);
150 
151 
152  handEyeCalib.estimateHandEyeScrew(
153  rvecsArm,
154  tvecsArm,
155  rvecsFiducial,
156  tvecsFiducial,
157  transformEstimate.matrix()
158  );
159 
160 
161  // get fiducial in optical tracker base frame
162  int ret = simGetObjectPosition(opticalTrackerDetectedObjectName, opticalTrackerBase, detectedObjectPosition.begin());
163  if(ret==-1) BOOST_THROW_EXCEPTION(std::runtime_error("HandEyeCalibrationVrepPlugin: Could not get position"));
164  ret = simGetObjectQuaternion(opticalTrackerDetectedObjectName, opticalTrackerBase, detectedObjectQuaternion.begin());
165  if(ret==-1) BOOST_THROW_EXCEPTION(std::runtime_error("HandEyeCalibrationVrepPlugin: Could not get quaternion"));
166 
167  if(debug){
168  // print simulation transfrom from tip to fiducial
169  Eigen::Affine3d RobotTipToFiducial = getObjectTransform(opticalTrackerDetectedObjectName,robotTip);
170  BOOST_LOG_TRIVIAL(info) << "\n" << poseString(RobotTipToFiducial,"expected RobotTipToFiducial (simulation only): ") << std::endl;
171  }
172 
173  BOOST_LOG_TRIVIAL(info) << "\n" << poseString(transformEstimate,"estimated RobotTipToFiducial:") << std::endl;
174 
175  applyEstimate();
176 
177  // print results
178  Eigen::Quaterniond eigenQuat(transformEstimate.rotation());
179  BOOST_LOG_TRIVIAL(info) << "Hand Eye Screw Estimate quat wxyz\n: " << eigenQuat.w() << " " << eigenQuat.x() << " " << eigenQuat.y() << " " << eigenQuat.z() << " " << " translation xyz: " << transformEstimate.translation().x() << " " << transformEstimate.translation().y() << " " << transformEstimate.translation().z() << " " << std::endl;
180 
181  BOOST_LOG_TRIVIAL(info) << "Optical Tracker Base Measured quat wxyz\n: " << detectedObjectQuaternion[0] << " " << detectedObjectQuaternion[1] << " " << detectedObjectQuaternion[2] << " " << detectedObjectQuaternion[3] << " " << " translation xyz: " << detectedObjectPosition[0] << " " << detectedObjectPosition[1] << " " << detectedObjectPosition[2] << " " << std::endl;
182 
183 }
184 
185 /// @brief Will apply the stored estimate to the v-rep simulation value
186 ///
187 /// A default transform is saved when construct() was called
188 /// so if no estimate has been found that will be used.
190 
191  // set transform between end effector and fiducial
192  setObjectTransform(opticalTrackerDetectedObjectName, robotTip, transformEstimate);
193 }
194 
195 Eigen::Affine3d getEstimate(){
196  return transformEstimate;
197 }
198 
199 
200 /// @brief Correct modified transform for measured optical tracker data
201 ///
202 /// If real time transform measurements are happening applyEstimate will move the measured object
203 /// relative to the sensor and this puts the transform measurement back so the object
204 /// appears in its real position. However, if running in simulation only
205 /// the sensor will be in the correct position, so you don't want to call this.
207  // set real optical tracker position and orientation relative to the robot fiducial
208  simSetObjectPosition (opticalTrackerBase, opticalTrackerDetectedObjectName, detectedObjectPosition.begin());
209  simSetObjectQuaternion(opticalTrackerBase, opticalTrackerDetectedObjectName, detectedObjectQuaternion.begin());
210 }
211 
212 private:
213 
214 
215 
216 /// @todo create a function that calls simGetObjectHandle and throws an exception when it fails
217 /// @todo throw an exception if any of the handles is -1
218 void initHandles() {
219 
220  robotTip = grl::vrep::getHandleFromParam<RobotTipName> (params_); //Obtain RobotTip handle
221  robotBase = grl::vrep::getHandleFromParam<RobotBaseName> (params_);
222  opticalTrackerBase = grl::vrep::getHandleFromParam<OpticalTrackerBaseName> (params_);
223  opticalTrackerDetectedObjectName = grl::vrep::getHandleFromParam<OpticalTrackerDetectedObjectName>(params_);
224 
225  allHandlesSet = true;
226 }
227 
228 
229 camodocal::HandEyeCalibration handEyeCalib;
230 std::vector<Eigen::Vector3d, Eigen::aligned_allocator<Eigen::Vector3d> > rvecsArm;
231 std::vector<Eigen::Vector3d, Eigen::aligned_allocator<Eigen::Vector3d> > tvecsArm;
232 std::vector<Eigen::Vector3d, Eigen::aligned_allocator<Eigen::Vector3d> > rvecsFiducial;
233 std::vector<Eigen::Vector3d, Eigen::aligned_allocator<Eigen::Vector3d> > tvecsFiducial;
234 
235 bool isFirstFrame;
236 std::size_t frameCount;
237 Eigen::Affine3d firstRobotTipInRobotBaseInverse;
238 Eigen::Affine3d firstFiducialInOpticalTrackerBaseInverse;
239 
240 ///
241 std::array<float,3> detectedObjectPosition;
242 std::array<float,4> detectedObjectQuaternion;
243 /// estimate transform between end effector and fiducial
244 Eigen::Affine3d transformEstimate;
245 
246 int robotTip = -1; //Obtain RobotTip handle
247 int robotBase = -1;
248 int opticalTrackerDetectedObjectName = -1;
249 int opticalTrackerBase = -1;
250 //int ImplantCutPath = -1;
251 //int BallJointPath = -1;
252 //int bone = -1;
253 
254 bool allHandlesSet = false;
255 
256 
257 private:
258 Params params_;
259 
260 public:
261  EIGEN_MAKE_ALIGNED_OPERATOR_NEW
262 };
263 
264 }
265 
266 #endif
void restoreSensorPosition()
Correct modified transform for measured optical tracker data.
Eigen::Affine3d getObjectTransform(int objectHandle, int relativeToObjectHandle=-1)
Definition: Eigen.hpp:135
V-REP handle string for the tip of the first known transform, often a robot tip.
void applyEstimate()
Will apply the stored estimate to the v-rep simulation value.
ptrSimGetObjectPosition simGetObjectPosition
Definition: v_repLib.cpp:75
V-REP handle string for the base of the first known transform, often a robot base.
ptrSimSetObjectPosition simSetObjectPosition
Definition: v_repLib.cpp:76
Eigen::Vector3d eigenRotToEigenVector3dAngleAxis(Input eigenQuat)
Definition: Eigen.hpp:63
HandEyeCalibrationVrepPlugin(Params params=defaultParams())
void setObjectTransform(int objectHandle, int relativeToObjectHandle, T &transform)
Definition: Eigen.hpp:117
std::tuple< std::string, std::string, std::string, std::string > Params
ptrSimSetObjectQuaternion simSetObjectQuaternion
Definition: v_repLib.cpp:365
ptrSimGetObjectQuaternion simGetObjectQuaternion
Definition: v_repLib.cpp:364
void estimateHandEyeScrew()
run solver to estimate the unknown transform and set the simulation to the estimated value ...
V-REP handle string for the tip of the first known transform, often a robot tip.