1 /*******************************************************************************\
2 * *
3 * This file contains routines for transforming between the different coordinate *
4 * systems used in Tsai's perspective projection camera model. The routines *
5 * are: *
6 * *
7 * world_coord_to_image_coord () *
8 * image_coord_to_world_coord () *
9 * world_coord_to_camera_coord () *
10 * camera_coord_to_world_coord () *
11 * distorted_to_undistorted_sensor_coord () *
12 * undistorted_to_distorted_sensor_coord () *
13 * distorted_to_undistorted_image_coord () *
14 * undistorted_to_distorted_image_coord () *
15 * *
16 * The routines make use of the calibrated camera parameters and calibration *
17 * constants contained in the two external data structures cp and cc. *
18 * *
19 * Notation *
20 * -------- *
21 * *
22 * The camera's X axis runs along increasing column coordinates in the *
23 * image/frame. The Y axis runs along increasing row coordinates. *
24 * All 3D coordinates are right-handed. *
25 * *
26 * pix == image/frame grabber picture element *
27 * sel == camera sensor element *
28 * *
29 * *
30 * History *
31 * ------- *
32 * *
33 * 18-Oct-95 Reg Willson (rgwillson@mmm.com) at 3M St. Paul, MN *
34 * Added check in undistorted_to_distorted_sensor_coord () for situation *
35 * where an undistorted sensor point maps to the maximum barrel distortion *
36 * radius. *
37 * *
38 * 18-May-95 Reg Willson (rgwillson@mmm.com) at 3M St. Paul, MN *
39 * Split out from the cal_main.c file. *
40 * Fix the CBRT routine so it handles negative arguments properly. *
41 * *
42 \*******************************************************************************/
43 #include <stdio.h>
44 #include <math.h>
45 #include "cal_main.h"
46
47
48 extern struct camera_parameters cp;
49 extern struct calibration_constants cc;
50
51
52 #define SQRT(x) sqrt(fabs(x))
53
54
55 /************************************************************************/
56 /* This cube root routine handles negative arguments (unlike cbrt). */
57
CBRT(x)58 double CBRT (x)
59 double x;
60 {
61 double pow ();
62
63 if (x == 0)
64 return (0);
65 else if (x > 0)
66 return (pow (x, (double) 1.0 / 3.0));
67 else
68 return (-pow (-x, (double) 1.0 / 3.0));
69 }
70
71
72 /************************************************************************/
73 /*
74 This routine converts from undistorted to distorted sensor coordinates.
75 The technique involves algebraically solving the cubic polynomial
76
77 Ru = Rd * (1 + kappa1 * Rd**2)
78
79 using the Cardan method.
80
81 Note: for kappa1 < 0 the distorted sensor plane extends out to a maximum
82 barrel distortion radius of Rd = sqrt (-1/(3 * kappa1)).
83
84 To see the logic used in this routine try graphing the above polynomial
85 for positive and negative kappa1's
86 */
87
undistorted_to_distorted_sensor_coord(Xu,Yu,Xd,Yd)88 void undistorted_to_distorted_sensor_coord (Xu, Yu, Xd, Yd)
89 double Xu,
90 Yu,
91 *Xd,
92 *Yd;
93 {
94 #define SQRT3 1.732050807568877293527446341505872366943
95
96 double Ru,
97 Rd,
98 lambda,
99 c,
100 d,
101 Q,
102 R,
103 D,
104 S,
105 T,
106 sinT,
107 cosT;
108
109 if (((Xu == 0) && (Yu == 0)) || (cc.kappa1 == 0)) {
110 *Xd = Xu;
111 *Yd = Yu;
112 return;
113 }
114
115 Ru = hypot (Xu, Yu); /* SQRT(Xu*Xu+Yu*Yu) */
116
117 c = 1 / cc.kappa1;
118 d = -c * Ru;
119
120 Q = c / 3;
121 R = -d / 2;
122 D = CUB (Q) + SQR (R);
123
124 if (D >= 0) { /* one real root */
125 D = SQRT (D);
126 S = CBRT (R + D);
127 T = CBRT (R - D);
128 Rd = S + T;
129
130 if (Rd < 0) {
131 Rd = SQRT (-1 / (3 * cc.kappa1));
132 fprintf (stderr, "\nWarning: undistorted image point to distorted image point mapping limited by\n");
133 fprintf (stderr, " maximum barrel distortion radius of %lf\n", Rd);
134 fprintf (stderr, " (Xu = %lf, Yu = %lf) -> (Xd = %lf, Yd = %lf)\n\n",
135 Xu, Yu, Xu * Rd / Ru, Yu * Rd / Ru);
136 }
137 } else { /* three real roots */
138 D = SQRT (-D);
139 S = CBRT (hypot (R, D));
140 T = atan2 (D, R) / 3;
141 SINCOS (T, sinT, cosT);
142
143 /* the larger positive root is 2*S*cos(T) */
144 /* the smaller positive root is -S*cos(T) + SQRT(3)*S*sin(T) */
145 /* the negative root is -S*cos(T) - SQRT(3)*S*sin(T) */
146
147 Rd = -S * cosT + SQRT3 * S * sinT; /* use the smaller positive root */
148 }
149
150 lambda = Rd / Ru;
151
152 *Xd = Xu * lambda;
153 *Yd = Yu * lambda;
154 }
155
156
157 /************************************************************************/
distorted_to_undistorted_sensor_coord(Xd,Yd,Xu,Yu)158 void distorted_to_undistorted_sensor_coord (Xd, Yd, Xu, Yu)
159 double Xd,
160 Yd,
161 *Xu,
162 *Yu;
163 {
164 double distortion_factor;
165
166 /* convert from distorted to undistorted sensor plane coordinates */
167 distortion_factor = 1 + cc.kappa1 * (SQR (Xd) + SQR (Yd));
168 *Xu = Xd * distortion_factor;
169 *Yu = Yd * distortion_factor;
170 }
171
172
173 /************************************************************************/
undistorted_to_distorted_image_coord(Xfu,Yfu,Xfd,Yfd)174 void undistorted_to_distorted_image_coord (Xfu, Yfu, Xfd, Yfd)
175 double Xfu,
176 Yfu,
177 *Xfd,
178 *Yfd;
179 {
180 double Xu,
181 Yu,
182 Xd,
183 Yd;
184
185 /* convert from image to sensor coordinates */
186 Xu = cp.dpx * (Xfu - cp.Cx) / cp.sx;
187 Yu = cp.dpy * (Yfu - cp.Cy);
188
189 /* convert from undistorted sensor to distorted sensor plane coordinates */
190 undistorted_to_distorted_sensor_coord (Xu, Yu, &Xd, &Yd);
191
192 /* convert from sensor to image coordinates */
193 *Xfd = Xd * cp.sx / cp.dpx + cp.Cx;
194 *Yfd = Yd / cp.dpy + cp.Cy;
195 }
196
197
198 /************************************************************************/
distorted_to_undistorted_image_coord(Xfd,Yfd,Xfu,Yfu)199 void distorted_to_undistorted_image_coord (Xfd, Yfd, Xfu, Yfu)
200 double Xfd,
201 Yfd,
202 *Xfu,
203 *Yfu;
204 {
205 double Xd,
206 Yd,
207 Xu,
208 Yu;
209
210 /* convert from image to sensor coordinates */
211 Xd = cp.dpx * (Xfd - cp.Cx) / cp.sx;
212 Yd = cp.dpy * (Yfd - cp.Cy);
213
214 /* convert from distorted sensor to undistorted sensor plane coordinates */
215 distorted_to_undistorted_sensor_coord (Xd, Yd, &Xu, &Yu);
216
217 /* convert from sensor to image coordinates */
218 *Xfu = Xu * cp.sx / cp.dpx + cp.Cx;
219 *Yfu = Yu / cp.dpy + cp.Cy;
220 }
221
222
223 /***********************************************************************\
224 * This routine takes the position of a point in world coordinates [mm] *
225 * and determines the position of its image in image coordinates [pix]. *
226 \***********************************************************************/
world_coord_to_image_coord(xw,yw,zw,Xf,Yf)227 void world_coord_to_image_coord (xw, yw, zw, Xf, Yf)
228 double xw,
229 yw,
230 zw,
231 *Xf,
232 *Yf;
233 {
234 double xc,
235 yc,
236 zc,
237 Xu,
238 Yu,
239 Xd,
240 Yd;
241
242 /* convert from world coordinates to camera coordinates */
243 xc = cc.r1 * xw + cc.r2 * yw + cc.r3 * zw + cc.Tx;
244 yc = cc.r4 * xw + cc.r5 * yw + cc.r6 * zw + cc.Ty;
245 zc = cc.r7 * xw + cc.r8 * yw + cc.r9 * zw + cc.Tz;
246
247 /* convert from camera coordinates to undistorted sensor plane coordinates */
248 Xu = cc.f * xc / zc;
249 Yu = cc.f * yc / zc;
250
251 /* convert from undistorted to distorted sensor plane coordinates */
252 undistorted_to_distorted_sensor_coord (Xu, Yu, &Xd, &Yd);
253
254 /* convert from distorted sensor plane coordinates to image coordinates */
255 *Xf = Xd * cp.sx / cp.dpx + cp.Cx;
256 *Yf = Yd / cp.dpy + cp.Cy;
257 }
258
259
260 /***********************************************************************\
261 * This routine performs an inverse perspective projection to determine *
262 * the position of a point in world coordinates that corresponds to a *
263 * given position in image coordinates. To constrain the inverse *
264 * projection to a single point the routine requires a Z world *
265 * coordinate for the point in addition to the X and Y image coordinates.*
266 \***********************************************************************/
image_coord_to_world_coord(Xfd,Yfd,zw,xw,yw)267 void image_coord_to_world_coord (Xfd, Yfd, zw, xw, yw)
268 double Xfd,
269 Yfd,
270 zw,
271 *xw,
272 *yw;
273 {
274 double Xd,
275 Yd,
276 Xu,
277 Yu,
278 common_denominator;
279
280 /* convert from image to distorted sensor coordinates */
281 Xd = cp.dpx * (Xfd - cp.Cx) / cp.sx;
282 Yd = cp.dpy * (Yfd - cp.Cy);
283
284 /* convert from distorted sensor to undistorted sensor plane coordinates */
285 distorted_to_undistorted_sensor_coord (Xd, Yd, &Xu, &Yu);
286
287 /* calculate the corresponding xw and yw world coordinates */
288 /* (these equations were derived by simply inverting */
289 /* the perspective projection equations using Macsyma) */
290 common_denominator = ((cc.r1 * cc.r8 - cc.r2 * cc.r7) * Yu +
291 (cc.r5 * cc.r7 - cc.r4 * cc.r8) * Xu -
292 cc.f * cc.r1 * cc.r5 + cc.f * cc.r2 * cc.r4);
293
294 *xw = (((cc.r2 * cc.r9 - cc.r3 * cc.r8) * Yu +
295 (cc.r6 * cc.r8 - cc.r5 * cc.r9) * Xu -
296 cc.f * cc.r2 * cc.r6 + cc.f * cc.r3 * cc.r5) * zw +
297 (cc.r2 * cc.Tz - cc.r8 * cc.Tx) * Yu +
298 (cc.r8 * cc.Ty - cc.r5 * cc.Tz) * Xu -
299 cc.f * cc.r2 * cc.Ty + cc.f * cc.r5 * cc.Tx) / common_denominator;
300
301 *yw = -(((cc.r1 * cc.r9 - cc.r3 * cc.r7) * Yu +
302 (cc.r6 * cc.r7 - cc.r4 * cc.r9) * Xu -
303 cc.f * cc.r1 * cc.r6 + cc.f * cc.r3 * cc.r4) * zw +
304 (cc.r1 * cc.Tz - cc.r7 * cc.Tx) * Yu +
305 (cc.r7 * cc.Ty - cc.r4 * cc.Tz) * Xu -
306 cc.f * cc.r1 * cc.Ty + cc.f * cc.r4 * cc.Tx) / common_denominator;
307 }
308
309
310 /***********************************************************************\
311 * This routine takes the position of a point in world coordinates [mm] *
312 * and determines its position in camera coordinates [mm]. *
313 \***********************************************************************/
world_coord_to_camera_coord(xw,yw,zw,xc,yc,zc)314 void world_coord_to_camera_coord (xw, yw, zw, xc, yc, zc)
315 double xw,
316 yw,
317 zw,
318 *xc,
319 *yc,
320 *zc;
321 {
322 *xc = cc.r1 * xw + cc.r2 * yw + cc.r3 * zw + cc.Tx;
323 *yc = cc.r4 * xw + cc.r5 * yw + cc.r6 * zw + cc.Ty;
324 *zc = cc.r7 * xw + cc.r8 * yw + cc.r9 * zw + cc.Tz;
325 }
326
327
328 /***********************************************************************\
329 * This routine takes the position of a point in camera coordinates [mm] *
330 * and determines its position in world coordinates [mm]. *
331 \***********************************************************************/
camera_coord_to_world_coord(xc,yc,zc,xw,yw,zw)332 void camera_coord_to_world_coord (xc, yc, zc, xw, yw, zw)
333 double xc,
334 yc,
335 zc,
336 *xw,
337 *yw,
338 *zw;
339 {
340 double common_denominator;
341
342 /* these equations were found by simply inverting the previous routine using Macsyma */
343
344 common_denominator = ((cc.r1 * cc.r5 - cc.r2 * cc.r4) * cc.r9 +
345 (cc.r3 * cc.r4 - cc.r1 * cc.r6) * cc.r8 +
346 (cc.r2 * cc.r6 - cc.r3 * cc.r5) * cc.r7);
347
348 *xw = ((cc.r2 * cc.r6 - cc.r3 * cc.r5) * zc +
349 (cc.r3 * cc.r8 - cc.r2 * cc.r9) * yc +
350 (cc.r5 * cc.r9 - cc.r6 * cc.r8) * xc +
351 (cc.r3 * cc.r5 - cc.r2 * cc.r6) * cc.Tz +
352 (cc.r2 * cc.r9 - cc.r3 * cc.r8) * cc.Ty +
353 (cc.r6 * cc.r8 - cc.r5 * cc.r9) * cc.Tx) / common_denominator;
354
355 *yw = -((cc.r1 * cc.r6 - cc.r3 * cc.r4) * zc +
356 (cc.r3 * cc.r7 - cc.r1 * cc.r9) * yc +
357 (cc.r4 * cc.r9 - cc.r6 * cc.r7) * xc +
358 (cc.r3 * cc.r4 - cc.r1 * cc.r6) * cc.Tz +
359 (cc.r1 * cc.r9 - cc.r3 * cc.r7) * cc.Ty +
360 (cc.r6 * cc.r7 - cc.r4 * cc.r9) * cc.Tx) / common_denominator;
361
362 *zw = ((cc.r1 * cc.r5 - cc.r2 * cc.r4) * zc +
363 (cc.r2 * cc.r7 - cc.r1 * cc.r8) * yc +
364 (cc.r4 * cc.r8 - cc.r5 * cc.r7) * xc +
365 (cc.r2 * cc.r4 - cc.r1 * cc.r5) * cc.Tz +
366 (cc.r1 * cc.r8 - cc.r2 * cc.r7) * cc.Ty +
367 (cc.r5 * cc.r7 - cc.r4 * cc.r8) * cc.Tx) / common_denominator;
368 }
369