1 /*******************************************************************************
2  * trace.cpp
3  *
4  * ---------------------------------------------------------------------------
5  * Persistence of Vision Ray Tracer ('POV-Ray') version 3.7.
6  * Copyright 1991-2013 Persistence of Vision Raytracer Pty. Ltd.
7  *
8  * POV-Ray is free software: you can redistribute it and/or modify
9  * it under the terms of the GNU Affero General Public License as
10  * published by the Free Software Foundation, either version 3 of the
11  * License, or (at your option) any later version.
12  *
13  * POV-Ray is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16  * GNU Affero General Public License for more details.
17  *
18  * You should have received a copy of the GNU Affero General Public License
19  * along with this program.  If not, see <http://www.gnu.org/licenses/>.
20  * ---------------------------------------------------------------------------
21  * POV-Ray is based on the popular DKB raytracer version 2.12.
22  * DKBTrace was originally written by David K. Buck.
23  * DKBTrace Ver 2.0-2.12 were written by David K. Buck & Aaron A. Collins.
24  * ---------------------------------------------------------------------------
25  * $File: //depot/public/povray/3.x/source/backend/render/trace.cpp $
26  * $Revision: #1 $
27  * $Change: 6069 $
28  * $DateTime: 2013/11/06 11:59:40 $
29  * $Author: chrisc $
30  *******************************************************************************/
31 
32 #include <boost/thread.hpp>
33 #include <boost/bind.hpp>
34 
35 #include <float.h>
36 
37 // frame.h must always be the first POV file included (pulls in platform config)
38 #include "backend/frame.h"
39 #include "backend/colour/colour.h"
40 #include "backend/math/vector.h"
41 #include "backend/math/matrices.h"
42 #include "backend/scene/objects.h"
43 #include "backend/pattern/pattern.h"
44 #include "backend/pattern/warps.h"
45 #include "backend/support/imageutil.h"
46 #include "backend/texture/normal.h"
47 #include "backend/texture/pigment.h"
48 #include "backend/texture/texture.h"
49 #include "backend/render/trace.h"
50 #include "backend/render/tracetask.h"
51 #include "backend/scene/scene.h"
52 #include "backend/scene/view.h"
53 #include "backend/lighting/point.h"
54 #include "backend/lighting/radiosity.h"
55 #include "backend/lighting/subsurface.h"
56 #include "backend/shape/csg.h"
57 #include "backend/shape/boxes.h"
58 #include "backend/support/bsptree.h"
59 
60 // this must be the last file included
61 #include "base/povdebug.h"
62 
63 namespace pov
64 {
65 
66 #define SHADOW_TOLERANCE 1.0e-3
67 
68 #define MEDIA_AFTER_TEXTURE_INTERPOLATION 1
69 
Trace(shared_ptr<SceneData> sd,TraceThreadData * td,unsigned int qf,CooperateFunctor & cf,MediaFunctor & mf,RadiosityFunctor & rf)70 Trace::Trace(shared_ptr<SceneData> sd, TraceThreadData *td, unsigned int qf,
71              CooperateFunctor& cf, MediaFunctor& mf, RadiosityFunctor& rf) :
72 	threadData(td),
73 	sceneData(sd),
74 	maxFoundTraceLevel(0),
75 	qualityFlags(qf),
76 	mailbox(0),
77 	crandRandomNumberGenerator(0),
78 	randomNumbers(0.0, 1.0, 32768),
79 	randomNumberGenerator(&randomNumbers),
80 	ssltUniformDirectionGenerator(),
81 	ssltUniformNumberGenerator(),
82 	ssltCosWeightedDirectionGenerator(),
83 	cooperate(cf),
84 	media(mf),
85 	radiosity(rf),
86 	lightColorCacheIndex(-1)
87 {
88 	lightSourceLevel1ShadowCache.resize(max(1, (int) threadData->lightSources.size()));
89 	for(vector<ObjectPtr>::iterator i(lightSourceLevel1ShadowCache.begin()); i != lightSourceLevel1ShadowCache.end(); i++)
90 		*i = NULL;
91 
92 	lightSourceOtherShadowCache.resize(max(1, (int) threadData->lightSources.size()));
93 	for(vector<ObjectPtr>::iterator i(lightSourceOtherShadowCache.begin()); i != lightSourceOtherShadowCache.end(); i++)
94 		*i = NULL;
95 
96 	lightColorCache.resize(max(20U, sd->parsedMaxTraceLevel + 1));
97 	for(LightColorCacheListList::iterator it = lightColorCache.begin(); it != lightColorCache.end(); it++)
98 		it->resize(max(1, (int) threadData->lightSources.size()));
99 
100 	if(sceneData->boundingMethod == 2)
101 		mailbox = BSPTree::Mailbox(sceneData->numberOfFiniteObjects);
102 }
103 
~Trace()104 Trace::~Trace()
105 {
106 }
107 
TraceRay(const Ray & ray,Colour & colour,COLC weight,TraceTicket & ticket,bool continuedRay,DBL maxDepth)108 double Trace::TraceRay(const Ray& ray, Colour& colour, COLC weight, TraceTicket& ticket, bool continuedRay, DBL maxDepth)
109 {
110 	Intersection bestisect;
111 	bool found;
112 	NoSomethingFlagRayObjectCondition precond;
113 	TrueRayObjectCondition postcond;
114 
115 	POV_ULONG nrays = threadData->Stats()[Number_Of_Rays]++;
116 	if(ray.IsPrimaryRay() || (((unsigned char) nrays & 0x0f) == 0x00))
117 		cooperate();
118 
119 	// Check for max. trace level or ADC bailout.
120 	if((ticket.traceLevel >= ticket.maxAllowedTraceLevel) || (weight < ticket.adcBailout))
121 	{
122 		if(weight < ticket.adcBailout)
123 			threadData->Stats()[ADC_Saves]++;
124 
125 		colour.clear();
126 		return HUGE_VAL;
127 	}
128 
129 	if (maxDepth >= EPSILON)
130 		bestisect.Depth = maxDepth;
131 
132 	found = FindIntersection(bestisect, ray, precond, postcond);
133 
134 	// Check if we're busy shooting too many radiosity sample rays at an unimportant object
135 	if (ticket.radiosityImportanceQueried >= 0.0)
136 	{
137 		if (found)
138 		{
139 			ticket.radiosityImportanceFound = bestisect.Object->RadiosityImportance(sceneData->radiositySettings.defaultImportance);
140 		}
141 		else
142 			ticket.radiosityImportanceFound = sceneData->radiositySettings.defaultImportance;
143 
144 		if (ticket.radiosityImportanceFound < ticket.radiosityImportanceQueried)
145 		{
146 			if(found == false)
147 				return HUGE_VAL;
148 			else
149 				return bestisect.Depth;
150 		}
151 	}
152 	float oldRadiosityImportanceQueried = ticket.radiosityImportanceQueried;
153 	ticket.radiosityImportanceQueried = -1.0; // indicates that recursive calls to TraceRay() should not check for radiosity importance
154 
155 	const bool traceLevelIncremented = !continuedRay;
156 
157 	if(traceLevelIncremented)
158 	{
159 		// Set highest level traced.
160 		ticket.traceLevel++;
161 		ticket.maxFoundTraceLevel = (unsigned int) max(ticket.maxFoundTraceLevel, ticket.traceLevel);
162 	}
163 
164 	if((qualityFlags & Q_VOLUME) && (ray.IsPhotonRay() == true) && (ray.IsHollowRay() == true))
165 	{
166 		// Note: this version of ComputeMedia does not deposit photons. This is
167 		// intentional.  Even though we're processing a photon ray, we don't want
168 		// to deposit photons in the infinite atmosphere, only in contained
169 		// media, which is processed later (in ComputeLightedTexture).  [nk]
170 		media.ComputeMedia(sceneData->atmosphere, ray, bestisect, colour, ticket);
171 
172 		if(sceneData->fog != NULL)
173 			ComputeFog(ray, bestisect, colour);
174 	}
175 
176 	if(found)
177 		ComputeTextureColour(bestisect, colour, ray, weight, false, ticket);
178 	else
179 		ComputeSky(ray, colour, ticket);
180 
181 	if((qualityFlags & Q_VOLUME) && (ray.IsPhotonRay() == false) && (ray.IsHollowRay() == true))
182 	{
183 		if((sceneData->rainbow != NULL) && (ray.IsShadowTestRay() == false))
184 			ComputeRainbow(ray, bestisect, colour);
185 
186 		media.ComputeMedia(sceneData->atmosphere, ray, bestisect, colour, ticket);
187 
188 		if(sceneData->fog != NULL)
189 			ComputeFog(ray, bestisect, colour);
190 	}
191 
192 	if(traceLevelIncremented)
193 		ticket.traceLevel--;
194 	maxFoundTraceLevel = (unsigned int) max(maxFoundTraceLevel, ticket.maxFoundTraceLevel);
195 
196 	ticket.radiosityImportanceQueried = oldRadiosityImportanceQueried;
197 
198 	if(found == false)
199 		return HUGE_VAL;
200 	else
201 		return bestisect.Depth;
202 }
203 
FindIntersection(Intersection & bestisect,const Ray & ray)204 bool Trace::FindIntersection(Intersection& bestisect, const Ray& ray)
205 {
206 	switch(sceneData->boundingMethod)
207 	{
208 		case 2:
209 		{
210 			BSPIntersectFunctor ifn(bestisect, ray, sceneData->objects, threadData);
211 			bool found = false;
212 
213 			mailbox.clear();
214 
215 			found = (*(sceneData->tree))(ray, ifn, mailbox, bestisect.Depth);
216 
217 			// test infinite objects
218 			for(vector<ObjectPtr>::iterator it = sceneData->objects.begin() + sceneData->numberOfFiniteObjects; it != sceneData->objects.end(); it++)
219 			{
220 				Intersection isect;
221 
222 				if(FindIntersection(*it, isect, ray) && (isect.Depth < bestisect.Depth))
223 				{
224 					bestisect = isect;
225 					found = true;
226 				}
227 			}
228 
229 			return found;
230 		}
231 		case 1:
232 		{
233 			if(sceneData->boundingSlabs != NULL)
234 				return (Intersect_BBox_Tree(priorityQueue, sceneData->boundingSlabs, ray, &bestisect, threadData));
235 		}
236 		// FALLTHROUGH
237 		case 0:
238 		{
239 			bool found = false;
240 
241 			for(vector<ObjectPtr>::iterator it = sceneData->objects.begin(); it != sceneData->objects.end(); it++)
242 			{
243 				Intersection isect;
244 
245 				if(FindIntersection(*it, isect, ray) && (isect.Depth < bestisect.Depth))
246 				{
247 					bestisect = isect;
248 					found = true;
249 				}
250 			}
251 
252 			return found;
253 		}
254 	}
255 
256 	return false;
257 }
258 
FindIntersection(Intersection & bestisect,const Ray & ray,const RayObjectCondition & precondition,const RayObjectCondition & postcondition)259 bool Trace::FindIntersection(Intersection& bestisect, const Ray& ray, const RayObjectCondition& precondition, const RayObjectCondition& postcondition)
260 {
261 	switch(sceneData->boundingMethod)
262 	{
263 		case 2:
264 		{
265 			BSPIntersectCondFunctor ifn(bestisect, ray, sceneData->objects, threadData, precondition, postcondition);
266 			bool found = false;
267 
268 			mailbox.clear();
269 
270 			found = (*(sceneData->tree))(ray, ifn, mailbox, bestisect.Depth);
271 
272 			// test infinite objects
273 			for(vector<ObjectPtr>::iterator it = sceneData->objects.begin() + sceneData->numberOfFiniteObjects; it != sceneData->objects.end(); it++)
274 			{
275 				if(precondition(ray, *it, 0.0) == true)
276 				{
277 					Intersection isect;
278 
279 					if(FindIntersection(*it, isect, ray, postcondition) && (isect.Depth < bestisect.Depth))
280 					{
281 						bestisect = isect;
282 						found = true;
283 					}
284 				}
285 			}
286 
287 			return found;
288 		}
289 		case 1:
290 		{
291 			if(sceneData->boundingSlabs != NULL)
292 				return (Intersect_BBox_Tree(priorityQueue, sceneData->boundingSlabs, ray, &bestisect, precondition, postcondition, threadData));
293 		}
294 		// FALLTHROUGH
295 		case 0:
296 		{
297 			bool found = false;
298 
299 			for(vector<ObjectPtr>::iterator it = sceneData->objects.begin(); it != sceneData->objects.end(); it++)
300 			{
301 				if(precondition(ray, *it, 0.0) == true)
302 				{
303 					Intersection isect;
304 
305 					if(FindIntersection(*it, isect, ray, postcondition) && (isect.Depth < bestisect.Depth))
306 					{
307 						bestisect = isect;
308 						found = true;
309 					}
310 				}
311 			}
312 
313 			return found;
314 		}
315 	}
316 
317 	return false;
318 }
319 
FindIntersection(ObjectPtr object,Intersection & isect,const Ray & ray,double closest)320 bool Trace::FindIntersection(ObjectPtr object, Intersection& isect, const Ray& ray, double closest)
321 {
322 	if(object != NULL)
323 	{
324 		BBOX_VECT origin;
325 		BBOX_VECT invdir;
326 		ObjectBase::BBoxDirection variant;
327 
328 		Vector3d tmp(1.0 / ray.GetDirection()[X], 1.0 / ray.GetDirection()[Y], 1.0 /ray.GetDirection()[Z]);
329 		Assign_Vector(origin, ray.Origin);
330 		Assign_Vector(invdir, *tmp);
331 		variant = (ObjectBase::BBoxDirection)((int(invdir[X] < 0.0) << 2) | (int(invdir[Y] < 0.0) << 1) | int(invdir[Z] < 0.0));
332 
333 		if(object->Intersect_BBox(variant, origin, invdir, closest) == false)
334 			return false;
335 
336 		if(object->Bound.empty() == false)
337 		{
338 			if(Ray_In_Bound(ray, object->Bound, threadData) == false)
339 				return false;
340 		}
341 
342 		IStack depthstack(stackPool);
343 		assert(depthstack->empty()); // verify that the IStack pulled from the pool is in a cleaned-up condition
344 
345 		if(object->All_Intersections(ray, depthstack, threadData))
346 		{
347 			bool found = false;
348 			double tmpDepth = 0;
349 
350 			while(depthstack->size() > 0)
351 			{
352 				tmpDepth = depthstack->top().Depth;
353 				// TODO FIXME - This was SMALL_TOLERANCE, but that's too rough for some scenes [cjc] need to check what it was in the old code [trf]
354 				if(tmpDepth < closest && (ray.IsSubsurfaceRay() || tmpDepth >= MIN_ISECT_DEPTH))
355 				{
356 					isect = depthstack->top();
357 					closest = tmpDepth;
358 					found = true;
359 				}
360 
361 				depthstack->pop();
362 			}
363 
364 			return (found == true);
365 		}
366 
367 		assert(depthstack->empty()); // verify that the IStack is in a cleaned-up condition (again)
368 	}
369 
370 	return false;
371 }
372 
FindIntersection(ObjectPtr object,Intersection & isect,const Ray & ray,const RayObjectCondition & postcondition,double closest)373 bool Trace::FindIntersection(ObjectPtr object, Intersection& isect, const Ray& ray, const RayObjectCondition& postcondition, double closest)
374 {
375 	if(object != NULL)
376 	{
377 		BBOX_VECT origin;
378 		BBOX_VECT invdir;
379 		ObjectBase::BBoxDirection variant;
380 
381 		Vector3d tmp(1.0 / ray.GetDirection()[X], 1.0 / ray.GetDirection()[Y], 1.0 /ray.GetDirection()[Z]);
382 		Assign_Vector(origin, ray.Origin);
383 		Assign_Vector(invdir, *tmp);
384 		variant = (ObjectBase::BBoxDirection)((int(invdir[X] < 0.0) << 2) | (int(invdir[Y] < 0.0) << 1) | int(invdir[Z] < 0.0));
385 
386 		if(object->Intersect_BBox(variant, origin, invdir, closest) == false)
387 			return false;
388 
389 		if(object->Bound.empty() == false)
390 		{
391 			if(Ray_In_Bound(ray, object->Bound, threadData) == false)
392 				return false;
393 		}
394 
395 		IStack depthstack(stackPool);
396 		assert(depthstack->empty()); // verify that the IStack pulled from the pool is in a cleaned-up condition
397 
398 		if(object->All_Intersections(ray, depthstack, threadData))
399 		{
400 			bool found = false;
401 			double tmpDepth = 0;
402 
403 			while(depthstack->size() > 0)
404 			{
405 				tmpDepth = depthstack->top().Depth;
406 				// TODO FIXME - This was SMALL_TOLERANCE, but that's too rough for some scenes [cjc] need to check what it was in the old code [trf]
407 				if(tmpDepth < closest && (ray.IsSubsurfaceRay() || tmpDepth >= MIN_ISECT_DEPTH) && postcondition(ray, object, tmpDepth))
408 				{
409 					isect = depthstack->top();
410 					closest = tmpDepth;
411 					found = true;
412 				}
413 
414 				depthstack->pop();
415 			}
416 
417 			return (found == true);
418 		}
419 
420 		assert(depthstack->empty()); // verify that the IStack is in a cleaned-up condition (again)
421 	}
422 
423 	return false;
424 }
425 
GetHighestTraceLevel()426 unsigned int Trace::GetHighestTraceLevel()
427 {
428 	return maxFoundTraceLevel;
429 }
430 
ComputeTextureColour(Intersection & isect,Colour & colour,const Ray & ray,COLC weight,bool photonPass,TraceTicket & ticket)431 void Trace::ComputeTextureColour(Intersection& isect, Colour& colour, const Ray& ray, COLC weight, bool photonPass, TraceTicket& ticket)
432 {
433 	// NOTE: when called during the photon pass this method is used to deposit photons
434 	// on the surface and not, per se, to compute texture color.
435 	WeightedTextureVector wtextures;
436 	double normaldirection;
437 	Colour tmpCol;
438 	Colour c1;
439 	Vector2d uvcoords;
440 	Vector3d rawnormal;
441 	Vector3d ipoint(isect.IPoint);
442 
443 	if (++lightColorCacheIndex >= lightColorCache.size())
444 	{
445 		lightColorCache.resize(lightColorCacheIndex + 10);
446 		for (LightColorCacheListList::iterator it = lightColorCache.begin() + lightColorCacheIndex; it != lightColorCache.end(); it++)
447 			it->resize(lightColorCache[0].size());
448 	}
449 	for (LightColorCacheList::iterator it = lightColorCache[lightColorCacheIndex].begin(); it != lightColorCache[lightColorCacheIndex].end(); it++)
450 		it->tested = false;
451 
452 	// compute the surface normal
453 	isect.Object->Normal(*rawnormal, &isect, threadData);
454 
455 	// I added this to flip the normal if the object is inverted (for CSG).
456 	// However, I subsequently commented it out for speed reasons - it doesn't
457 	// make a difference (no pun intended). The preexisting flip code below
458 	// produces a similar (though more extensive) result. [NK]
459 	// Actually, we should keep this code to guarantee that Normal_Direction
460 	// is set properly. [NK]
461 	if(Test_Flag(isect.Object, INVERTED_FLAG))
462 		rawnormal = -rawnormal;
463 
464 	// if the surface normal points away, flip its direction
465 	normaldirection = dot(rawnormal, Vector3d(ray.Direction));
466 	if(normaldirection > 0.0)
467 		rawnormal = -rawnormal;
468 
469 	Assign_Vector(isect.INormal, *rawnormal);
470 	Assign_Vector(isect.PNormal, *rawnormal);
471 
472 	if(Test_Flag(isect.Object, UV_FLAG))
473 	{
474 		// TODO FIXME
475 		//  I think we have a serious problem here regarding bump mapping:
476 		//  The UV vector contains doesn't contain any information about the (local) *orientation* of U and V in our XYZ co-ordinate system!
477 		//  This causes slopes do be applied in the wrong directions.
478 
479 		// get the UV vect of the intersection
480 		isect.Object->UVCoord(*uvcoords, &isect, threadData);
481 		// save the normal and UV coords into Intersection
482 		Assign_UV_Vect(isect.Iuv, *uvcoords);
483 	}
484 
485 	// now switch to UV mapping if we need to
486 	if(Test_Flag(isect.Object, UV_FLAG))
487 		ipoint = Vector3d(uvcoords.u(), uvcoords.v(), 0.0);
488 
489 	bool isMultiTextured = Test_Flag(isect.Object, MULTITEXTURE_FLAG) ||
490 	                       ((isect.Object->Texture == NULL) && Test_Flag(isect.Object, CUTAWAY_TEXTURES_FLAG));
491 
492 	// get textures and weights
493 	if(isMultiTextured == true)
494 	{
495 		isect.Object->Determine_Textures(&isect, normaldirection > 0.0, wtextures, threadData);
496 	}
497 	else if(isect.Object->Texture != NULL)
498 	{
499 		if((normaldirection > 0.0) && (isect.Object->Interior_Texture != NULL))
500 			wtextures.push_back(WeightedTexture(1.0, isect.Object->Interior_Texture)); /* Chris Huff: Interior Texture patch */
501 		else
502 			wtextures.push_back(WeightedTexture(1.0, isect.Object->Texture));
503 	}
504 	else
505 	{
506 		// don't need to do anything as the texture list will be empty.
507 		// TODO: could we perform these tests earlier ? [cjc]
508 		lightColorCacheIndex--;
509 		return;
510 	}
511 
512 	// Now, we perform the lighting calculations by stepping through
513 	// the list of textures and summing the weighted color.
514 
515 	for(WeightedTextureVector::iterator i(wtextures.begin()); i != wtextures.end(); i++)
516 	{
517 		TextureVector warps(texturePool);
518 		assert(warps->empty()); // verify that the TextureVector pulled from the pool is in a cleaned-up condition
519 
520 		// if the contribution of this texture is neglectable skip ahead
521 		if((i->weight < ticket.adcBailout) || (i->texture == NULL))
522 			continue;
523 
524 		if(photonPass == true)
525 		{
526 			// For the photon pass, colour (and thus c1) represents the
527 			// light energy being transmitted by the photon.  Because of this, we
528 			// compute the weighted energy value, then pass it to the texture for
529 			// processing.
530 			c1.red()   = colour.red()   * i->weight;
531 			c1.green() = colour.green() * i->weight;
532 			c1.blue()  = colour.blue()  * i->weight;
533 
534 			// NOTE that ComputeOneTextureColor is being used for a secondary purpose, and
535 			// that to place photons on the surface and trigger recursive photon shooting
536 			ComputeOneTextureColour(c1, i->texture, *warps, ipoint, rawnormal, ray, weight, isect, false, true, ticket);
537 		}
538 		else
539 		{
540 			ComputeOneTextureColour(c1, i->texture, *warps, ipoint, rawnormal, ray, weight, isect, false, false, ticket);
541 
542 			tmpCol.red()    += i->weight * c1.red();
543 			tmpCol.green()  += i->weight * c1.green();
544 			tmpCol.blue()   += i->weight * c1.blue();
545 			tmpCol.transm() += i->weight * c1.transm();
546 		}
547 	}
548 
549 #if MEDIA_AFTER_TEXTURE_INTERPOLATION
550 	// [CLi] moved this here from Trace::ComputeShadowTexture() and Trace::ComputeLightedTexture(), respectively,
551 	// to avoid media to be computed twice when dealing with averaged textures.
552 	// TODO - For photon rays we're still potentially doing double work on media.
553 	// TODO - For shadow rays we're still potentially doing double work on distance-based attenuation.
554 	// Calculate participating media effects.
555 	if(!photonPass && (qualityFlags & Q_VOLUME) && (!ray.GetInteriors().empty()) && (ray.IsHollowRay() == true))
556 		media.ComputeMedia(ray.GetInteriors(), ray, isect, tmpCol, ticket);
557 #endif
558 
559 	colour += tmpCol;
560 
561 	lightColorCacheIndex--;
562 }
563 
ComputeOneTextureColour(Colour & resultcolour,const TEXTURE * texture,vector<const TEXTURE * > & warps,const Vector3d & ipoint,const Vector3d & rawnormal,const Ray & ray,COLC weight,Intersection & isect,bool shadowflag,bool photonPass,TraceTicket & ticket)564 void Trace::ComputeOneTextureColour(Colour& resultcolour, const TEXTURE *texture, vector<const TEXTURE *>& warps, const Vector3d& ipoint,
565                                     const Vector3d& rawnormal, const Ray& ray, COLC weight, Intersection& isect, bool shadowflag, bool photonPass, TraceTicket& ticket)
566 {
567 	// NOTE: this method is used by the photon pass to deposit photons on the surface
568 	// (and not, per se, to compute texture color)
569 	const BLEND_MAP *blendmap = texture->Blend_Map;
570 	const BLEND_MAP_ENTRY *prev, *cur;
571 	double value1, value2; // TODO FIXME - choose better names!
572 	Vector3d tpoint;
573 	Vector2d uvcoords;
574 	Colour c2;
575 
576 	switch(texture->Type)
577 	{
578 		case NO_PATTERN:
579 		case PLAIN_PATTERN:
580 			break;
581 		case AVERAGE_PATTERN:
582 		case UV_MAP_PATTERN:
583 		case BITMAP_PATTERN:
584 		default:
585 			warps.push_back(texture);
586 			break;
587 	}
588 
589 	// ipoint - interseciton point (and evaluation point)
590 	// epoint - evaluation point
591 	// tpoint - turbulated/transformed point
592 
593 	if(texture->Type <= LAST_SPECIAL_PATTERN)
594 	{
595 		switch(texture->Type)
596 		{
597 			case NO_PATTERN:
598 				resultcolour = Colour(1.0, 1.0, 1.0, 1.0, 1.0);
599 				break;
600 			case AVERAGE_PATTERN:
601 				Warp_EPoint(*tpoint, *ipoint, reinterpret_cast<const TPATTERN *>(warps.back()));
602 				ComputeAverageTextureColours(resultcolour, texture, warps, tpoint, rawnormal, ray, weight, isect, shadowflag, photonPass, ticket);
603 				break;
604 			case UV_MAP_PATTERN:
605 				// Don't bother warping, simply get the UV vect of the intersection
606 				isect.Object->UVCoord(*uvcoords, &isect, threadData);
607 				tpoint = Vector3d(uvcoords[U], uvcoords[V], 0.0);
608 				cur = &(texture->Blend_Map->Blend_Map_Entries[0]);
609 				ComputeOneTextureColour(resultcolour, cur->Vals.Texture, warps, tpoint, rawnormal, ray, weight, isect, shadowflag, photonPass, ticket);
610 				break;
611 			case BITMAP_PATTERN:
612 				Warp_EPoint(*tpoint, *ipoint, reinterpret_cast<const TPATTERN *>(texture));
613 				ComputeOneTextureColour(resultcolour, material_map(*tpoint, texture), warps, tpoint, rawnormal, ray, weight, isect, shadowflag, photonPass, ticket);
614 				break;
615 			case PLAIN_PATTERN:
616 				if(shadowflag == true)
617 					ComputeShadowTexture(resultcolour, texture, warps, ipoint, rawnormal, ray, isect, ticket);
618 				else
619 					ComputeLightedTexture(resultcolour, texture, warps, ipoint, rawnormal, ray, weight, isect, ticket);
620 				break;
621 			default:
622 				throw POV_EXCEPTION_STRING("Bad texture type in ComputeOneTextureColour");
623 		}
624 	}
625 	else
626 	{
627 		// NK 19 Nov 1999 added Warp_EPoint
628 		Warp_EPoint(*tpoint, *ipoint, reinterpret_cast<const TPATTERN *>(texture));
629 		value1 = Evaluate_TPat(reinterpret_cast<const TPATTERN *>(texture), *tpoint, &isect, &ray, threadData);
630 
631 		Search_Blend_Map(value1, blendmap, &prev, &cur);
632 
633 		// NK phmap
634 		if(photonPass)
635 		{
636 			if(prev == cur)
637 				ComputeOneTextureColour(resultcolour, cur->Vals.Texture, warps, tpoint, rawnormal, ray, weight, isect, shadowflag, photonPass, ticket);
638 			else
639 			{
640 				value1 = (value1 - prev->value) / (cur->value - prev->value);
641 				value2 = 1.0 - value1;
642 				VScale(*c2, *resultcolour, value1); // modifies RGB, but leaves Filter and Transmit unchanged
643 				ComputeOneTextureColour(c2, cur->Vals.Texture, warps, tpoint, rawnormal, ray, weight, isect, shadowflag, photonPass, ticket);
644 				VScale(*c2, *resultcolour, value2); // modifies RGB, but leaves Filter and Transmit unchanged
645 				ComputeOneTextureColour(c2, prev->Vals.Texture, warps, tpoint, rawnormal, ray, weight, isect, shadowflag, photonPass, ticket);
646 			}
647 		}
648 		else
649 		{
650 			ComputeOneTextureColour(resultcolour, cur->Vals.Texture, warps, tpoint, rawnormal, ray, weight, isect, shadowflag, photonPass, ticket);
651 
652 			if(prev != cur)
653 			{
654 				ComputeOneTextureColour(c2, prev->Vals.Texture, warps, tpoint, rawnormal, ray, weight, isect, shadowflag, photonPass, ticket);
655 				value1 = (value1 - prev->value) / (cur->value - prev->value);
656 				value2 = 1.0 - value1;
657 				resultcolour = value1 * resultcolour + value2 * c2;
658 			}
659 		}
660 	}
661 }
662 
ComputeAverageTextureColours(Colour & resultcolour,const TEXTURE * texture,vector<const TEXTURE * > & warps,const Vector3d & ipoint,const Vector3d & rawnormal,const Ray & ray,COLC weight,Intersection & isect,bool shadowflag,bool photonPass,TraceTicket & ticket)663 void Trace::ComputeAverageTextureColours(Colour& resultcolour, const TEXTURE *texture, vector<const TEXTURE *>& warps, const Vector3d& ipoint,
664                                          const Vector3d& rawnormal, const Ray& ray, COLC weight, Intersection& isect, bool shadowflag, bool photonPass, TraceTicket& ticket)
665 {
666 	const BLEND_MAP *bmap = texture->Blend_Map;
667 	SNGL total = 0.0;
668 	Colour lc;
669 
670 	if(photonPass == false)
671 	{
672 		resultcolour.clear();
673 
674 		for(int i = 0; i < bmap->Number_Of_Entries; i++)
675 		{
676 			SNGL val = bmap->Blend_Map_Entries[i].value;
677 
678 			ComputeOneTextureColour(lc, bmap->Blend_Map_Entries[i].Vals.Texture, warps, ipoint, rawnormal, ray, weight, isect, shadowflag, photonPass, ticket);
679 
680 			resultcolour += lc * val;
681 
682 			total += val;
683 		}
684 
685 		resultcolour /= total;
686 	}
687 	else
688 	{
689 		for(int i = 0; i < bmap->Number_Of_Entries; i++)
690 			total += bmap->Blend_Map_Entries[i].value;
691 
692 		for(int i = 0; i < bmap->Number_Of_Entries; i++)
693 		{
694 			VScale(*lc, *resultcolour, bmap->Blend_Map_Entries[i].value / total); // modifies RGB, but leaves Filter and Transmit unchanged
695 
696 			ComputeOneTextureColour(lc, bmap->Blend_Map_Entries[i].Vals.Texture, warps, ipoint, rawnormal, ray, weight, isect, shadowflag, photonPass, ticket);
697 		}
698 	}
699 }
700 
ComputeLightedTexture(Colour & resultcolour,const TEXTURE * texture,vector<const TEXTURE * > & warps,const Vector3d & ipoint,const Vector3d & rawnormal,const Ray & ray,COLC weight,Intersection & isect,TraceTicket & ticket)701 void Trace::ComputeLightedTexture(Colour& resultcolour, const TEXTURE *texture, vector<const TEXTURE *>& warps, const Vector3d& ipoint,
702                                   const Vector3d& rawnormal, const Ray& ray, COLC weight, Intersection& isect, TraceTicket& ticket)
703 {
704 	Interior *interior;
705 	const TEXTURE *layer;
706 	int i;
707 	bool radiosity_done, radiosity_back_done, radiosity_needed;
708 	int layer_number;
709 	double w1;
710 	double new_Weight;
711 	double att, trans, max_Radiosity_Contribution;
712 	double cos_Angle_Incidence;
713 	Vector3d layNormal, topNormal;
714 	RGBColour attCol;
715 	Colour layCol, rflCol, rfrCol;
716 	RGBColour filCol;
717 	RGBColour tmpCol, tmp;
718 	RGBColour ambCol; // Note that there is no gathering of filter or transparency
719 	RGBColour ambBackCol;
720 	bool one_colour_found, colour_found;
721 	bool tir_occured;
722 	std::auto_ptr<PhotonGatherer> surfacePhotonGatherer(NULL); // TODO FIXME - auto_ptr why?  [CLi] why, to auto-destruct it of course! (e.g. in case of exception)
723 
724 	WNRXVector listWNRX(wnrxPool); // "Weight, Normal, Reflectivity, eXponent"
725 	assert(listWNRX->empty()); // verify that the WNRXVector pulled from the pool is in a cleaned-up condition
726 
727 	// resultcolour builds up the apparent visible color of the point.
728 	// Note that besides the RGB components, this also includes Transmission
729 	// for alpha channel computation.
730 	resultcolour.clear();
731 
732 	// filCol serves two purposes. It accumulates the filter properties
733 	// of a multi-layer texture so that if a ray makes it all the way through
734 	// all layers, the color of object behind is filtered by this object.
735 	// It also is used to attenuate how much of an underlayer you
736 	// can see in a layered texture.  Note that when computing the reflective
737 	// properties of a layered texture, the upper layers don't filter the
738 	// light from the lower layers -- the layer colors add together (even
739 	// before we added additive transparency via the "transmit" 5th
740 	// color channel).  However when computing the transmitted rays, all layers
741 	// filter the light from any objects behind this object. [CY 1/95]
742 
743 	// NK layers - switched transmit component to zero
744 	// [CLi] changed filCol to RGB, as filter and transmit were always pinned to 1.0 and 0.0 respectively anyway
745 	filCol = RGBColour(1.0, 1.0, 1.0);
746 
747 	trans = 1.0;
748 
749 	// Add in radiosity (stochastic interreflection-based ambient light) if desired
750 	radiosity_done = false;
751 	radiosity_back_done = false;
752 
753 	// This block just sets up radiosity for the code inside the loop, which is first-time-through.
754 	radiosity_needed = (sceneData->radiositySettings.radiosityEnabled == true) &&
755 	                   (radiosity.CheckRadiosityTraceLevel(ticket) == true) &&
756 	                   (Test_Flag(isect.Object, IGNORE_RADIOSITY_FLAG) == false);
757 
758 	// Loop through the layers and compute the ambient, diffuse,
759 	// phong and specular for these textures.
760 	one_colour_found = false;
761 
762 	if(sceneData->photonSettings.photonsEnabled && sceneData->surfacePhotonMap.numPhotons > 0)
763 		surfacePhotonGatherer.reset(new PhotonGatherer(&sceneData->surfacePhotonMap, sceneData->photonSettings));
764 
765 	for(layer_number = 0, layer = texture; (layer != NULL) && (trans > ticket.adcBailout); layer_number++, layer = reinterpret_cast<const TEXTURE *>(layer->Next))
766 	{
767 		// Get perturbed surface normal.
768 		layNormal = rawnormal;
769 
770 		if((qualityFlags & Q_NORMAL) && (layer->Tnormal != NULL))
771 		{
772 			for(vector<const TEXTURE *>::iterator i(warps.begin()); i != warps.end(); i++)
773 				Warp_Normal(*layNormal, *layNormal, reinterpret_cast<const TPATTERN *>(*i), Test_Flag((*i), DONT_SCALE_BUMPS_FLAG));
774 
775 			Perturb_Normal(*layNormal, layer->Tnormal, *ipoint, &isect, &ray, threadData);
776 
777 			if((Test_Flag(layer->Tnormal, DONT_SCALE_BUMPS_FLAG)))
778 				layNormal.normalize();
779 
780 			for(vector<const TEXTURE *>::reverse_iterator i(warps.rbegin()); i != warps.rend(); i++)
781 				UnWarp_Normal(*layNormal, *layNormal, reinterpret_cast<const TPATTERN *>(*i), Test_Flag((*i), DONT_SCALE_BUMPS_FLAG));
782 		}
783 
784 		// Store top layer normal.
785 		if(layer_number == 0)
786 			topNormal = layNormal;
787 
788 		// Get surface colour.
789 		new_Weight = weight * trans;
790 		colour_found = Compute_Pigment(layCol, layer->Pigment, *ipoint, &isect, &ray, threadData);
791 
792 		// If a valid color was returned set one_colour_found to true.
793 		// An invalid color is returned if a surface point is outside
794 		// an image map used just once.
795 		one_colour_found = (one_colour_found || colour_found);
796 
797 		// This section of code used to be the routine Compute_Reflected_Colour.
798 		// I copied it in here to rearrange some of it more easily and to
799 		// see if we could eliminate passing a zillion parameters for no
800 		// good reason. [CY 1/95]
801 
802 		if(qualityFlags & Q_FULL_AMBIENT)
803 		{
804 			// Only use top layer and kill transparency if low quality.
805 			resultcolour = layCol;
806 			resultcolour.filter() = 0.0;
807 			resultcolour.transm() = 0.0;
808 		}
809 		else
810 		{
811 			// Store vital information for later reflection.
812 			listWNRX->push_back(WNRX(new_Weight, layNormal, RGBColour(), layer->Finish->Reflect_Exp));
813 
814 			// angle-dependent reflectivity
815 			cos_Angle_Incidence = -dot(Vector3d(ray.Direction), layNormal);
816 
817 			if((isect.Object->interior != NULL) || (layer->Finish->Reflection_Type != 1))
818 			{
819 				ComputeReflectivity(listWNRX->back().weight, listWNRX->back().reflec,
820 				                    layer->Finish->Reflection_Max, layer->Finish->Reflection_Min,
821 				                    layer->Finish->Reflection_Type, layer->Finish->Reflection_Falloff,
822 				                    cos_Angle_Incidence, ray, isect.Object->interior);
823 			}
824 			else
825 				throw POV_EXCEPTION_STRING("Reflection_Type 1 used with no interior."); // TODO FIXME - wrong place to report this [trf]
826 
827 			// for metallic reflection, apply the surface color using the fresnel equation
828 			// (use the same equaltion as "metallic" in phong and specular
829 			if(layer->Finish->Reflect_Metallic != 0.0)
830 			{
831 				double R_M = layer->Finish->Reflect_Metallic;
832 
833 				double x = fabs(acos(cos_Angle_Incidence)) / M_PI_2;
834 				double F = 0.014567225 / Sqr(x - 1.12) - 0.011612903;
835 				F = min(1.0, max(0.0, F));
836 
837 				listWNRX->back().reflec.red()   *= (1.0 + R_M * (1.0 - F) * (layCol.red()   - 1.0));
838 				listWNRX->back().reflec.green() *= (1.0 + R_M * (1.0 - F) * (layCol.green() - 1.0));
839 				listWNRX->back().reflec.blue()  *= (1.0 + R_M * (1.0 - F) * (layCol.blue()  - 1.0));
840 			}
841 
842 			// NK - I think we SHOULD do something like this: (to apply the layer's color) */
843 			// listWNRX->back().reflec.red()   *= filCol.red();
844 			// listWNRX->back().reflec.green() *= filCol.green();
845 			// listWNRX->back().reflec.blue()  *= filCol.blue();
846 
847 			// We need to reduce the layer's own brightness if it is transparent.
848 			if (sceneData->EffectiveLanguageVersion() < 370)
849 				// this formula is bogus, but it has been around for a while so we're keeping it for compatibility with legacy scenes
850 				att = (1.0 - (layCol.filter() * max3(layCol.red(), layCol.green(), layCol.blue()) + layCol.transm()));
851 			else
852 				att = layCol.opacity();
853 
854 			// now compute the BRDF or BSSRDF contribution
855 			tmpCol.clear();
856 
857 			if(sceneData->useSubsurface && layer->Finish->UseSubsurface && (qualityFlags & Q_SUBSURFACE))
858 			{
859 				// Add diffuse & single scattering contribution.
860 				ComputeSubsurfaceScattering(layer->Finish, RGBColour(layCol), isect, ray, layNormal, tmpCol, att, ticket);
861 				// [CLi] moved multiplication with filCol to further below
862 
863 				// Radiosity-style ambient may be subject to subsurface light transport.
864 				// In that case, the respective computations are handled by the BSSRDF code already.
865 				if (sceneData->subsurfaceUseRadiosity)
866 					radiosity_needed = false;
867 			}
868 			// Add radiosity ambient contribution.
869 			if(radiosity_needed)
870 			{
871 				// if radiosity calculation needed, but not yet done, do it now
872 				// TODO FIXME - [CLi] with "normal on", shouldn't we compute radiosity for each layer separately (if it has pertubed normals)?
873 				if(radiosity_done == false)
874 				{
875 					// calculate max possible contribution of radiosity, to see if calculating it is worthwhile
876 					// TODO FIXME - other layers may see a higher weight!
877 					// Maybe we should go along and compute *first* the total contribution radiosity will make,
878 					// and at the *end* apply it.
879 					max_Radiosity_Contribution = (filCol * RGBColour(layCol)).greyscale() * att * layer->Finish->RawDiffuse;
880 
881 					if(max_Radiosity_Contribution > ticket.adcBailout)
882 					{
883 						radiosity.ComputeAmbient(Vector3d(isect.IPoint), rawnormal, layNormal, ambCol, weight * max_Radiosity_Contribution, ticket);
884 						radiosity_done = true;
885 					}
886 				}
887 
888 				// [CLi] moved multiplication with filCol to further below
889 				tmpCol += (RGBColour(layCol) * ambCol) * (att * layer->Finish->RawDiffuse);
890 
891 				// if backside radiosity calculation needed, but not yet done, do it now
892 				// TODO FIXME - [CLi] with "normal on", shouldn't we compute radiosity for each layer separately (if it has pertubed normals)?
893 				if(layer->Finish->DiffuseBack != 0.0)
894 				{
895 					if(radiosity_back_done == false)
896 					{
897 						// calculate max possible contribution of radiosity, to see if calculating it is worthwhile
898 						// TODO FIXME - other layers may see a higher weight!
899 						// Maybe we should go along and compute *first* the total contribution radiosity will make,
900 						// and at the *end* apply it.
901 						max_Radiosity_Contribution = (filCol * RGBColour(layCol)).greyscale() * att * layer->Finish->RawDiffuseBack;
902 
903 						if(max_Radiosity_Contribution > ticket.adcBailout)
904 						{
905 							radiosity.ComputeAmbient(Vector3d(isect.IPoint), -rawnormal, -layNormal, ambBackCol, weight * max_Radiosity_Contribution, ticket);
906 							radiosity_back_done = true;
907 						}
908 					}
909 
910 					// [CLi] moved multiplication with filCol to further below
911 					tmpCol += (RGBColour(layCol) * ambBackCol) * (att * layer->Finish->RawDiffuseBack);
912 				}
913 			}
914 
915 			// Add emissive ("classic" ambient) contribution.
916 			// [CLi] moved multiplication with filCol to further below
917 			if (!sceneData->radiositySettings.radiosityEnabled || (sceneData->EffectiveLanguageVersion() < 370))
918 				// only use "ambient" setting when radiosity is disabled (or in legacy scenes)
919 				tmpCol += (RGBColour(layCol) * layer->Finish->Ambient * sceneData->ambientLight * att);
920 			tmpCol += (RGBColour(layCol) * layer->Finish->Emission * att);
921 
922 			// set up the "litObjectIgnoresPhotons" flag (thread variable) so that
923 			// ComputeShadowColour will know whether or not this lit object is
924 			// ignoring photons, which affects partial-shadowing (i.e. filter and transmit)
925 			threadData->litObjectIgnoresPhotons = Test_Flag(isect.Object,PH_IGNORE_PHOTONS_FLAG);
926 
927 			// Add diffuse, phong, specular, and iridescence contribution.
928 			// (We don't need to do this for (non-radiosity) rays during pretrace, as it does not affect radiosity sampling)
929 			if(!ray.IsPretraceRay())
930 			{
931 				Vector3d tmpIPoint(isect.IPoint);
932 
933 				if((layer->Finish->Diffuse != 0.0) || (layer->Finish->DiffuseBack != 0.0) || (layer->Finish->Specular != 0.0) || (layer->Finish->Phong != 0.0))
934 					ComputeDiffuseLight(layer->Finish, tmpIPoint, ray, layNormal, RGBColour(layCol), tmpCol, att, isect.Object, ticket);
935 			}
936 
937 			if(sceneData->photonSettings.photonsEnabled && sceneData->surfacePhotonMap.numPhotons > 0)
938 			{
939 				// NK phmap - now do the same for the photons in the area
940 				if(!Test_Flag(isect.Object, PH_IGNORE_PHOTONS_FLAG))
941 				{
942 					Vector3d tmpIPoint(isect.IPoint);
943 					ComputePhotonDiffuseLight(layer->Finish, tmpIPoint, ray, layNormal, rawnormal, RGBColour(layCol), tmpCol, att, isect.Object, *surfacePhotonGatherer);
944 				}
945 			}
946 
947 			tmpCol *= filCol;
948 			VAddEq(*resultcolour, *tmpCol); // modifies RGB, but leaves Filter and Transmit unchanged
949 		}
950 
951 		// Get new filter color.
952 		if(colour_found)
953 		{
954 			filCol *= layCol.rgbTransm();
955 
956 			if(layer->Finish->Conserve_Energy != 0 && listWNRX->empty() == false)
957 			{
958 				// adjust filCol based on reflection
959 				// this would work so much better with r,g,b,rt,gt,bt
960 				filCol *= RGBColour(min(1.0, 1.0 - listWNRX->back().reflec.red()),
961 				                    min(1.0, 1.0 - listWNRX->back().reflec.green()),
962 				                    min(1.0, 1.0 - listWNRX->back().reflec.blue()));
963 			}
964 		}
965 
966 		// Get new remaining translucency.
967 		// [CLi] changed filCol to RGB, as filter and transmit were always pinned to 1.0 and 0.0, respectively anyway
968 		// TODO CLARIFY - is this working properly if filCol.greyscale() is negative? (what would be the right thing then?)
969 		trans = min(1.0, (double)fabs(filCol.greyscale()));
970 	}
971 
972 	// Calculate transmitted component.
973 	//
974 	// If the surface is translucent a transmitted ray is traced
975 	// and its contribution is added to the total ResCol after
976 	// filtering it by filCol.
977 	tir_occured = false;
978 
979 	if(((interior = isect.Object->interior) != NULL) && (trans > ticket.adcBailout) && (qualityFlags & Q_REFRACT))
980 	{
981 		// [CLi] changed filCol to RGB, as filter and transmit were always pinned to 1.0 and 0.0, respectively anyway
982 		// TODO CLARIFY - is this working properly if some filCol component is negative? (what would be the right thing then?)
983 		w1 = max3(fabs(filCol.red()), fabs(filCol.green()), fabs(filCol.blue()));
984 		new_Weight = weight * w1;
985 
986 		// Trace refracted ray.
987 		Vector3d tmpIPoint(isect.IPoint);
988 		Colour tempcolor;
989 
990 		tir_occured = ComputeRefraction(texture->Finish, interior, tmpIPoint, ray, topNormal, rawnormal, tempcolor, new_Weight, ticket);
991 
992 		if(tir_occured == true)
993 			rfrCol += tempcolor;
994 		else
995 			rfrCol = tempcolor;
996 
997 		// Get distance based attenuation.
998 		// TODO - virtually the same code is used in ComputeShadowTexture().
999 		attCol.set(interior->Old_Refract);
1000 
1001 		if((interior != NULL) && ray.IsInterior(interior) == true)
1002 		{
1003 			if(fabs(interior->Fade_Distance) > EPSILON)
1004 			{
1005 				// NK attenuate
1006 				if(interior->Fade_Power >= 1000)
1007 				{
1008 					double depth = isect.Depth / interior->Fade_Distance;
1009 					attCol *= exp(-(RGBColour(1.0) - interior->Fade_Colour) * depth);
1010 				}
1011 				else
1012 				{
1013 					att = 1.0 + pow(isect.Depth / interior->Fade_Distance, (double)interior->Fade_Power);
1014 					attCol *= (interior->Fade_Colour + (RGBColour(1.0) - interior->Fade_Colour) / att);
1015 				}
1016 			}
1017 		}
1018 
1019 		// If total internal reflection occured the transmitted light is not filtered.
1020 		if(tir_occured)
1021 		{
1022 			resultcolour.red()   += attCol.red()   * rfrCol.red();
1023 			resultcolour.green() += attCol.green() * rfrCol.green();
1024 			resultcolour.blue()  += attCol.blue()  * rfrCol.blue();
1025 			// NOTE: pTRANSM (alpha channel) stays zero
1026 		}
1027 		else
1028 		{
1029 			if(one_colour_found)
1030 			{
1031 				// [CLi] changed filCol to RGB, as filter and transmit were always pinned to 1.0 and 0.0, respectively anyway
1032 				resultcolour.red()   += attCol.red()   * rfrCol.red()   * filCol.red();
1033 				resultcolour.green() += attCol.green() * rfrCol.green() * filCol.green();
1034 				resultcolour.blue()  += attCol.blue()  * rfrCol.blue()  * filCol.blue();
1035 				// We need to know the transmittance value for the alpha channel. [DB]
1036 				resultcolour.transm() = attCol.greyscale() * rfrCol.transm() * trans;
1037 			}
1038 			else
1039 			{
1040 				resultcolour.red()   += attCol.red()   * rfrCol.red();
1041 				resultcolour.green() += attCol.green() * rfrCol.green();
1042 				resultcolour.blue()  += attCol.blue()  * rfrCol.blue();
1043 				// We need to know the transmittance value for the alpha channel. [DB]
1044 				resultcolour.transm() = attCol.greyscale() * rfrCol.transm();
1045 			}
1046 		}
1047 	}
1048 
1049 	// Calculate reflected component.
1050 	//
1051 	// If total internal reflection occured all reflections using
1052 	// TopNormal are skipped.
1053 	if(qualityFlags & Q_REFLECT)
1054 	{
1055 		layer = texture;
1056 		for(i = 0; i < layer_number; i++)
1057 		{
1058 			if((!tir_occured) ||
1059 			   (fabs(topNormal[X]-(*listWNRX)[i].normal[X]) > EPSILON) ||
1060 			   (fabs(topNormal[Y]-(*listWNRX)[i].normal[Y]) > EPSILON) ||
1061 			   (fabs(topNormal[Z]-(*listWNRX)[i].normal[Z]) > EPSILON))
1062 			{
1063 				if(!(*listWNRX)[i].reflec.isZero())
1064 				{
1065 					Vector3d tmpIPoint(isect.IPoint);
1066 
1067 					rflCol.clear();
1068 					ComputeReflection(layer->Finish, tmpIPoint, ray, (*listWNRX)[i].normal, rawnormal, rflCol, (*listWNRX)[i].weight, ticket);
1069 
1070 					if((*listWNRX)[i].reflex != 1.0)
1071 					{
1072 						resultcolour.red()    += (*listWNRX)[i].reflec.red()   * pow(rflCol.red(),   (*listWNRX)[i].reflex);
1073 						resultcolour.green()  += (*listWNRX)[i].reflec.green() * pow(rflCol.green(), (*listWNRX)[i].reflex);
1074 						resultcolour.blue()   += (*listWNRX)[i].reflec.blue()  * pow(rflCol.blue(),  (*listWNRX)[i].reflex);
1075 					}
1076 					else
1077 					{
1078 						resultcolour.red()   += (*listWNRX)[i].reflec.red()   * rflCol.red();
1079 						resultcolour.green() += (*listWNRX)[i].reflec.green() * rflCol.green();
1080 						resultcolour.blue()  += (*listWNRX)[i].reflec.blue()  * rflCol.blue();
1081 					}
1082 				}
1083 			}
1084 			layer = reinterpret_cast<const TEXTURE *>(layer->Next);
1085 		}
1086 	}
1087 
1088 #if MEDIA_AFTER_TEXTURE_INTERPOLATION
1089 	// [CLi] moved this to Trace::ComputeTextureColour() and Trace::ComputeShadowColour(), respectively
1090 	// to avoid media to be computed twice when dealing with averaged textures.
1091 #else
1092 	// Calculate participating media effects.
1093 	if((qualityFlags & Q_VOLUME) && (!ray.GetInteriors().empty()) && (ray.IsHollowRay() == true))
1094 		media.ComputeMedia(ray.GetInteriors(), ray, isect, resultcolour, ticket);
1095 #endif
1096 }
1097 
ComputeShadowTexture(Colour & filtercolour,const TEXTURE * texture,vector<const TEXTURE * > & warps,const Vector3d & ipoint,const Vector3d & rawnormal,const Ray & ray,Intersection & isect,TraceTicket & ticket)1098 void Trace::ComputeShadowTexture(Colour& filtercolour, const TEXTURE *texture, vector<const TEXTURE *>& warps, const Vector3d& ipoint,
1099                                  const Vector3d& rawnormal, const Ray& ray, Intersection& isect, TraceTicket& ticket)
1100 {
1101 	Interior *interior = isect.Object->interior;
1102 	const TEXTURE *layer;
1103 	double caustics, dotval, k;
1104 	Vector3d layer_Normal;
1105 	RGBColour refraction;
1106 	Colour layer_Pigment_Colour;
1107 	bool one_colour_found, colour_found;
1108 
1109 	RGBColour tmpCol = RGBColour(1.0, 1.0, 1.0);
1110 
1111 	one_colour_found = false;
1112 
1113 	// [CLI] removed obsolete test for filtercolour.filter() and filtercolour.transm(), as they remain unchanged during loop
1114 	for(layer = texture; layer != NULL; layer = reinterpret_cast<TEXTURE *>(layer->Next))
1115 	{
1116 		colour_found = Compute_Pigment(layer_Pigment_Colour, layer->Pigment, *ipoint, &isect, &ray, threadData);
1117 
1118 		if(colour_found)
1119 		{
1120 			one_colour_found = true;
1121 
1122 			tmpCol *= layer_Pigment_Colour.rgbTransm();
1123 		}
1124 
1125 		// Get normal for faked caustics (will rewrite later to cache).
1126 		if((interior != NULL) && ((caustics = interior->Caustics) != 0.0))
1127 		{
1128 			layer_Normal = rawnormal;
1129 
1130 			if((qualityFlags & Q_NORMAL) && (layer->Tnormal != NULL))
1131 			{
1132 				for(vector<const TEXTURE *>::iterator i(warps.begin()); i != warps.end(); i++)
1133 					Warp_Normal(*layer_Normal, *layer_Normal, reinterpret_cast<const TPATTERN *>(*i), Test_Flag((*i), DONT_SCALE_BUMPS_FLAG));
1134 
1135 				Perturb_Normal(*layer_Normal, layer->Tnormal, *ipoint, &isect, &ray, threadData);
1136 
1137 				if((Test_Flag(layer->Tnormal,DONT_SCALE_BUMPS_FLAG)))
1138 					layer_Normal.normalize();
1139 
1140 				for(vector<const TEXTURE *>::reverse_iterator i(warps.rbegin()); i != warps.rend(); i++)
1141 					UnWarp_Normal(*layer_Normal, *layer_Normal, reinterpret_cast<const TPATTERN *>(*i), Test_Flag((*i), DONT_SCALE_BUMPS_FLAG));
1142 			}
1143 
1144 			// Get new filter/transmit values.
1145 			dotval = dot(layer_Normal, Vector3d(ray.Direction));
1146 
1147 			k = (1.0 + pow(fabs(dotval), caustics));
1148 
1149 			tmpCol *= k;
1150 		}
1151 	}
1152 
1153 	// TODO - [CLi] aren't spatial effects (distance attenuation, media) better handled in Trace::ComputeTextureColour()? We may be doing double work here!
1154 
1155 	// Get distance based attenuation.
1156 	// TODO - virtually the same code is used in ComputeLightedTexture().
1157 	refraction = RGBColour(1.0, 1.0, 1.0);
1158 
1159 	if((interior != NULL) && (ray.IsInterior(interior) == true))
1160 	{
1161 		if((interior->Fade_Power > 0.0) && (fabs(interior->Fade_Distance) > EPSILON))
1162 		{
1163 			// NK - attenuation
1164 			if(interior->Fade_Power>=1000)
1165 			{
1166 				refraction *= exp( -(RGBColour(1.0) - interior->Fade_Colour) * (isect.Depth / interior->Fade_Distance) );
1167 			}
1168 			else
1169 			{
1170 				k = 1.0 + pow(isect.Depth / interior->Fade_Distance, (double)interior->Fade_Power);
1171 				refraction *= (interior->Fade_Colour + (RGBColour(1.0) - interior->Fade_Colour) / k);
1172 			}
1173 		}
1174 	}
1175 
1176 	// Get distance based attenuation.
1177 	filtercolour = Colour(tmpCol * refraction, 1.0, 0.0);
1178 
1179 #if MEDIA_AFTER_TEXTURE_INTERPOLATION
1180 	// [CLi] moved this to Trace::ComputeTextureColour() and Trace::ComputeShadowColour(), respectively
1181 	// to avoid media to be computed twice when dealing with averaged textures.
1182 #else
1183 	// Calculate participating media effects.
1184 	if((qualityFlags & Q_VOLUME) && (!ray.GetInteriors().empty()) && (ray.IsHollowRay() == true))
1185 		media.ComputeMedia(ray.GetInteriors(), ray, isect, filtercolour, ticket);
1186 #endif
1187 }
1188 
ComputeReflection(const FINISH * finish,const Vector3d & ipoint,const Ray & ray,const Vector3d & normal,const Vector3d & rawnormal,Colour & colour,COLC weight,TraceTicket & ticket)1189 void Trace::ComputeReflection(const FINISH* finish, const Vector3d& ipoint, const Ray& ray, const Vector3d& normal, const Vector3d& rawnormal, Colour& colour, COLC weight, TraceTicket& ticket)
1190 {
1191 	Ray nray(ray);
1192 	double n, n2;
1193 
1194 	nray.SetFlags(Ray::ReflectionRay, ray);
1195 
1196 	// The rest of this is essentally what was originally here, with small changes.
1197 	n = -2.0 * dot(Vector3d(ray.Direction), normal);
1198 	VAddScaled(nray.Direction, ray.Direction, n, *normal);
1199 
1200 	// Nathan Kopp & CEY 1998 - Reflection bugfix
1201 	// if the new ray is going the opposite direction as raw normal, we
1202 	// need to fix it.
1203 	n = dot(Vector3d(nray.Direction), rawnormal);
1204 
1205 	if(n < 0.0)
1206 	{
1207 		// It needs fixing. Which kind?
1208 		n2 = dot(Vector3d(nray.Direction), normal);
1209 
1210 		if(n2 < 0.0)
1211 		{
1212 			// reflected inside rear virtual surface. Reflect Ray using Raw_Normal
1213 			n = -2.0 * dot(Vector3d(ray.Direction), rawnormal);
1214 			VAddScaled(nray.Direction, ray.Direction, n, *rawnormal);
1215 		}
1216 		else
1217 		{
1218 			// Double reflect NRay using Raw_Normal
1219 			// n = dot(Vector3d(New_Ray.Direction),Vector3d(Jitter_Raw_Normal)); - kept the old n around
1220 			n *= -2.0;
1221 			VAddScaledEq(nray.Direction, n, *rawnormal);
1222 		}
1223 	}
1224 
1225 	VNormalizeEq(nray.Direction);
1226 	Assign_Vector(nray.Origin, *ipoint);
1227 	threadData->Stats()[Reflected_Rays_Traced]++;
1228 
1229 	// Trace reflected ray.
1230 	bool alphaBackground = ticket.alphaBackground;
1231 	ticket.alphaBackground = false;
1232 	if (!ray.IsPhotonRay() && (finish->Irid > 0.0))
1233 	{
1234 		Colour tmpCol;
1235 		TraceRay(nray, tmpCol, weight, ticket, false);
1236 		RGBColour tmpCol2(tmpCol);
1237 		ComputeIridColour(finish, Vector3d(nray.Direction), Vector3d(ray.Direction), normal, ipoint, tmpCol2);
1238 		colour += Colour(tmpCol2);
1239 	}
1240 	else
1241 	{
1242 		TraceRay(nray, colour, weight, ticket, false);
1243 	}
1244 	ticket.alphaBackground = alphaBackground;
1245 }
1246 
ComputeRefraction(const FINISH * finish,Interior * interior,const Vector3d & ipoint,const Ray & ray,const Vector3d & normal,const Vector3d & rawnormal,Colour & colour,COLC weight,TraceTicket & ticket)1247 bool Trace::ComputeRefraction(const FINISH* finish, Interior *interior, const Vector3d& ipoint, const Ray& ray, const Vector3d& normal, const Vector3d& rawnormal, Colour& colour, COLC weight, TraceTicket& ticket)
1248 {
1249 	Ray nray(ray);
1250 	Vector3d localnormal;
1251 	double n, ior, dispersion;
1252 	unsigned int dispersionelements = interior->Disp_NElems;
1253 	bool havedispersion = (dispersionelements > 0);
1254 
1255 	nray.SetFlags(Ray::RefractionRay, ray);
1256 
1257 	// Set up new ray.
1258 	Assign_Vector(nray.Origin, *ipoint);
1259 
1260 	// Get ratio of iors depending on the interiors the ray is traversing.
1261 
1262 	// Note:
1263 	// For the purpose of refraction, the space occupied by "nested" objects is considered to be "outside" the containing objects,
1264 	// i.e. when encountering (A (B B) A) we pretend that it's (A A|B B|A A).
1265 	// (Here "(X" and "X)" denote the entering and leaving of object X, and "X|Y" denotes an interface between objects X and Y.)
1266 	// In case of overlapping objects, the intersecting region is considered to be part of whatever object is encountered last,
1267 	// i.e. when encountering (A (B A) B) we pretend that it's (A A|B B|B B).
1268 
1269 	if(nray.GetInteriors().empty())
1270 	{
1271 		// The ray is entering from the atmosphere.
1272 		nray.AppendInterior(interior);
1273 
1274 		ior = sceneData->atmosphereIOR / interior->IOR;
1275 		if(havedispersion == true)
1276 			dispersion = sceneData->atmosphereDispersion / interior->Dispersion;
1277 	}
1278 	else
1279 	{
1280 		// The ray is currently inside an object.
1281 		if(interior == nray.GetInteriors().back()) // The ray is leaving the "innermost" object
1282 		{
1283 			nray.RemoveInterior(interior);
1284 			if(nray.GetInteriors().empty())
1285 			{
1286 				// The ray is leaving into the atmosphere
1287 				ior = interior->IOR / sceneData->atmosphereIOR;
1288 				if(havedispersion == true)
1289 					dispersion = interior->Dispersion / sceneData->atmosphereDispersion;
1290 			}
1291 			else
1292 			{
1293 				// The ray is leaving into another object, i.e. (A (B B) ...
1294 				// For the purpose of refraction, pretend that we weren't inside that other object,
1295 				// i.e. pretend that we didn't encounter (A (B B) ... but (A A|B B|A ...
1296 				ior = interior->IOR / nray.GetInteriors().back()->IOR;
1297 				if(havedispersion == true)
1298 				{
1299 					dispersion = interior->Dispersion / nray.GetInteriors().back()->Dispersion;
1300 					dispersionelements = max(dispersionelements, (unsigned int)(nray.GetInteriors().back()->Disp_NElems));
1301 				}
1302 			}
1303 		}
1304 		else if(nray.RemoveInterior(interior) == true) // The ray is leaving the intersection of overlapping objects, i.e. (A (B A) ...
1305 		{
1306 			// For the purpose of refraction, pretend that we had already left the other member of the intersection when we entered the overlap,
1307 			// i.e. pretend that we didn't encounter (A (B A) ... but (A A|B B|B ...
1308 			ior = 1.0;
1309 			dispersion = 1.0;
1310 		}
1311 		else
1312 		{
1313 			// The ray is entering a new object.
1314 			// For the purpose of refraction, pretend that we're leaving any containing objects,
1315 			// i.e. pretend that we didn't encounter (A (B ... but (A A|B ...
1316 			ior = nray.GetInteriors().back()->IOR / interior->IOR;
1317 			if(havedispersion == true)
1318 				dispersion = nray.GetInteriors().back()->Dispersion / interior->Dispersion;
1319 
1320 			nray.AppendInterior(interior);
1321 		}
1322 	}
1323 
1324 	// Do the two mediums traversed have the same indices of refraction?
1325 	if((fabs(ior - 1.0) < EPSILON) && (fabs(dispersion - 1.0) < EPSILON))
1326 	{
1327 		// Only transmit the ray.
1328 		Assign_Vector(nray.Direction, ray.Direction);
1329 		// Trace a transmitted ray.
1330 		threadData->Stats()[Transmitted_Rays_Traced]++;
1331 
1332 		colour.clear();
1333 		TraceRay(nray, colour, weight, ticket, true);
1334 	}
1335 	else
1336 	{
1337 		// Refract the ray.
1338 		n = dot(Vector3d(ray.Direction), normal);
1339 
1340 		if(n <= 0.0)
1341 		{
1342 			localnormal = normal;
1343 			n = -n;
1344 		}
1345 		else
1346 			localnormal = -normal;
1347 
1348 
1349 		// TODO FIXME: also for first radiosity pass ? (see line 3272 of v3.6 lighting.cpp)
1350 		if(fabs (dispersion - 1.0) < EPSILON) // TODO FIXME - radiosity: || (!isFinalTrace)
1351 			return TraceRefractionRay(finish, ipoint, ray, nray, ior, n, normal, rawnormal, localnormal, colour, weight, ticket);
1352 		else if(ray.IsMonochromaticRay() == true)
1353 			return TraceRefractionRay(finish, ipoint, ray, nray, ray.GetSpectralBand().GetDispersionIOR(ior, dispersion), n, normal, rawnormal, localnormal, colour, weight, ticket);
1354 		else
1355 		{
1356 			RGBColour sumcol;
1357 
1358 			for(unsigned int i = 0; i < dispersionelements; i++)
1359 			{
1360 				Colour tempcolour;
1361 
1362 				// NB setting the dispersion factor also causes the MonochromaticRay flag to be set
1363 				SpectralBand spectralBand(i, dispersionelements);
1364 				nray.SetSpectralBand(spectralBand);
1365 
1366 				(void)TraceRefractionRay(finish, ipoint, ray, nray, spectralBand.GetDispersionIOR(ior, dispersion), n, normal, rawnormal, localnormal, tempcolour, weight, ticket);
1367 
1368 				sumcol += RGBColour(tempcolour) * spectralBand.GetHue();
1369 			}
1370 
1371 			colour = Colour(sumcol / double(dispersionelements));
1372 		}
1373 	}
1374 
1375 	return false;
1376 }
1377 
TraceRefractionRay(const FINISH * finish,const Vector3d & ipoint,const Ray & ray,Ray & nray,double ior,double n,const Vector3d & normal,const Vector3d & rawnormal,const Vector3d & localnormal,Colour & colour,COLC weight,TraceTicket & ticket)1378 bool Trace::TraceRefractionRay(const FINISH* finish, const Vector3d& ipoint, const Ray& ray, Ray& nray, double ior, double n, const Vector3d& normal, const Vector3d& rawnormal, const Vector3d& localnormal, Colour& colour, COLC weight, TraceTicket& ticket)
1379 {
1380 	// Compute refrated ray direction using Heckbert's method.
1381 	double t = 1.0 + Sqr(ior) * (Sqr(n) - 1.0);
1382 
1383 	if(t < 0.0)
1384 	{
1385 		Colour tempcolour;
1386 
1387 		// Total internal reflection occures.
1388 		threadData->Stats()[Internal_Reflected_Rays_Traced]++;
1389 		ComputeReflection(finish, ipoint, ray, normal, rawnormal, tempcolour, weight, ticket);
1390 		colour += tempcolour;
1391 
1392 		return true;
1393 	}
1394 
1395 	t = ior * n - sqrt(t);
1396 
1397 	VLinComb2(nray.Direction, ior, ray.Direction, t, *localnormal);
1398 
1399 	// Trace a refracted ray.
1400 	threadData->Stats()[Refracted_Rays_Traced]++;
1401 
1402 	colour.clear();
1403 	TraceRay(nray, colour, weight, ticket, false);
1404 
1405 	return false;
1406 }
1407 
1408 // see Diffuse in the 3.6 code (lighting.cpp)
ComputeDiffuseLight(const FINISH * finish,const Vector3d & ipoint,const Ray & eye,const Vector3d & layer_normal,const RGBColour & layer_pigment_colour,RGBColour & colour,double attenuation,ObjectPtr object,TraceTicket & ticket)1409 void Trace::ComputeDiffuseLight(const FINISH *finish, const Vector3d& ipoint, const Ray& eye, const Vector3d& layer_normal, const RGBColour& layer_pigment_colour,
1410                                 RGBColour& colour, double attenuation, ObjectPtr object, TraceTicket& ticket)
1411 {
1412 	Vector3d reye;
1413 
1414 	// TODO FIXME - [CLi] why is this computed here? Not so exciting, is it?
1415 	if(finish->Specular != 0.0)
1416 		reye = -Vector3d(eye.Direction);
1417 
1418 	// global light sources, if not turned off for this object
1419 	if((object->Flags & NO_GLOBAL_LIGHTS_FLAG) != NO_GLOBAL_LIGHTS_FLAG)
1420 	{
1421 		for(int i = 0; i < threadData->lightSources.size(); i++)
1422 			ComputeOneDiffuseLight(*threadData->lightSources[i], reye, finish, ipoint, eye, layer_normal, layer_pigment_colour, colour, attenuation, object, ticket, i);
1423 	}
1424 
1425 	// local light sources from a light group, if any
1426 	if(!object->LLights.empty())
1427 	{
1428 		for(int i = 0; i < object->LLights.size(); i++)
1429 			ComputeOneDiffuseLight(*object->LLights[i], reye, finish, ipoint, eye, layer_normal, layer_pigment_colour, colour, attenuation, object, ticket);
1430 	}
1431 }
1432 
ComputePhotonDiffuseLight(const FINISH * Finish,const Vector3d & IPoint,const Ray & Eye,const Vector3d & Layer_Normal,const Vector3d & Raw_Normal,const RGBColour & Layer_Pigment_Colour,RGBColour & colour,double Attenuation,ConstObjectPtr Object,PhotonGatherer & gatherer)1433 void Trace::ComputePhotonDiffuseLight(const FINISH *Finish, const Vector3d& IPoint, const Ray& Eye, const Vector3d& Layer_Normal, const Vector3d& Raw_Normal,
1434                                       const RGBColour& Layer_Pigment_Colour, RGBColour& colour, double Attenuation, ConstObjectPtr Object, PhotonGatherer& gatherer)
1435 {
1436 	double Cos_Shadow_Angle;
1437 	Ray Light_Source_Ray;
1438 	Vector3d REye;
1439 	RGBColour Light_Colour;
1440 	RGBColour tmpCol, tmpCol2;
1441 	double r;
1442 	int n;
1443 	int j;
1444 	double thisDensity=0;
1445 	double prevDensity=0.0000000000000001; // avoid div-by-zero error
1446 	int expanded = false;
1447 	double att;  // attenuation for lambertian compensation & filters
1448 
1449 	if (!sceneData->photonSettings.photonsEnabled || sceneData->surfacePhotonMap.numPhotons<1)
1450 		return;
1451 
1452 	if ((Finish->Diffuse == 0.0) && (Finish->DiffuseBack == 0.0) && (Finish->Specular == 0.0) && (Finish->Phong == 0.0))
1453 		return;
1454 
1455 	// statistics
1456 	threadData->Stats()[Gather_Performed_Count]++;
1457 
1458 	if (Finish->Specular != 0.0)
1459 	{
1460 		REye[X] = -Eye.Direction[X];
1461 		REye[Y] = -Eye.Direction[Y];
1462 		REye[Z] = -Eye.Direction[Z];
1463 	}
1464 
1465 	if(gatherer.gathered)
1466 		r = gatherer.alreadyGatheredRadius;
1467 	else
1468 		r = gatherer.gatherPhotonsAdaptive(*IPoint, *Layer_Normal, true);
1469 
1470 	n = gatherer.gatheredPhotons.numFound;
1471 
1472 	tmpCol.clear();
1473 
1474 	// now go through these photons and add up their contribution
1475 	for(j=0; j<n; j++)
1476 	{
1477 		// double theta,phi;
1478 		int theta,phi;
1479 		bool backside = false;
1480 
1481 		// convert small color to normal color
1482 		photonRgbe2colour(Light_Colour, gatherer.gatheredPhotons.photonGatherList[j]->colour);
1483 
1484 		// convert theta/phi to vector direction
1485 		// Use a pre-computed array of sin/cos to avoid many calls to the
1486 		// sin() and cos() functions.  These arrays were initialized in
1487 		// InitBacktraceEverything.
1488 		theta = gatherer.gatheredPhotons.photonGatherList[j]->theta+127;
1489 		phi = gatherer.gatheredPhotons.photonGatherList[j]->phi+127;
1490 
1491 		Light_Source_Ray.Direction[Y] = sinCosData.sinTheta[theta];
1492 		Light_Source_Ray.Direction[X] = sinCosData.cosTheta[theta];
1493 
1494 		Light_Source_Ray.Direction[Z] = Light_Source_Ray.Direction[X]*sinCosData.sinTheta[phi];
1495 		Light_Source_Ray.Direction[X] = Light_Source_Ray.Direction[X]*sinCosData.cosTheta[phi];
1496 
1497 		VSub(Light_Source_Ray.Origin, gatherer.gatheredPhotons.photonGatherList[j]->Loc, Light_Source_Ray.Direction);
1498 
1499 		// this compensates for real lambertian (diffuse) lighting (see paper)
1500 		// use raw normal, not layer normal
1501 		// att = dot(Layer_Normal, Vector3d(Light_Source_Ray.Direction));
1502 		att = dot(Raw_Normal, Vector3d(Light_Source_Ray.Direction));
1503 		if (att>1) att=1.0;
1504 		if (att<.1) att = 0.1; // limit to 10x - otherwise we get bright dots
1505 		att = 1.0 / fabs(att);
1506 
1507 		// do gaussian filter
1508 		//att *= 0.918*(1.0-(1.0-exp((-1.953) * gatherer.photonDistances[j])) / (1.0-exp(-1.953)) );
1509 		// do cone filter
1510 		//att *= 1.0-(sqrt(gatherer.photonDistances[j])/(4.0 * r)) / (1.0-2.0/(3.0*4.0));
1511 
1512 		Light_Colour *= att;
1513 
1514 		// See if light on far side of surface from camera.
1515 		if (!(Test_Flag(Object, DOUBLE_ILLUMINATE_FLAG)))
1516 		{
1517 			Cos_Shadow_Angle = dot(Layer_Normal, Vector3d(Light_Source_Ray.Direction));
1518 			if (Cos_Shadow_Angle < EPSILON)
1519 			{
1520 				if (Finish->DiffuseBack != 0.0)
1521 					backside = true;
1522 				else
1523 					continue;
1524 			}
1525 		}
1526 
1527 		// now add diffuse, phong, specular, irid contribution
1528 
1529 		tmpCol2.clear();
1530 
1531 		if (!(sceneData->useSubsurface && Finish->UseSubsurface))
1532 			// (Diffuse contribution is not supported in combination with BSSRDF, to emphasize the fact that the BSSRDF
1533 			// model is intended to provide for all the diffuse term by default. If users want to add some additional
1534 			// surface-only diffuse term, they should use layered textures.
1535 			ComputeDiffuseColour(Finish, Light_Source_Ray, Layer_Normal, tmpCol2, Light_Colour, Layer_Pigment_Colour, Attenuation, backside);
1536 
1537 		// NK rad - don't compute highlights for radiosity gather rays, since this causes
1538 		// problems with colors being far too bright
1539 		// don't compute highlights for diffuse backside illumination
1540 		if(!Eye.IsRadiosityRay() && !backside) // TODO FIXME radiosity - is this really the right way to do it (speaking of realism)?
1541 		{
1542 			if (Finish->Phong > 0.0)
1543 			{
1544 				Vector3d ed(Eye.Direction);
1545 				ComputePhongColour(Finish, Light_Source_Ray, ed, Layer_Normal, tmpCol2, Light_Colour, Layer_Pigment_Colour);
1546 			}
1547 			if (Finish->Specular > 0.0)
1548 			{
1549 				ComputeSpecularColour(Finish, Light_Source_Ray, REye, Layer_Normal, tmpCol2, Light_Colour, Layer_Pigment_Colour);
1550 			}
1551 		}
1552 
1553 		if (Finish->Irid > 0.0)
1554 		{
1555 			ComputeIridColour(Finish, Vector3d(Light_Source_Ray.Direction), Vector3d(Eye.Direction), Layer_Normal, IPoint, tmpCol2);
1556 		}
1557 
1558 		tmpCol += tmpCol2;
1559 	}
1560 
1561 	// finish the photons equation
1562 	tmpCol /= M_PI*r*r;
1563 
1564 	// add photon contribution to total lighting
1565 	colour += tmpCol;
1566 }
1567 
1568 // see Diffuse_One_Light in the 3.6 code (lighting.cpp)
ComputeOneDiffuseLight(const LightSource & lightsource,const Vector3d & reye,const FINISH * finish,const Vector3d & ipoint,const Ray & eye,const Vector3d & layer_normal,const RGBColour & layer_pigment_colour,RGBColour & colour,double attenuation,ConstObjectPtr object,TraceTicket & ticket,int light_index)1569 void Trace::ComputeOneDiffuseLight(const LightSource &lightsource, const Vector3d& reye, const FINISH *finish, const Vector3d& ipoint, const Ray& eye, const Vector3d& layer_normal,
1570                                    const RGBColour& layer_pigment_colour, RGBColour& colour, double attenuation, ConstObjectPtr object, TraceTicket& ticket, int light_index)
1571 {
1572 	double lightsourcedepth, cos_shadow_angle;
1573 	Ray lightsourceray(eye);
1574 	RGBColour lightcolour;
1575 	bool backside = false;
1576 	RGBColour tmpCol;
1577 
1578 	// Get a colour and a ray.
1579 	ComputeOneLightRay(lightsource, lightsourcedepth, lightsourceray, ipoint, lightcolour);
1580 
1581 	// Don't calculate spotlights when outside of the light's cone.
1582 	if((fabs(lightcolour.red())   < EPSILON) &&
1583 	   (fabs(lightcolour.green()) < EPSILON) &&
1584 	   (fabs(lightcolour.blue())  < EPSILON))
1585 		return;
1586 
1587 	// See if light on far side of surface from camera.
1588 	if(!(Test_Flag(object, DOUBLE_ILLUMINATE_FLAG)) // NK 1998 double_illuminate - changed to Test_Flag
1589 	   && !lightsource.Use_Full_Area_Lighting) // JN2007: Easiest way of getting rid of sharp shadow lines
1590 	{
1591 		cos_shadow_angle = dot(layer_normal, Vector3d(lightsourceray.Direction));
1592 		if(cos_shadow_angle < EPSILON)
1593 		{
1594 			if (finish->DiffuseBack != 0.0)
1595 				backside = true;
1596 			else
1597 				return;
1598 		}
1599 	}
1600 
1601 	// If light source was not blocked by any intervening object, then
1602 	// calculate it's contribution to the object's overall illumination.
1603 	if ((qualityFlags & Q_SHADOW) && ((lightsource.Projected_Through_Object != NULL) || (lightsource.Light_Type != FILL_LIGHT_SOURCE)))
1604 	{
1605 		if (lightColorCacheIndex != -1 && light_index != -1)
1606 		{
1607 			if (lightColorCache[lightColorCacheIndex][light_index].tested == false)
1608 			{
1609 				// note that lightColorCache may be re-sized during trace, so we don't store a reference to it across the call
1610 				TraceShadowRay(lightsource, lightsourcedepth, lightsourceray, ipoint, lightcolour, ticket);
1611 				lightColorCache[lightColorCacheIndex][light_index].tested = true;
1612 				lightColorCache[lightColorCacheIndex][light_index].colour = lightcolour;
1613 			}
1614 			else
1615 				lightcolour = lightColorCache[lightColorCacheIndex][light_index].colour;
1616 		}
1617 		else
1618 			TraceShadowRay(lightsource, lightsourcedepth, lightsourceray, ipoint, lightcolour, ticket);
1619 	}
1620 
1621 	if((fabs(lightcolour.red())   > EPSILON) ||
1622 	   (fabs(lightcolour.green()) > EPSILON) ||
1623 	   (fabs(lightcolour.blue())  > EPSILON))
1624 	{
1625 		if(lightsource.Area_Light && lightsource.Use_Full_Area_Lighting &&
1626 			(qualityFlags & Q_AREA_LIGHT)) // JN2007: Full area lighting
1627 		{
1628 			ComputeFullAreaDiffuseLight(lightsource, reye, finish, ipoint, eye,
1629 				layer_normal, layer_pigment_colour, colour, attenuation,
1630 				lightsourcedepth, lightsourceray, lightcolour,
1631 				Test_Flag(object, DOUBLE_ILLUMINATE_FLAG));
1632 			return;
1633 		}
1634 
1635 		if(!(sceneData->useSubsurface && finish->UseSubsurface))
1636 			// (Diffuse contribution is not supported in combination with BSSRDF, to emphasize the fact that the BSSRDF
1637 			// model is intended to provide for all the diffuse term by default. If users want to add some additional
1638 			// surface-only diffuse term, they should use layered textures.
1639 			ComputeDiffuseColour(finish, lightsourceray, layer_normal, tmpCol, lightcolour, layer_pigment_colour, attenuation, backside);
1640 
1641 		// NK rad - don't compute highlights for radiosity gather rays, since this causes
1642 		// problems with colors being far too bright
1643 		// don't compute highlights for diffuse backside illumination
1644 		if((lightsource.Light_Type != FILL_LIGHT_SOURCE) && !eye.IsRadiosityRay() && !backside) // TODO FIXME radiosity - is this really the right way to do it (speaking of realism)?
1645 		{
1646 			if(finish->Phong > 0.0)
1647 			{
1648 				Vector3d ed(eye.Direction);
1649 				ComputePhongColour(finish, lightsourceray, ed, layer_normal, tmpCol, lightcolour, layer_pigment_colour);
1650 			}
1651 
1652 			if(finish->Specular > 0.0)
1653 				ComputeSpecularColour(finish, lightsourceray, reye, layer_normal, tmpCol, lightcolour, layer_pigment_colour);
1654 		}
1655 
1656 		if(finish->Irid > 0.0)
1657 			ComputeIridColour(finish, Vector3d(lightsourceray.Direction), Vector3d(eye.Direction), layer_normal, ipoint, tmpCol);
1658 	}
1659 
1660 	colour += tmpCol;
1661 }
1662 
1663 // JN2007: Full area lighting:
ComputeFullAreaDiffuseLight(const LightSource & lightsource,const Vector3d & reye,const FINISH * finish,const Vector3d & ipoint,const Ray & eye,const Vector3d & layer_normal,const RGBColour & layer_pigment_colour,RGBColour & colour,double attenuation,double lightsourcedepth,Ray & lightsourceray,const RGBColour & lightcolour,bool isDoubleIlluminated)1664 void Trace::ComputeFullAreaDiffuseLight(const LightSource &lightsource, const Vector3d& reye, const FINISH *finish, const Vector3d& ipoint, const Ray& eye,
1665                                         const Vector3d& layer_normal, const RGBColour& layer_pigment_colour, RGBColour& colour, double attenuation,
1666                                         double lightsourcedepth, Ray& lightsourceray, const RGBColour& lightcolour, bool isDoubleIlluminated)
1667 {
1668 	Vector3d temp;
1669 	Vector3d axis1Temp, axis2Temp;
1670 	double axis1_Length, cos_shadow_angle;
1671 
1672 	axis1Temp = Vector3d(lightsource.Axis1);
1673 	axis2Temp = Vector3d(lightsource.Axis2);
1674 
1675 	if(lightsource.Orient == true)
1676 	{
1677 		// Orient the area light to face the intersection point [ENB 9/97]
1678 
1679 		// Do Light source to get the correct lightsourceray
1680 		ComputeOneWhiteLightRay(lightsource, lightsourcedepth, lightsourceray, ipoint);
1681 
1682 		// Save the lengths of the axises
1683 		axis1_Length = axis1Temp.length();
1684 
1685 		// Make axis 1 be perpendicular with the light-ray
1686 		if(fabs(fabs(lightsourceray.Direction[Z]) - 1.0) < 0.01)
1687 			// too close to vertical for comfort, so use cross product with horizon
1688 			temp = Vector3d(0.0, 1.0, 0.0);
1689 		else
1690 			temp = Vector3d(0.0, 0.0, 0.1);
1691 
1692 		axis1Temp = cross(Vector3d(lightsourceray.Direction), temp).normalized();
1693 
1694 		// Make axis 2 be perpendicular with the light-ray and with Axis1.  A simple cross-product will do the trick.
1695 		axis2Temp = cross(Vector3d(lightsourceray.Direction), axis1Temp).normalized();
1696 
1697 		// make it square
1698 		axis1Temp *= axis1_Length;
1699 		axis2Temp *= axis1_Length;
1700 	}
1701 
1702 	RGBColour sampleLightcolour = lightcolour / (lightsource.Area_Size1 * lightsource.Area_Size2);
1703 	RGBColour attenuatedLightcolour;
1704 
1705 	for(int v = 0; v < lightsource.Area_Size2; ++v)
1706 	{
1707 		for(int u = 0; u < lightsource.Area_Size1; ++u)
1708 		{
1709 			Vector3d jitterAxis1, jitterAxis2;
1710 			Ray lsr(lightsourceray);
1711 			double jitter_u = (double)u;
1712 			double jitter_v = (double)v;
1713 			bool backside = false;
1714 			RGBColour tmpCol;
1715 
1716 			if(lightsource.Jitter)
1717 			{
1718 				jitter_u += randomNumberGenerator() - 0.5;
1719 				jitter_v += randomNumberGenerator() - 0.5;
1720 			}
1721 
1722 			// Create circular are lights [ENB 9/97]
1723 			// First, make jitter_u and jitter_v be numbers from -1 to 1
1724 			// Second, set scaleFactor to the abs max (jitter_u,jitter_v) (for shells)
1725 			// Third, divide scaleFactor by the length of <jitter_u,jitter_v>
1726 			// Fourth, scale jitter_u & jitter_v by scaleFactor
1727 			// Finally scale Axis1 by jitter_u & Axis2 by jitter_v
1728 			if(lightsource.Circular == true)
1729 			{
1730 				jitter_u = jitter_u / (lightsource.Area_Size1 - 1) - 0.5 + 0.001;
1731 				jitter_v = jitter_v / (lightsource.Area_Size2 - 1) - 0.5 + 0.001;
1732 				double scaleFactor = ((fabs(jitter_u) > fabs(jitter_v)) ? fabs(jitter_u) : fabs(jitter_v));
1733 				scaleFactor /= sqrt(jitter_u * jitter_u + jitter_v * jitter_v);
1734 				jitter_u *= scaleFactor;
1735 				jitter_v *= scaleFactor;
1736 				jitterAxis1 = axis1Temp * jitter_u;
1737 				jitterAxis2 = axis2Temp * jitter_v;
1738 			}
1739 			else
1740 			{
1741 				if(lightsource.Area_Size1 > 1)
1742 				{
1743 					double scaleFactor = jitter_u / (double)(lightsource.Area_Size1 - 1) - 0.5;
1744 					jitterAxis1 = axis1Temp * scaleFactor;
1745 				}
1746 				else
1747 					jitterAxis1 = Vector3d(0.0, 0.0, 0.0);
1748 
1749 				if(lightsource.Area_Size2 > 1)
1750 				{
1751 					double scaleFactor = jitter_v / (double)(lightsource.Area_Size2 - 1) - 0.5;
1752 					jitterAxis2 = axis2Temp * scaleFactor;
1753 				}
1754 				else
1755 					jitterAxis2 = Vector3d(0.0, 0.0, 0.0);
1756 			}
1757 
1758 			// Recalculate the light source ray but not the colour
1759 			ComputeOneWhiteLightRay(lightsource, lightsourcedepth, lsr, ipoint, jitterAxis1 + jitterAxis2);
1760 			// Calculate distance- and angle-based light attenuation
1761 			attenuatedLightcolour = sampleLightcolour * Attenuate_Light(&lightsource, lsr, lightsourcedepth);
1762 
1763 			// If not double-illuminated, check if the normal is pointing away:
1764 			if(!isDoubleIlluminated)
1765 			{
1766 				cos_shadow_angle = dot(layer_normal, Vector3d(lsr.Direction));
1767 				if(cos_shadow_angle < EPSILON)
1768 				{
1769 					if (finish->DiffuseBack != 0.0)
1770 						backside = true;
1771 					else
1772 						continue;
1773 				}
1774 			}
1775 
1776 			if(!(sceneData->useSubsurface && finish->UseSubsurface))
1777 				// (Diffuse contribution is not supported in combination with BSSRDF, to emphasize the fact that the BSSRDF
1778 				// model is intended to provide for all the diffuse term by default. If users want to add some additional
1779 				// surface-only diffuse term, they should use layered textures.
1780 				ComputeDiffuseColour(finish, lsr, layer_normal, tmpCol, attenuatedLightcolour, layer_pigment_colour, attenuation, backside);
1781 
1782 			// NK rad - don't compute highlights for radiosity gather rays, since this causes
1783 			// problems with colors being far too bright
1784 			// don't compute highlights for diffuse backside illumination
1785 			if((lightsource.Light_Type != FILL_LIGHT_SOURCE) && !eye.IsRadiosityRay() && !backside) // TODO FIXME radiosity - is this really the right way to do it (speaking of realism)?
1786 			{
1787 				if(finish->Phong > 0.0)
1788 				{
1789 					Vector3d ed(eye.Direction);
1790 					ComputePhongColour(finish, lsr, ed, layer_normal, tmpCol, attenuatedLightcolour, layer_pigment_colour);
1791 				}
1792 
1793 				if(finish->Specular > 0.0)
1794 					ComputeSpecularColour(finish, lsr, reye, layer_normal, tmpCol, attenuatedLightcolour, layer_pigment_colour);
1795 			}
1796 
1797 			if(finish->Irid > 0.0)
1798 				ComputeIridColour(finish, Vector3d(lightsourceray.Direction), Vector3d(eye.Direction), layer_normal, ipoint, tmpCol);
1799 
1800 			colour += tmpCol;
1801 		}
1802 	}
1803 }
1804 
1805 // see do_light in version 3.6's lighting.cpp
ComputeOneLightRay(const LightSource & lightsource,double & lightsourcedepth,Ray & lightsourceray,const Vector3d & ipoint,RGBColour & lightcolour,bool forceAttenuate)1806 void Trace::ComputeOneLightRay(const LightSource &lightsource, double& lightsourcedepth, Ray& lightsourceray, const Vector3d& ipoint, RGBColour& lightcolour, bool forceAttenuate)
1807 {
1808 	double attenuation;
1809 	ComputeOneWhiteLightRay(lightsource, lightsourcedepth, lightsourceray, ipoint);
1810 
1811 	// Get the light source colour.
1812 	lightcolour = lightsource.colour;
1813 
1814 	// Attenuate light source color.
1815 	if (lightsource.Area_Light && lightsource.Use_Full_Area_Lighting && (qualityFlags & Q_AREA_LIGHT) && !forceAttenuate)
1816 		// for full area lighting we apply distance- and angle-based attenuation to each "lightlet" individually later
1817 		attenuation = 1.0;
1818 	else
1819 		attenuation = Attenuate_Light(&lightsource, lightsourceray, lightsourcedepth);
1820 
1821 	// Now scale the color by the attenuation
1822 	lightcolour *= attenuation;
1823 }
1824 
1825 // see block_light_source in the version 3.6 source
TraceShadowRay(const LightSource & lightsource,double depth,const Ray & lightsourceray,const Vector3d & point,RGBColour & colour,TraceTicket & ticket)1826 void Trace::TraceShadowRay(const LightSource &lightsource, double depth, const Ray& lightsourceray, const Vector3d& point, RGBColour& colour, TraceTicket& ticket)
1827 {
1828 	// test and set highest level traced. We do it differently than TraceRay() does,
1829 	// for compatibility with the way max_trace_level is tested and reported in v3.6
1830 	// and earlier.
1831 	if(ticket.traceLevel > ticket.maxAllowedTraceLevel)
1832 	{
1833 		colour.clear();
1834 		return;
1835 	}
1836 
1837 	ticket.maxFoundTraceLevel = (unsigned int) max(ticket.maxFoundTraceLevel, ticket.traceLevel);
1838 	ticket.traceLevel++;
1839 
1840 	double newdepth;
1841 	Intersection isect;
1842 	Ray newray(lightsourceray);
1843 
1844 	// Store current depth and ray because they will be modified.
1845 	newdepth = depth;
1846 
1847 	// NOTE: shadow rays are never photon rays, so flag can be hard-coded to false
1848 	newray.SetFlags(Ray::OtherRay, true, false);
1849 
1850 	// Get shadows from current light source.
1851 	if((lightsource.Area_Light) && (qualityFlags & Q_AREA_LIGHT))
1852 		TraceAreaLightShadowRay(lightsource, newdepth, newray, point, colour, ticket);
1853 	else
1854 		TracePointLightShadowRay(lightsource, newdepth, newray, colour, ticket);
1855 
1856 	// If there's some distance left for the ray to reach the light source
1857 	// we have to apply atmospheric stuff to this part of the ray.
1858 
1859 	if((newdepth > SHADOW_TOLERANCE) && (lightsource.Media_Interaction) && (lightsource.Media_Attenuation))
1860 	{
1861 		isect.Depth = newdepth;
1862 		isect.Object = NULL;
1863 		ComputeShadowMedia(newray, isect, colour, (lightsource.Media_Interaction) && (lightsource.Media_Attenuation), ticket);
1864 	}
1865 
1866 	ticket.traceLevel--;
1867 	maxFoundTraceLevel = (unsigned int) max(maxFoundTraceLevel, ticket.maxFoundTraceLevel);
1868 }
1869 
1870 // moved this here (was originally inside TracePointLightShadowRay) because
1871 // for some reason the Intel compiler (version W_CC_PC_8.1.027) will fail
1872 // to link the exe, complaining of an unresolved external.
1873 //
1874 // TODO: try moving it back in at some point in the future.
1875 struct NoShadowFlagRayObjectCondition : public RayObjectCondition
1876 {
operator ()pov::NoShadowFlagRayObjectCondition1877 	virtual bool operator()(const Ray&, const ObjectBase* object, double) const { return !Test_Flag(object, NO_SHADOW_FLAG); }
1878 };
1879 
1880 struct SmallToleranceRayObjectCondition : public RayObjectCondition
1881 {
operator ()pov::SmallToleranceRayObjectCondition1882 	virtual bool operator()(const Ray&, const ObjectBase*, double dist) const { return dist > SMALL_TOLERANCE; }
1883 };
1884 
TracePointLightShadowRay(const LightSource & lightsource,double & lightsourcedepth,Ray & lightsourceray,RGBColour & lightcolour,TraceTicket & ticket)1885 void Trace::TracePointLightShadowRay(const LightSource &lightsource, double& lightsourcedepth, Ray& lightsourceray, RGBColour& lightcolour, TraceTicket& ticket)
1886 {
1887 	Intersection boundedIntersection;
1888 	ObjectPtr cacheObject = NULL;
1889 	bool foundTransparentObjects = false;
1890 	bool foundIntersection;
1891 
1892 	// Projected through main tests
1893 	double projectedDepth = 0.0;
1894 
1895 	if(lightsource.Projected_Through_Object != NULL)
1896 	{
1897 		Intersection tempIntersection;
1898 
1899 		if(FindIntersection(lightsource.Projected_Through_Object, tempIntersection, lightsourceray))
1900 		{
1901 			if((tempIntersection.Depth - lightsourcedepth) < 0.0)
1902 				projectedDepth = lightsourcedepth - fabs(tempIntersection.Depth) + SMALL_TOLERANCE;
1903 			else
1904 			{
1905 				lightcolour.clear();
1906 				return;
1907 			}
1908 		}
1909 		else
1910 		{
1911 			lightcolour.clear();
1912 			return;
1913 		}
1914 
1915 		// Make sure we don't do shadows for fill light sources.
1916 		// (Note that if Projected_Through_Object is NULL, the test for FILL_LIGHT_SOURCE has happened earlier already)
1917 		if(lightsource.Light_Type == FILL_LIGHT_SOURCE)
1918 			return;
1919 	}
1920 
1921 	NoShadowFlagRayObjectCondition precond;
1922 	SmallToleranceRayObjectCondition postcond;
1923 
1924 	// check for object in the light source shadow cache (object that fully shadowed during last test) first
1925 
1926 	if(lightsource.lightGroupLight == false) // we don't cache for light groups
1927 	{
1928 		if((ticket.traceLevel == 2) && (lightSourceLevel1ShadowCache[lightsource.index] != NULL))
1929 			cacheObject = lightSourceLevel1ShadowCache[lightsource.index];
1930 		else if(lightSourceOtherShadowCache[lightsource.index] != NULL)
1931 			cacheObject = lightSourceOtherShadowCache[lightsource.index];
1932 
1933 		// if there was an object in the light source shadow cache, check that first
1934 		if(cacheObject != NULL)
1935 		{
1936 			if(FindIntersection(cacheObject, boundedIntersection, lightsourceray, lightsourcedepth - projectedDepth) == true)
1937 			{
1938 				if(!Test_Flag(boundedIntersection.Object, NO_SHADOW_FLAG))
1939 				{
1940 					ComputeShadowColour(lightsource, boundedIntersection, lightsourceray, lightcolour, ticket);
1941 
1942 					if((fabs(lightcolour.red())   < EPSILON) &&
1943 					   (fabs(lightcolour.green()) < EPSILON) &&
1944 					   (fabs(lightcolour.blue())  < EPSILON) &&
1945 					   (Test_Flag(boundedIntersection.Object, OPAQUE_FLAG)))
1946 					{
1947 						threadData->Stats()[Shadow_Ray_Tests]++;
1948 						threadData->Stats()[Shadow_Rays_Succeeded]++;
1949 						threadData->Stats()[Shadow_Cache_Hits]++;
1950 						return;
1951 					}
1952 				}
1953 				else
1954 					cacheObject = NULL;
1955 			}
1956 			else
1957 				cacheObject = NULL;
1958 		}
1959 	}
1960 
1961 	foundTransparentObjects = false;
1962 
1963 	while(true)
1964 	{
1965 		boundedIntersection.Object = boundedIntersection.Csg = NULL;
1966 		boundedIntersection.Depth = lightsourcedepth - projectedDepth;
1967 
1968 		threadData->Stats()[Shadow_Ray_Tests]++;
1969 
1970 		foundIntersection = FindIntersection(boundedIntersection, lightsourceray, precond, postcond);
1971 
1972 		if((foundIntersection == true) && (boundedIntersection.Object != cacheObject) &&
1973 		   (boundedIntersection.Depth < lightsourcedepth - SHADOW_TOLERANCE) &&
1974 		   (lightsourcedepth - boundedIntersection.Depth > projectedDepth) &&
1975 		   (boundedIntersection.Depth > SHADOW_TOLERANCE))
1976 		{
1977 			threadData->Stats()[Shadow_Rays_Succeeded]++;
1978 
1979 			ComputeShadowColour(lightsource, boundedIntersection, lightsourceray, lightcolour, ticket);
1980 
1981 			ObjectPtr testObject(boundedIntersection.Csg != NULL ? boundedIntersection.Csg : boundedIntersection.Object);
1982 
1983 			if((fabs(lightcolour.red())   < EPSILON) &&
1984 			   (fabs(lightcolour.green()) < EPSILON) &&
1985 			   (fabs(lightcolour.blue())  < EPSILON) &&
1986 			   (Test_Flag(testObject, OPAQUE_FLAG)))
1987 			{
1988 				// Hit a fully opaque object; cache that object, so that next time we can test for it first;
1989 				// don't cache for light groups though (why not??)
1990 
1991 				if((lightsource.lightGroupLight == false) && (foundTransparentObjects == false))
1992 				{
1993 					cacheObject = testObject;
1994 
1995 					if(ticket.traceLevel == 2)
1996 						lightSourceLevel1ShadowCache[lightsource.index] = cacheObject;
1997 					else
1998 						lightSourceOtherShadowCache[lightsource.index] = cacheObject;
1999 				}
2000 				break;
2001 			}
2002 
2003 			foundTransparentObjects = true;
2004 
2005 			// Move the ray to the point of intersection, plus some
2006 			lightsourcedepth -= boundedIntersection.Depth;
2007 
2008 			Assign_Vector(lightsourceray.Origin, boundedIntersection.IPoint);
2009 		}
2010 		else
2011 			// No further intersections in the direction of the ray.
2012 			break;
2013 	}
2014 }
2015 
TraceAreaLightShadowRay(const LightSource & lightsource,double & lightsourcedepth,Ray & lightsourceray,const Vector3d & ipoint,RGBColour & lightcolour,TraceTicket & ticket)2016 void Trace::TraceAreaLightShadowRay(const LightSource &lightsource, double& lightsourcedepth, Ray& lightsourceray,
2017                                     const Vector3d& ipoint, RGBColour& lightcolour, TraceTicket& ticket)
2018 {
2019 	Vector3d temp;
2020 	Vector3d axis1Temp, axis2Temp;
2021 	double axis1_Length;
2022 
2023 	lightGrid.resize(lightsource.Area_Size1 * lightsource.Area_Size2);
2024 
2025 	// Flag uncalculated points with a negative value for Red
2026 	/*
2027 	for(i = 0; i < lightsource.Area_Size1; i++)
2028 	{
2029 		for(j = 0; j < lightsource.Area_Size2; j++)
2030 			lightGrid[i * lightsource.Area_Size2 + j].red() = -1.0; // TODO FIXME - Old bug: This will not work with negative color values! [trf]
2031 	}
2032 	*/
2033 	for(size_t ind = 0; ind < lightGrid.size(); ++ind)
2034 		lightGrid[ind].red() = -1.0;
2035 
2036 	axis1Temp = Vector3d(lightsource.Axis1);
2037 	axis2Temp = Vector3d(lightsource.Axis2);
2038 
2039 	if(lightsource.Orient == true)
2040 	{
2041 		// Orient the area light to face the intersection point [ENB 9/97]
2042 
2043 		// Do Light source to get the correct lightsourceray
2044 		ComputeOneWhiteLightRay(lightsource, lightsourcedepth, lightsourceray, ipoint);
2045 
2046 		// Save the lengths of the axes
2047 		axis1_Length = axis1Temp.length();
2048 
2049 		// Make axis 1 be perpendicular with the light-ray
2050 		if(fabs(fabs(lightsourceray.Direction[Z]) - 1.0) < 0.01)
2051 			// too close to vertical for comfort, so use cross product with horizon
2052 			temp = Vector3d(0.0, 1.0, 0.0);
2053 		else
2054 			temp = Vector3d(0.0, 0.0, 1.0);
2055 
2056 		axis1Temp = cross(Vector3d(lightsourceray.Direction), temp).normalized();
2057 
2058 		// Make axis 2 be perpendicular with the light-ray and with Axis1.  A simple cross-product will do the trick.
2059 		axis2Temp = cross(Vector3d(lightsourceray.Direction), axis1Temp).normalized();
2060 
2061 		// make it square
2062 		axis1Temp *= axis1_Length;
2063 		axis2Temp *= axis1_Length;
2064 	}
2065 
2066 	TraceAreaLightSubsetShadowRay(lightsource, lightsourcedepth, lightsourceray, ipoint, lightcolour, 0, 0, lightsource.Area_Size1 - 1, lightsource.Area_Size2 - 1, 0, axis1Temp, axis2Temp, ticket);
2067 }
2068 
TraceAreaLightSubsetShadowRay(const LightSource & lightsource,double & lightsourcedepth,Ray & lightsourceray,const Vector3d & ipoint,RGBColour & lightcolour,int u1,int v1,int u2,int v2,int level,const Vector3d & axis1,const Vector3d & axis2,TraceTicket & ticket)2069 void Trace::TraceAreaLightSubsetShadowRay(const LightSource &lightsource, double& lightsourcedepth, Ray& lightsourceray,
2070                                     const Vector3d& ipoint, RGBColour& lightcolour, int u1, int  v1, int  u2, int  v2, int level, const Vector3d& axis1, const Vector3d& axis2, TraceTicket& ticket)
2071 {
2072 	RGBColour sample_Colour[4];
2073 	int i, u, v, new_u1, new_v1, new_u2, new_v2;
2074 	double jitter_u, jitter_v, scaleFactor;
2075 
2076 	// Sample the four corners of the region
2077 	for(i = 0; i < 4; i++)
2078 	{
2079 		Vector3d center(lightsource.Center);
2080 		Ray lsr(lightsourceray);
2081 
2082 		switch(i)
2083 		{
2084 			case 0: u = u1; v = v1; break;
2085 			case 1: u = u2; v = v1; break;
2086 			case 2: u = u1; v = v2; break;
2087 			case 3: u = u2; v = v2; break;
2088 			default: u = v = 0;  // Should never happen!
2089 		}
2090 
2091 		if(lightGrid[u * lightsource.Area_Size2 + v].red() >= 0.0)
2092 			// We've already calculated this point, reuse it
2093 			sample_Colour[i] = lightGrid[u * lightsource.Area_Size2 + v];
2094 		else
2095 		{
2096 			Vector3d jitterAxis1, jitterAxis2;
2097 
2098 			jitter_u = (double)u;
2099 			jitter_v = (double)v;
2100 
2101 			if(lightsource.Jitter)
2102 			{
2103 				jitter_u += randomNumberGenerator() - 0.5;
2104 				jitter_v += randomNumberGenerator() - 0.5;
2105 			}
2106 
2107 			// Create circular are lights [ENB 9/97]
2108 			// First, make jitter_u and jitter_v be numbers from -1 to 1
2109 			// Second, set scaleFactor to the abs max (jitter_u,jitter_v) (for shells)
2110 			// Third, divide scaleFactor by the length of <jitter_u,jitter_v>
2111 			// Fourth, scale jitter_u & jitter_v by scaleFactor
2112 			// Finally scale Axis1 by jitter_u & Axis2 by jitter_v
2113 			if(lightsource.Circular == true)
2114 			{
2115 				jitter_u = jitter_u / (lightsource.Area_Size1 - 1) - 0.5 + 0.001;
2116 				jitter_v = jitter_v / (lightsource.Area_Size2 - 1) - 0.5 + 0.001;
2117 				scaleFactor = ((fabs(jitter_u) > fabs(jitter_v)) ? fabs(jitter_u) : fabs(jitter_v));
2118 				scaleFactor /= sqrt(jitter_u * jitter_u + jitter_v * jitter_v);
2119 				jitter_u *= scaleFactor;
2120 				jitter_v *= scaleFactor;
2121 				jitterAxis1 = axis1 * jitter_u;
2122 				jitterAxis2 = axis2 * jitter_v;
2123 			}
2124 			else
2125 			{
2126 				if(lightsource.Area_Size1 > 1)
2127 				{
2128 					scaleFactor = jitter_u / (double)(lightsource.Area_Size1 - 1) - 0.5;
2129 					jitterAxis1 = axis1 * scaleFactor;
2130 				}
2131 				else
2132 					jitterAxis1 = Vector3d(0.0, 0.0, 0.0);
2133 
2134 				if(lightsource.Area_Size2 > 1)
2135 				{
2136 					scaleFactor = jitter_v / (double)(lightsource.Area_Size2 - 1) - 0.5;
2137 					jitterAxis2 = axis2 * scaleFactor;
2138 				}
2139 				else
2140 					jitterAxis2 = Vector3d(0.0, 0.0, 0.0);
2141 			}
2142 
2143 			// Recalculate the light source ray but not the colour
2144 			ComputeOneWhiteLightRay(lightsource, lightsourcedepth, lsr, ipoint, jitterAxis1 + jitterAxis2);
2145 
2146 			sample_Colour[i] = lightcolour;
2147 
2148 			TracePointLightShadowRay(lightsource, lightsourcedepth, lsr, sample_Colour[i], ticket);
2149 
2150 			lightGrid[u * lightsource.Area_Size2 + v] = sample_Colour[i];
2151 		}
2152 	}
2153 
2154 	if((u2 - u1 > 1) || (v2 - v1 > 1))
2155 	{
2156 		if((level < lightsource.Adaptive_Level) ||
2157 		   (colourDistance(sample_Colour[0], sample_Colour[1]) > 0.1) ||
2158 		   (colourDistance(sample_Colour[1], sample_Colour[3]) > 0.1) ||
2159 		   (colourDistance(sample_Colour[3], sample_Colour[2]) > 0.1) ||
2160 		   (colourDistance(sample_Colour[2], sample_Colour[0]) > 0.1))
2161 		{
2162 			Vector3d center(lightsource.Center);
2163 
2164 			for (i = 0; i < 4; i++)
2165 			{
2166 				switch (i)
2167 				{
2168 					case 0:
2169 						new_u1 = u1;
2170 						new_v1 = v1;
2171 						new_u2 = (int)floor((u1 + u2)/2.0);
2172 						new_v2 = (int)floor((v1 + v2)/2.0);
2173 						break;
2174 					case 1:
2175 						new_u1 = (int)ceil((u1 + u2)/2.0);
2176 						new_v1 = v1;
2177 						new_u2 = u2;
2178 						new_v2 = (int)floor((v1 + v2)/2.0);
2179 						break;
2180 					case 2:
2181 						new_u1 = u1;
2182 						new_v1 = (int)ceil((v1 + v2)/2.0);
2183 						new_u2 = (int)floor((u1 + u2)/2.0);
2184 						new_v2 = v2;
2185 						break;
2186 					case 3:
2187 						new_u1 = (int)ceil((u1 + u2)/2.0);
2188 						new_v1 = (int)ceil((v1 + v2)/2.0);
2189 						new_u2 = u2;
2190 						new_v2 = v2;
2191 						break;
2192 					default:  // Should never happen!
2193 						new_u1 = new_u2 = new_v1 = new_v2 = 0;
2194 				}
2195 
2196 				// Recalculate the light source ray but not the colour
2197 				ComputeOneWhiteLightRay(lightsource, lightsourcedepth, lightsourceray, ipoint, center);
2198 
2199 				sample_Colour[i] = lightcolour;
2200 
2201 				TraceAreaLightSubsetShadowRay(lightsource, lightsourcedepth, lightsourceray,
2202 				                              ipoint, sample_Colour[i], new_u1, new_v1, new_u2, new_v2, level + 1, axis1, axis2, ticket);
2203 			}
2204 		}
2205 	}
2206 
2207 	// Average up the light contributions
2208 	lightcolour = (sample_Colour[0] + sample_Colour[1] + sample_Colour[2] + sample_Colour[3]) * 0.25;
2209 }
2210 
2211 // see filter_shadow_ray in version 3.6's lighting.cpp
ComputeShadowColour(const LightSource & lightsource,Intersection & isect,Ray & lightsourceray,RGBColour & colour,TraceTicket & ticket)2212 void Trace::ComputeShadowColour(const LightSource &lightsource, Intersection& isect, Ray& lightsourceray, RGBColour& colour, TraceTicket& ticket)
2213 {
2214 	WeightedTextureVector wtextures;
2215 	Vector3d ipoint;
2216 	Vector3d raw_Normal;
2217 	Colour fc1, temp_Colour;
2218 	Vector2d uv_Coords;
2219 	double normaldirection;
2220 
2221 	// Here's the issue:
2222 	// Imagine "LightA" shoots photons at "GlassSphereB", which refracts light and
2223 	// hits "PlaneC".
2224 	// When computing Diffuse/Phong/etc lighting for PlaneC, if there were no
2225 	// photons, POV would compute a filtered shadow ray from PlaneC through
2226 	// GlassSphereB to LightA.  If photons are used for the combination of objects,
2227 	// this filtered shadow ray should be completely black.  The filtered shadow
2228 	// ray should be forced to black UNLESS any of the following conditions are
2229 	// true (which would indicate that photons were not shot from LightA through
2230 	// GlassSphereB to PlaneC):
2231 	// 1) PlaneC has photon collection set to "off"
2232 	// 2) GlassSphereB is not a photon target
2233 	// 3) GlassSphereB has photon refraction set to "off"
2234 	// 4) LightA has photon refraction set to "off"
2235 	// 5) Neither GlassSphereB nor LightA has photon refraction set to "on"
2236 	if((sceneData->photonSettings.photonsEnabled == true) &&
2237 		(sceneData->surfacePhotonMap.numPhotons > 0) &&
2238 		(!threadData->litObjectIgnoresPhotons) &&
2239 		(Test_Flag(isect.Object,PH_TARGET_FLAG)) &&
2240 		(!Test_Flag(isect.Object,PH_RFR_OFF_FLAG)) &&
2241 		(!Test_Flag(&lightsource,PH_RFR_OFF_FLAG)) &&
2242 		((Test_Flag(isect.Object,PH_RFR_ON_FLAG) || Test_Flag(&lightsource,PH_RFR_ON_FLAG)))
2243 		)
2244 	{
2245 		// full shadow (except for photon-based illumination)
2246 		colour.clear();
2247 		return;
2248 	}
2249 
2250 	ipoint = Vector3d(isect.IPoint);
2251 
2252 	if(!(qualityFlags & Q_SHADOW))
2253 		// no shadow
2254 		return;
2255 
2256 	// If the object is opaque there's no need to go any further. [DB 8/94]
2257 	if(Test_Flag(isect.Object, OPAQUE_FLAG))
2258 	{
2259 		// full shadow
2260 		colour.clear();
2261 		return;
2262 	}
2263 
2264 	// Get the normal to the surface
2265 	isect.Object->Normal(*raw_Normal, &isect, threadData);
2266 
2267 	// I added this to flip the normal if the object is inverted (for CSG).
2268 	// However, I subsequently commented it out for speed reasons - it doesn't
2269 	// make a difference (no pun intended). The preexisting flip code below
2270 	// produces a similar (though more extensive) result. [NK]
2271 	//
2272 	// Actually, we should keep this code to guarantee that normaldirection
2273 	// is set properly. [NK]
2274 	if(Test_Flag(isect.Object, INVERTED_FLAG))
2275 		raw_Normal = -raw_Normal;
2276 
2277 	// If the surface normal points away, flip its direction.
2278 	normaldirection = dot(raw_Normal, Vector3d(lightsourceray.Direction));
2279 	if(normaldirection > 0.0)
2280 		raw_Normal = -raw_Normal;
2281 
2282 	Assign_Vector(isect.INormal, *raw_Normal);
2283 	// and save to intersection -hdf-
2284 	Assign_Vector(isect.PNormal, *raw_Normal);
2285 
2286 	if(Test_Flag(isect.Object, UV_FLAG))
2287 	{
2288 		// get the UV vect of the intersection
2289 		isect.Object->UVCoord(*uv_Coords, &isect, threadData);
2290 		// save the normal and UV coords into Intersection
2291 		Assign_UV_Vect(isect.Iuv, *uv_Coords);
2292 	}
2293 
2294 	// now switch to UV mapping if we need to
2295 	if(Test_Flag(isect.Object, UV_FLAG))
2296 	{
2297 		ipoint[X] = uv_Coords[U];
2298 		ipoint[Y] = uv_Coords[V];
2299 		ipoint[Z] = 0;
2300 	}
2301 
2302 	// NB the 3.6 code doesn't set the light cache's Tested flags to false after incrementing the level.
2303 	if (++lightColorCacheIndex >= lightColorCache.size())
2304 	{
2305 		lightColorCache.resize(lightColorCacheIndex + 10);
2306 		for (LightColorCacheListList::iterator it = lightColorCache.begin() + lightColorCacheIndex; it != lightColorCache.end(); it++)
2307 			it->resize(lightColorCache[0].size());
2308 	}
2309 
2310 	bool isMultiTextured = Test_Flag(isect.Object, MULTITEXTURE_FLAG) ||
2311 	                       ((isect.Object->Texture == NULL) && Test_Flag(isect.Object, CUTAWAY_TEXTURES_FLAG));
2312 
2313 	// get textures and weights
2314 	if(isMultiTextured == true)
2315 	{
2316 		isect.Object->Determine_Textures(&isect, normaldirection > 0.0, wtextures, threadData);
2317 	}
2318 	else if(isect.Object->Texture != NULL)
2319 	{
2320 		if((normaldirection > 0.0) && (isect.Object->Interior_Texture != NULL))
2321 			wtextures.push_back(WeightedTexture(1.0, isect.Object->Interior_Texture)); /* Chris Huff: Interior Texture patch */
2322 		else
2323 			wtextures.push_back(WeightedTexture(1.0, isect.Object->Texture));
2324 	}
2325 	else
2326 	{
2327 		// don't need to do anything as the texture list will be empty.
2328 		// TODO: could we perform these tests earlier ? [cjc]
2329 		lightColorCacheIndex--;
2330 		return;
2331 	}
2332 
2333 	temp_Colour.clear();
2334 
2335 	for(WeightedTextureVector::iterator i(wtextures.begin()); i != wtextures.end(); i++)
2336 	{
2337 		TextureVector warps(texturePool);
2338 		assert(warps->empty()); // verify that the TextureVector pulled from the pool is in a cleaned-up condition
2339 
2340 		// If contribution of this texture is neglectable skip ahead.
2341 		if((i->weight < ticket.adcBailout) || (i->texture == NULL))
2342 			continue;
2343 
2344 		ComputeOneTextureColour(fc1, i->texture, *warps, ipoint, raw_Normal, lightsourceray, 0.0, isect, true, false, ticket);
2345 
2346 		temp_Colour += i->weight * fc1;
2347 	}
2348 
2349 	lightColorCacheIndex--;
2350 
2351 	if(fabs(temp_Colour.filter()) + fabs(temp_Colour.transm()) < ticket.adcBailout)
2352 	{
2353 		// close enough to full shadow - bail out to avoid media computations
2354 		colour.clear();
2355 		return;
2356 	}
2357 
2358 #if MEDIA_AFTER_TEXTURE_INTERPOLATION
2359 	// [CLi] moved this here from Trace::ComputeShadowTexture() and Trace::ComputeLightedTexture(), respectively,
2360 	// to avoid media to be computed twice when dealing with averaged textures.
2361 	// TODO - For photon rays we're still potentially doing double work on media.
2362 	// TODO - For shadow rays we're still potentially doing double work on distance-based attenuation.
2363 	// Calculate participating media effects.
2364 	if((qualityFlags & Q_VOLUME) && (!lightsourceray.GetInteriors().empty()) && (lightsourceray.IsHollowRay() == true))
2365 		media.ComputeMedia(lightsourceray.GetInteriors(), lightsourceray, isect, temp_Colour, ticket);
2366 #endif
2367 
2368 	colour *= temp_Colour.rgbTransm();
2369 
2370 	// Get atmospheric attenuation.
2371 	ComputeShadowMedia(lightsourceray, isect, colour, (lightsource.Media_Interaction) && (lightsource.Media_Attenuation), ticket);
2372 }
2373 
ComputeDiffuseColour(const FINISH * finish,const Ray & lightsourceray,const Vector3d & layer_normal,RGBColour & colour,const RGBColour & light_colour,const RGBColour & layer_pigment_colour,double attenuation,bool backside)2374 void Trace::ComputeDiffuseColour(const FINISH *finish, const Ray& lightsourceray, const Vector3d& layer_normal, RGBColour& colour, const RGBColour& light_colour,
2375                                  const RGBColour& layer_pigment_colour, double attenuation, bool backside)
2376 {
2377 	double cos_angle_of_incidence, intensity;
2378 	double diffuse = (backside? finish->DiffuseBack : finish->Diffuse);
2379 
2380 	if (diffuse <= 0.0)
2381 		return;
2382 
2383 	cos_angle_of_incidence = dot(layer_normal, Vector3d(lightsourceray.Direction));
2384 
2385 	// Brilliance is likely to be 1.0 (default value)
2386 	if(finish->Brilliance != 1.0)
2387 		intensity = pow(fabs(cos_angle_of_incidence), (double) finish->Brilliance);
2388 	else
2389 		intensity = fabs(cos_angle_of_incidence);
2390 
2391 	intensity *= diffuse * attenuation;
2392 
2393 	if(finish->Crand > 0.0)
2394 		intensity -= POV_rand(crandRandomNumberGenerator) * finish->Crand;
2395 
2396 	colour += intensity * layer_pigment_colour * light_colour;
2397 }
2398 
ComputeIridColour(const FINISH * finish,const Vector3d & lightsource,const Vector3d & eye,const Vector3d & layer_normal,const Vector3d & ipoint,RGBColour & colour)2399 void Trace::ComputeIridColour(const FINISH *finish, const Vector3d& lightsource, const Vector3d& eye, const Vector3d& layer_normal, const Vector3d& ipoint, RGBColour& colour)
2400 {
2401 	double rwl, gwl, bwl;
2402 	double cos_angle_of_incidence_light, cos_angle_of_incidence_eye, interference;
2403 	double film_thickness;
2404 	double noise;
2405 	TURB turb;
2406 
2407 	film_thickness = finish->Irid_Film_Thickness;
2408 
2409 	if(finish->Irid_Turb != 0)
2410 	{
2411 		// Uses hardcoded octaves, lambda, omega
2412 		turb.Omega=0.5;
2413 		turb.Lambda=2.0;
2414 		turb.Octaves=5;
2415 
2416 		// Turbulence() returns a value from 0..1, so noise will be in order of magnitude 1.0 +/- finish->Irid_Turb
2417 		noise = Turbulence(*ipoint, &turb, sceneData->noiseGenerator);
2418 		noise = 2.0 * noise - 1.0;
2419 		noise = 1.0 + noise * finish->Irid_Turb;
2420 		film_thickness *= noise;
2421 	}
2422 
2423 	// Approximate dominant wavelengths of primary hues.
2424 	// Source: 3D Computer Graphics by John Vince (Addison Wesely)
2425 	// These are initialized in parse.c (Parse_Frame)
2426 	// and are user-adjustable with the irid_wavelength keyword.
2427 	// Red = 700 nm  Grn = 520 nm Blu = 480 nm
2428 	// Divided by 1000 gives: rwl = 0.70;  gwl = 0.52;  bwl = 0.48;
2429 	//
2430 	// However... I originally "guessed" at the values and came up with
2431 	// the following, which I'm using as the defaults, since it seems
2432 	// to work better:  rwl = 0.25;  gwl = 0.18;  bwl = 0.14;
2433 
2434 	// Could avoid these assignments if we want to
2435 	rwl = sceneData->iridWavelengths.red();
2436 	gwl = sceneData->iridWavelengths.green();
2437 	bwl = sceneData->iridWavelengths.blue();
2438 
2439 	// NOTE: Shouldn't we compute Cos_Angle_Of_Incidence just once?
2440 	cos_angle_of_incidence_light = std::abs(dot(layer_normal, lightsource));
2441 	cos_angle_of_incidence_eye   = std::abs(dot(layer_normal, eye));
2442 
2443 	// Calculate phase offset.
2444 	interference = 2.0 * M_PI * film_thickness * (cos_angle_of_incidence_light + cos_angle_of_incidence_eye);
2445 
2446 //	intensity = cos_angle_of_incidence * finish->Irid; // TODO CLARIFY - [CLi] note that this effectively gets finish->Irid squared; is this intentional?
2447 
2448 	// Modify color by phase offset for each wavelength.
2449 	colour *= RGBColour(1.0 + finish->Irid * cos(interference / rwl),
2450 	                    1.0 + finish->Irid * cos(interference / gwl),
2451 	                    1.0 + finish->Irid * cos(interference / bwl));
2452 }
2453 
ComputePhongColour(const FINISH * finish,const Ray & lightsourceray,const Vector3d & eye,const Vector3d & layer_normal,RGBColour & colour,const RGBColour & light_colour,const RGBColour & layer_pigment_colour)2454 void Trace::ComputePhongColour(const FINISH *finish, const Ray& lightsourceray, const Vector3d& eye, const Vector3d& layer_normal, RGBColour& colour, const RGBColour& light_colour,
2455                                const RGBColour& layer_pigment_colour)
2456 {
2457 	double cos_angle_of_incidence, intensity;
2458 	Vector3d reflect_direction;
2459 	double ndotl, x, f;
2460 	RGBColour cs;
2461 
2462 	cos_angle_of_incidence = -2.0 * dot(eye, layer_normal);
2463 
2464 	reflect_direction = eye + cos_angle_of_incidence * layer_normal;
2465 
2466 	cos_angle_of_incidence = dot(reflect_direction, Vector3d(lightsourceray.Direction));
2467 
2468 	if(cos_angle_of_incidence > 0.0)
2469 	{
2470 		if((finish->Phong_Size < 60) || (cos_angle_of_incidence > 0.0008)) // rgs
2471 			intensity = finish->Phong * pow(cos_angle_of_incidence, (double)finish->Phong_Size);
2472 		else
2473 			intensity = 0.0; // ad
2474 
2475 		if(finish->Metallic > 0.0)
2476 		{
2477 			// Calculate the reflected color by interpolating between
2478 			// the light source color and the surface color according
2479 			// to the (empirical) Fresnel reflectivity function. [DB 9/94]
2480 
2481 			ndotl = dot(layer_normal, Vector3d(lightsourceray.Direction));
2482 
2483 			x = fabs(acos(ndotl)) / M_PI_2;
2484 
2485 			f = 0.014567225 / Sqr(x - 1.12) - 0.011612903;
2486 
2487 			f = min(1.0, max(0.0, f));
2488 			cs = light_colour * ( RGBColour(1.0) + (finish->Metallic * (1.0 - f)) * (layer_pigment_colour - RGBColour(1.0)) );
2489 
2490 			colour += intensity * cs;
2491 		}
2492 		else
2493 			colour += intensity * light_colour;
2494 	}
2495 }
2496 
ComputeSpecularColour(const FINISH * finish,const Ray & lightsourceray,const Vector3d & eye,const Vector3d & layer_normal,RGBColour & colour,const RGBColour & light_colour,const RGBColour & layer_pigment_colour)2497 void Trace::ComputeSpecularColour(const FINISH *finish, const Ray& lightsourceray, const Vector3d& eye, const Vector3d& layer_normal, RGBColour& colour,
2498                                   const RGBColour& light_colour, const RGBColour& layer_pigment_colour)
2499 {
2500 	double cos_angle_of_incidence, intensity, halfway_length;
2501 	Vector3d halfway;
2502 	double ndotl, x, f;
2503 	RGBColour cs;
2504 
2505 	VHalf(*halfway, *eye, lightsourceray.Direction);
2506 
2507 	halfway_length = halfway.length();
2508 
2509 	if(halfway_length > 0.0)
2510 	{
2511 		cos_angle_of_incidence = dot(halfway, layer_normal) / halfway_length;
2512 
2513 		if(cos_angle_of_incidence > 0.0)
2514 		{
2515 			intensity = finish->Specular * pow(cos_angle_of_incidence, (double)finish->Roughness);
2516 
2517 			if(finish->Metallic > 0.0)
2518 			{
2519 				// Calculate the reflected color by interpolating between
2520 				// the light source color and the surface color according
2521 				// to the (empirical) Fresnel reflectivity function. [DB 9/94]
2522 
2523 				ndotl = dot(layer_normal, Vector3d(lightsourceray.Direction));
2524 
2525 				x = fabs(acos(ndotl)) / M_PI_2;
2526 
2527 				f = 0.014567225 / Sqr(x - 1.12) - 0.011612903;
2528 
2529 				f = min(1.0, max(0.0, f));
2530 				cs = light_colour * ( RGBColour(1.0) + (finish->Metallic * (1.0 - f)) * (layer_pigment_colour - RGBColour(1.0)) );
2531 
2532 				colour += intensity * cs;
2533 			}
2534 			else
2535 				colour += intensity * light_colour;
2536 		}
2537 	}
2538 }
2539 
ComputeRelativeIOR(const Ray & ray,const Interior * interior,double & ior)2540 void Trace::ComputeRelativeIOR(const Ray& ray, const Interior* interior, double& ior)
2541 {
2542 	// Get ratio of iors depending on the interiors the ray is traversing.
2543 	if (interior == NULL)
2544 	{
2545 		// TODO VERIFY - is this correct?
2546 		ior = 1.0;
2547 	}
2548 	else
2549 	{
2550 		if(ray.GetInteriors().empty())
2551 			// The ray is entering from the atmosphere.
2552 			ior = interior->IOR / sceneData->atmosphereIOR;
2553 		else
2554 		{
2555 			// The ray is currently inside an object.
2556 			if(ray.IsInterior(interior) == true)
2557 			{
2558 				if(ray.GetInteriors().size() == 1)
2559 					// The ray is leaving into the atmosphere.
2560 					ior = sceneData->atmosphereIOR / interior->IOR;
2561 				else
2562 					// The ray is leaving into another object.
2563 					ior = ray.GetInteriors().back()->IOR / interior->IOR;
2564 			}
2565 			else
2566 				// The ray is entering a new object.
2567 				ior = interior->IOR / ray.GetInteriors().back()->IOR;
2568 		}
2569 	}
2570 }
2571 
ComputeReflectivity(double & weight,RGBColour & reflectivity,const RGBColour & reflection_max,const RGBColour & reflection_min,int reflection_type,double reflection_falloff,double cos_angle,const Ray & ray,const Interior * interior)2572 void Trace::ComputeReflectivity(double& weight, RGBColour& reflectivity, const RGBColour& reflection_max, const RGBColour& reflection_min,
2573                                 int reflection_type, double reflection_falloff, double cos_angle, const Ray& ray, const Interior* interior)
2574 {
2575 	double temp_Weight_Min, temp_Weight_Max;
2576 	double reflection_Frac;
2577 	double g, f;
2578 	double ior;
2579 
2580 	if(reflection_type == 1)
2581 		ComputeRelativeIOR(ray, interior, ior);
2582 
2583 	switch(reflection_type)
2584 	{
2585 		case 0: // Standard reflection
2586 		{
2587 			temp_Weight_Max = max3(reflection_max.red(), reflection_max.green(), reflection_max.blue());
2588 			temp_Weight_Min = max3(reflection_min.red(), reflection_min.green(), reflection_min.blue());
2589 			weight = weight * max(temp_Weight_Max, temp_Weight_Min);
2590 
2591 			if(fabs(reflection_falloff - 1.0) > EPSILON)
2592 				reflection_Frac = pow(1.0 - cos_angle, reflection_falloff);
2593 			else
2594 				reflection_Frac = 1.0 - cos_angle;
2595 
2596 			if(fabs(reflection_Frac) < EPSILON)
2597 				reflectivity = reflection_min;
2598 			else if (fabs(reflection_Frac - 1.0)<EPSILON)
2599 				reflectivity = reflection_max;
2600 			else
2601 				reflectivity = reflection_Frac * reflection_max + (1.0 - reflection_Frac) * reflection_min;
2602 			break;
2603 		}
2604 		case 1:  // Fresnel
2605 		{
2606 			// NB: This is a special case of the Fresnel formula, presuming that incident light is unpolarized.
2607 			//
2608 			// The implemented formula is as follows:
2609 			//
2610 			//      1     ( g - cos Ti )^2           ( cos Ti (g + cos Ti) - 1 )^2
2611 			// R = --- * ------------------ * ( 1 + ------------------------------- )
2612 			//      2     ( g + cos Ti )^2           ( cos Ti (g - cos Ti) + 1 )^2
2613 			//
2614 			// where
2615 			//
2616 			//        /---------------------------
2617 			// g = -\/ (n1/n2)^2 + (cos Ti)^2 - 1
2618 
2619 			// Christoph's tweak to work around possible negative argument in sqrt
2620 			double sqx = Sqr(ior) + Sqr(cos_angle) - 1.0;
2621 
2622 			if(sqx > 0.0)
2623 			{
2624 				g = sqrt(sqx);
2625 				f = 0.5 * (Sqr(g - cos_angle) / Sqr(g + cos_angle));
2626 				f = f * (1.0 + Sqr(cos_angle * (g + cos_angle) - 1.0) / Sqr(cos_angle * (g - cos_angle) + 1.0));
2627 
2628 				f = min(1.0, max(0.0, f));
2629 				reflectivity = f * reflection_max + (1.0 - f) * reflection_min;
2630 			}
2631 			else
2632 				reflectivity = reflection_max;
2633 
2634 			weight = weight * max3(reflectivity.red(), reflectivity.green(), reflectivity.blue());
2635 			break;
2636 		}
2637 		default:
2638 			throw POV_EXCEPTION_STRING("Illegal reflection_type."); // TODO FIXME - wrong place to report this [trf]
2639 	}
2640 }
2641 
ComputeOneWhiteLightRay(const LightSource & lightsource,double & lightsourcedepth,Ray & lightsourceray,const Vector3d & ipoint,const Vector3d & jitter)2642 void Trace::ComputeOneWhiteLightRay(const LightSource &lightsource, double& lightsourcedepth, Ray& lightsourceray, const Vector3d& ipoint, const Vector3d& jitter)
2643 {
2644 	Vector3d center = Vector3d(lightsource.Center) + jitter;
2645 	double a;
2646 	Vector3d v1;
2647 
2648 	// Get the light ray starting at the intersection point and pointing towards the light source.
2649 	Assign_Vector(lightsourceray.Origin, *ipoint);
2650 	// NK 1998 parallel beams for cylinder source - added 'if'
2651 	if(lightsource.Light_Type == CYLINDER_SOURCE)
2652 	{
2653 		double distToPointsAt;
2654 		Vector3d toLightCtr;
2655 
2656 		// use new code to get ray direction - use center - points_at for direction
2657 		VSub(lightsourceray.Direction, *center, lightsource.Points_At);
2658 
2659 		// get vector pointing to center of light
2660 		toLightCtr = center - ipoint;
2661 
2662 		// project light_ctr-intersect_point onto light_ctr-point_at
2663 		distToPointsAt = Vector3d(lightsourceray.Direction).length();
2664 		lightsourcedepth = dot(toLightCtr, Vector3d(lightsourceray.Direction));
2665 
2666 		// lenght of shadow ray is the length of the projection
2667 		lightsourcedepth /= distToPointsAt;
2668 		VNormalizeEq(lightsourceray.Direction);
2669 	}
2670 	else
2671 	{
2672 		// NK 1998 parallel beams for cylinder source - the stuff in this 'else'
2673 		// block used to be all that there was... the first half of the if
2674 		// statement (before the 'else') is new
2675 		VSub(lightsourceray.Direction, *center, *ipoint);
2676 		lightsourcedepth = Vector3d(lightsourceray.Direction).length();
2677 		VInverseScaleEq(lightsourceray.Direction, lightsourcedepth);
2678 	}
2679 
2680 	// Attenuate light source color.
2681 	// Attenuation = Attenuate_Light(lightsource, lightsourceray, *Light_Source_Depth);
2682 	// Recalculate for Parallel light sources
2683 	if(lightsource.Parallel)
2684 	{
2685 		if(lightsource.Area_Light)
2686 		{
2687 			v1 = (center - Vector3d(lightsource.Points_At)).normalized();
2688 			a = dot(v1, Vector3d(lightsourceray.Direction));
2689 			lightsourcedepth *= a;
2690 			Assign_Vector(lightsourceray.Direction, *v1);
2691 		}
2692 		else
2693 		{
2694 			a = dot(Vector3d(lightsource.Direction), Vector3d(lightsourceray.Direction));
2695 			lightsourcedepth *= (-a);
2696 			Assign_Vector(lightsourceray.Direction, lightsource.Direction);
2697 			VScaleEq(lightsourceray.Direction, -1.0);
2698 		}
2699 	}
2700 }
2701 
ComputeSky(const Ray & ray,Colour & colour,TraceTicket & ticket)2702 void Trace::ComputeSky(const Ray& ray, Colour& colour, TraceTicket& ticket)
2703 {
2704 	if (sceneData->EffectiveLanguageVersion() < 370)
2705 	{
2706 		// this gives the same results regarding sky sphere filter as how v3.6 did it
2707 
2708 		int i;
2709 		double att, trans;
2710 		RGBColour col;
2711 		Colour col_Temp, filterc;
2712 		Vector3d p;
2713 
2714 		if (ticket.alphaBackground)
2715 		{
2716 			// If rendering with alpha channel, just return full transparency.
2717 			// (As we're working with associated alpha internally, the respective color must be black here.)
2718 			colour = Colour(0.0, 0.0, 0.0, 0.0, 1.0);
2719 			return;
2720 		}
2721 
2722 		colour = sceneData->backgroundColour;
2723 
2724 		if((sceneData->skysphere == NULL) || (sceneData->skysphere->Pigments == NULL))
2725 			return;
2726 
2727 		col.clear();
2728 		filterc = Colour(1.0, 1.0, 1.0, 1.0, 1.0);
2729 		trans = 1.0;
2730 
2731 		// Transform point on unit sphere.
2732 		if(sceneData->skysphere->Trans != NULL)
2733 			MInvTransPoint(*p, ray.Direction, sceneData->skysphere->Trans);
2734 		else
2735 			p = Vector3d(ray.Direction);
2736 
2737 		for(i = sceneData->skysphere->Count - 1; i >= 0; i--)
2738 		{
2739 			// Compute sky colour from colour map.
2740 
2741 			// NK 1998 - added NULL as final parameter
2742 			Compute_Pigment(col_Temp, sceneData->skysphere->Pigments[i], *p, NULL, NULL, threadData);
2743 
2744 			att = trans * (1.0 - col_Temp.filter() - col_Temp.transm());
2745 
2746 			col += RGBColour(col_Temp) * att;
2747 
2748 			filterc *= col_Temp;
2749 
2750 			trans = fabs(filterc.filter()) + fabs(filterc.transm());
2751 		}
2752 
2753 		col *= sceneData->skysphere->Emission;
2754 
2755 		colour.red()    = col.red()    + colour.red()   * (filterc.red()   * filterc.filter() + filterc.transm());
2756 		colour.green()  = col.green()  + colour.green() * (filterc.green() * filterc.filter() + filterc.transm());
2757 		colour.blue()   = col.blue()   + colour.blue()  * (filterc.blue()  * filterc.filter() + filterc.transm());
2758 		colour.filter() = colour.filter() * filterc.filter();
2759 		colour.transm() = colour.transm() * filterc.transm();
2760 	}
2761 	else // i.e. sceneData->languageVersion >= 370
2762 	{
2763 		// this gives the same results regarding sky sphere filter as a layered-texture genuine sphere
2764 
2765 		int i;
2766 		RGBColour filCol(1.0);
2767 		double att;
2768 		RGBColour col;
2769 		Colour col_Temp;
2770 		Vector3d p;
2771 
2772 		col.clear();
2773 
2774 		if((sceneData->skysphere != NULL) && (sceneData->skysphere->Pigments != NULL))
2775 		{
2776 			// Transform point on unit sphere.
2777 			if(sceneData->skysphere->Trans != NULL)
2778 				MInvTransPoint(*p, ray.Direction, sceneData->skysphere->Trans);
2779 			else
2780 				p = Vector3d(ray.Direction);
2781 
2782 			for(i = sceneData->skysphere->Count - 1; i >= 0; i--)
2783 			{
2784 				// Compute sky colour from colour map.
2785 				Compute_Pigment(col_Temp, sceneData->skysphere->Pigments[i], *p, NULL, NULL, threadData);
2786 
2787 				att = col_Temp.opacity();
2788 
2789 				col += RGBColour(col_Temp) * att * filCol * sceneData->skysphere->Emission;
2790 				filCol *= col_Temp.rgbTransm();
2791 			}
2792 		}
2793 
2794 		// apply background as if it was another sky sphere with uniform pigment
2795 		col_Temp = sceneData->backgroundColour;
2796 		if (!ticket.alphaBackground)
2797 		{
2798 			// if rendering without alpha channel, ignore filter and transmit of background color.
2799 			col_Temp.filter() = 0.0;
2800 			col_Temp.transm() = 0.0;
2801 		}
2802 
2803 		att = col_Temp.opacity();
2804 
2805 		col += RGBColour(col_Temp) * att * filCol;
2806 		filCol *= col_Temp.rgbTransm();
2807 
2808 		colour.red()    = col.red();
2809 		colour.green()  = col.green();
2810 		colour.blue()   = col.blue();
2811 		colour.filter() = 0.0;
2812 		colour.transm() = min(1.0f, std::fabs(filCol.greyscale()));
2813 	}
2814 }
2815 
ComputeFog(const Ray & ray,const Intersection & isect,Colour & colour)2816 void Trace::ComputeFog(const Ray& ray, const Intersection& isect, Colour& colour)
2817 {
2818 	double att, att_inv, width;
2819 	Colour col_fog;
2820 	RGBColour sum_att; // total attenuation.
2821 	RGBColour sum_col; // total color.
2822 
2823 	// Why are we here.
2824 	if(sceneData->fog == NULL)
2825 		return;
2826 
2827 	// Init total attenuation and total color.
2828 	sum_att = RGBColour(1.0, 1.0, 1.0);
2829 	sum_col = RGBColour(0.0, 0.0, 0.0);
2830 
2831 	// Loop over all fogs.
2832 	for(FOG *fog = sceneData->fog; fog != NULL; fog = fog->Next)
2833 	{
2834 		// Don't care about fogs with zero distance.
2835 		if(fabs(fog->Distance) > EPSILON)
2836 		{
2837 			width = isect.Depth;
2838 
2839 			switch(fog->Type)
2840 			{
2841 				case GROUND_MIST:
2842 					att = ComputeGroundFogColour(ray, 0.0, width, fog, col_fog);
2843 					break;
2844 				default:
2845 					att = ComputeConstantFogColour(ray, 0.0, width, fog, col_fog);
2846 					break;
2847 			}
2848 
2849 			// Check for minimum transmittance.
2850 			if(att < col_fog.transm())
2851 				att = col_fog.transm();
2852 
2853 			// Get attenuation sum due to filtered/unfiltered translucency.
2854 			// [CLi] removed computation of sum_att.filer() and sum_att.transm(), as they were discarded anyway
2855 			sum_att.red()    *= att * ((1.0 - col_fog.filter()) + col_fog.filter() * col_fog.red());
2856 			sum_att.green()  *= att * ((1.0 - col_fog.filter()) + col_fog.filter() * col_fog.green());
2857 			sum_att.blue()   *= att * ((1.0 - col_fog.filter()) + col_fog.filter() * col_fog.blue());
2858 
2859 			if(!ray.IsShadowTestRay())
2860 			{
2861 				att_inv = 1.0 - att;
2862 				sum_col += att_inv * RGBColour(col_fog);
2863 			}
2864 		}
2865 	}
2866 
2867 	// Add light coming from background.
2868 	colour.red()   = sum_col.red()   + sum_att.red()   * colour.red();
2869 	colour.green() = sum_col.green() + sum_att.green() * colour.green();
2870 	colour.blue()  = sum_col.blue()  + sum_att.blue()  * colour.blue();
2871 	colour.transm() *= sum_att.greyscale();
2872 }
2873 
ComputeConstantFogColour(const Ray & ray,double depth,double width,const FOG * fog,Colour & colour)2874 double Trace::ComputeConstantFogColour(const Ray &ray, double depth, double width, const FOG *fog, Colour& colour)
2875 {
2876 	Vector3d p;
2877 	double k;
2878 
2879 	if(fog->Turb != NULL)
2880 	{
2881 		depth += width / 2.0;
2882 
2883 		VEvaluateRay(*p, ray.Origin, depth, ray.Direction);
2884 		VEvaluateEq(*p, fog->Turb->Turbulence);
2885 
2886 		// The further away the less influence turbulence has.
2887 		k = exp(-width / fog->Distance);
2888 
2889 		width *= (1.0 - k * min(1.0, Turbulence(*p, fog->Turb, sceneData->noiseGenerator) * fog->Turb_Depth));
2890 	}
2891 
2892 	colour = fog->colour;
2893 
2894 	return (exp(-width / fog->Distance));
2895 }
2896 
2897 /*****************************************************************************
2898 *   Here is an ascii graph of the ground fog density, it has a maximum
2899 *   density of 1.0 at Y <= 0, and approaches 0.0 as Y goes up:
2900 *
2901 *   ***********************************
2902 *        |           |            |    ****
2903 *        |           |            |        ***
2904 *        |           |            |           ***
2905 *        |           |            |            | ****
2906 *        |           |            |            |     *****
2907 *        |           |            |            |          *******
2908 *   -----+-----------+------------+------------+-----------+-----
2909 *       Y=-2        Y=-1         Y=0          Y=1         Y=2
2910 *
2911 *   ground fog density is 1 / (Y*Y+1) for Y >= 0 and equals 1.0 for Y <= 0.
2912 *   (It behaves like regular fog for Y <= 0.)
2913 *
2914 *   The integral of the density is atan(Y) (for Y >= 0).
2915 ******************************************************************************/
2916 
ComputeGroundFogColour(const Ray & ray,double depth,double width,const FOG * fog,Colour & colour)2917 double Trace::ComputeGroundFogColour(const Ray& ray, double depth, double width, const FOG *fog, Colour& colour)
2918 {
2919 	double fog_density, delta;
2920 	double start, end;
2921 	double y1, y2, k;
2922 	Vector3d p, p1, p2;
2923 
2924 	// Get start point.
2925 	VEvaluateRay(*p1, ray.Origin, depth, ray.Direction);
2926 
2927 	// Get end point.
2928 	p2 = p1 + Vector3d(ray.Direction) * width;
2929 
2930 	// Could preform transfomation here to translate Start and End
2931 	// points into ground fog space.
2932 	y1 = dot(p1, Vector3d(fog->Up));
2933 	y2 = dot(p2, Vector3d(fog->Up));
2934 
2935 	start = (y1 - fog->Offset) / fog->Alt;
2936 	end   = (y2 - fog->Offset) / fog->Alt;
2937 
2938 	// Get integral along y-axis from start to end.
2939 	if(start <= 0.0)
2940 	{
2941 		if(end <= 0.0)
2942 			fog_density = 1.0;
2943 		else
2944 			fog_density = (atan(end) - start) / (end - start);
2945 	}
2946 	else
2947 	{
2948 		if(end <= 0.0)
2949 			fog_density = (atan(start) - end) / (start - end);
2950 		else
2951 		{
2952 			delta = start - end;
2953 
2954 			if(fabs(delta) > EPSILON)
2955 				fog_density = (atan(start) - atan(end)) / delta;
2956 			else
2957 				fog_density = 1.0 / (Sqr(start) + 1.0);
2958 		}
2959 	}
2960 
2961 	// Apply turbulence.
2962 	if (fog->Turb != NULL)
2963 	{
2964 		p = (p1 + p2) * 0.5;
2965 		VEvaluateEq(*p, fog->Turb->Turbulence);
2966 
2967 		// The further away the less influence turbulence has.
2968 		k = exp(-width / fog->Distance);
2969 		width *= (1.0 - k * min(1.0, Turbulence(*p, fog->Turb, sceneData->noiseGenerator) * fog->Turb_Depth));
2970 	}
2971 
2972 	colour = fog->colour;
2973 
2974 	return (exp(-width * fog_density / fog->Distance));
2975 }
2976 
ComputeShadowMedia(Ray & light_source_ray,Intersection & isect,RGBColour & resultcolour,bool media_attenuation_and_interaction,TraceTicket & ticket)2977 void Trace::ComputeShadowMedia(Ray& light_source_ray, Intersection& isect, RGBColour& resultcolour, bool media_attenuation_and_interaction, TraceTicket& ticket)
2978 {
2979 	if((resultcolour.red() < EPSILON) && (resultcolour.green() < EPSILON) && (resultcolour.blue() < EPSILON))
2980 		return;
2981 
2982 	// Calculate participating media effects.
2983 	if(media_attenuation_and_interaction && (qualityFlags & Q_VOLUME) && ((light_source_ray.IsHollowRay() == true) || (isect.Object != NULL && isect.Object->interior != NULL)))
2984 	{
2985 		// we're using general-purpose media and fog handling code, which insists on computing a transmissive component (for alpha channel)
2986 		Colour tmpCol = Colour(resultcolour);
2987 
2988 		media.ComputeMedia(sceneData->atmosphere, light_source_ray, isect, tmpCol, ticket);
2989 
2990 		if((sceneData->fog != NULL) && (light_source_ray.IsHollowRay() == true) && (light_source_ray.IsPhotonRay() == false))
2991 			ComputeFog(light_source_ray, isect, tmpCol);
2992 
2993 		// discard the transmissive component (alpha channel)
2994 		resultcolour = RGBColour(tmpCol);
2995 	}
2996 
2997 	// If ray is entering from the atmosphere or the ray is currently *not* inside an object add it,
2998 	// but it it is currently inside an object, the ray is leaving the current object and is removed
2999 	if((isect.Object != NULL) && ((light_source_ray.GetInteriors().empty()) || (light_source_ray.RemoveInterior(isect.Object->interior) == false)))
3000 		light_source_ray.AppendInterior(isect.Object->interior);
3001 }
3002 
3003 
3004 
ComputeRainbow(const Ray & ray,const Intersection & isect,Colour & colour)3005 void Trace::ComputeRainbow(const Ray& ray, const Intersection& isect, Colour& colour)
3006 {
3007 	int n;
3008 	double dot1, k, ki, index, x, y, l, angle, fade, f;
3009 	Vector3d Temp;
3010 	Colour Cr, Ct;
3011 
3012 	// Why are we here.
3013 	if(sceneData->rainbow == NULL)
3014 		return;
3015 
3016 	Ct = Colour(0.0, 0.0, 0.0, 1.0, 1.0);
3017 
3018 	n = 0;
3019 
3020 	for(RAINBOW *Rainbow = sceneData->rainbow; Rainbow != NULL; Rainbow = Rainbow->Next)
3021 	{
3022 		if((Rainbow->Pigment != NULL) && (Rainbow->Distance != 0.0) && (Rainbow->Width != 0.0))
3023 		{
3024 			// Get angle between ray direction and rainbow's up vector.
3025 			x = dot(Vector3d(ray.Direction), Vector3d(Rainbow->Right_Vector));
3026 			y = dot(Vector3d(ray.Direction), Vector3d(Rainbow->Up_Vector));
3027 
3028 			l = Sqr(x) + Sqr(y);
3029 
3030 			if(l > 0.0)
3031 			{
3032 				l = sqrt(l);
3033 				y /= l;
3034 			}
3035 
3036 			angle = fabs(acos(y));
3037 
3038 			if(angle <= Rainbow->Arc_Angle)
3039 			{
3040 				// Get dot product between ray direction and antisolar vector.
3041 				dot1 = dot(Vector3d(ray.Direction), Vector3d(Rainbow->Antisolar_Vector));
3042 
3043 				if(dot1 >= 0.0)
3044 				{
3045 					// Get index ([0;1]) into rainbow's colour map.
3046 					index = (acos(dot1) - Rainbow->Angle) / Rainbow->Width;
3047 
3048 					// Jitter index.
3049 					if(Rainbow->Jitter > 0.0)
3050 						index += (2.0 * randomNumberGenerator() - 1.0) * Rainbow->Jitter;
3051 
3052 					if((index >= 0.0) && (index <= 1.0 - EPSILON))
3053 					{
3054 						// Get colour from rainbow's colour map.
3055 						Temp = Vector3d(index, 0.0, 0.0);
3056 						Compute_Pigment(Cr, Rainbow->Pigment, *Temp, &isect, &ray, threadData);
3057 
3058 						// Get fading value for falloff.
3059 						if((Rainbow->Falloff_Width > 0.0) && (angle > Rainbow->Falloff_Angle))
3060 						{
3061 							fade = (angle - Rainbow->Falloff_Angle) / Rainbow->Falloff_Width;
3062 							fade = (3.0 - 2.0 * fade) * fade * fade;
3063 						}
3064 						else
3065 							fade = 0.0;
3066 
3067 						// Get attenuation factor due to distance.
3068 						k = exp(-isect.Depth / Rainbow->Distance);
3069 
3070 						// Colour's transm value is used as minimum attenuation value.
3071 						k = max(k, fade * (1.0 - Cr.transm()) + Cr.transm());
3072 
3073 						// Now interpolate the colours.
3074 						ki = 1.0 - k;
3075 
3076 						// Attenuate filter value.
3077 						f = Cr.filter() * ki;
3078 
3079 						Ct.red()    += k * colour.red()   * ((1.0 - f) + f * Cr.red())   + ki * Cr.red();
3080 						Ct.green()  += k * colour.green() * ((1.0 - f) + f * Cr.green()) + ki * Cr.green();
3081 						Ct.blue()   += k * colour.blue()  * ((1.0 - f) + f * Cr.blue())  + ki * Cr.blue();
3082 						Ct.filter() *= k * Cr.filter();
3083 						Ct.transm() *= k * Cr.transm();
3084 
3085 						n++;
3086 					}
3087 				}
3088 			}
3089 		}
3090 	}
3091 
3092 	if(n > 0)
3093 	{
3094 		COLC tmp = 1.0 / n;
3095 
3096 		colour.red()   = Ct.red()   * tmp;
3097 		colour.green() = Ct.green() * tmp;
3098 		colour.blue()  = Ct.blue()  * tmp;
3099 
3100 		colour.filter() *= Ct.filter();
3101 		colour.transm() *= Ct.transm();
3102 	}
3103 }
3104 
TestShadow(const LightSource & lightsource,double & depth,Ray & light_source_ray,const Vector3d & p,RGBColour & colour,TraceTicket & ticket)3105 bool Trace::TestShadow(const LightSource &lightsource, double& depth, Ray& light_source_ray, const Vector3d& p, RGBColour& colour, TraceTicket& ticket)
3106 {
3107 	ComputeOneLightRay(lightsource, depth, light_source_ray, p, colour);
3108 
3109 	// There's no need to test for shadows if no light
3110 	// is coming from the light source.
3111 	//
3112 	// Test for PURE zero, because we only want to skip this if we're out
3113 	// of the range of a spot light or cylinder light.  Very dim lights
3114 	// should not be ignored.
3115 
3116 	if((fabs(colour.red()) < EPSILON) && (fabs(colour.green()) < EPSILON) && (fabs(colour.blue()) < EPSILON))
3117 	{
3118 		colour.clear();
3119 		return true;
3120 	}
3121 
3122 	// Test for shadows.
3123 	if((qualityFlags & Q_SHADOW) && ((lightsource.Projected_Through_Object != NULL) || (lightsource.Light_Type != FILL_LIGHT_SOURCE)))
3124 	{
3125 		TraceShadowRay(lightsource, depth, light_source_ray, p, colour, ticket);
3126 
3127 		if((fabs(colour.red()) < EPSILON) && (fabs(colour.green()) < EPSILON) && (fabs(colour.blue()) < EPSILON))
3128 		{
3129 			colour.clear();
3130 			return true;
3131 		}
3132 	}
3133 
3134 	return false;
3135 }
3136 
IsObjectInCSG(const ObjectBase * object,const ObjectBase * parent)3137 bool Trace::IsObjectInCSG(const ObjectBase* object, const ObjectBase* parent)
3138 {
3139 	bool found = false;
3140 
3141 	if(object == parent)
3142 		return true;
3143 
3144 	if(parent->Type & IS_COMPOUND_OBJECT)
3145 	{
3146 		for(vector<ObjectPtr>::const_iterator Sib = (reinterpret_cast<const CSG *>(parent))->children.begin(); Sib != (reinterpret_cast<const CSG *>(parent))->children.end(); Sib++)
3147 		{
3148 			if(IsObjectInCSG(object, *Sib))
3149 				found = true;
3150 		}
3151 	}
3152 
3153 	return found;
3154 }
3155 
3156 // SSLT code by Sarah Tariq and Lawrence (Lorenzo) Ibarria
3157 
ComputeFt(double phi,double eta)3158 double Trace::ComputeFt(double phi, double eta)
3159 {
3160 #if 0
3161 	double sin_phi   = sin(phi);
3162 	double sin_theta = sin_phi / eta;
3163 	if ((sin_theta < -1.0) || (sin_theta > 1.0))
3164 		return 0; // total reflection, i.e. no transmission at all
3165 
3166 	double  theta = asin(sin_theta);
3167 
3168 	return 1 - 0.5 * (Sqr(sin(phi-theta)) / Sqr(sin(phi+theta)) + Sqr(tan(phi-theta)) / Sqr(tan(phi+theta)));
3169 #elif 0
3170 	/*
3171 	double x = fabs(acos(cos(phi))) / M_PI_2;
3172 
3173 	double Fr = 0.014567225 / Sqr(x - 1.12) - 0.011612903;
3174 	return 1.0 - Fr;
3175 	*/
3176 #else
3177 	double cos_angle = cos(phi);
3178 	double g = sqrt(Sqr(eta) + Sqr(cos_angle) - 1);
3179 	double F = 0.5 * (Sqr(g - cos_angle) / Sqr(g + cos_angle));
3180 	F = F * (1 + Sqr(cos_angle * (g + cos_angle) - 1) / Sqr(cos_angle * (g - cos_angle) + 1));
3181 
3182 	return 1.0 - min(1.0,max(0.0,F));
3183 #endif
3184 }
3185 
ComputeSurfaceTangents(const Vector3d & normal,Vector3d & u,Vector3d & v)3186 void Trace::ComputeSurfaceTangents(const Vector3d& normal, Vector3d& u, Vector3d& v)
3187 {
3188 #if 1
3189 	if (fabs(normal[0]) <= fabs(normal[1]) && fabs(normal[0]) <= fabs(normal[2]))
3190 		// if x co-ordinate is smallest, creating a tangent in the yz plane is a piece of cake;
3191 		// the following code is equivalent to u = cross(normal, Vector3d(1,0,0)).normalized();
3192 		u = Vector3d(0, normal[2], -normal[1]).normalized();
3193 	else if (fabs(normal[1]) <= fabs(normal[2]))
3194 		// if y co-ordinate is smallest, creating a tangent in the xz plane is a piece of cake;
3195 		// the following code is equivalent to u = cross(normal, Vector3d(0,1,0)).normalized();
3196 		u = Vector3d(-normal[2], 0, normal[0]).normalized();
3197 	else
3198 		// if z co-ordinate is smallest, creating a tangent in the xy plane is a piece of cake;
3199 		// the following code is equivalent to u = cross(normal, Vector3d(0,0,1)).normalized();
3200 		u = Vector3d(normal[1], -normal[0], 0).normalized();
3201 #else
3202 	if (fabs(normal[0]) <= fabs(normal[1]) && fabs(normal[0]) <= fabs(normal[2]))
3203 		// if x co-ordinate is smallest, creating a tangent in the yz plane is a piece of cake
3204 		u = cross(normal, Vector3d(1,0,0)).normalized();
3205 	else if (fabs(normal[1]) <= fabs(normal[0]) && fabs(normal[1]) <= fabs(normal[2]))
3206 		// if y co-ordinate is smallest, creating a tangent in the xz plane is a piece of cake
3207 		u = cross(normal, Vector3d(0,1,0)).normalized();
3208 	else
3209 		// if z co-ordinate is smallest, creating a tangent in the xy plane is a piece of cake
3210 		u = cross(normal, Vector3d(0,0,1)).normalized();
3211 #endif
3212 
3213 	v = cross(normal, u);
3214 }
3215 
ComputeSSLTNormal(Intersection & Ray_Intersection)3216 void Trace::ComputeSSLTNormal(Intersection& Ray_Intersection)
3217 {
3218 	Vector3d Raw_Normal;
3219 
3220 	/* Get the normal to the surface */
3221 	Ray_Intersection.Object->Normal(*Raw_Normal, &Ray_Intersection, threadData);
3222 	Assign_Vector(Ray_Intersection.INormal, *Raw_Normal);
3223 	Assign_Vector(Ray_Intersection.PNormal, *Raw_Normal); // TODO FIXME - we should possibly take normal pertubation into account
3224 }
3225 
IsSameSSLTObject(const ObjectBase * obj1,const ObjectBase * obj2)3226 bool Trace::IsSameSSLTObject(const ObjectBase* obj1, const ObjectBase* obj2)
3227 {
3228 	// TODO maybe use something smarter
3229 	return (obj1 && obj2 && obj1->interior == obj2->interior);
3230 }
3231 
ComputeDiffuseSampleBase(Vector3d & basePoint,const Intersection & out,const Vector3d & vOut,double avgFreeDist)3232 void Trace::ComputeDiffuseSampleBase(Vector3d& basePoint, const Intersection& out, const Vector3d& vOut, double avgFreeDist)
3233 {
3234 	Vector3d pOut(out.IPoint);
3235 	Vector3d nOut(out.INormal);
3236 
3237 	// make sure to get the normal right; obviously, the observer must be "outside".
3238 	// for algorithm simplicity, we want the normal to point inward
3239 	double cos_phi = dot(nOut, vOut);
3240 	if (cos_phi > 0)
3241 		nOut = -nOut;
3242 
3243 	// typically, place the base point the average free distance below the surface;
3244 	// however, never place it closer to the "back side" than to the front
3245 	Intersection backSide;
3246 	Ray ray(*pOut, *nOut); // we're shooting from the surface, so SubsurfaceRay would do us no good (as it would potentially "re-discover" the current surface)
3247 	backSide.Depth = avgFreeDist * 2; // max distance we're looking at
3248 	bool found = FindIntersection(backSide, ray);
3249 	if (found)
3250 	{
3251 		if (IsSameSSLTObject(out.Object, backSide.Object))
3252 			basePoint = pOut + nOut * (backSide.Depth / 2);
3253 		else
3254 			basePoint = pOut + nOut * min(avgFreeDist, backSide.Depth - EPSILON);
3255 	}
3256 	else
3257 		basePoint = pOut + nOut * avgFreeDist;
3258 }
3259 
ComputeDiffuseSamplePoint(const Vector3d & basePoint,Intersection & in,double & sampleArea,TraceTicket & ticket)3260 void Trace::ComputeDiffuseSamplePoint(const Vector3d& basePoint, Intersection& in, double& sampleArea, TraceTicket& ticket)
3261 {
3262 	// generate a vector in a random direction
3263 	// TODO FIXME - a suitably weighted distribution (oriented according to the surface normal) would possibly be better
3264 	while (ssltUniformDirectionGenerator.size() <= ticket.subsurfaceRecursionDepth)
3265 		ssltUniformDirectionGenerator.push_back(GetSubRandomDirectionGenerator(0, 32767));
3266 	Vector3d v = (*(ssltUniformDirectionGenerator[ticket.subsurfaceRecursionDepth]))();
3267 
3268 	Ray ray(*basePoint, *v, Ray::SubsurfaceRay);
3269 	bool found = FindIntersection(in, ray);
3270 
3271 	if (found)
3272 	{
3273 		ComputeSSLTNormal(in);
3274 
3275 		Vector3d vDelta = Vector3d(in.IPoint) - basePoint;
3276 		double dist = vDelta.length();
3277 		double cos_phi = std::abs(dot(vDelta / dist, Vector3d(in.INormal)));
3278 		if (cos_phi < 0)
3279 		{
3280 			VScaleEq(in.INormal, -1.0);
3281 			cos_phi = -cos_phi;
3282 		}
3283 		if (cos_phi < 0.01)
3284 			cos_phi = 0.01; // TODO FIXME - rather arbitrary limit
3285 		sampleArea = 4.0 * M_PI * Sqr(dist * sceneData->mmPerUnit) / cos_phi;
3286 	}
3287 	else
3288 	{
3289 		sampleArea = 0.0;
3290 	}
3291 }
3292 
ComputeOneSingleScatteringContribution(const LightSource & lightsource,const Intersection & out,double sigma_t_xo,double sigma_s,double s_prime_out,RGBColour & Lo,double eta,const Vector3d & bend_point,double phi_out,double cos_out_prime,TraceTicket & ticket)3293 void Trace::ComputeOneSingleScatteringContribution(const LightSource& lightsource, const Intersection& out, double sigma_t_xo, double sigma_s, double s_prime_out,
3294                                                    RGBColour& Lo, double eta, const Vector3d& bend_point, double phi_out, double cos_out_prime, TraceTicket& ticket)
3295 {
3296 	// TODO FIXME - part of this code is very alike to ComputeOneDiffuseLight()
3297 
3298 	// Do Light source to get the correct lightsourceray
3299 	// (note that for now we're mainly interested in the direction)
3300 	Ray lightsourceray(Ray::SubsurfaceRay);
3301 	double lightsourcedepth;
3302 	ComputeOneWhiteLightRay(lightsource, lightsourcedepth, lightsourceray, bend_point);
3303 
3304 	// We're below the surface; determine where a light ray from the source would be intersecting this object's surface
3305 	// (and, more importantly, what the surface normal is there; notice that this intersection is an approximation,
3306 	// ignoring refraction)
3307 	Intersection xi;
3308 	if (!FindIntersection(xi, lightsourceray))
3309 		return;
3310 
3311 	if (!IsSameSSLTObject(xi.Object, out.Object))
3312 		return; // TODO - what if the other object is transparent?
3313 
3314 	ComputeSSLTNormal(xi);
3315 
3316 	// Get a colour and a ray (also recomputes all the lightsourceray stuff).
3317 	RGBColour lightcolour;
3318 	ComputeOneLightRay(lightsource, lightsourcedepth, lightsourceray, Vector3d(xi.IPoint), lightcolour, true);
3319 
3320 	// Don't calculate spotlights when outside of the light's cone.
3321 	if((fabs(lightcolour.red())   < EPSILON) &&
3322 	   (fabs(lightcolour.green()) < EPSILON) &&
3323 	   (fabs(lightcolour.blue())  < EPSILON))
3324 		return;
3325 
3326 	// See if light on far side of surface from camera.
3327 	// [CLi] double_illuminate and diffuse backside illumination don't seem to make much sense with BSSRDF, so we ignore them here.
3328 	// [CLi] BSSRDF always does "full area lighting", so we ignore it here.
3329 	double cos_in = dot(Vector3d(xi.INormal), Vector3d(lightsourceray.Direction));
3330 	// [CLi] we're coming from inside the object, so the surface /must/ be properly oriented towards the camera; if it isn't,
3331 	// it must be the normal's fault
3332 	if (cos_in < 0)
3333 	{
3334 		VScaleEq(xi.INormal, -1.0);
3335 		cos_in = -cos_in;
3336 	}
3337 	// [CLi] light coming in almost parallel to the surface is a problem though
3338 	if(cos_in < EPSILON)
3339 		return;
3340 
3341 	// If light source was not blocked by any intervening object, then
3342 	// calculate it's contribution to the object's overall illumination.
3343 	if ((qualityFlags & Q_SHADOW) && ((lightsource.Projected_Through_Object != NULL) || (lightsource.Light_Type != FILL_LIGHT_SOURCE)))
3344 	{
3345 		// [CLi] Not using lightColorCache because it's unsuited for BSSRDF
3346 		TraceShadowRay(lightsource, lightsourcedepth, lightsourceray, Vector3d(xi.IPoint), lightcolour, ticket);
3347 	}
3348 
3349 	// Don't calculate anything more if we're in full shadow
3350 	if((fabs(lightcolour.red())   < EPSILON) &&
3351 	   (fabs(lightcolour.green()) < EPSILON) &&
3352 	   (fabs(lightcolour.blue())  < EPSILON))
3353 		return;
3354 
3355 	double sigma_t_xi = sigma_t_xo; // TODO FIXME - theoretically this should be taken from point where light comes in
3356 
3357 	double cos_in_sqr       = Sqr(cos_in);
3358 	double sin_in_sqr       = 1 - cos_in_sqr;
3359 	double eta_sqr          = Sqr(eta);
3360 	double sin_in_prime_sqr = sin_in_sqr / eta_sqr;
3361 	double cos_in_prime_sqr = 1 - sin_in_prime_sqr;
3362 	if (cos_in_prime_sqr < 0.0)
3363 		return; // total reflection
3364 	double cos_in_prime  = sqrt(cos_in_prime_sqr);
3365 
3366 	if (cos_in_prime <= EPSILON)
3367 		return; // close enough to total reflection to give us trouble
3368 
3369 	//RGBColour lightColour = RGBColour(lightsource.colour);
3370 	lightcolour *= cos_in; // TODO VERIFY - is this right? Where does this term come from??
3371 
3372 	// compute si
3373 	double si = (bend_point - Vector3d(xi.IPoint)).length() * sceneData->mmPerUnit;
3374 
3375 	// calculate s_prime_i
3376 	double s_prime_i = si * cos_in / cos_in_prime;
3377 
3378 	// calculate F
3379 	double phi_in  = acos(cos_in);
3380 	double F = ComputeFt(phi_in, eta) * ComputeFt(phi_out, eta);
3381 
3382 	// calculate sigma_tc
3383 	double G = fabs(cos_out_prime / cos_in_prime); // TODO FIXME - theoretically this is only valid for comparatively flat surfaces
3384 	double sigma_tc = sigma_t_xo + G * sigma_t_xi;
3385 
3386 	// calculate the phase function
3387 	// NOTE: We're leaving out the 1/pi factor because in POV-Ray, by convention,
3388 	// light intensity is normalized to imply this factor already.
3389 	double p = 1.0 / 4.0; // asume isotropic scattering (normally this would be 1/(4*M_PI))
3390 
3391 	// multiply with the e terms
3392 	double eTerms = exp(-s_prime_i * sigma_t_xi) * exp(-s_prime_out * sigma_t_xo);    // TODO FIXME - theoretically first sigma_t should be taken from from xi.IPoint
3393 
3394 	double factor = (sigma_s * F * p / sigma_tc) * eTerms;
3395 	if (factor >= DBL_MAX)
3396 		factor = DBL_MAX;
3397 	assert ((factor >= 0.0) && (factor <= DBL_MAX)); // verify factor is a non-negative, finite value (no #INF, no #IND, no #NAN)
3398 
3399 	lightcolour *= factor;
3400 
3401 	assert ((lightcolour.red() >= 0) &&
3402 	        (lightcolour.green() >= 0) &&
3403 	        (lightcolour.blue() >= 0));
3404 
3405 	// add up the contribution
3406 	Lo += lightcolour;
3407 }
3408 
3409 // call this once for each color
3410 // out.INormal is calculated
ComputeSingleScatteringContribution(const Intersection & out,double dist,double cos_out,const Vector3d & refractedREye,double sigma_prime_t,double sigma_prime_s,RGBColour & Lo,double eta,TraceTicket & ticket)3411 void Trace::ComputeSingleScatteringContribution(const Intersection& out, double dist, double cos_out, const Vector3d& refractedREye, double sigma_prime_t, double sigma_prime_s, RGBColour& Lo, double eta,
3412                                                 TraceTicket& ticket)
3413 {
3414 	double          g = 0; // the mean cosine of the scattering angle; for isotropic scattering, g = 0
3415 	double          sigma_t_xo = sigma_prime_t / (1-g); // TODO FIXME - precompute
3416 	double          sigma_s = sigma_prime_s / (1-g); // TODO FIXME - precompute
3417 	double          epsilon;
3418 	double          s_prime_out;
3419 
3420 	Lo.clear();
3421 
3422 	// TODO FIXME - a significant deal of this only needs to be computed once for any intersection point!
3423 
3424 	// calculate s_prime_out
3425 	while (ssltUniformNumberGenerator.size() <= ticket.subsurfaceRecursionDepth)
3426 		ssltUniformNumberGenerator.push_back(GetRandomDoubleGenerator(0.0, 1.0, 32767));
3427 	epsilon = (*(ssltUniformNumberGenerator[ticket.subsurfaceRecursionDepth]))(); // epsilon is a random floating point value in the range [0,1) {including 0, not including 1}
3428 	s_prime_out = fabs(log(epsilon)) / sigma_t_xo;
3429 
3430 	if (s_prime_out >= dist)
3431 		return; // not within the object - this will be covered by a "zero scattering" term
3432 
3433 	//compute bend_point wihch is s_prime_out distance away on refractedREye
3434 	Vector3d bend_point = Vector3d(out.IPoint) + refractedREye * (s_prime_out / sceneData->mmPerUnit);
3435 
3436 	double cos_out_prime = sqrt(1 - ((Sqr(1.0 / eta)) * (1 - Sqr(cos_out))));
3437 
3438 	// global light sources, if not turned off for this object
3439 	if((out.Object->Flags & NO_GLOBAL_LIGHTS_FLAG) != NO_GLOBAL_LIGHTS_FLAG)
3440 	{
3441 		for(int i = 0; i < threadData->lightSources.size(); i++)
3442 			ComputeOneSingleScatteringContribution(*threadData->lightSources[i], out, sigma_t_xo, sigma_s, s_prime_out, Lo, eta, bend_point, acos(cos_out), cos_out_prime, ticket);
3443 	}
3444 
3445 	// local light sources from a light group, if any
3446 	if(!out.Object->LLights.empty())
3447 	{
3448 		for(int i = 0; i < out.Object->LLights.size(); i++)
3449 			ComputeOneSingleScatteringContribution(*out.Object->LLights[i], out, sigma_t_xo, sigma_s, s_prime_out, Lo, eta, bend_point, acos(cos_out), cos_out_prime, ticket);
3450 	}
3451 
3452 	assert ((Lo.red()   >= 0) &&
3453 	        (Lo.green() >= 0) &&
3454 	        (Lo.blue()  >= 0));
3455 
3456 	// TODO FIXME - radiosity should also be taken into account
3457 }
3458 
SSLTComputeRefractedDirection(const Vector3d & v,const Vector3d & n,double eta,Vector3d & refracted)3459 bool Trace::SSLTComputeRefractedDirection(const Vector3d& v, const Vector3d& n, double eta, Vector3d& refracted)
3460 {
3461 	// Phi: angle between normal and -incoming_ray (REye in this case, since it points to Eye)
3462 	// Theta: angle between -normal and outgoing_ray
3463 	double          cosPhi;
3464 	Vector3d        unitV = v.normalized();
3465 	Vector3d        unitN = n.normalized();
3466 
3467 	cosPhi = dot(unitV, unitN);
3468 	if (cosPhi > 0)
3469 		unitN = -unitN;
3470 	else
3471 		cosPhi = -cosPhi;
3472 
3473 	double cosThetaSqr = 1.0 + Sqr(eta) * (Sqr(cosPhi) - 1.0);
3474 	if (cosThetaSqr < 0.0)
3475 		return false;
3476 
3477 	double cosTheta = sqrt(cosThetaSqr);
3478 	refracted = (unitV * eta + unitN * (eta * cosPhi - cosTheta)).normalized();
3479 
3480 	return true;
3481 }
3482 
ComputeDiffuseContribution(const Intersection & out,const Vector3d & vOut,const Vector3d & pIn,const Vector3d & nIn,const Vector3d & vIn,double & sd,double sigma_prime_s,double sigma_a,double eta)3483 void Trace::ComputeDiffuseContribution(const Intersection& out, const Vector3d& vOut, const Vector3d& pIn, const Vector3d& nIn, const Vector3d& vIn, double& sd, double sigma_prime_s, double sigma_a, double eta)
3484 {
3485 	// TODO FIXME - a great deal of this can be precomputed
3486 	double  sigma_prime_t = sigma_prime_s + sigma_a;
3487 	double  alpha_prime = sigma_prime_s / sigma_prime_t;
3488 	double  F_dr = FresnelDiffuseReflectance(eta);
3489 	double  Aconst = ((1 + F_dr) / (1 - F_dr));
3490 	double  Rd;
3491 
3492 	double  cos_phi_in  = clip(dot(vIn,  nIn),                      -1.0, 1.0); // (clip values to not run into trouble due to petty precision issues)
3493 	double  cos_phi_out = clip(dot(vOut, Vector3d(out.INormal)),    -1.0, 1.0);
3494 	double  phi_in  = acos(cos_phi_in);
3495 	double  phi_out = acos(cos_phi_out);
3496 
3497 	double  F = ComputeFt(phi_in, eta) * ComputeFt(phi_out, eta);
3498 
3499 #if 1
3500 	// full BSSRDF model
3501 
3502 	double  distSqr = (pIn - Vector3d(out.IPoint)).lengthSqr() * Sqr(sceneData->mmPerUnit);
3503 
3504 	double  Dconst = 1 / (3 * sigma_prime_t);
3505 	double  sigma_tr = sqrt(3 * sigma_a * sigma_prime_t);
3506 
3507 	double  z_r = 1.0 / sigma_prime_t;
3508 	double  dSqr_r = Sqr(z_r) + distSqr;
3509 	double  d_r = sqrt(dSqr_r);
3510 	double  z_v = z_r * (1.0 + Aconst * 4.0/3.0);
3511 	double  dSqr_v = Sqr(z_v) + distSqr;
3512 	double  d_v = sqrt(dSqr_v);
3513 
3514 	double common_term  = alpha_prime / (4.0 * M_PI);               // dimensionless
3515 	double C1           = z_r * (sigma_tr + 1.0/d_r);               // dimensionless
3516 	double C2           = z_v * (sigma_tr + 1.0/d_v);               // dimensionless
3517 	double r_term       = C1 * exp(-sigma_tr * d_r) / dSqr_r;     // dimension 1/area
3518 	double v_term       = C2 * exp(-sigma_tr * d_v) / dSqr_v;     // dimension 1/area
3519 
3520 	Rd = common_term * (r_term + v_term);
3521 
3522 #else
3523 	// uniform illumination BRDF approximation
3524 	// TODO - use this for radiosity?
3525 
3526 	// calculate Rd
3527 	double root_term = sqrt(3.0 * (1.0 - alpha_prime));
3528 	Rd = (alpha_prime / 2.0) * (1 + exp((-4.0/3.0) * Aconst * root_term)) * exp(-root_term);
3529 
3530 #endif
3531 
3532 	// NOTE: We're leaving out the 1/pi factor because in POV-Ray, by convention,
3533 	// light intensity is normalized to imply this factor already.
3534 	sd = F * Rd; // (normally this would be F*Rd/M_PI)
3535 	assert ((sd >= 0.0) && (sd <= DBL_MAX)); // verify sd is a non-negative, finite value (no #INF, no #IND, no #NAN)
3536 }
3537 
ComputeDiffuseContribution1(const LightSource & lightsource,const Intersection & out,const Vector3d & vOut,const Intersection & in,RGBColour & Total_Colour,const DblRGBColour & sigma_prime_s,const DblRGBColour & sigma_a,double eta,double weight,TraceTicket & ticket)3538 void Trace::ComputeDiffuseContribution1(const LightSource& lightsource, const Intersection& out, const Vector3d& vOut, const Intersection& in, RGBColour& Total_Colour,
3539                                         const DblRGBColour& sigma_prime_s, const DblRGBColour& sigma_a, double eta, double weight, TraceTicket& ticket)
3540 {
3541 	// TODO FIXME - part of this code is very alike to ComputeOneDiffuseLight()
3542 
3543 	// Get a colour and a ray.
3544 	Ray lightsourceray;
3545 	double lightsourcedepth;
3546 	RGBColour lightcolour;
3547 	ComputeOneLightRay(lightsource, lightsourcedepth, lightsourceray, Vector3d(in.IPoint), lightcolour, true);
3548 
3549 	// Don't calculate spotlights when outside of the light's cone.
3550 	if((fabs(lightcolour.red())   < EPSILON) &&
3551 	   (fabs(lightcolour.green()) < EPSILON) &&
3552 	   (fabs(lightcolour.blue())  < EPSILON))
3553 		return;
3554 
3555 	Vector3d nIn = Vector3d(in.INormal);
3556 
3557 	// See if light on far side of surface from camera.
3558 	// [CLi] double_illuminate and diffuse backside illumination don't seem to make much sense with BSSRDF, so we ignore them here.
3559 	// [CLi] BSSRDF always does "full area lighting", so we ignore it here.
3560 	double cos_in = dot(nIn, Vector3d(lightsourceray.Direction));
3561 	// [CLi] we're coming from inside the object, so the surface /must/ be properly oriented towards the camera; if it isn't,
3562 	// it must be the normal's fault
3563 	if (cos_in < 0)
3564 	{
3565 		nIn    = -nIn;
3566 		cos_in = -cos_in;
3567 	}
3568 	// [CLi] light coming in almost parallel to the surface is a problem though
3569 	if(cos_in < EPSILON)
3570 		return;
3571 
3572 	// If light source was not blocked by any intervening object, then
3573 	// calculate it's contribution to the object's overall illumination.
3574 	if ((qualityFlags & Q_SHADOW) && ((lightsource.Projected_Through_Object != NULL) || (lightsource.Light_Type != FILL_LIGHT_SOURCE)))
3575 	{
3576 		// [CLi] Not using lightColorCache because it's unsuited for BSSRDF
3577 		TraceShadowRay(lightsource, lightsourcedepth, lightsourceray, Vector3d(in.IPoint), lightcolour, ticket);
3578 	}
3579 
3580 	// Don't calculate anything more if we're in full shadow
3581 	if((fabs(lightcolour.red())   < EPSILON) &&
3582 	   (fabs(lightcolour.green()) < EPSILON) &&
3583 	   (fabs(lightcolour.blue())  < EPSILON))
3584 		return;
3585 
3586 	lightcolour *= cos_in;
3587 	for (int j = 0; j < 3; j++)
3588 	{
3589 		double sd;
3590 		ComputeDiffuseContribution(out, vOut, Vector3d(in.IPoint), nIn, Vector3d(lightsourceray.Direction), sd, sigma_prime_s[j], sigma_a[j], eta);
3591 		sd *= weight;
3592 		assert (sd >= 0);
3593 		lightcolour[j] *= sd;
3594 		assert (lightcolour[j] >= 0);
3595 		Total_Colour[j] += lightcolour[j];
3596 	}
3597 }
3598 
ComputeDiffuseAmbientContribution1(const Intersection & out,const Vector3d & vOut,const Intersection & in,RGBColour & Total_Colour,const DblRGBColour & sigma_prime_s,const DblRGBColour & sigma_a,double eta,double weight,TraceTicket & ticket)3599 void Trace::ComputeDiffuseAmbientContribution1(const Intersection& out, const Vector3d& vOut, const Intersection& in, RGBColour& Total_Colour,
3600                                                const DblRGBColour& sigma_prime_s, const DblRGBColour& sigma_a, double eta, double weight, TraceTicket& ticket)
3601 {
3602 #if 0
3603 	// generate a random direction vector (using a distribution cosine-weighted along the normal)
3604 	Vector3d axisU, axisV;
3605 	ComputeSurfaceTangents(Vector3d(in.INormal), axisU, axisV);
3606 	while (ssltCosWeightedDirectionGenerator.size() <= ticket.subsurfaceRecursionDepth)
3607 		ssltCosWeightedDirectionGenerator.push_back(GetSubRandomCosWeightedDirectionGenerator(2, 32767));
3608 	Vector3d direction = (*(ssltCosWeightedDirectionGenerator[ticket.subsurfaceRecursionDepth]))();
3609 	double cos_in = direction.y(); // cosine of angle between normal and random vector
3610 	Vector3d vIn = Vector3d(in.INormal)*cos_in + axisU*direction.x() + axisV*direction.z();
3611 
3612 	assert(fabs(dot(Vector3d(in.INormal), axisU)) < EPSILON);
3613 	assert(fabs(dot(Vector3d(in.INormal), axisV)) < EPSILON);
3614 	assert(fabs(dot(axisU, axisV)) < EPSILON);
3615 
3616 	// [CLi] light coming in almost parallel to the surface is a problem
3617 	if(cos_in < EPSILON)
3618 		return;
3619 
3620 	Ray ambientray = Ray(in.IPoint, *vIn, Ray::OtherRay); // TODO FIXME - [CLi] check whether ray type is suitable
3621 	Colour ambientcolour;
3622 	TraceRay(ambientray, ambientcolour, weight, ticket, false);
3623 
3624 	// Don't calculate anything more if there's no light input
3625 	if((fabs(ambientcolour.red())   < EPSILON) &&
3626 	   (fabs(ambientcolour.green()) < EPSILON) &&
3627 	   (fabs(ambientcolour.blue())  < EPSILON))
3628 		return;
3629 
3630 	for (int j = 0; j < 3; j++)
3631 	{
3632 		double sd;
3633 		// Note: radiosity data is already cosine-weighted, so we're passing the surface normal as incident light direction
3634 		ComputeDiffuseContribution(out, vOut, Vector3d(in.IPoint), Vector3d(in.INormal),  vIn, sd, sigma_prime_s[j], sigma_a[j], eta);
3635 		sd *= 0.5/cos_in; // the distribution is cosine-weighted, but sd was computed assuming neutral weighting, so compensate
3636 		sd *= weight;
3637 		assert (sd >= 0);
3638 		ambientcolour[j] *= sd;
3639 		assert (ambientcolour[j] >= 0);
3640 		Total_Colour[j] += ambientcolour[j];
3641 	}
3642 #else
3643 	RGBColour ambientcolour;
3644 	// TODO FIXME - should support pertubed normals
3645 	radiosity.ComputeAmbient(Vector3d(in.IPoint), Vector3d(in.INormal), Vector3d(in.INormal), ambientcolour, weight, ticket);
3646 	for (int j = 0; j < 3; j++)
3647 	{
3648 		double sd;
3649 		// Note: radiosity data is already cosine-weighted, so we're passing the surface normal as incident light direction
3650 		ComputeDiffuseContribution(out, vOut, Vector3d(in.IPoint), Vector3d(in.INormal), Vector3d(in.INormal), sd, sigma_prime_s[j], sigma_a[j], eta);
3651 		sd *= weight;
3652 		assert (sd >= 0);
3653 		ambientcolour[j] *= sd;
3654 		assert (ambientcolour[j] >= 0);
3655 		Total_Colour[j] += ambientcolour[j];
3656 	}
3657 #endif
3658 }
3659 
ComputeSubsurfaceScattering(const FINISH * Finish,const RGBColour & layer_pigment_colour,const Intersection & out,const Ray & Eye,const Vector3d & Layer_Normal,RGBColour & Final_Colour,double Attenuation,TraceTicket & ticket)3660 void Trace::ComputeSubsurfaceScattering(const FINISH *Finish, const RGBColour& layer_pigment_colour, const Intersection& out, const Ray& Eye, const Vector3d& Layer_Normal, RGBColour& Final_Colour, double Attenuation, TraceTicket& ticket)
3661 {
3662 	int NumSamplesDiffuse = sceneData->subsurfaceSamplesDiffuse;
3663 	int NumSamplesSingle  = sceneData->subsurfaceSamplesSingle;
3664 
3665 	// TODO FIXME - this is hard-coded for now
3666 	if (ticket.subsurfaceRecursionDepth >= 2)
3667 		return;
3668 	else if (ticket.subsurfaceRecursionDepth == 1)
3669 	{
3670 		NumSamplesDiffuse = 1;
3671 		NumSamplesSingle  = 1;
3672 		//NumSamplesDiffuse = (int)ceil(sqrt(NumSamplesDiffuse));
3673 		//NumSamplesSingle  = (int)ceil(sqrt(NumSamplesSingle));
3674 	}
3675 
3676 	ticket.subsurfaceRecursionDepth++;
3677 
3678 	LightSource Light_Source;
3679 
3680 	Vector3d vOut = -Vector3d(Eye.Direction);
3681 
3682 	RGBColour Total_Colour;
3683 
3684 	double eta;
3685 
3686 	ComputeRelativeIOR(Eye, out.Object->interior, eta);
3687 
3688 #if 0
3689 	// user setting specifies mean free path
3690 	DblRGBColour   alpha_prime      = object->interior->subsurface->GetReducedAlbedo(layer_pigment_colour * Finish->Diffuse);
3691 	DblRGBColour   sigma_tr         = DblRGBColour(1.0) / DblRGBColour(Finish->SubsurfaceTranslucency);
3692 
3693 	DblRGBColour   sigma_prime_t    = sigma_tr / sqrt(3*(RGBColour(1.0)-alpha_prime));
3694 	DblRGBColour   sigma_prime_s    = alpha_prime * sigma_prime_t;
3695 	DblRGBColour   sigma_a          = sigma_prime_t - sigma_prime_s;
3696 	DblRGBColour   sigma_tr_sqr     = sigma_tr * sigma_tr;
3697 #else
3698 	// user setting specifies reduced scattering coefficient
3699 	DblRGBColour   alpha_prime      = out.Object->interior->subsurface->GetReducedAlbedo(layer_pigment_colour * Finish->RawDiffuse);
3700 	DblRGBColour   sigma_prime_s    = DblRGBColour(1.0) / DblRGBColour(Finish->SubsurfaceTranslucency);
3701 
3702 	DblRGBColour   sigma_prime_t    = sigma_prime_s / alpha_prime;
3703 	DblRGBColour   sigma_a          = sigma_prime_t - sigma_prime_s;
3704 	DblRGBColour   sigma_tr_sqr     = sigma_a * sigma_prime_t * 3.0;
3705 	DblRGBColour   sigma_tr         = sqrt(sigma_tr_sqr);
3706 #endif
3707 
3708 #if 1
3709 
3710 	// colour dependent diffuse contribution
3711 
3712 	double      sampleArea;
3713 	double      weight;
3714 	double      weightSum;
3715 	double      sigma_a_mean        = sigma_a.greyscale();
3716 	double      sigma_prime_s_mean  = sigma_prime_s.greyscale();
3717 	double      sigma_prime_t_mean  = sigma_a_mean + sigma_prime_s_mean;
3718 	double      sigma_tr_mean_sqr   = sigma_a_mean * sigma_prime_t_mean * 3.0;
3719 	double      sigma_tr_mean       = sqrt(sigma_tr_mean_sqr);
3720 	int         trueNumSamples;
3721 
3722 	bool radiosity_needed = (sceneData->radiositySettings.radiosityEnabled == true) &&
3723 	                        (sceneData->subsurfaceUseRadiosity == true) &&
3724 	                        (radiosity.CheckRadiosityTraceLevel(ticket) == true) &&
3725 	                        (Test_Flag(out.Object, IGNORE_RADIOSITY_FLAG) == false);
3726 
3727 	Vector3d sampleBase;
3728 	ComputeDiffuseSampleBase(sampleBase, out, vOut, 1.0 / (sigma_prime_t_mean * sceneData->mmPerUnit));
3729 
3730 	weightSum = 0.0;
3731 	trueNumSamples = 0;
3732 
3733 	for (int i = 0; i < NumSamplesDiffuse; i++)
3734 	{
3735 		Intersection in;
3736 		ComputeDiffuseSamplePoint(sampleBase, in, sampleArea, ticket);
3737 
3738 		// avoid pathological cases
3739 		if (sampleArea != 0)
3740 		{
3741 			weight = sampleArea;
3742 			weightSum += weight;
3743 			trueNumSamples ++;
3744 
3745 			if (IsSameSSLTObject(in.Object, out.Object))
3746 			{
3747 				// radiosity-alike ambient illumination
3748 				if (radiosity_needed)
3749 					// shoot just one random ray to account for ambient illumination (we're averaging stuff anyway)
3750 					ComputeDiffuseAmbientContribution1(out, vOut, in, Total_Colour, sigma_prime_s, sigma_a, eta, weight, ticket);
3751 
3752 				// global light sources, if not turned off for this object
3753 				if((out.Object->Flags & NO_GLOBAL_LIGHTS_FLAG) != NO_GLOBAL_LIGHTS_FLAG)
3754 				{
3755 					for(int k = 0; k < threadData->lightSources.size(); k++)
3756 						ComputeDiffuseContribution1(*threadData->lightSources[k], out, vOut, in, Total_Colour, sigma_prime_s, sigma_a, eta, weight, ticket);
3757 				}
3758 
3759 				// local light sources from a light group, if any
3760 				if(!out.Object->LLights.empty())
3761 				{
3762 					for(int k = 0; k < out.Object->LLights.size(); k++)
3763 						ComputeDiffuseContribution1(*out.Object->LLights[k], out, vOut, in, Total_Colour, sigma_prime_s, sigma_a, eta, weight, ticket);
3764 				}
3765 			}
3766 			else
3767 			{
3768 				// TODO - what's the proper thing to do?
3769 			}
3770 		}
3771 	}
3772 	if (trueNumSamples > 0)
3773 		Total_Colour /= trueNumSamples;
3774 
3775 #endif
3776 
3777 	Vector3d refractedEye;
3778 	if (SSLTComputeRefractedDirection(Vector3d(Eye.Direction), Vector3d(out.INormal), 1.0/eta, refractedEye))
3779 	{
3780 		Ray refractedEyeRay(out.IPoint, *refractedEye);
3781 		Intersection unscatteredIn;
3782 
3783 		double dist;
3784 
3785 		// find the intersection of the refracted ray with the object
3786 		// find the distance to this intersection
3787 		bool found = FindIntersection(unscatteredIn, refractedEyeRay);
3788 		if (found)
3789 			dist = (Vector3d(out.IPoint) - Vector3d(unscatteredIn.IPoint)).length() * sceneData->mmPerUnit;
3790 		else
3791 			dist = HUGE_VAL;
3792 
3793 		double cos_out = dot(vOut, Vector3d(out.INormal));
3794 
3795 #if 1
3796 
3797 		// colour dependent single scattering contribution
3798 
3799 		for (int i = 0; i < NumSamplesSingle; i++)
3800 		{
3801 			for (int j = 0; j < 3; j ++)
3802 			{
3803 				RGBColour temp;
3804 				ComputeSingleScatteringContribution(out, dist, cos_out, refractedEye, sigma_prime_t[j], sigma_prime_s[j], temp, eta, ticket);
3805 				Total_Colour[j] += temp[j] / NumSamplesSingle;
3806 			}
3807 		}
3808 
3809 #endif
3810 
3811 #if 1
3812 
3813 		// colour dependent unscattered contribution
3814 
3815 		// Trace refracted ray.
3816 		Colour tempcolor;
3817 
3818 		// TODO FIXME - account for fresnel attenuation at interfaces
3819 		DblRGBColour att = exp(-sigma_prime_t * dist); // TODO should be sigma_t
3820 		weight = max3(att.red(), att.green(), att.blue());
3821 		if (weight > ticket.adcBailout)
3822 		{
3823 			if (!found)
3824 			{
3825 				// TODO - trace the ray to the background?
3826 			}
3827 			else if (IsSameSSLTObject(unscatteredIn.Object, out.Object))
3828 			{
3829 				unscatteredIn.Object->Normal(unscatteredIn.INormal, &unscatteredIn, threadData);
3830 				if (dot(refractedEye, Vector3d(unscatteredIn.INormal)) > 0)
3831 					VScaleEq(unscatteredIn.INormal, -1.0);
3832 				Vector3d doubleRefractedEye;
3833 				if (SSLTComputeRefractedDirection(refractedEye, Vector3d(unscatteredIn.INormal), eta, doubleRefractedEye))
3834 				{
3835 					Ray doubleRefractedEyeRay(refractedEyeRay);
3836 					doubleRefractedEyeRay.SetFlags(Ray::RefractionRay, refractedEyeRay);
3837 					Assign_Vector(doubleRefractedEyeRay.Origin, unscatteredIn.IPoint);
3838 					Assign_Vector(doubleRefractedEyeRay.Direction, *(doubleRefractedEye));
3839 					TraceRay(doubleRefractedEyeRay, tempcolor, weight, ticket, false);
3840 					Total_Colour += RGBColour(DblRGBColour(RGBColour(tempcolor)) * att);
3841 				}
3842 			}
3843 			else
3844 			{
3845 				// TODO - trace the ray into that object (if it is transparent)
3846 			}
3847 		}
3848 
3849 #endif
3850 
3851 	}
3852 
3853 	Final_Colour += Total_Colour;
3854 
3855 	ticket.subsurfaceRecursionDepth--;
3856 }
3857 
3858 } // end of namespace
3859