1-- glslfx version 0.1
2
3//
4// Copyright 2019 Pixar
5//
6// Licensed under the Apache License, Version 2.0 (the "Apache License")
7// with the following modification; you may not use this file except in
8// compliance with the Apache License and the following modification to it:
9// Section 6. Trademarks. is deleted and replaced with:
10//
11// 6. Trademarks. This License does not grant permission to use the trade
12//    names, trademarks, service marks, or product names of the Licensor
13//    and its affiliates, except as required to comply with Section 4(c) of
14//    the License and to reproduce the content of the NOTICE file.
15//
16// You may obtain a copy of the Apache License at
17//
18//     http://www.apache.org/licenses/LICENSE-2.0
19//
20// Unless required by applicable law or agreed to in writing, software
21// distributed under the Apache License with the above modification is
22// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
23// KIND, either express or implied. See the Apache License for the specific
24// language governing permissions and limitations under the Apache License.
25//
26
27--- This is what an import might look like.
28--- #import $TOOLS/hdSt/shaders/volume.glslfx
29
30#import $TOOLS/hdSt/shaders/instancing.glslfx
31#import $TOOLS/hdSt/shaders/pointId.glslfx
32
33--- --------------------------------------------------------------------------
34-- glsl Volume.Vertex
35
36out VertexData
37{
38    // Relying on perspectively correct interpolation.
39    vec3 Peye;
40} outData;
41
42void main(void)
43{
44    // Bounding box vertex in local spce
45    const vec4 point = vec4(HdGet_points().xyz, 1);
46
47    const MAT4 transform  = ApplyInstanceTransform(HdGet_transform());
48
49    // Bounding box vertex in eye space.
50    const vec4 pointEye = vec4(GetWorldToViewMatrix() * transform * point);
51
52    outData.Peye = pointEye.xyz / pointEye.w;
53
54    gl_Position = vec4(GetProjectionMatrix() * pointEye);
55
56    ProcessPrimvars();
57}
58
59--- --------------------------------------------------------------------------
60-- glsl Volume.Fragment
61
62// Quality knobs, should eventually be configurable.
63//
64// We also might have different values for the raymarch
65// integrating the pixel value and for the raymarch doing
66// the lighting computation.
67
68const int maxNumSteps = 10000;
69
70// Min transmittance (ray marching stops when underrun)
71const float minTransmittance = 0.002;
72
73// Minimal scattering amount to ray march to light
74const float minScattering = 0.002;
75
76in VertexData
77{
78    vec3 Peye;
79} inData;
80
81// Eye space to local space.
82// Used frequently per ray-marching step in both volumeIntegrator
83// and accumulatedTransmittance, so computed only once in main.
84//
85MAT4 instanceModelViewInverse;
86// Eye space to volume bounding box space
87mat4 eyeToBBox;
88float resolvedStepSizeEye;
89float resolvedStepSizeEyeLighting;
90float resolvedStepSizeWorld;
91float resolvedStepSizeWorldLighting;
92
93// Transform a point by a 4x4 matrix
94vec3
95transformPoint(mat4 m, vec3 point)
96{
97    const vec4 result = vec4(m * vec4(point, 1.0));
98    return result.xyz / result.w;
99}
100
101vec3
102transformPoint(dmat4 m, vec3 point)
103{
104    const vec4 result = vec4(m * vec4(point, 1.0));
105    return result.xyz / result.w;
106}
107
108// Transform a direction by a 4x4 matrix
109vec3
110transformDir(mat4 m, vec3 dir)
111{
112    const vec4 result = vec4(m * vec4(dir, 0.0));
113    return result.xyz;
114}
115
116vec3
117transformDir(dmat4 m, vec3 dir)
118{
119    const vec4 result = vec4(m * vec4(dir, 0.0));
120    return result.xyz;
121}
122
123// Compute time when a ray starting at pos with direction dir
124// exits the axis-aligned box with vertices lMin and lMax.
125//
126// Assumes that dir.x isn't close to zero.
127float
128timeRayExitsBoxPreferX(vec3 pos, vec3 dir, vec3 lMin, vec3 lMax)
129{
130    // Compute the time when the ray exists the region
131    // R1 = [xMin, xMax] x [-inf,inf] x [-inf,inf].
132
133    // Depending on whether the the ray is going left or right, compute
134    // the time when the ray is intersecting the plane containing the
135    // left or right face of the box.
136    float result = (((dir.x > 0.0) ? lMax.x : lMin.x) - pos.x) / dir.x;
137
138    // Compute the time when the ray exists the region
139    // R2 = [xMin, xMax] x [yMin,yMax] x [-inf,inf].
140
141    // We can compute the intersection where the ray left R1 as
142    // pos + dir * result.
143    // If this intersection is above or below the box, we know that there
144    // is an earlier intersection of the ray with the plane containing
145    // the top or bottom face of the box, compute that intersection time.
146    const float y = pos.y + dir.y * result;
147    if (y < lMin.y) {
148        result = (lMin.y - pos.y) / dir.y;
149    }
150    if (y > lMax.y) {
151        result = (lMax.y - pos.y) / dir.y;
152    }
153
154    // Compute the time when the ray exists the region
155    // R3 = [xMin, xMax] x [yMin,yMax] x [zMin,zMax].
156
157    // Analogous procedure to above.
158    const float z = pos.z + dir.z * result;
159    if (z < lMin.z) {
160        result = (lMin.z - pos.z) / dir.z;
161    }
162    if (z > lMax.z) {
163        result = (lMax.z - pos.z) / dir.z;
164    }
165
166    return result;
167}
168
169// Compute time when a ray starting at pos with direction dir
170// exits the axis-aligned box with vertices lMin and lMax.
171//
172// Assumes that dir is normalized.
173float
174timeRayExitsBox(vec3 pos, vec3 dir, vec3 lMin, vec3 lMax)
175{
176    // Uses timeRayExitsBoxPreferX after permuting the coordinates
177    // to make sure that x is not close to zero.
178    //
179    // Note that because dir has unit length, at least one of its entries
180    // has absolute value larger 1/2 ( (1/2)^2 + (1/2)^2 + (1/2)^2 < 1^2).
181
182    const vec3 abs_dir = abs(dir);
183    if (abs_dir.x > 0.5) {
184        return timeRayExitsBoxPreferX(
185            pos    , dir    , lMin    , lMax    );
186    }
187    if (abs_dir.y > 0.5) {
188        return timeRayExitsBoxPreferX(
189            pos.yzx, dir.yzx, lMin.yzx, lMax.yzx);
190    }
191
192    return timeRayExitsBoxPreferX(
193            pos.zxy, dir.zxy, lMin.zxy, lMax.zxy);
194}
195
196// Given a ray in eye space starting inside a volume, compute the time when it
197// exists the volume (assuming rayDirectionEye is normalized).
198float
199timeRayExitsVolume(vec3 rayStartEye, vec3 rayDirectionEye)
200{
201    // Transform ray to volume bounding box space
202    const vec3 rayStartBBox     = transformPoint(eyeToBBox, rayStartEye);
203    const vec3 rayDirectionBBox = transformDir  (eyeToBBox, rayDirectionEye);
204
205    // Compute when ray is leaving the volume bounding box
206    return timeRayExitsBox(rayStartBBox,
207                           rayDirectionBBox,
208                           vec3(HdGet_volumeBBoxLocalMin().xyz),
209                           vec3(HdGet_volumeBBoxLocalMax().xyz));
210
211}
212
213// Given a ray in eye space, compute the time when it entered the volume
214// (assuming rayDirectionEye is normalized).
215// Note that it is assumed that the ray point is in the volume and that the
216// result will be negative.
217float
218timeRayEnteredVolume(vec3 rayEndEye, vec3 rayDirectionEye)
219{
220    // Compute when reversed ray is exiting the volume bounding box
221    return - timeRayExitsVolume(rayEndEye, -rayDirectionEye);
222}
223
224vec3
225coordsToLocalVolumeSpace(vec3 coords)
226{
227    return transformPoint(instanceModelViewInverse, coords);
228}
229
230#if NUM_LIGHTS == 0
231
232vec3
233lightingComputation(vec3 rayPointEye, vec3 rayDirectionEye)
234{
235    return vec3(0.0);
236}
237
238#else
239
240// Compute how the transmittance of volume from Peye to a
241// light source in the given direction rayDirection.
242// This integrates the density from Peye to the boundary of
243// the volume. The assumption is that the light source is
244// out of the volume.
245float
246accumulatedTransmittance(vec3 rayStartEye, vec3 rayDirectionEye)
247{
248    int i = 1;
249
250    float totalExtinction = 0.0;
251
252    const vec3 rayStepEye = resolvedStepSizeEyeLighting * rayDirectionEye;
253
254    const float rayLength = timeRayExitsVolume(rayStartEye, rayDirectionEye);
255
256    const int numSteps =
257        int(floor(min(maxNumSteps, rayLength / resolvedStepSizeEyeLighting)));
258
259    while(i < numSteps) {
260        const vec3 rayPointEye = rayStartEye + i * rayStepEye;
261
262        totalExtinction += extinctionFunction(rayPointEye);
263
264        i+=1;
265    }
266
267    return exp(-totalExtinction * resolvedStepSizeWorldLighting);
268}
269
270// Computes amount of light arriving at point Peye
271// taking attenuation (e.g., by inverse-square law), shadows,
272// transmittance by volume into account.
273vec3
274lightingComputation(vec3 rayPointEye, vec3 rayDirectionEye)
275{
276    vec3 result = vec3(0.0);
277    for (int i = 0; i < NUM_LIGHTS; ++i) {
278
279        const vec4 Plight = lightSource[i].position;
280
281        const vec3 lightDirectionEye = normalize(
282            (Plight.w == 0.0) ? Plight.xyz : Plight.xyz - rayPointEye);
283
284        const float atten =
285            lightDistanceAttenuation(vec4(rayPointEye,1), i) *
286            lightSpotAttenuation(lightDirectionEye, i);
287
288// For now, not using shadows for volumes.
289#if USE_SHADOWS && 0
290        const float shadow = (lightSource[i].hasShadow) ?
291            shadowing(/*lightIndex=*/i, rayPointEye) : 1.0;
292#else
293        const float shadow = 1.0;
294#endif
295
296        if (shadow > 0.0001) {
297            result +=
298                shadow *
299                atten *
300                // Assuming that light source is outside of volume's
301                // bounding box (might integrate extinction along ray
302                // beyond light source).
303                accumulatedTransmittance(rayPointEye, lightDirectionEye) *
304                phaseFunction(-rayDirectionEye, lightDirectionEye) *
305                lightSource[i].diffuse.rgb;
306        }
307    }
308
309    return result;
310}
311
312#endif
313
314// Result of integrating volume along a ray
315struct VolumeContribution
316{
317    // Coordinates where ray marching hit the first non-empty voxel
318    // in eye space. 0 indicates the ray hit only empty voxels.
319    vec3 firstHitPeye;
320
321    // Integrated color
322    vec3 color;
323
324    // Integrated transmittance, i.e., what fraction of light from
325    // geometry behind the volume is still visible.
326    float transmittance;
327};
328
329VolumeContribution
330volumeIntegrator(vec3 rayStartEye, vec3 rayDirectionEye, float rayLength)
331{
332    int i = 1;
333
334    VolumeContribution result;
335    result.firstHitPeye = vec3(0.0);
336    result.color = vec3(0.0);
337    result.transmittance = 1.0;
338
339    const vec3 rayStepEye = resolvedStepSizeEye * rayDirectionEye;
340
341    const int numSteps =
342        int(floor(min(maxNumSteps, rayLength / resolvedStepSizeEye)));
343
344    // integrate transmittance and light along ray for bounding box
345    while(i < numSteps) {
346        const vec3 rayPointEye = rayStartEye + i * rayStepEye;
347
348        // Evaluate volume shader functions to determine extinction,
349        // scattering, and emission.
350        const float extinctionValue = extinctionFunction(rayPointEye);
351        const float scatteringValue = scatteringFunction(rayPointEye);
352        const vec3 emissionValue = emissionFunction(rayPointEye);
353
354        // If this is the first time the ray is hitting a non-empty voxel,
355        // record the coordinates.
356        if (result.firstHitPeye == vec3(0.0)) {
357            if (  extinctionValue > 0 ||
358                  scatteringValue > 0 ||
359                  any(greaterThan(emissionValue, vec3(0)))) {
360                result.firstHitPeye = rayPointEye;
361            }
362        }
363
364        // In scattering contribution, lighting only computed if scattering
365        // is non-trivial.
366        const vec3 inScattering =
367            (resolvedStepSizeWorld * scatteringValue >= minScattering) ?
368                (scatteringValue *
369                 lightingComputation(rayPointEye, rayDirectionEye))
370            : vec3(0.0);
371
372        // In scattering and emission contribution
373        result.color +=
374            (resolvedStepSizeWorld * result.transmittance) *
375            (inScattering + emissionValue);
376
377        // Update transmittance
378        result.transmittance *= exp(-extinctionValue * resolvedStepSizeWorld);
379
380        // Stop when the volume has become close to opaque.
381        if (result.transmittance < minTransmittance) {
382            break;
383        }
384
385        i+=1;
386    }
387
388    return result;
389}
390
391// Is camera orthographic?
392bool
393isCameraOrthographic()
394{
395    return abs(GetProjectionMatrix()[3][3] - 1.0) < 1e-5;
396}
397
398// Convert depth value z in [-1,1] to depth in eye space [-near, -far].
399float
400NDCtoEyeZ(float z)
401{
402    const MAT4 m = inverse(GetProjectionMatrix());
403    return float((m[2][2] * z + m[3][2]) / (m[2][3] * z + m[3][3]));
404}
405
406// Compute the z-value of the near clipping plane in eye space.
407// Negative by OpenGL convention
408float
409computeNearZ()
410{
411    return NDCtoEyeZ(-1.0);
412}
413
414// Compute the near clipping distance. Always returns a positive value.
415float
416computeNearDistance()
417{
418    return abs(computeNearZ());
419}
420
421// Consider the ray from the eye to a given point in eye space.
422// Computes the direction of this ray in both cases where the
423// camera is orthographic or perspective.
424vec3
425computeRayDirectionEye(vec3 rayPointEye)
426{
427    // In NDC space, the ray is always pointing into the z-direction (0,0,1).
428    // In clip space, this corresponds to (0,0,1,0).
429    // We need to multiply (0,0,1,0) by the inverse projection matrix to
430    // get to homogeneous eye space.
431    // Or alternatively, we can get the direction in homogeneous eye space
432    // by taking the respective column of the inverse projection matrix:
433    const vec4 dir = vec4(inverse(GetProjectionMatrix())[2]);
434
435    // To compute the corresponding direction in non-homogeneous eye space,
436    // compute the position of the ray after time dt << 1:
437    //     vec4 pHomogeneous = vec4(rayPointEye, 1.0) + dt * dir;
438    //     vec3 p = pHomogeneous.xyz / pHomogeneous.w;
439    //
440    // Or equivalently:
441    //     vec3 p = (rayPointEye + dt * dir.xyz) / (1.0 + dir.w * dt);
442    // And since dt << 1, we have
443    //     vec3 p = (rayPointEye + dt * dir.xyz) * (1.0 - dir.w * dt);
444    // And dropping higher order terms:
445    //     vec3 p = rayPointEye + dt * (dir.xyz - rayPointEye * dir.w);
446    // So the new direction is given by:
447    //     vec3 d = dir.xyz - rayPointEye * dir.w;
448
449    // Normalize direction in eye space.
450    return normalize(dir.xyz - rayPointEye * dir.w);
451}
452
453// Given where the ray is about to leave the volume, compute where we
454// should start ray marching: this is either the point where the ray
455// would have entered the volume or the intersection with the near
456// clipping plane or a sphere about the eye (in the perspective case).
457//
458vec3
459computeRayStartEye(vec3 rayEndEye, vec3 rayDirectionEye)
460{
461    // Time where ray would have entered volume (negative).
462    const float startTime = timeRayEnteredVolume(rayEndEye, rayDirectionEye);
463
464    if (isCameraOrthographic()) {
465        // Time where ray would have intersected near plane
466        const float nearTime =
467            (computeNearZ() - rayEndEye.z)
468            / rayDirectionEye.z;
469        // Take the latter of the two times for getting the start point
470        return rayEndEye + max(startTime, nearTime) * rayDirectionEye;
471    }
472
473    // Note that we intersect the ray with sphere about the eye with
474    // radius equal to the near distance in the perspective case rather
475    // than just the above intersection with the near plane.
476    //
477    // The motivation is that the distance between the eye and the
478    // near plane is non-constant across the image. Thus, ray-marching
479    // would skip more volume away from the center of the image making
480    // the image darker there - so we see opposite vignetting.  To
481    // avoid this bias, we use a sphere about the eye.
482    //
483    // Note that we can use points in front of the near plane
484    // since OIT resolution makes no assumptions about the
485    // depth value.
486    //
487
488    // Compute point where ray would have entered volume
489    const vec3 rayStartEye = rayEndEye + startTime * rayDirectionEye;
490    // If this point is behind the eye or in the sphere about the eye, ...
491    if (rayStartEye.z > 0.0 || length(rayStartEye) < computeNearDistance()) {
492        // ... use point on sphere.
493        return normalize(rayDirectionEye) * computeNearDistance();
494    }
495
496    return rayStartEye;
497}
498
499// The depth at which we hit opaque geometry in eye space (negative
500// value by OpenGL convention).
501float
502sampleZBuffer(vec2 fragcoord)
503{
504#ifdef HD_HAS_depthReadback
505    // Sample the z-Buffer at the frag coordinate.
506    const float bufferVal = texelFetch(HdGetSampler_depthReadback(),
507                                       ivec2(fragcoord),
508                                       /* lod = */ 0).z;
509#else
510    // Assume far-plane if we cannot sample the z-Buffer.
511    const float bufferVal = 1.0;
512#endif
513
514    return NDCtoEyeZ(2.0 * bufferVal - 1.0);
515}
516
517// Compute how much length we need to ray march.
518//
519// The ray is encoded through it start point and direction. Its end will be
520// determined from two things:
521// - the eye space coordinates of this fragment which is part of the back-faces
522//   of the volume.
523// - the z-Value of the opaque geometry (since we want to stop raymarching
524//   once the ray has hit opaque geometry)
525float
526computeRayLength(vec3 rayStartEye, vec3 rayDirectionEye, vec3 rayEndEye,
527                 float opaqueZ)
528{
529    // Recall that the camera is looking down the minus z-direction by
530    // OpenGL conventions so we need to take the max to get the closer
531    // point.
532    const float rayEndZ = max(opaqueZ, rayEndEye.z);
533    return (rayEndZ - rayStartEye.z) / rayDirectionEye.z;
534}
535
536float max3(float a, float b, float c)
537{
538    return max(a, max(b, c));
539}
540
541// Computes the inverse of the scaling of an affine transform.
542// Approximately - since most transforms have uniform scaling
543// and no shear, this is fine.
544float
545scaleOfMatrix(MAT4 m)
546{
547    // Take the maximum of the lengths of the images of the x, y
548    // and z vector.
549    //
550    // A more general, coordinate independent implementation would take
551    // the minimum singular value from the singular value decomposition.
552    //
553    const mat3 affinePart = mat3(m);
554    return max3(length(affinePart[0]),
555                length(affinePart[1]),
556                length(affinePart[2]));
557}
558
559
560void main(void)
561{
562    instanceModelViewInverse =
563        ApplyInstanceTransformInverse(HdGet_transformInverse()) *
564        GetWorldToViewInverseMatrix();
565
566    eyeToBBox = mat4(
567        MAT4(HdGet_volumeBBoxInverseTransform()) * instanceModelViewInverse);
568
569    ProcessSamplingTransforms(instanceModelViewInverse);
570
571    const float halfWorldSampleDistance =
572        0.5 * float(HdGet_sampleDistance())
573            / scaleOfMatrix(instanceModelViewInverse);
574
575    const float viewScale = scaleOfMatrix(GetWorldToViewInverseMatrix());
576
577    resolvedStepSizeEye =
578        HdGet_stepSize() * halfWorldSampleDistance;
579    resolvedStepSizeWorld =
580        viewScale * resolvedStepSizeEye;
581    resolvedStepSizeEyeLighting =
582        HdGet_stepSizeLighting() * halfWorldSampleDistance;
583    resolvedStepSizeWorldLighting =
584        viewScale * resolvedStepSizeEyeLighting;
585
586    // Discard front faces - ray marching stops at fragment eye position
587    // and starts at the intersection of ray with volume bounding box or
588    // near plane.
589    if (gl_FrontFacing ^^ (determinant(instanceModelViewInverse) < 0.0)) {
590        discard;
591    }
592
593    // camera facing.
594    const vec3 Neye = vec3(0, 0, 1);
595
596    // compute ray for ray marching
597    const vec3 rayDirectionEye = computeRayDirectionEye(inData.Peye);
598    const vec3 rayStartEye = computeRayStartEye(inData.Peye, rayDirectionEye);
599
600    // Use z-value from depth buffer to compute length for ray marching
601    const float opaqueZ = sampleZBuffer(gl_FragCoord.xy);
602    const float rayLength = computeRayLength(
603        rayStartEye, rayDirectionEye, inData.Peye, opaqueZ);
604
605    const VolumeContribution volumeContribution =
606        volumeIntegrator(rayStartEye, rayDirectionEye, rayLength);
607    const float alpha = 1 - volumeContribution.transmittance;
608    const vec4 color = ApplyColorOverrides(vec4(volumeContribution.color, alpha));
609
610    const vec4 patchCoord = vec4(0.0);
611
612    RenderOutput(vec4(volumeContribution.firstHitPeye, 1),
613                 Neye, color, patchCoord);
614}
615