1 //
2 // Copyright 2018 Pixar
3 //
4 // Licensed under the Apache License, Version 2.0 (the "Apache License")
5 // with the following modification; you may not use this file except in
6 // compliance with the Apache License and the following modification to it:
7 // Section 6. Trademarks. is deleted and replaced with:
8 //
9 // 6. Trademarks. This License does not grant permission to use the trade
10 //    names, trademarks, service marks, or product names of the Licensor
11 //    and its affiliates, except as required to comply with Section 4(c) of
12 //    the License and to reproduce the content of the NOTICE file.
13 //
14 // You may obtain a copy of the Apache License at
15 //
16 //     http://www.apache.org/licenses/LICENSE-2.0
17 //
18 // Unless required by applicable law or agreed to in writing, software
19 // distributed under the Apache License with the above modification is
20 // distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
21 // KIND, either express or implied. See the Apache License for the specific
22 // language governing permissions and limitations under the Apache License.
23 //
24 #include "pxr/pxr.h"
25 
26 #include "pxr/imaging/hd/renderIndex.h"
27 #include "pxr/imaging/hd/engine.h"
28 #include "pxr/imaging/hd/rprimCollection.h"
29 #include "pxr/imaging/hd/task.h"
30 #include "pxr/imaging/hd/renderPass.h"
31 #include "pxr/imaging/hd/renderPassState.h"
32 #include "pxr/imaging/cameraUtil/screenWindowParameters.h"
33 
34 #include "pxr/usd/usd/stage.h"
35 #include "pxr/usd/usd/prim.h"
36 #include "pxr/usd/sdr/registry.h"
37 #include "pxr/usd/usdLux/listAPI.h"
38 #include "pxr/usd/usdGeom/camera.h"
39 #include "pxr/usd/usdGeom/xformCache.h"
40 #include "pxr/usd/usdRender/product.h"
41 #include "pxr/usd/usdRender/settings.h"
42 #include "pxr/usd/usdRender/spec.h"
43 #include "pxr/usd/usdRender/var.h"
44 #include "pxr/usdImaging/usdImaging/delegate.h"
45 
46 #include "pxr/base/gf/camera.h"
47 #include "pxr/base/tf/getenv.h"
48 #include "pxr/base/tf/setenv.h"
49 #include "pxr/base/tf/pathUtils.h"
50 #include "pxr/base/tf/registryManager.h"
51 #include "pxr/base/tf/stopwatch.h"
52 #include "pxr/base/trace/reporter.h"
53 #include "pxr/base/work/threadLimits.h"
54 
55 #include "hdPrman/offlineContext.h"
56 #include "hdPrman/renderDelegate.h"
57 #include "hdPrman/rixStrings.h"
58 
59 #include "RixShadingUtils.h"
60 
61 #include <fstream>
62 #include <memory>
63 #include <stdio.h>
64 #include <string>
65 
66 PXR_NAMESPACE_USING_DIRECTIVE
67 
68 TF_DEFINE_PRIVATE_TOKENS(
69     _tokens,
70 
71     // Collection Names
72     (testCollection)
73 );
74 
75  static const RtUString us_A("A");
76  static const RtUString us_default("default");
77  static const RtUString us_defaultColor("defaultColor");
78  static const RtUString us_density("density");
79  static const RtUString us_densityFloatPrimVar("densityFloatPrimVar");
80  static const RtUString us_diffuseColor("diffuseColor");
81  static const RtUString us_displayColor("displayColor");
82  static const RtUString us_lightA("lightA");
83  static const RtUString us_lightGroup("lightGroup");
84  static const RtUString us_main_cam("main_cam");
85  static const RtUString us_main_cam_projection("main_cam_projection");
86  static const RtUString us_PathTracer("PathTracer");
87  static const RtUString us_pv_color("pv_color");
88  static const RtUString us_pv_color_resultRGB("pv_color:resultRGB");
89  static const RtUString us_PxrDomeLight("PxrDomeLight");
90  static const RtUString us_PxrPathTracer("PxrPathTracer");
91  static const RtUString us_PxrPrimvar("PxrPrimvar");
92  static const RtUString us_PxrSurface("PxrSurface");
93  static const RtUString us_PxrVisualizer("PxrVisualizer");
94  static const RtUString us_PxrVolume("PxrVolume");
95  static const RtUString us_simpleTestSurface("simpleTestSurface");
96  static const RtUString us_simpleVolume("simpleVolume");
97  static const RtUString us_specularEdgeColor("specularEdgeColor");
98  static const RtUString us_specularFaceColor("specularFaceColor");
99  static const RtUString us_specularModelType("specularModelType");
100  static const RtUString us_style("style");
101  static const RtUString us_traceLightPaths("traceLightPaths");
102  static const RtUString us_varname("varname");
103  static const RtUString us_wireframe("wireframe");
104 
105 static TfStopwatch timer_prmanRender;
106 
107 // Simple Hydra task to Sync and Render the data provided to this test.
108 class Hd_DrawTask final : public HdTask
109 {
110 public:
Hd_DrawTask(HdRenderPassSharedPtr const & renderPass,HdRenderPassStateSharedPtr const & renderPassState,TfTokenVector const & renderTags)111     Hd_DrawTask(HdRenderPassSharedPtr const &renderPass,
112                 HdRenderPassStateSharedPtr const &renderPassState,
113                 TfTokenVector const &renderTags)
114     : HdTask(SdfPath::EmptyPath())
115     , _renderPass(renderPass)
116     , _renderPassState(renderPassState)
117     , _renderTags(renderTags)
118     {
119     }
120 
Sync(HdSceneDelegate * delegate,HdTaskContext * ctx,HdDirtyBits * dirtyBits)121     void Sync(HdSceneDelegate* delegate,
122               HdTaskContext* ctx,
123               HdDirtyBits* dirtyBits) override
124     {
125         _renderPass->Sync();
126         *dirtyBits = HdChangeTracker::Clean;
127     }
128 
Prepare(HdTaskContext * ctx,HdRenderIndex * renderIndex)129     void Prepare(HdTaskContext* ctx,
130                  HdRenderIndex* renderIndex) override
131     {
132         _renderPassState->Prepare(renderIndex->GetResourceRegistry());
133     }
134 
Execute(HdTaskContext * ctx)135     void Execute(HdTaskContext* ctx) override
136     {
137         timer_prmanRender.Start();
138         _renderPass->Execute(_renderPassState, _renderTags);
139         timer_prmanRender.Stop();
140     }
141 
GetRenderTags() const142     const TfTokenVector &GetRenderTags() const override
143     {
144         return _renderTags;
145     }
146 
147 private:
148     HdRenderPassSharedPtr _renderPass;
149     HdRenderPassStateSharedPtr _renderPassState;
150     TfTokenVector _renderTags;
151 };
152 
153 void
PrintUsage(const char * cmd,const char * err=nullptr)154 PrintUsage(const char* cmd, const char *err=nullptr)
155 {
156     if (err) {
157         fprintf(stderr, err);
158     }
159     fprintf(stderr, "Usage: %s INPUT.usd "
160             "[--out OUTPUT] [--frame FRAME] [--freeCamProj CAM_PROJECTION] "
161             "[--sceneCamPath CAM_PATH] [--settings RENDERSETTINGS_PATH] "
162             "[--sceneCamAspect aspectRatio] "
163             "[--visualize STYLE] [--perf PERF] [--trace TRACE]\n"
164             "OUTPUT defaults to UsdRenderSettings if not specified.\n"
165             "FRAME defaults to 0 if not specified.\n"
166             "CAM_PROJECTION default to PxrPerspective if not specified\n"
167             "CAM_PATH defaults to empty path if not specified\n"
168             "RENDERSETTINGS_PATH defaults to empty path is not specified\n"
169             "STYLE indicates a PxrVisualizer style to use instead of "
170             "      the default integrator\n"
171             "PERF indicates a json file to record performance measurements\n"
172             "TRACE indicates a text file to record trace measurements\n",
173             cmd);
174 }
175 
176 ////////////////////////////////////////////////////////////////////////
177 
178 // Helper to convert a dictionary of Hydra settings to Riley params.
179 static void
_ConvertSettings(VtDictionary const & settings,RtParamList & params)180 _ConvertSettings(VtDictionary const& settings, RtParamList& params)
181 {
182     for (auto const& entry: settings) {
183         // Strip "ri:" namespace from USD.
184         // Note that some Renderman options have their own "Ri:"
185         // prefix, unrelated to USD, which we leave intact.
186         RtUString riName;
187         if (TfStringStartsWith(entry.first, "ri:")) {
188             riName = RtUString(entry.first.c_str()+3);
189         } else {
190             riName = RtUString(entry.first.c_str());
191         }
192         if (entry.second.IsHolding<int>()) {
193             params.SetInteger(riName, entry.second.UncheckedGet<int>());
194         } else if (entry.second.IsHolding<float>()) {
195             params.SetFloat(riName, entry.second.UncheckedGet<float>());
196         } else if (entry.second.IsHolding<std::string>()) {
197             params.SetString(riName,
198                 RtUString(entry.second.UncheckedGet<std::string>().c_str()));
199         } else if (entry.second.IsHolding<VtArray<int>>()) {
200             auto const& array = entry.second.UncheckedGet<VtArray<int>>();
201             params.SetIntegerArray(riName, &array[0], array.size());
202         } else if (entry.second.IsHolding<VtArray<float>>()) {
203             auto const& array = entry.second.UncheckedGet<VtArray<float>>();
204             params.SetFloatArray(riName, &array[0], array.size());
205         } else {
206             TF_CODING_ERROR("Unimplemented setting %s of type %s\n",
207                             entry.first.c_str(),
208                             entry.second.GetTypeName().c_str());
209         }
210     }
211 }
212 
main(int argc,char * argv[])213 int main(int argc, char *argv[])
214 {
215     // Pixar studio config
216     TfRegistryManager::GetInstance().SubscribeTo<HdPrman_Context>();
217 
218     ////////////////////////////////////////////////////////////////////////
219     //
220     // Parse args
221     //
222     if (argc < 2) {
223         PrintUsage(argv[0]);
224         return -1;
225     }
226 
227     std::string inputFilename(argv[1]);
228     std::string outputFilename;
229     std::string perfOutput, traceOutput;
230 
231     int frameNum = 0;
232     bool isOrthographic = false;
233     std::string cameraProjection("PxrPerspective");
234     static const std::string PxrOrthographic("PxrOrthographic");
235     SdfPath sceneCamPath, renderSettingsPath;
236     float sceneCamAspect = -1.0;
237     std::string visualizerStyle;
238 
239     for (int i=2; i<argc-1; ++i) {
240         if (std::string(argv[i]) == "--frame") {
241             frameNum = atoi(argv[++i]);
242         } else if (std::string(argv[i]) == "--sceneCamPath") {
243             sceneCamPath = SdfPath(argv[++i]);
244         } else if (std::string(argv[i]) == "--sceneCamAspect") {
245             sceneCamAspect = atof(argv[++i]);
246         } else if (std::string(argv[i]) == "--freeCamProj") {
247             cameraProjection = argv[++i];
248             isOrthographic = cameraProjection == PxrOrthographic;
249         } else if (std::string(argv[i]) == "--out") {
250             outputFilename = argv[++i];
251         } else if (std::string(argv[i]) == "--settings") {
252             renderSettingsPath = SdfPath(argv[++i]);
253         } else if (std::string(argv[i]) == "--visualize") {
254             visualizerStyle = argv[++i];
255         } else if (std::string(argv[i]) == "--perf") {
256             perfOutput = argv[++i];
257         } else if (std::string(argv[i]) == "--trace") {
258             traceOutput = argv[++i];
259         }
260     }
261 
262     if (!traceOutput.empty()) {
263         TraceCollector::GetInstance().SetEnabled(true);
264     }
265 
266     ////////////////////////////////////////////////////////////////////////
267     //
268     // USD setup
269     //
270 
271     TfStopwatch timer_usdOpen;
272     timer_usdOpen.Start();
273     // Load USD file
274     UsdStageRefPtr stage = UsdStage::Open(inputFilename);
275     if (!stage) {
276         PrintUsage(argv[0], "could not load input file");
277         return -1;
278     }
279     timer_usdOpen.Stop();
280 
281     ////////////////////////////////////////////////////////////////////////
282     // Render settings
283     //
284 
285     UsdRenderSpec renderSpec;
286     UsdRenderSettings settings;
287     if (renderSettingsPath.IsEmpty()) {
288         settings = UsdRenderSettings::GetStageRenderSettings(stage);
289     } else {
290         // If a path was specified, try to use the requested settings prim.
291         settings = UsdRenderSettings(stage->GetPrimAtPath(renderSettingsPath));
292     }
293     if (settings) {
294         // If we found USD settings, read those.
295         renderSpec = UsdRenderComputeSpec(settings, frameNum, {"ri:"});
296     } else {
297         // Otherwise, provide a built-in render specification.
298         renderSpec = {
299             /* products */
300             {
301                 UsdRenderSpec::Product {
302                     TfToken("raster"),
303                     TfToken(outputFilename),
304                     // camera path
305                     SdfPath(),
306                     false,
307                     GfVec2i(512,512),
308                     1.0f,
309                     // aspectRatioConformPolicy
310                     UsdRenderTokens->expandAperture,
311                     // aperture size
312                     GfVec2f(2.0, 2.0),
313                     // data window
314                     GfRange2f(GfVec2f(0.0f), GfVec2f(1.0f)),
315                     // renderVarIndices
316                     { 0, 1 },
317                 },
318             },
319             /* renderVars */
320             {
321                 UsdRenderSpec::RenderVar {
322                     SdfPath("/Render/Vars/Ci"), TfToken("color3f"),
323                     TfToken("Ci")
324                 },
325                 UsdRenderSpec::RenderVar {
326                     SdfPath("/Render/Vars/Alpha"), TfToken("float"),
327                     TfToken("a")
328                 }
329             }
330         };
331     }
332 
333     // Merge fallback settings specific to testHdPrman.
334     VtDictionary defaultSettings;
335     defaultSettings["ri:hider:jitter"] = 1;
336     defaultSettings["ri:hider:minsamples"] = 32;
337     defaultSettings["ri:hider:maxsamples"] = 64;
338     defaultSettings["ri:trace:maxdepth"] = 10;
339     defaultSettings["ri:Ri:PixelVariance"] = 0.01f;
340     defaultSettings["ri:Ri:Shutter"] = VtArray<float>({0.0f, 0.5f});
341 
342     // Update product settings.
343     for (auto &product: renderSpec.products) {
344         // Command line overrides built-in paths.
345         if (!sceneCamPath.IsEmpty()) {
346             product.cameraPath = sceneCamPath;
347         }
348         if (sceneCamAspect > 0.0) {
349             product.resolution[1] = (int)(product.resolution[0]/sceneCamAspect);
350             product.apertureSize[1] = product.apertureSize[0]/sceneCamAspect;
351         }
352         VtDictionaryOver(&product.extraSettings, defaultSettings);
353     }
354 
355     ////////////////////////////////////////////////////////////////////////
356     //
357     // Diagnostic aids
358     //
359 
360     // These are meant to help keep an eye on how much available
361     // concurrency is being used, within an automated test environment.
362     printf("Current concurrency limit:  %u\n", WorkGetConcurrencyLimit());
363     printf("Physical concurrency limit: %u\n",
364         WorkGetPhysicalConcurrencyLimit());
365 
366     ////////////////////////////////////////////////////////////////////////
367     //
368     // Render loop for products
369     //
370 
371     // Since this test uses HdPrman directly (usually we would
372     // use the plugin system), it does not use HdPrmanLoader. This means
373     // we need to link the test against libPrman, however our build system
374     // will look at the elf and remove any unused libraries.
375     // By calling RixGetContext here, we make sure the symbols are
376     // not removed post compilation.
377     RixContext *rix = RixGetContext();
378     TF_UNUSED(rix);
379 
380     TfStopwatch timer_hydra;
381 
382     // XXX In the future, we should be able to produce multiple
383     // products directly from one Riley session.
384     for (auto product: renderSpec.products) {
385         printf("Rendering %s...\n", product.name.GetText());
386 
387         std::shared_ptr<HdPrman_OfflineContext> context =
388             std::make_shared<HdPrman_OfflineContext>();
389 
390         // Find USD camera prim.
391         UsdGeomCamera usdCam;
392         if (!product.cameraPath.IsEmpty()) {
393             UsdPrim prim = stage->GetPrimAtPath(product.cameraPath);
394             if (prim && prim.IsA<UsdGeomCamera>()) {
395                 usdCam = UsdGeomCamera(prim);
396             } else {
397                 TF_WARN("Invalid scene camera at %s. Falling back to the "
398                         "free cam.\n", product.cameraPath.GetText());
399             }
400         }
401 
402         // Shutter settings from studio production.
403         //
404         // XXX Up to RenderMan 22, there is a global Ri:Shutter interval
405         // that specifies the time when (all) camera shutters begin opening,
406         // and when they (all) finish closing.  This is shutterInterval.
407         // Then, per-camera, there is a shutterCurve, which use normalized
408         // (0..1) time relative to the global shutterInterval.  This forces
409         // all the cameras to have the same shutter interval, so in the
410         // future the shutterInterval will be moved to new attributes on
411         // the cameras, and shutterCurve will exist an a UsdRi schema.
412         //
413         float shutterCurve[10] = {0, 0, 0, 0, 0, 0, 0, 1, 0.3, 0};
414         if (usdCam) {
415             float interval[2] = {0.0, 0.5};
416             if (usdCam.GetShutterOpenAttr().Get(&interval[0], frameNum) ||
417                 usdCam.GetShutterCloseAttr().Get(&interval[1], frameNum)) {
418                 // XXX Scene-wide shutter will change to be per-camera;
419                 // see RMAN-14078
420                 product.extraSettings["ri:Ri:Shutter"] =
421                     VtArray<float>({interval[0], interval[1]});
422             }
423         }
424 
425         // Use two samples (start and end) of a frame for now.
426         std::vector<double> timeSampleOffsets = {0.0, 1.0};
427 
428         // Options
429         RtParamList rileyOptions;
430         {
431             // Searchpaths (TEXTUREPATH, etc)
432             HdPrman_UpdateSearchPathsFromEnvironment(rileyOptions);
433 
434             // Product extraSettings become Riley options.
435             _ConvertSettings(product.extraSettings, rileyOptions);
436 
437             rileyOptions.SetIntegerArray(RixStr.k_Ri_FormatResolution,
438                 (int*) &product.resolution, 2);
439             rileyOptions.SetFloat(RixStr.k_Ri_FormatPixelAspectRatio,
440                 product.pixelAspectRatio);
441 
442             // Compute screen window from product aperture.
443             float screenWindow[4] = { -1.0f, 1.0f, -1.0f, 1.0f };
444             if (usdCam) {
445                 GfCamera gfCam = usdCam.GetCamera(frameNum);
446                 gfCam.SetHorizontalAperture(product.apertureSize[0]);
447                 gfCam.SetVerticalAperture(product.apertureSize[1]);
448                 CameraUtilScreenWindowParameters
449                     cuswp(gfCam, GfCamera::FOVVertical);
450                 GfVec4d screenWindowd = cuswp.GetScreenWindow();
451                 screenWindow[0] = float(screenWindowd[0]);
452                 screenWindow[1] = float(screenWindowd[1]);
453                 screenWindow[2] = float(screenWindowd[2]);
454                 screenWindow[3] = float(screenWindowd[3]);
455             }
456             rileyOptions.SetFloatArray(RixStr.k_Ri_ScreenWindow, screenWindow, 4);
457 
458             // Crop/Data window.
459             float cropWindow[4] = {
460                 product.dataWindowNDC.GetMin()[0], // xmin
461                 product.dataWindowNDC.GetMax()[0], // xmax
462                 product.dataWindowNDC.GetMin()[1], // ymin
463                 product.dataWindowNDC.GetMax()[1], // ymax
464             };
465             // RiCropWindow semantics has different float->int behavior
466             // than UsdRenderSettings dataWindowNDC, so compensate here.
467             float dx = 0.5 / product.resolution[0];
468             float dy = 0.5 / product.resolution[1];
469             cropWindow[0] -= dx;
470             cropWindow[1] -= dx;
471             cropWindow[2] -= dy;
472             cropWindow[3] -= dy;
473             rileyOptions.SetFloatArray(RixStr.k_Ri_CropWindow, cropWindow, 4);
474         }
475 
476         // Integrator
477         // TODO Figure out how to represent this in UsdRi.
478         // Perhaps a UsdRiIntegrator prim, plus an adapter
479         // in UsdImaging that adds it as an sprim?
480         riley::ShadingNode integratorNode;
481         {
482             integratorNode = riley::ShadingNode {
483                 riley::ShadingNode::Type::k_Integrator,
484                 us_PxrPathTracer,
485                 us_PathTracer,
486                 RtParamList() };
487 
488             // If PxrVisualizer was requested, configure it.
489             if (!visualizerStyle.empty()) {
490                 integratorNode.name = us_PxrVisualizer;
491                 integratorNode.params.SetInteger(us_wireframe, 1);
492                 integratorNode.params.SetString(us_style,
493                     RtUString(visualizerStyle.c_str()));
494             }
495         }
496 
497         // Camera
498         riley::ShadingNode cameraNode;
499         RtUString cameraName = us_main_cam;
500         riley::Transform cameraXform;
501         RtParamList cameraParams;
502         {
503             RtParamList projParams;
504 
505             // Shutter curve (this is relative to the Shutter interval above).
506             cameraParams.SetFloat(RixStr.k_shutterOpenTime, shutterCurve[0]);
507             cameraParams.SetFloat(RixStr.k_shutterCloseTime, shutterCurve[1]);
508             cameraParams.SetFloatArray(RixStr.k_shutteropening,shutterCurve+2,8);
509 
510             if (usdCam) {
511                 GfCamera gfCam = usdCam.GetCamera(frameNum);
512 
513                 // Clip planes
514                 GfRange1f clipRange = gfCam.GetClippingRange();
515                 cameraParams.SetFloat(RixStr.k_nearClip, clipRange.GetMin());
516                 cameraParams.SetFloat(RixStr.k_farClip, clipRange.GetMax());
517 
518                 // Projection
519                 projParams.SetFloat(
520                     RixStr.k_fov, gfCam.GetFieldOfView(GfCamera::FOVVertical));
521                 // Convert parameters that are specified in tenths of a world
522                 // unit in USD to world units for Riley. See
523                 // UsdImagingCameraAdapter::UpdateForTime for reference.
524                 projParams.SetFloat(RixStr.k_focalLength,
525                     gfCam.GetFocalLength() / 10.0f);
526                 projParams.SetFloat(RixStr.k_fStop, gfCam.GetFStop());
527                 projParams.SetFloat(RixStr.k_focalDistance,
528                     gfCam.GetFocusDistance());
529                 cameraNode = riley::ShadingNode {
530                     riley::ShadingNode::Type::k_Projection,
531                     RtUString(cameraProjection.c_str()),
532                     RtUString("main_cam_projection"),
533                     projParams
534                 };
535 
536                 // Transform
537                 std::vector<GfMatrix4d> xforms;
538                 xforms.reserve(timeSampleOffsets.size());
539                 // Get the xform at each time sample
540                 for (double const& offset : timeSampleOffsets) {
541                     UsdGeomXformCache xfc(frameNum + offset);
542                     xforms.emplace_back(xfc.GetLocalToWorldTransform(
543                         usdCam.GetPrim()));
544                 }
545 
546                 // USD camera looks down -Z (RHS), while
547                 // Prman camera looks down +Z (RHS)
548                 GfMatrix4d flipZ(1.0);
549                 flipZ[2][2] = -1.0;
550                 RtMatrix4x4 xf_rt_values[HDPRMAN_MAX_TIME_SAMPLES];
551                 float times[HDPRMAN_MAX_TIME_SAMPLES];
552                 size_t numNetSamples = std::min(xforms.size(),
553                                             (size_t) HDPRMAN_MAX_TIME_SAMPLES);
554                 for (size_t i=0; i < numNetSamples; i++) {
555                     xf_rt_values[i] =
556                         HdPrman_GfMatrixToRtMatrix(flipZ * xforms[i]);
557                     times[i] = timeSampleOffsets[i];
558                 }
559                 cameraXform = {(unsigned) numNetSamples, xf_rt_values, times};
560             } else {
561                 // Projection
562                 projParams.SetFloat(RixStr.k_fov, 60.0f);
563                 cameraNode = riley::ShadingNode {
564                     riley::ShadingNode::Type::k_Projection,
565                     RtUString(cameraProjection.c_str()),
566                     RtUString("main_cam_projection"),
567                     projParams
568                 };
569 
570                 // Transform
571                 float const zerotime = 0.0f;
572                 RtMatrix4x4 matrix = RixConstants::k_IdentityMatrix;
573 
574                 // Orthographic camera:
575                 // XXX In HdPrman RenderPass we apply orthographic
576                 // projection as a scale onto the viewMatrix. This
577                 // is because we currently cannot update Renderman's
578                 // `ScreenWindow` once it is running.
579                 if (isOrthographic) {
580                     matrix.Scale(10,10,10);
581                 }
582 
583                 // Translate camera back a bit
584                 matrix.Translate(0.f, 0.f, -10.0f);
585                 cameraXform = { 1, &matrix, &zerotime };
586             }
587         }
588 
589         // Displays & Display Channels
590         std::vector<HdPrman_OfflineContext::RenderOutput> renderOutputs;
591         for (size_t index: product.renderVarIndices) {
592             auto const& renderVar = renderSpec.renderVars[index];
593 
594             // Map source to Ri name.
595             std::string name = renderVar.sourceName;
596             if (renderVar.sourceType == UsdRenderTokens->lpe) {
597                 name = "lpe:" + name;
598             }
599 
600             // Map dataType from token to Ri enum.
601             // XXX use usd tokens?
602             riley::RenderOutputType renderOutputType;
603             if (renderVar.dataType == TfToken("color3f")) {
604                 renderOutputType = riley::RenderOutputType::k_Color;
605             } else if (renderVar.dataType == TfToken("float")) {
606                 renderOutputType = riley::RenderOutputType::k_Float;
607             } else if (renderVar.dataType == TfToken("int")) {
608                 renderOutputType = riley::RenderOutputType::k_Integer;
609             } else {
610                 TF_RUNTIME_ERROR("Unimplemented renderVar dataType '%s'; "
611                                  "skipping", renderVar.dataType.GetText());
612                 continue;
613             }
614 
615             RtParamList params;
616             // RenderVar extraSettings become Riley channel params.
617             _ConvertSettings(renderVar.extraSettings, params);
618 
619             HdPrman_OfflineContext::RenderOutput ro;
620             ro.name = RtUString(name.c_str());
621             ro.type = renderOutputType;
622             ro.params = params;
623             renderOutputs.push_back(ro);
624         }
625 
626         // Only allow "raster" for now.
627         TF_VERIFY(product.type == TfToken("raster"));
628         riley::Extent const format = {
629             uint32_t(product.resolution[0]), uint32_t(product.resolution[1]),1};
630 
631         // Fallback materials
632         std::vector<riley::ShadingNode> materialNodes;
633         {
634             riley::ShadingNode pxrPrimvar_node;
635             pxrPrimvar_node.type = riley::ShadingNode::Type::k_Pattern;
636             pxrPrimvar_node.name = us_PxrPrimvar;
637             pxrPrimvar_node.handle = us_pv_color;
638             pxrPrimvar_node.params.SetString(us_varname, us_displayColor);
639             // Note: this 0.5 gray is to match UsdImaging's fallback.
640             pxrPrimvar_node.params.SetColor(us_defaultColor,
641                                         RtColorRGB(0.5, 0.5, 0.5));
642             pxrPrimvar_node.params.SetString(RixStr.k_type, RixStr.k_color);
643             materialNodes.push_back(pxrPrimvar_node);
644 
645             riley::ShadingNode pxrSurface_node;
646             pxrSurface_node.type = riley::ShadingNode::Type::k_Bxdf;
647             pxrSurface_node.name = us_PxrSurface;
648             pxrSurface_node.handle = us_simpleTestSurface;
649             pxrSurface_node.params.SetColorReference(us_diffuseColor,
650                                               us_pv_color_resultRGB);
651             pxrSurface_node.params.SetInteger(us_specularModelType, 1);
652             pxrSurface_node.params.SetColor(us_specularFaceColor,
653                                         RtColorRGB(0.04f));
654             pxrSurface_node.params.SetColor(us_specularEdgeColor,
655                                         RtColorRGB(1.0f));
656             materialNodes.push_back(pxrSurface_node);
657         }
658 
659         // Fallback volume material
660         std::vector<riley::ShadingNode> volumeMaterialNodes;
661         {
662             riley::ShadingNode pxrVolume_node;
663             pxrVolume_node.type = riley::ShadingNode::Type::k_Bxdf;
664             pxrVolume_node.name = us_PxrVolume;
665             pxrVolume_node.handle = us_simpleVolume;
666             pxrVolume_node.params.SetString(us_densityFloatPrimVar, us_density);
667             volumeMaterialNodes.push_back(pxrVolume_node);
668         }
669 
670         // Basic configuration
671         context->Initialize(
672                 rileyOptions,
673                 integratorNode,
674                 cameraName,
675                 cameraNode,
676                 cameraXform,
677                 cameraParams,
678                 format,
679                 product.name,
680                 materialNodes,
681                 volumeMaterialNodes,
682                 renderOutputs);
683 
684         // Optionally add a fallback light if no lights present in USD file.
685         if (UsdLuxListAPI(stage->GetPseudoRoot()).ComputeLightList(
686             UsdLuxListAPI::ComputeModeIgnoreCache).empty()) {
687             // Light shader
688             riley::ShadingNode lightNode {
689                 riley::ShadingNode::Type::k_Light, // type
690                 us_PxrDomeLight, // name
691                 us_lightA, // handle
692                 RtParamList()
693             };
694             lightNode.params.SetFloat(RixStr.k_intensity, 1.0f);
695             lightNode.params.SetInteger(us_traceLightPaths, 1);
696             lightNode.params.SetString(us_lightGroup, us_A);
697 
698             // Light instance
699             float const zerotime = 0.0f;
700             RtMatrix4x4 matrix = RixConstants::k_IdentityMatrix;
701             riley::Transform xform = { 1, &matrix, &zerotime };
702             RtParamList lightAttributes;
703             lightAttributes.SetInteger(RixStr.k_visibility_camera, 0);
704             lightAttributes.SetInteger(RixStr.k_visibility_indirect, 1);
705             lightAttributes.SetInteger(RixStr.k_visibility_transmission, 1);
706             lightAttributes.SetString(RixStr.k_grouping_membership,
707                                       us_default);
708             context->SetFallbackLight(lightNode, xform, lightAttributes);
709         }
710 
711         // Hydra setup
712         //
713         // Assemble a Hydra pipeline to feed USD data to Riley.
714         // Scene data flows left-to-right:
715         //
716         //     => UsdStage
717         //       => UsdImagingDelegate (hydra "frontend")
718         //         => HdRenderIndex
719         //           => HdPrmanRenderDelegate (hydra "backend")
720         //             => Riley
721         //
722         // Note that Hydra is flexible, but that means it takes a few steps
723         // to configure the details. This might seem out of proportion in a
724         // simple usage example like this, if you don't consider the range of
725         // other scenarios Hydra is meant to handle.
726         {
727             // Set up frontend -> index -> backend
728             // TODO We should configure the render delegate to request
729             // the appropriate materialBindingPurposes from the USD scene.
730             // We should also configure the scene to filter for the
731             // requested includedPurposes.
732             HdRenderSettingsMap settingsMap;
733             HdPrmanRenderDelegate hdPrmanBackend(context, settingsMap);
734             std::unique_ptr<HdRenderIndex> hdRenderIndex(
735                 HdRenderIndex::New(&hdPrmanBackend, HdDriverVector()));
736             UsdImagingDelegate hdUsdFrontend(hdRenderIndex.get(),
737                                              SdfPath::AbsoluteRootPath());
738             hdUsdFrontend.Populate(stage->GetPseudoRoot());
739             hdUsdFrontend.SetTime(frameNum);
740             hdUsdFrontend.SetRefineLevelFallback(8); // max refinement
741             if (!product.cameraPath.IsEmpty()) {
742                 hdUsdFrontend.SetCameraForSampling(product.cameraPath);
743             }
744 
745             TfTokenVector renderTags(1, HdRenderTagTokens->geometry);
746             // The collection of scene contents to render
747             HdRprimCollection hdCollection(
748                 _tokens->testCollection,
749                 HdReprSelector(HdReprTokens->smoothHull));
750             HdChangeTracker &tracker = hdRenderIndex->GetChangeTracker();
751             tracker.AddCollection(_tokens->testCollection);
752 
753             // We don't need multi-pass rendering with a pathtracer
754             // so we use a single, simple render pass.
755             HdRenderPassSharedPtr hdRenderPass =
756                 hdPrmanBackend.CreateRenderPass(hdRenderIndex.get(),
757                                                 hdCollection);
758             HdRenderPassStateSharedPtr hdRenderPassState =
759                 hdPrmanBackend.CreateRenderPassState();
760 
761             // The task execution graph and engine configuration is also simple.
762             HdTaskSharedPtrVector tasks = {
763                 std::make_shared<Hd_DrawTask>(hdRenderPass,
764                                               hdRenderPassState,
765                                               renderTags)
766             };
767             HdEngine hdEngine;
768             timer_hydra.Start();
769             hdEngine.Execute(hdRenderIndex.get(), &tasks);
770             timer_hydra.Stop();
771         }
772         printf("Rendered %s\n", product.name.GetText());
773     }
774 
775     if (!traceOutput.empty()) {
776         std::ofstream outFile(traceOutput);
777         TraceCollector::GetInstance().SetEnabled(false);
778         TraceReporter::GetGlobalReporter()->Report(outFile);
779     }
780 
781     if (!perfOutput.empty()) {
782         std::ofstream perfResults(perfOutput);
783         perfResults << "{'profile': 'usdOpen',"
784             << " 'metric': 'time',"
785             << " 'value': " << timer_usdOpen.GetSeconds() << ","
786             << " 'samples': 1"
787             << " }\n";
788         perfResults << "{'profile': 'hydraExecute',"
789             << " 'metric': 'time',"
790             << " 'value': " << timer_hydra.GetSeconds() << ","
791             << " 'samples': 1"
792             << " }\n";
793         perfResults << "{'profile': 'prmanRender',"
794             << " 'metric': 'time',"
795             << " 'value': " << timer_prmanRender.GetSeconds() << ","
796             << " 'samples': 1"
797             << " }\n";
798     }
799 }
800