1 /* -*-c++-*- OpenSceneGraph - Copyright (C) 1998-2006 Robert Osfield
2  *
3  * This library is open source and may be redistributed and/or modified under
4  * the terms of the OpenSceneGraph Public License (OSGPL) version 0.0 or
5  * (at your option) any later version.  The full license is in LICENSE file
6  * included with this distribution, and on the openscenegraph.org website.
7  *
8  * This library is distributed in the hope that it will be useful,
9  * but WITHOUT ANY WARRANTY; without even the implied warranty of
10  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11  * OpenSceneGraph Public License for more details.
12 */
13 
14 #include <osgViewer/Renderer>
15 #include <osgViewer/View>
16 #include <osgViewer/GraphicsWindow>
17 
18 #include <osg/io_utils>
19 
20 #include <osg/TextureCubeMap>
21 #include <osg/TextureRectangle>
22 #include <osg/Texture1D>
23 #include <osg/TexMat>
24 #include <osg/Stencil>
25 #include <osg/PolygonStipple>
26 #include <osg/ValueObject>
27 
28 #include <osgUtil/Optimizer>
29 #include <osgUtil/ShaderGen>
30 #include <osgUtil/IntersectionVisitor>
31 
32 #include <osgDB/ReadFile>
33 #include <osgDB/WriteFile>
34 
35 // view configurations.
36 #include <osgViewer/config/AcrossAllScreens>
37 #include <osgViewer/config/SingleWindow>
38 #include <osgViewer/config/SingleScreen>
39 #include <osgViewer/config/SphericalDisplay>
40 #include <osgViewer/config/PanoramicSphericalDisplay>
41 #include <osgViewer/config/WoWVxDisplay>
42 
43 
44 #include <iterator>
45 
46 using namespace osgViewer;
47 
getActiveDisplaySetting(osgViewer::View & view) const48 osg::DisplaySettings* ViewConfig::getActiveDisplaySetting(osgViewer::View& view) const
49 {
50     return view.getDisplaySettings()  ? view.getDisplaySettings() : osg::DisplaySettings::instance().get();
51 }
52 
53 class CollectedCoordinateSystemNodesVisitor : public osg::NodeVisitor
54 {
55 public:
56 
CollectedCoordinateSystemNodesVisitor()57     CollectedCoordinateSystemNodesVisitor():
58         NodeVisitor(osg::NodeVisitor::TRAVERSE_ACTIVE_CHILDREN) {}
59 
60     META_NodeVisitor("osgViewer","CollectedCoordinateSystemNodesVisitor")
61 
apply(osg::Node & node)62     virtual void apply(osg::Node& node)
63     {
64         traverse(node);
65     }
66 
apply(osg::CoordinateSystemNode & node)67     virtual void apply(osg::CoordinateSystemNode& node)
68     {
69         if (_pathToCoordinateSystemNode.empty())
70         {
71             OSG_DEBUG<<"Found CoordinateSystemNode node"<<std::endl;
72             OSG_DEBUG<<"     CoordinateSystem = "<<node.getCoordinateSystem()<<std::endl;
73             _pathToCoordinateSystemNode = getNodePath();
74         }
75         else
76         {
77             OSG_DEBUG<<"Found additional CoordinateSystemNode node, but ignoring"<<std::endl;
78             OSG_DEBUG<<"     CoordinateSystem = "<<node.getCoordinateSystem()<<std::endl;
79         }
80         traverse(node);
81     }
82 
83     osg::NodePath _pathToCoordinateSystemNode;
84 };
85 
86 
87 /** callback class to use to allow matrix manipulators to query the application for the local coordinate frame.*/
88 class ViewerCoordinateFrameCallback : public osgGA::CameraManipulator::CoordinateFrameCallback
89 {
90 public:
91 
ViewerCoordinateFrameCallback(osgViewer::View * view)92     ViewerCoordinateFrameCallback(osgViewer::View* view):
93         _view(view) {}
94 
getCoordinateFrame(const osg::Vec3d & position) const95     virtual osg::CoordinateFrame getCoordinateFrame(const osg::Vec3d& position) const
96     {
97         OSG_DEBUG<<"getCoordinateFrame("<<position<<")"<<std::endl;
98 
99         osg::NodePath tmpPath = _view->getCoordinateSystemNodePath();
100 
101         if (!tmpPath.empty())
102         {
103             osg::Matrixd coordinateFrame;
104 
105             osg::CoordinateSystemNode* csn = dynamic_cast<osg::CoordinateSystemNode*>(tmpPath.back());
106             if (csn)
107             {
108                 osg::Vec3 local_position = position*osg::computeWorldToLocal(tmpPath);
109 
110                 // get the coordinate frame in world coords.
111                 coordinateFrame = csn->computeLocalCoordinateFrame(local_position)* osg::computeLocalToWorld(tmpPath);
112 
113                 // keep the position of the coordinate frame to reapply after rescale.
114                 osg::Vec3d pos = coordinateFrame.getTrans();
115 
116                 // compensate for any scaling, so that the coordinate frame is a unit size
117                 osg::Vec3d x(1.0,0.0,0.0);
118                 osg::Vec3d y(0.0,1.0,0.0);
119                 osg::Vec3d z(0.0,0.0,1.0);
120                 x = osg::Matrixd::transform3x3(x,coordinateFrame);
121                 y = osg::Matrixd::transform3x3(y,coordinateFrame);
122                 z = osg::Matrixd::transform3x3(z,coordinateFrame);
123                 coordinateFrame.preMultScale(osg::Vec3d(1.0/x.length(),1.0/y.length(),1.0/z.length()));
124 
125                 // reapply the position.
126                 coordinateFrame.setTrans(pos);
127 
128                 OSG_DEBUG<<"csn->computeLocalCoordinateFrame(position)* osg::computeLocalToWorld(tmpPath)"<<coordinateFrame<<std::endl;
129 
130             }
131             else
132             {
133                 OSG_DEBUG<<"osg::computeLocalToWorld(tmpPath)"<<std::endl;
134                 coordinateFrame =  osg::computeLocalToWorld(tmpPath);
135             }
136             return coordinateFrame;
137         }
138         else
139         {
140             OSG_DEBUG<<"   no coordinate system found, using default orientation"<<std::endl;
141             return osg::Matrixd::translate(position);
142         }
143     }
144 
145 protected:
~ViewerCoordinateFrameCallback()146     virtual ~ViewerCoordinateFrameCallback() {}
147 
148     osg::observer_ptr<osgViewer::View> _view;
149 };
150 
151 
View()152 View::View():
153     _fusionDistanceMode(osgUtil::SceneView::PROPORTIONAL_TO_SCREEN_DISTANCE),
154     _fusionDistanceValue(1.0f)
155 {
156     // OSG_NOTICE<<"Constructing osgViewer::View"<<std::endl;
157 
158     _startTick = 0;
159 
160     _frameStamp = new osg::FrameStamp;
161     _frameStamp->setFrameNumber(0);
162     _frameStamp->setReferenceTime(0);
163     _frameStamp->setSimulationTime(0);
164 
165     _scene = new Scene;
166 
167     // make sure View is safe to reference multi-threaded.
168     setThreadSafeRefUnref(true);
169 
170     // need to attach a Renderer to the master camera which has been default constructed
171     getCamera()->setRenderer(createRenderer(getCamera()));
172 
173     setEventQueue(new osgGA::EventQueue);
174 
175     setStats(new osg::Stats("View"));
176 }
177 
178 
View(const osgViewer::View & view,const osg::CopyOp & copyop)179 View::View(const osgViewer::View& view, const osg::CopyOp& copyop):
180     osg::Object(true),
181     osg::View(view,copyop),
182     osgGA::GUIActionAdapter(),
183     _startTick(0),
184     _fusionDistanceMode(view._fusionDistanceMode),
185     _fusionDistanceValue(view._fusionDistanceValue)
186 {
187     _scene = new Scene;
188 
189     // need to attach a Renderer to the master camera which has been default constructed
190     getCamera()->setRenderer(createRenderer(getCamera()));
191 
192     setEventQueue(new osgGA::EventQueue);
193 
194     setStats(new osg::Stats("View"));
195 }
196 
~View()197 View::~View()
198 {
199     OSG_INFO<<"Destructing osgViewer::View"<<std::endl;
200 }
201 
take(osg::View & rhs)202 void View::take(osg::View& rhs)
203 {
204     osg::View::take(rhs);
205 
206 #if 1
207     osgViewer::View* rhs_osgViewer = dynamic_cast<osgViewer::View*>(&rhs);
208     if (rhs_osgViewer)
209     {
210 
211         // copy across rhs
212         _startTick = rhs_osgViewer->_startTick;
213         _frameStamp = rhs_osgViewer->_frameStamp;
214 
215         if (rhs_osgViewer->getSceneData())
216         {
217             _scene = rhs_osgViewer->_scene;
218         }
219 
220         if (rhs_osgViewer->_cameraManipulator.valid())
221         {
222             _cameraManipulator = rhs_osgViewer->_cameraManipulator;
223         }
224 
225         _eventHandlers.insert(_eventHandlers.end(), rhs_osgViewer->_eventHandlers.begin(), rhs_osgViewer->_eventHandlers.end());
226 
227         _coordinateSystemNodePath = rhs_osgViewer->_coordinateSystemNodePath;
228 
229         _displaySettings = rhs_osgViewer->_displaySettings;
230         _fusionDistanceMode = rhs_osgViewer->_fusionDistanceMode;
231         _fusionDistanceValue = rhs_osgViewer->_fusionDistanceValue;
232 
233 
234         // clear rhs
235         rhs_osgViewer->_frameStamp = 0;
236         rhs_osgViewer->_scene = 0;
237         rhs_osgViewer->_cameraManipulator = 0;
238         rhs_osgViewer->_eventHandlers.clear();
239 
240         rhs_osgViewer->_coordinateSystemNodePath.clearNodePath();
241 
242         rhs_osgViewer->_displaySettings = 0;
243     }
244 #endif
245     computeActiveCoordinateSystemNodePath();
246     assignSceneDataToCameras();
247 }
248 
createRenderer(osg::Camera * camera)249 osg::GraphicsOperation* View::createRenderer(osg::Camera* camera)
250 {
251     Renderer* render = new Renderer(camera);
252     camera->setStats(new osg::Stats("Camera"));
253     return render;
254 }
255 
256 
init()257 void View::init()
258 {
259     OSG_INFO<<"View::init()"<<std::endl;
260 
261     osg::ref_ptr<osgGA::GUIEventAdapter> initEvent = _eventQueue->createEvent();
262     initEvent->setEventType(osgGA::GUIEventAdapter::FRAME);
263 
264     if (_cameraManipulator.valid())
265     {
266         _cameraManipulator->init(*initEvent, *this);
267     }
268 }
269 
setStartTick(osg::Timer_t tick)270 void View::setStartTick(osg::Timer_t tick)
271 {
272     _startTick = tick;
273 
274     for(Devices::iterator eitr = _eventSources.begin();
275         eitr != _eventSources.end();
276         ++eitr)
277     {
278         (*eitr)->getEventQueue()->setStartTick(_startTick);
279     }
280 }
281 
setSceneData(osg::Node * node)282 void View::setSceneData(osg::Node* node)
283 {
284     if (node==_scene->getSceneData()) return;
285 
286     osg::ref_ptr<Scene> scene = Scene::getScene(node);
287 
288     if (scene)
289     {
290         OSG_INFO<<"View::setSceneData() Sharing scene "<<scene.get()<<std::endl;
291         _scene = scene;
292     }
293     else
294     {
295         if (_scene->referenceCount()!=1)
296         {
297             // we are not the only reference to the Scene so we cannot reuse it.
298             _scene = new Scene;
299             OSG_INFO<<"View::setSceneData() Allocating new scene"<<_scene.get()<<std::endl;
300         }
301         else
302         {
303             OSG_INFO<<"View::setSceneData() Reusing existing scene"<<_scene.get()<<std::endl;
304         }
305 
306         _scene->setSceneData(node);
307     }
308 
309     if (getSceneData())
310     {
311         #if defined(OSG_GLES2_AVAILABLE)
312             osgUtil::ShaderGenVisitor sgv;
313             getSceneData()->getOrCreateStateSet();
314             getSceneData()->accept(sgv);
315         #endif
316 
317         // now make sure the scene graph is set up with the correct DataVariance to protect the dynamic elements of
318         // the scene graph from being run in parallel.
319         osgUtil::Optimizer::StaticObjectDetectionVisitor sodv;
320         getSceneData()->accept(sodv);
321 
322         // make sure that existing scene graph objects are allocated with thread safe ref/unref
323         if (getViewerBase() &&
324             getViewerBase()->getThreadingModel()!=ViewerBase::SingleThreaded)
325         {
326             getSceneData()->setThreadSafeRefUnref(true);
327         }
328 
329         // update the scene graph so that it has enough GL object buffer memory for the graphics contexts that will be using it.
330         getSceneData()->resizeGLObjectBuffers(osg::DisplaySettings::instance()->getMaxNumberOfGraphicsContexts());
331     }
332 
333     computeActiveCoordinateSystemNodePath();
334 
335     assignSceneDataToCameras();
336 }
337 
setDatabasePager(osgDB::DatabasePager * dp)338 void View::setDatabasePager(osgDB::DatabasePager* dp)
339 {
340     _scene->setDatabasePager(dp);
341 }
342 
getDatabasePager()343 osgDB::DatabasePager* View::getDatabasePager()
344 {
345     return _scene->getDatabasePager();
346 }
347 
getDatabasePager() const348 const osgDB::DatabasePager* View::getDatabasePager() const
349 {
350     return _scene->getDatabasePager();
351 }
352 
353 
setImagePager(osgDB::ImagePager * dp)354 void View::setImagePager(osgDB::ImagePager* dp)
355 {
356     _scene->setImagePager(dp);
357 }
358 
getImagePager()359 osgDB::ImagePager* View::getImagePager()
360 {
361     return _scene->getImagePager();
362 }
363 
getImagePager() const364 const osgDB::ImagePager* View::getImagePager() const
365 {
366     return _scene->getImagePager();
367 }
368 
369 
setCameraManipulator(osgGA::CameraManipulator * manipulator,bool resetPosition)370 void View::setCameraManipulator(osgGA::CameraManipulator* manipulator, bool resetPosition)
371 {
372     _cameraManipulator = manipulator;
373 
374     if (_cameraManipulator.valid())
375     {
376         _cameraManipulator->setCoordinateFrameCallback(new ViewerCoordinateFrameCallback(this));
377 
378         if (getSceneData()) _cameraManipulator->setNode(getSceneData());
379 
380         if (resetPosition)
381         {
382             osg::ref_ptr<osgGA::GUIEventAdapter> dummyEvent = _eventQueue->createEvent();
383             _cameraManipulator->home(*dummyEvent, *this);
384         }
385     }
386 }
387 
home()388 void View::home()
389 {
390     if (_cameraManipulator.valid())
391     {
392         osg::ref_ptr<osgGA::GUIEventAdapter> dummyEvent = _eventQueue->createEvent();
393         _cameraManipulator->home(*dummyEvent, *this);
394     }
395 }
396 
397 
addEventHandler(osgGA::EventHandler * eventHandler)398 void View::addEventHandler(osgGA::EventHandler* eventHandler)
399 {
400     EventHandlers::iterator itr = std::find(_eventHandlers.begin(), _eventHandlers.end(), eventHandler);
401     if (itr == _eventHandlers.end())
402     {
403         _eventHandlers.push_back(eventHandler);
404     }
405 }
406 
removeEventHandler(osgGA::EventHandler * eventHandler)407 void View::removeEventHandler(osgGA::EventHandler* eventHandler)
408 {
409     EventHandlers::iterator itr = std::find(_eventHandlers.begin(), _eventHandlers.end(), eventHandler);
410     if (itr != _eventHandlers.end())
411     {
412         _eventHandlers.erase(itr);
413     }
414 }
415 
setCoordinateSystemNodePath(const osg::NodePath & nodePath)416 void View::setCoordinateSystemNodePath(const osg::NodePath& nodePath)
417 {
418     _coordinateSystemNodePath.setNodePath(nodePath);
419 }
420 
getCoordinateSystemNodePath() const421 osg::NodePath View::getCoordinateSystemNodePath() const
422 {
423     osg::NodePath nodePath;
424     _coordinateSystemNodePath.getNodePath(nodePath);
425     return nodePath;
426 }
427 
computeActiveCoordinateSystemNodePath()428 void View::computeActiveCoordinateSystemNodePath()
429 {
430     // now search for CoordinateSystemNode's for which we want to track.
431     osg::Node* subgraph = getSceneData();
432 
433     if (subgraph)
434     {
435 
436         CollectedCoordinateSystemNodesVisitor ccsnv;
437         subgraph->accept(ccsnv);
438 
439         if (!ccsnv._pathToCoordinateSystemNode.empty())
440         {
441            setCoordinateSystemNodePath(ccsnv._pathToCoordinateSystemNode);
442            return;
443         }
444     }
445 
446     // otherwise no node path found so reset to empty.
447     setCoordinateSystemNodePath(osg::NodePath());
448 }
449 
450 
apply(ViewConfig * config)451 void View::apply(ViewConfig* config)
452 {
453     if (config)
454     {
455         OSG_INFO<<"Applying osgViewer::ViewConfig : "<<config->className()<<std::endl;
456         config->configure(*this);
457     }
458     _lastAppliedViewConfig = config;
459 }
460 
setUpViewAcrossAllScreens()461 void View::setUpViewAcrossAllScreens()
462 {
463     apply(new osgViewer::AcrossAllScreens());
464 }
465 
setUpViewInWindow(int x,int y,int width,int height,unsigned int screenNum)466 void View::setUpViewInWindow(int x, int y, int width, int height, unsigned int screenNum)
467 {
468     apply(new osgViewer::SingleWindow(x, y, width, height, screenNum));
469 }
470 
setUpViewOnSingleScreen(unsigned int screenNum)471 void View::setUpViewOnSingleScreen(unsigned int screenNum)
472 {
473     apply(new osgViewer::SingleScreen(screenNum));
474 }
475 
setUpViewFor3DSphericalDisplay(double radius,double collar,unsigned int screenNum,osg::Image * intensityMap,const osg::Matrixd & projectorMatrix)476 void View::setUpViewFor3DSphericalDisplay(double radius, double collar, unsigned int screenNum, osg::Image* intensityMap, const osg::Matrixd& projectorMatrix)
477 {
478     apply(new osgViewer::SphericalDisplay(radius, collar, screenNum, intensityMap, projectorMatrix));
479 }
480 
setUpViewForPanoramicSphericalDisplay(double radius,double collar,unsigned int screenNum,osg::Image * intensityMap,const osg::Matrixd & projectorMatrix)481 void View::setUpViewForPanoramicSphericalDisplay(double radius, double collar, unsigned int screenNum, osg::Image* intensityMap, const osg::Matrixd& projectorMatrix)
482 {
483     apply(new osgViewer::PanoramicSphericalDisplay(radius, collar, screenNum, intensityMap, projectorMatrix));
484 }
485 
setUpViewForWoWVxDisplay(unsigned int screenNum,unsigned char wow_content,unsigned char wow_factor,unsigned char wow_offset,float wow_disparity_Zd,float wow_disparity_vz,float wow_disparity_M,float wow_disparity_C)486 void View::setUpViewForWoWVxDisplay(unsigned int screenNum, unsigned char wow_content, unsigned char wow_factor, unsigned char wow_offset, float wow_disparity_Zd, float wow_disparity_vz, float wow_disparity_M, float wow_disparity_C)
487 {
488     apply(new osgViewer::WoWVxDisplay(screenNum, wow_content, wow_factor, wow_offset, wow_disparity_Zd,wow_disparity_vz, wow_disparity_M, wow_disparity_C));
489 }
490 
DepthPartitionSettings(DepthMode mode)491 DepthPartitionSettings::DepthPartitionSettings(DepthMode mode):
492     _mode(mode),
493     _zNear(1.0), _zMid(5.0), _zFar(1000.0)
494 {}
495 
getDepthRange(osg::View & view,unsigned int partition,double & zNear,double & zFar)496 bool DepthPartitionSettings::getDepthRange(osg::View& view, unsigned int partition, double& zNear, double& zFar)
497 {
498     switch(_mode)
499     {
500         case(FIXED_RANGE):
501         {
502             if (partition==0)
503             {
504                 zNear = _zNear;
505                 zFar = _zMid;
506                 return true;
507             }
508             else if (partition==1)
509             {
510                 zNear = _zMid;
511                 zFar = _zFar;
512                 return true;
513             }
514             return false;
515         }
516         case(BOUNDING_VOLUME):
517         {
518             osgViewer::View* view_withSceneData = dynamic_cast<osgViewer::View*>(&view);
519             const osg::Node* node = view_withSceneData ? view_withSceneData->getSceneData() : 0;
520             if (!node) return false;
521 
522             const osg::Camera* masterCamera = view.getCamera();
523             if (!masterCamera) return false;
524 
525             osg::BoundingSphere bs = node->getBound();
526             const osg::Matrixd& viewMatrix = masterCamera->getViewMatrix();
527             //osg::Matrixd& projectionMatrix = masterCamera->getProjectionMatrix();
528 
529             osg::Vec3d lookVectorInWorldCoords = osg::Matrixd::transform3x3(viewMatrix,osg::Vec3d(0.0,0.0,-1.0));
530             lookVectorInWorldCoords.normalize();
531 
532             osg::Vec3d nearPointInWorldCoords = bs.center() - lookVectorInWorldCoords*bs.radius();
533             osg::Vec3d farPointInWorldCoords = bs.center() + lookVectorInWorldCoords*bs.radius();
534 
535             osg::Vec3d nearPointInEyeCoords = nearPointInWorldCoords * viewMatrix;
536             osg::Vec3d farPointInEyeCoords = farPointInWorldCoords * viewMatrix;
537 
538 #if 0
539             OSG_NOTICE<<std::endl;
540             OSG_NOTICE<<"viewMatrix = "<<viewMatrix<<std::endl;
541             OSG_NOTICE<<"lookVectorInWorldCoords = "<<lookVectorInWorldCoords<<std::endl;
542             OSG_NOTICE<<"nearPointInWorldCoords = "<<nearPointInWorldCoords<<std::endl;
543             OSG_NOTICE<<"farPointInWorldCoords = "<<farPointInWorldCoords<<std::endl;
544             OSG_NOTICE<<"nearPointInEyeCoords = "<<nearPointInEyeCoords<<std::endl;
545             OSG_NOTICE<<"farPointInEyeCoords = "<<farPointInEyeCoords<<std::endl;
546 #endif
547             double minZNearRatio = 0.00001;
548 
549 
550             if (masterCamera->getDisplaySettings())
551             {
552                 OSG_NOTICE<<"Has display settings"<<std::endl;
553             }
554 
555             double scene_zNear = -nearPointInEyeCoords.z();
556             double scene_zFar = -farPointInEyeCoords.z();
557             if (scene_zNear<=0.0) scene_zNear = minZNearRatio * scene_zFar;
558 
559             double scene_zMid = sqrt(scene_zFar*scene_zNear);
560 
561 #if 0
562             OSG_NOTICE<<"scene_zNear = "<<scene_zNear<<std::endl;
563             OSG_NOTICE<<"scene_zMid = "<<scene_zMid<<std::endl;
564             OSG_NOTICE<<"scene_zFar = "<<scene_zFar<<std::endl;
565 #endif
566             if (partition==0)
567             {
568                 zNear = scene_zNear;
569                 zFar = scene_zMid;
570                 return true;
571             }
572             else if (partition==1)
573             {
574                 zNear = scene_zMid;
575                 zFar = scene_zFar;
576                 return true;
577             }
578 
579             return false;
580         }
581         default: return false;
582     }
583 }
584 
585 namespace osgDepthPartition {
586 
587 struct MyUpdateSlaveCallback : public osg::View::Slave::UpdateSlaveCallback
588 {
MyUpdateSlaveCallbackosgDepthPartition::MyUpdateSlaveCallback589     MyUpdateSlaveCallback(DepthPartitionSettings* dps, unsigned int partition):_dps(dps), _partition(partition) {}
590 
updateSlaveosgDepthPartition::MyUpdateSlaveCallback591     virtual void updateSlave(osg::View& view, osg::View::Slave& slave)
592     {
593         slave.updateSlaveImplementation(view);
594 
595         if (!_dps) return;
596 
597         osg::Camera* camera = slave._camera.get();
598 
599         double computed_zNear;
600         double computed_zFar;
601         if (!_dps->getDepthRange(view, _partition, computed_zNear, computed_zFar))
602         {
603             OSG_NOTICE<<"Switching off Camera "<<camera<<std::endl;
604             camera->setNodeMask(0x0);
605             return;
606         }
607         else
608         {
609             camera->setNodeMask(0xffffff);
610         }
611 
612         if (camera->getProjectionMatrix()(0,3)==0.0 &&
613             camera->getProjectionMatrix()(1,3)==0.0 &&
614             camera->getProjectionMatrix()(2,3)==0.0)
615         {
616             double left, right, bottom, top, zNear, zFar;
617             camera->getProjectionMatrixAsOrtho(left, right, bottom, top, zNear, zFar);
618             camera->setProjectionMatrixAsOrtho(left, right, bottom, top, computed_zNear, computed_zFar);
619         }
620         else
621         {
622             double left, right, bottom, top, zNear, zFar;
623             camera->getProjectionMatrixAsFrustum(left, right, bottom, top, zNear, zFar);
624 
625             double nr = computed_zNear / zNear;
626             camera->setProjectionMatrixAsFrustum(left * nr, right * nr, bottom * nr, top * nr, computed_zNear, computed_zFar);
627         }
628     }
629 
630     osg::ref_ptr<DepthPartitionSettings> _dps;
631     unsigned int _partition;
632 };
633 
634 
635 typedef std::list< osg::ref_ptr<osg::Camera> > Cameras;
636 
getActiveCameras(osg::View & view)637 Cameras getActiveCameras(osg::View& view)
638 {
639     Cameras activeCameras;
640 
641     if (view.getCamera() && view.getCamera()->getGraphicsContext())
642     {
643         activeCameras.push_back(view.getCamera());
644     }
645 
646     for(unsigned int i=0; i<view.getNumSlaves(); ++i)
647     {
648         osg::View::Slave& slave = view.getSlave(i);
649         if (slave._camera.valid() && slave._camera->getGraphicsContext())
650         {
651             activeCameras.push_back(slave._camera.get());
652         }
653     }
654     return activeCameras;
655 }
656 
657 }
658 
setUpDepthPartitionForCamera(osg::Camera * cameraToPartition,DepthPartitionSettings * incoming_dps)659 bool View::setUpDepthPartitionForCamera(osg::Camera* cameraToPartition, DepthPartitionSettings* incoming_dps)
660 {
661     osg::ref_ptr<osg::GraphicsContext> context = cameraToPartition->getGraphicsContext();
662     if (!context) return false;
663 
664     osg::ref_ptr<osg::Viewport> viewport = cameraToPartition->getViewport();
665     if (!viewport) return false;
666 
667     osg::ref_ptr<DepthPartitionSettings> dps = incoming_dps;
668     if (!dps) dps = new DepthPartitionSettings;
669 
670     bool useMastersSceneData = true;
671     osg::Matrixd projectionOffset;
672     osg::Matrixd viewOffset;
673 
674     if (getCamera()==cameraToPartition)
675     {
676         // replace main camera with depth partition cameras
677         OSG_INFO<<"View::setUpDepthPartitionForCamera(..) Replacing main Camera"<<std::endl;
678     }
679     else
680     {
681         unsigned int i = findSlaveIndexForCamera(cameraToPartition);
682         if (i>=getNumSlaves()) return false;
683 
684         osg::View::Slave& slave = getSlave(i);
685 
686         useMastersSceneData = slave._useMastersSceneData;
687         projectionOffset = slave._projectionOffset;
688         viewOffset = slave._viewOffset;
689 
690         OSG_NOTICE<<"View::setUpDepthPartitionForCamera(..) Replacing slave Camera"<<i<<std::endl;
691         removeSlave(i);
692     }
693 
694     cameraToPartition->setGraphicsContext(0);
695     cameraToPartition->setViewport(0);
696 
697     // far camera
698     {
699         osg::ref_ptr<osg::Camera> camera = new osg::Camera;
700         camera->setGraphicsContext(context.get());
701         camera->setViewport(viewport.get());
702 
703         camera->setDrawBuffer(cameraToPartition->getDrawBuffer());
704         camera->setReadBuffer(cameraToPartition->getReadBuffer());
705 
706         camera->setComputeNearFarMode(osg::Camera::DO_NOT_COMPUTE_NEAR_FAR);
707         camera->setCullingMode(osg::Camera::ENABLE_ALL_CULLING);
708 
709         addSlave(camera.get());
710 
711         osg::View::Slave& slave = getSlave(getNumSlaves()-1);
712 
713         slave._useMastersSceneData = useMastersSceneData;
714         slave._projectionOffset = projectionOffset;
715         slave._viewOffset = viewOffset;
716         slave._updateSlaveCallback =  new osgDepthPartition::MyUpdateSlaveCallback(dps.get(), 1);
717     }
718 
719     // near camera
720     {
721         osg::ref_ptr<osg::Camera> camera = new osg::Camera;
722         camera->setGraphicsContext(context.get());
723         camera->setViewport(viewport.get());
724 
725         camera->setDrawBuffer(cameraToPartition->getDrawBuffer());
726         camera->setReadBuffer(cameraToPartition->getReadBuffer());
727 
728         camera->setComputeNearFarMode(osg::Camera::DO_NOT_COMPUTE_NEAR_FAR);
729         camera->setCullingMode(osg::Camera::ENABLE_ALL_CULLING);
730         camera->setClearMask(GL_DEPTH_BUFFER_BIT);
731 
732         addSlave(camera.get());
733 
734         osg::View::Slave& slave = getSlave(getNumSlaves()-1);
735         slave._useMastersSceneData = useMastersSceneData;
736         slave._projectionOffset = projectionOffset;
737         slave._viewOffset = viewOffset;
738         slave._updateSlaveCallback =  new osgDepthPartition::MyUpdateSlaveCallback(dps.get(), 0);
739     }
740 
741     return true;
742 }
743 
744 
745 
setUpDepthPartition(DepthPartitionSettings * dsp)746 bool View::setUpDepthPartition(DepthPartitionSettings* dsp)
747 {
748     osgDepthPartition::Cameras originalCameras = osgDepthPartition::getActiveCameras(*this);
749     if (originalCameras.empty())
750     {
751         OSG_INFO<<"osgView::View::setUpDepthPartition(,..), no windows assigned, doing view.setUpViewAcrossAllScreens()"<<std::endl;
752         setUpViewAcrossAllScreens();
753 
754         originalCameras = osgDepthPartition::getActiveCameras(*this);
755         if (originalCameras.empty())
756         {
757             OSG_NOTICE<<"osgView::View::setUpDepthPartition(View,..) Unable to set up windows for viewer."<<std::endl;
758             return false;
759         }
760     }
761 
762     bool threadsWereRunning = getViewerBase()->areThreadsRunning();
763     if (threadsWereRunning) getViewerBase()->stopThreading();
764 
765     for(osgDepthPartition::Cameras::iterator itr = originalCameras.begin();
766         itr != originalCameras.end();
767         ++itr)
768     {
769         setUpDepthPartitionForCamera(itr->get(), dsp);
770     }
771 
772     if (threadsWereRunning) getViewerBase()->startThreading();
773 
774     return true;
775 }
776 
777 
assignSceneDataToCameras()778 void View::assignSceneDataToCameras()
779 {
780     // OSG_NOTICE<<"View::assignSceneDataToCameras()"<<std::endl;
781 
782     if (_scene.valid() && _scene->getDatabasePager() && getViewerBase())
783     {
784         _scene->getDatabasePager()->setIncrementalCompileOperation(getViewerBase()->getIncrementalCompileOperation());
785     }
786 
787     osg::Node* sceneData = _scene.valid() ? _scene->getSceneData() : 0;
788 
789     if (_cameraManipulator.valid())
790     {
791         _cameraManipulator->setNode(sceneData);
792 
793         osg::ref_ptr<osgGA::GUIEventAdapter> dummyEvent = _eventQueue->createEvent();
794 
795         _cameraManipulator->home(*dummyEvent, *this);
796     }
797 
798     if (_camera.valid())
799     {
800         _camera->removeChildren(0,_camera->getNumChildren());
801         if (sceneData) _camera->addChild(sceneData);
802 
803         Renderer* renderer = dynamic_cast<Renderer*>(_camera->getRenderer());
804         if (renderer) renderer->setCompileOnNextDraw(true);
805 
806     }
807 
808     for(unsigned i=0; i<getNumSlaves(); ++i)
809     {
810         Slave& slave = getSlave(i);
811         if (slave._camera.valid() && slave._useMastersSceneData)
812         {
813             slave._camera->removeChildren(0,slave._camera->getNumChildren());
814             if (sceneData) slave._camera->addChild(sceneData);
815 
816             Renderer* renderer = dynamic_cast<Renderer*>(slave._camera->getRenderer());
817             if (renderer) renderer->setCompileOnNextDraw(true);
818         }
819     }
820 }
821 
requestRedraw()822 void View::requestRedraw()
823 {
824     if (getViewerBase())
825     {
826         getViewerBase()->_requestRedraw = true;
827     }
828     else
829     {
830         OSG_INFO<<"View::requestRedraw(), No viewer base has been assigned yet."<<std::endl;
831     }
832 }
833 
requestContinuousUpdate(bool flag)834 void View::requestContinuousUpdate(bool flag)
835 {
836     if (getViewerBase())
837     {
838         getViewerBase()->_requestContinousUpdate = flag;
839     }
840     else
841     {
842         OSG_INFO<<"View::requestContinuousUpdate(), No viewer base has been assigned yet."<<std::endl;
843     }
844 }
845 
requestWarpPointer(float x,float y)846 void View::requestWarpPointer(float x,float y)
847 {
848     OSG_INFO<<"View::requestWarpPointer("<<x<<","<<y<<")"<<std::endl;
849 
850     float local_x, local_y;
851     const osg::Camera* camera = getCameraContainingPosition(x, y, local_x, local_y);
852     if (camera)
853     {
854         const osgViewer::GraphicsWindow* gw = dynamic_cast<const osgViewer::GraphicsWindow*>(camera->getGraphicsContext());
855         if (gw)
856         {
857             getEventQueue()->mouseWarped(x,y);
858             if (gw->getEventQueue()->getCurrentEventState()->getMouseYOrientation()==osgGA::GUIEventAdapter::Y_INCREASING_DOWNWARDS)
859             {
860                 local_y = gw->getTraits()->height - local_y;
861             }
862             const_cast<osgViewer::GraphicsWindow*>(gw)->getEventQueue()->mouseWarped(local_x,local_y);
863             const_cast<osgViewer::GraphicsWindow*>(gw)->requestWarpPointer(local_x, local_y);
864         }
865     }
866     else
867     {
868         OSG_INFO<<"View::requestWarpPointer failed no camera containing pointer"<<std::endl;
869     }
870 }
871 
containsCamera(const osg::Camera * camera) const872 bool View::containsCamera(const osg::Camera* camera) const
873 {
874     if (_camera == camera) return true;
875 
876     for(unsigned i=0; i<getNumSlaves(); ++i)
877     {
878         const Slave& slave = getSlave(i);
879         if (slave._camera == camera) return true;
880     }
881     return false;
882 }
883 
884 
getCameraContainingPosition(float x,float y,float & local_x,float & local_y) const885 const osg::Camera* View::getCameraContainingPosition(float x, float y, float& local_x, float& local_y) const
886 {
887     const osgGA::GUIEventAdapter* eventState = getEventQueue()->getCurrentEventState();
888     const osgViewer::GraphicsWindow* gw = dynamic_cast<const osgViewer::GraphicsWindow*>(eventState->getGraphicsContext());
889     bool view_invert_y = eventState->getMouseYOrientation()==osgGA::GUIEventAdapter::Y_INCREASING_DOWNWARDS;
890 
891     // OSG_NOTICE<<"getCameraContainingPosition("<<x<<", "<<y<<") view_invert_y = "<<view_invert_y<<", Xmin() = "<<eventState->getXmin()<<", Xmax() = "<<eventState->getXmax()<<", Ymin() = "<<eventState->getYmin()<<", Ymax() = "<<eventState->getYmax()<<std::endl;
892 
893     double epsilon = 0.5;
894 
895 
896     // if master camera has graphics context and eventState context matches then assume coordinates refer
897     // to master camera
898     bool masterActive = (_camera->getGraphicsContext()!=0 && _camera->getViewport());
899     bool eventStateMatchesMaster = (gw!=0) ? _camera->getGraphicsContext()==gw : false;
900 
901     if (masterActive && eventStateMatchesMaster)
902     {
903         // OSG_NOTICE<<"Event state matches master"<<std::endl;
904         const osg::Viewport* viewport = _camera->getViewport();
905 
906         // rescale mouse x,y first to 0 to 1 range
907         double new_x = (x-eventState->getXmin())/(eventState->getXmax()-eventState->getXmin());
908         double new_y = (y-eventState->getYmin())/(eventState->getYmax()-eventState->getYmin());
909 
910         // flip y if required
911         if (view_invert_y) new_y = 1.0f-new_y;
912 
913         // rescale mouse x, y to window dimensions so we can check against master Camera's viewport
914         new_x *= static_cast<double>(_camera->getGraphicsContext()->getTraits()->width);
915         new_y *= static_cast<double>(_camera->getGraphicsContext()->getTraits()->height);
916 
917         if (new_x >= (viewport->x()-epsilon) && new_y >= (viewport->y()-epsilon) &&
918             new_x < (viewport->x()+viewport->width()-1.0+epsilon) && new_y <= (viewport->y()+viewport->height()-1.0+epsilon) )
919         {
920             local_x = new_x;
921             local_y = new_y;
922 
923             //OSG_NOTICE<<"Returning master camera"<<std::endl;
924 
925             return _camera.get();
926         }
927         else
928         {
929             // OSG_NOTICE<<"master camera viewport not matched."<<std::endl;
930         }
931     }
932 
933     osg::Matrix masterCameraVPW = getCamera()->getViewMatrix() * getCamera()->getProjectionMatrix();
934 
935     // convert to non dimensional
936     x = (x - eventState->getXmin()) * 2.0 / (eventState->getXmax()-eventState->getXmin()) - 1.0;
937     y = (y - eventState->getYmin())* 2.0 / (eventState->getYmax()-eventState->getYmin()) - 1.0;
938 
939     if (view_invert_y) y = - y;
940 
941     for(int i=getNumSlaves()-1; i>=0; --i)
942     {
943         const Slave& slave = getSlave(i);
944         if (slave._camera.valid() &&
945             slave._camera->getAllowEventFocus() &&
946             slave._camera->getRenderTargetImplementation()==osg::Camera::FRAME_BUFFER)
947         {
948             OSG_INFO<<"Testing slave camera "<<slave._camera->getName()<<std::endl;
949 
950             const osg::Camera* camera = slave._camera.get();
951             const osg::Viewport* viewport = camera ? camera->getViewport() : 0;
952 
953             osg::Matrix localCameraVPW = camera->getViewMatrix() * camera->getProjectionMatrix();
954             if (viewport) localCameraVPW *= viewport->computeWindowMatrix();
955 
956             osg::Matrix matrix( osg::Matrix::inverse(masterCameraVPW) * localCameraVPW );
957 
958             osg::Vec3d new_coord = osg::Vec3d(x,y,0.0) * matrix;
959 
960             //OSG_NOTICE<<"  x="<<x<<" y="<<y<<std::endl;;
961             //OSG_NOTICE<<"  eventState->getXmin()="<<eventState->getXmin()<<" eventState->getXmax()="<<eventState->getXmax()<<std::endl;;
962             //OSG_NOTICE<<"  new_coord "<<new_coord<<std::endl;;
963 
964             if (viewport &&
965                 new_coord.x() >= (viewport->x()-epsilon) && new_coord.y() >= (viewport->y()-epsilon) &&
966                 new_coord.x() < (viewport->x()+viewport->width()-1.0+epsilon) && new_coord.y() <= (viewport->y()+viewport->height()-1.0+epsilon) )
967             {
968                 // OSG_NOTICE<<"  in viewport "<<std::endl;;
969 
970                 local_x = new_coord.x();
971                 local_y = new_coord.y();
972 
973                 return camera;
974             }
975             else
976             {
977                 // OSG_NOTICE<<"  not in viewport "<<viewport->x()<<" "<<(viewport->x()+viewport->width())<<std::endl;;
978             }
979 
980         }
981     }
982 
983     local_x = x;
984     local_y = y;
985 
986     return 0;
987 }
988 
computeIntersections(float x,float y,osgUtil::LineSegmentIntersector::Intersections & intersections,osg::Node::NodeMask traversalMask)989 bool View::computeIntersections(float x,float y, osgUtil::LineSegmentIntersector::Intersections& intersections, osg::Node::NodeMask traversalMask)
990 {
991     float local_x, local_y;
992     const osg::Camera* camera = getCameraContainingPosition(x, y, local_x, local_y);
993 
994     OSG_INFO<<"computeIntersections("<<x<<", "<<y<<") local_x="<<local_x<<", local_y="<<local_y<<std::endl;
995 
996     if (camera) return computeIntersections(camera, (camera->getViewport()==0)?osgUtil::Intersector::PROJECTION : osgUtil::Intersector::WINDOW, local_x, local_y, intersections, traversalMask);
997     else return false;
998 }
999 
computeIntersections(float x,float y,const osg::NodePath & nodePath,osgUtil::LineSegmentIntersector::Intersections & intersections,osg::Node::NodeMask traversalMask)1000 bool View::computeIntersections(float x,float y, const osg::NodePath& nodePath, osgUtil::LineSegmentIntersector::Intersections& intersections, osg::Node::NodeMask traversalMask)
1001 {
1002     float local_x, local_y;
1003     const osg::Camera* camera = getCameraContainingPosition(x, y, local_x, local_y);
1004 
1005     OSG_INFO<<"computeIntersections("<<x<<", "<<y<<") local_x="<<local_x<<", local_y="<<local_y<<std::endl;
1006 
1007     if (camera) return computeIntersections(camera, (camera->getViewport()==0)?osgUtil::Intersector::PROJECTION : osgUtil::Intersector::WINDOW, local_x, local_y, nodePath, intersections, traversalMask);
1008     else return false;
1009 }
1010 
computeIntersections(const osgGA::GUIEventAdapter & ea,osgUtil::LineSegmentIntersector::Intersections & intersections,osg::Node::NodeMask traversalMask)1011 bool View::computeIntersections(const osgGA::GUIEventAdapter& ea, osgUtil::LineSegmentIntersector::Intersections& intersections,osg::Node::NodeMask traversalMask)
1012 {
1013 #if 1
1014     if (ea.getNumPointerData()>=1)
1015     {
1016         const osgGA::PointerData* pd = ea.getPointerData(ea.getNumPointerData()-1);
1017         const osg::Camera* camera = dynamic_cast<const osg::Camera*>(pd->object.get());
1018         if (camera)
1019         {
1020             return computeIntersections(camera, osgUtil::Intersector::PROJECTION, pd->getXnormalized(), pd->getYnormalized(), intersections, traversalMask);
1021         }
1022     }
1023 #endif
1024     return computeIntersections(ea.getX(), ea.getY(), intersections, traversalMask);
1025 }
1026 
computeIntersections(const osgGA::GUIEventAdapter & ea,const osg::NodePath & nodePath,osgUtil::LineSegmentIntersector::Intersections & intersections,osg::Node::NodeMask traversalMask)1027 bool View::computeIntersections(const osgGA::GUIEventAdapter& ea, const osg::NodePath& nodePath, osgUtil::LineSegmentIntersector::Intersections& intersections,osg::Node::NodeMask traversalMask)
1028 {
1029 #if 1
1030     if (ea.getNumPointerData()>=1)
1031     {
1032         const osgGA::PointerData* pd = ea.getPointerData(ea.getNumPointerData()-1);
1033         const osg::Camera* camera = dynamic_cast<const osg::Camera*>(pd->object.get());
1034         if (camera)
1035         {
1036             return computeIntersections(camera, osgUtil::Intersector::PROJECTION, pd->getXnormalized(), pd->getYnormalized(), nodePath, intersections, traversalMask);
1037         }
1038     }
1039 #endif
1040     return computeIntersections(ea.getX(), ea.getY(), nodePath, intersections, traversalMask);
1041 }
1042 
computeIntersections(const osg::Camera * camera,osgUtil::Intersector::CoordinateFrame cf,float x,float y,osgUtil::LineSegmentIntersector::Intersections & intersections,osg::Node::NodeMask traversalMask)1043 bool View::computeIntersections(const osg::Camera* camera, osgUtil::Intersector::CoordinateFrame cf, float x,float y, osgUtil::LineSegmentIntersector::Intersections& intersections, osg::Node::NodeMask traversalMask)
1044 {
1045     if (!camera) return false;
1046 
1047     osg::ref_ptr< osgUtil::LineSegmentIntersector > picker = new osgUtil::LineSegmentIntersector(cf, x, y);
1048     osgUtil::IntersectionVisitor iv(picker.get());
1049     iv.setTraversalMask(traversalMask);
1050 
1051     const_cast<osg::Camera*>(camera)->accept(iv);
1052 
1053     if (picker->containsIntersections())
1054     {
1055         intersections = picker->getIntersections();
1056         return true;
1057     }
1058     else
1059     {
1060         intersections.clear();
1061         return false;
1062     }
1063 }
1064 
computeIntersections(const osg::Camera * camera,osgUtil::Intersector::CoordinateFrame cf,float x,float y,const osg::NodePath & nodePath,osgUtil::LineSegmentIntersector::Intersections & intersections,osg::Node::NodeMask traversalMask)1065 bool View::computeIntersections(const osg::Camera* camera, osgUtil::Intersector::CoordinateFrame cf, float x,float y, const osg::NodePath& nodePath, osgUtil::LineSegmentIntersector::Intersections& intersections,osg::Node::NodeMask traversalMask)
1066 {
1067     if (!camera || nodePath.empty()) return false;
1068 
1069     osg::Matrixd matrix;
1070     if (nodePath.size()>1)
1071     {
1072         osg::NodePath prunedNodePath(nodePath.begin(),nodePath.end()-1);
1073         matrix = osg::computeLocalToWorld(prunedNodePath);
1074     }
1075 
1076     matrix.postMult(camera->getViewMatrix());
1077     matrix.postMult(camera->getProjectionMatrix());
1078 
1079     double zNear = -1.0;
1080     double zFar = 1.0;
1081     if (cf==osgUtil::Intersector::WINDOW && camera->getViewport())
1082     {
1083         matrix.postMult(camera->getViewport()->computeWindowMatrix());
1084         zNear = 0.0;
1085         zFar = 1.0;
1086     }
1087 
1088     osg::Matrixd inverse;
1089     inverse.invert(matrix);
1090 
1091     osg::Vec3d startVertex = osg::Vec3d(x,y,zNear) * inverse;
1092     osg::Vec3d endVertex = osg::Vec3d(x,y,zFar) * inverse;
1093 
1094     osg::ref_ptr< osgUtil::LineSegmentIntersector > picker = new osgUtil::LineSegmentIntersector(osgUtil::Intersector::MODEL, startVertex, endVertex);
1095 
1096     osgUtil::IntersectionVisitor iv(picker.get());
1097     iv.setTraversalMask(traversalMask);
1098     nodePath.back()->accept(iv);
1099 
1100     if (picker->containsIntersections())
1101     {
1102         intersections = picker->getIntersections();
1103         return true;
1104     }
1105     else
1106     {
1107         intersections.clear();
1108         return false;
1109     }
1110 }
1111 
addDevice(osgGA::Device * eventSource)1112 void View::addDevice(osgGA::Device* eventSource)
1113 {
1114     Devices::iterator itr = std::find( _eventSources.begin(), _eventSources.end(), eventSource );
1115     if (itr==_eventSources.end())
1116     {
1117         _eventSources.push_back(eventSource);
1118     }
1119 
1120     if (eventSource)
1121         eventSource->getEventQueue()->setStartTick(getStartTick());
1122 }
1123 
removeDevice(osgGA::Device * eventSource)1124 void View::removeDevice(osgGA::Device* eventSource)
1125 {
1126     Devices::iterator itr = std::find( _eventSources.begin(), _eventSources.end(), eventSource );
1127     if (itr!=_eventSources.end())
1128     {
1129         _eventSources.erase(itr);
1130     }
1131 }
1132 
1133 ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
1134 //
1135 // Methods that support Stereo and Keystone correction.
1136 //
createDistortionTexture(int width,int height)1137 osg::Texture* View::createDistortionTexture(int width, int height)
1138 {
1139     osg::ref_ptr<osg::TextureRectangle> texture = new osg::TextureRectangle;
1140 
1141     texture->setTextureSize(width, height);
1142     texture->setInternalFormat(GL_RGB);
1143     texture->setFilter(osg::Texture::MIN_FILTER,osg::Texture::LINEAR);
1144     texture->setFilter(osg::Texture::MAG_FILTER,osg::Texture::LINEAR);
1145     texture->setWrap(osg::Texture::WRAP_S,osg::Texture::CLAMP_TO_EDGE);
1146     texture->setWrap(osg::Texture::WRAP_T,osg::Texture::CLAMP_TO_EDGE);
1147 
1148     return texture.release();
1149 }
1150 
assignRenderToTextureCamera(osg::GraphicsContext * gc,int width,int height,osg::Texture * texture)1151 osg::Camera* View::assignRenderToTextureCamera(osg::GraphicsContext* gc, int width, int height, osg::Texture* texture)
1152 {
1153     osg::ref_ptr<osg::Camera> camera = new osg::Camera;
1154     camera->setName("Render to texture camera");
1155     camera->setGraphicsContext(gc);
1156     camera->setViewport(new osg::Viewport(0,0,width, height));
1157     camera->setDrawBuffer(GL_FRONT);
1158     camera->setReadBuffer(GL_FRONT);
1159     camera->setAllowEventFocus(false);
1160     camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1161 
1162     // attach the texture and use it as the color buffer.
1163     camera->attach(osg::Camera::COLOR_BUFFER, texture);
1164 
1165     addSlave(camera.get(), osg::Matrixd(), osg::Matrixd());
1166 
1167     return camera.release();
1168 }
1169 
assignKeystoneDistortionCamera(osg::DisplaySettings * ds,osg::GraphicsContext * gc,int x,int y,int width,int height,GLenum buffer,osg::Texture * texture,Keystone * keystone)1170 osg::Camera* View::assignKeystoneDistortionCamera(osg::DisplaySettings* ds, osg::GraphicsContext* gc, int x, int y, int width, int height, GLenum buffer, osg::Texture* texture, Keystone* keystone)
1171 {
1172     double screenDistance = ds->getScreenDistance();
1173     double screenWidth = ds->getScreenWidth();
1174     double screenHeight = ds->getScreenHeight();
1175     double fovy = osg::RadiansToDegrees(2.0*atan2(screenHeight/2.0,screenDistance));
1176     double aspectRatio = screenWidth/screenHeight;
1177 
1178     osg::Geode* geode = keystone->createKeystoneDistortionMesh();
1179 
1180     // new we need to add the texture to the mesh, we do so by creating a
1181     // StateSet to contain the Texture StateAttribute.
1182     osg::StateSet* stateset = geode->getOrCreateStateSet();
1183     stateset->setTextureAttributeAndModes(0, texture,osg::StateAttribute::ON);
1184     stateset->setMode(GL_LIGHTING,osg::StateAttribute::OFF);
1185 
1186     osg::TexMat* texmat = new osg::TexMat;
1187     texmat->setScaleByTextureRectangleSize(true);
1188     stateset->setTextureAttributeAndModes(0, texmat, osg::StateAttribute::ON);
1189 
1190     osg::ref_ptr<osg::Camera> camera = new osg::Camera;
1191     camera->setGraphicsContext(gc);
1192     camera->setClearMask(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT );
1193     camera->setClearColor( osg::Vec4(0.0,0.0,0.0,1.0) );
1194     camera->setViewport(new osg::Viewport(x, y, width, height));
1195     camera->setDrawBuffer(buffer);
1196     camera->setReadBuffer(buffer);
1197     camera->setReferenceFrame(osg::Camera::ABSOLUTE_RF);
1198     camera->setInheritanceMask(camera->getInheritanceMask() & ~osg::CullSettings::CLEAR_COLOR & ~osg::CullSettings::COMPUTE_NEAR_FAR_MODE);
1199     //camera->setComputeNearFarMode(osg::CullSettings::DO_NOT_COMPUTE_NEAR_FAR);
1200 
1201     camera->setViewMatrix(osg::Matrix::identity());
1202     camera->setProjectionMatrixAsPerspective(fovy, aspectRatio, 0.1, 1000.0);
1203 
1204     // add subgraph to render
1205     camera->addChild(geode);
1206 
1207     camera->addChild(keystone->createGrid());
1208 
1209     camera->setName("DistortionCorrectionCamera");
1210 
1211     // camera->addEventCallback(new KeystoneHandler(keystone));
1212 
1213     addSlave(camera.get(), osg::Matrixd(), osg::Matrixd(), false);
1214 
1215     return camera.release();
1216 }
1217 
1218 
1219 
updateSlave(osg::View & view,osg::View::Slave & slave)1220 void View::StereoSlaveCallback::updateSlave(osg::View& view, osg::View::Slave& slave)
1221 {
1222     osg::Camera* camera = slave._camera.get();
1223     osgViewer::View* viewer_view = dynamic_cast<osgViewer::View*>(&view);
1224 
1225     if (_ds.valid() && camera && viewer_view)
1226     {
1227         // inherit any settings applied to the master Camera.
1228         camera->inheritCullSettings(*(view.getCamera()), camera->getInheritanceMask());
1229 
1230         if (_eyeScale<0.0)
1231         {
1232             camera->setCullMask(camera->getCullMaskLeft());
1233         }
1234         else
1235         {
1236             camera->setCullMask(camera->getCullMaskRight());
1237         }
1238 
1239         // set projection matrix
1240         if (_eyeScale<0.0)
1241         {
1242             camera->setProjectionMatrix(_ds->computeLeftEyeProjectionImplementation(view.getCamera()->getProjectionMatrix()));
1243         }
1244         else
1245         {
1246             camera->setProjectionMatrix(_ds->computeRightEyeProjectionImplementation(view.getCamera()->getProjectionMatrix()));
1247         }
1248 
1249         double sd = _ds->getScreenDistance();
1250         double fusionDistance = sd;
1251         switch(viewer_view->getFusionDistanceMode())
1252         {
1253             case(osgUtil::SceneView::USE_FUSION_DISTANCE_VALUE):
1254                 fusionDistance = viewer_view->getFusionDistanceValue();
1255                 break;
1256             case(osgUtil::SceneView::PROPORTIONAL_TO_SCREEN_DISTANCE):
1257                 fusionDistance *= viewer_view->getFusionDistanceValue();
1258                 break;
1259         }
1260         double eyeScale = osg::absolute(_eyeScale) * (fusionDistance/sd);
1261 
1262         if (_eyeScale<0.0)
1263         {
1264             camera->setViewMatrix(_ds->computeLeftEyeViewImplementation(view.getCamera()->getViewMatrix(), eyeScale));
1265         }
1266         else
1267         {
1268             camera->setViewMatrix(_ds->computeRightEyeViewImplementation(view.getCamera()->getViewMatrix(), eyeScale));
1269         }
1270     }
1271     else
1272     {
1273         slave.updateSlaveImplementation(view);
1274     }
1275 }
1276 
assignStereoCamera(osg::DisplaySettings * ds,osg::GraphicsContext * gc,int x,int y,int width,int height,GLenum buffer,double eyeScale)1277 osg::Camera* View::assignStereoCamera(osg::DisplaySettings* ds, osg::GraphicsContext* gc, int x, int y, int width, int height, GLenum buffer, double eyeScale)
1278 {
1279     osg::ref_ptr<osg::Camera> camera = new osg::Camera;
1280 
1281     camera->setGraphicsContext(gc);
1282     camera->setViewport(new osg::Viewport(x,y, width, height));
1283     camera->setDrawBuffer(buffer);
1284     camera->setReadBuffer(buffer);
1285 
1286     // add this slave camera to the viewer, with a shift left of the projection matrix
1287     addSlave(camera.get(), osg::Matrixd::identity(), osg::Matrixd::identity());
1288 
1289     // assign update callback to maintain the correct view and projection matrices
1290     osg::View::Slave& slave = getSlave(getNumSlaves()-1);
1291     slave._updateSlaveCallback =  new StereoSlaveCallback(ds, eyeScale);
1292 
1293     return camera.release();
1294 }
1295 
1296 static const GLubyte patternVertEven[] = {
1297     0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1298     0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1299     0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1300     0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1301     0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1302     0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1303     0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1304     0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1305     0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1306     0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1307     0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1308     0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1309     0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1310     0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1311     0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
1312     0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55};
1313 
1314 
1315 static const GLubyte patternHorzEven[] = {
1316     0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1317     0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1318     0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1319     0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1320     0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1321     0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1322     0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1323     0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1324     0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1325     0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1326     0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1327     0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1328     0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1329     0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1330     0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,
1331     0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00};
1332 
1333 // 32 x 32 bit array every row is a horizontal line of pixels
1334 //  and the (bitwise) columns a vertical line
1335 //  The following is a checkerboard pattern
1336 static const GLubyte patternCheckerboard[] = {
1337     0x55, 0x55, 0x55, 0x55,
1338     0xAA, 0xAA, 0xAA, 0xAA,
1339     0x55, 0x55, 0x55, 0x55,
1340     0xAA, 0xAA, 0xAA, 0xAA,
1341     0x55, 0x55, 0x55, 0x55,
1342     0xAA, 0xAA, 0xAA, 0xAA,
1343     0x55, 0x55, 0x55, 0x55,
1344     0xAA, 0xAA, 0xAA, 0xAA,
1345     0x55, 0x55, 0x55, 0x55,
1346     0xAA, 0xAA, 0xAA, 0xAA,
1347     0x55, 0x55, 0x55, 0x55,
1348     0xAA, 0xAA, 0xAA, 0xAA,
1349     0x55, 0x55, 0x55, 0x55,
1350     0xAA, 0xAA, 0xAA, 0xAA,
1351     0x55, 0x55, 0x55, 0x55,
1352     0xAA, 0xAA, 0xAA, 0xAA,
1353     0x55, 0x55, 0x55, 0x55,
1354     0xAA, 0xAA, 0xAA, 0xAA,
1355     0x55, 0x55, 0x55, 0x55,
1356     0xAA, 0xAA, 0xAA, 0xAA,
1357     0x55, 0x55, 0x55, 0x55,
1358     0xAA, 0xAA, 0xAA, 0xAA,
1359     0x55, 0x55, 0x55, 0x55,
1360     0xAA, 0xAA, 0xAA, 0xAA,
1361     0x55, 0x55, 0x55, 0x55,
1362     0xAA, 0xAA, 0xAA, 0xAA,
1363     0x55, 0x55, 0x55, 0x55,
1364     0xAA, 0xAA, 0xAA, 0xAA,
1365     0x55, 0x55, 0x55, 0x55,
1366     0xAA, 0xAA, 0xAA, 0xAA,
1367     0x55, 0x55, 0x55, 0x55,
1368     0xAA, 0xAA, 0xAA, 0xAA};
1369 
1370 
assignStereoOrKeystoneToCamera(osg::Camera * camera,osg::DisplaySettings * ds)1371 void View::assignStereoOrKeystoneToCamera(osg::Camera* camera, osg::DisplaySettings* ds)
1372 {
1373     if (!camera || camera->getGraphicsContext()==0) return;
1374     if (!ds->getStereo() && !ds->getKeystoneHint()) return;
1375 
1376     ds->setUseSceneViewForStereoHint(false);
1377 
1378     typedef std::vector< osg::ref_ptr<Keystone> > Keystones;
1379     Keystones keystones;
1380     if (ds->getKeystoneHint() && !ds->getKeystones().empty())
1381     {
1382         for(osg::DisplaySettings::Objects::iterator itr = ds->getKeystones().begin();
1383             itr != ds->getKeystones().end();
1384             ++itr)
1385         {
1386             Keystone* keystone = dynamic_cast<Keystone*>(itr->get());
1387             if (keystone) keystones.push_back(keystone);
1388         }
1389     }
1390 
1391     if (ds->getKeystoneHint())
1392     {
1393         while(keystones.size()<2) keystones.push_back(new Keystone);
1394     }
1395 
1396 
1397     // set up view's main camera
1398     {
1399         double height = ds->getScreenHeight();
1400         double width = ds->getScreenWidth();
1401         double distance = ds->getScreenDistance();
1402         double vfov = osg::RadiansToDegrees(atan2(height/2.0f,distance)*2.0);
1403 
1404         camera->setProjectionMatrixAsPerspective( vfov, width/height, 1.0f,10000.0f);
1405     }
1406 
1407 
1408     osg::ref_ptr<osg::GraphicsContext> gc = camera->getGraphicsContext();
1409 
1410     osg::ref_ptr<osg::GraphicsContext::Traits> traits = const_cast<osg::GraphicsContext::Traits*>(camera->getGraphicsContext()->getTraits());
1411 
1412     if (!ds->getStereo())
1413     {
1414         // load or create a Keystone object
1415         osg::ref_ptr<osgViewer::Keystone> keystone = 0;
1416         if (!(ds->getKeystones().empty())) keystone = dynamic_cast<osgViewer::Keystone*>(ds->getKeystones().front().get());
1417         if (!keystone) keystone = new osgViewer::Keystone;
1418 
1419         // create distortion texture
1420         osg::ref_ptr<osg::Texture> texture = createDistortionTexture(traits->width, traits->height);
1421 
1422         // create RTT Camera
1423         assignRenderToTextureCamera(gc.get(), traits->width, traits->height, texture.get());
1424 
1425         // create Keystone distortion camera
1426         osg::ref_ptr<osg::Camera> distortion_camera = assignKeystoneDistortionCamera(ds, gc.get(),
1427                                                                         0, 0, traits->width, traits->height,
1428                                                                         traits->doubleBuffer ? GL_BACK : GL_FRONT,
1429                                                                         texture.get(), keystone.get());
1430         // attach Keystone editing event handler.
1431         distortion_camera->addEventCallback(new KeystoneHandler(keystone.get()));
1432 
1433         camera->setGraphicsContext(0);
1434 
1435         return;
1436     }
1437 
1438     switch(ds->getStereoMode())
1439     {
1440         case(osg::DisplaySettings::QUAD_BUFFER):
1441         {
1442             // disconect the camera from the graphics context.
1443             camera->setGraphicsContext(0);
1444 
1445             // left Camera left buffer
1446             osg::ref_ptr<osg::Camera> left_camera = assignStereoCamera(ds, gc.get(), 0, 0, traits->width, traits->height, traits->doubleBuffer ? GL_BACK_LEFT : GL_FRONT_LEFT, -1.0);
1447             left_camera->setClearMask(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
1448             left_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 0);
1449 
1450             // right Camera right buffer
1451             osg::ref_ptr<osg::Camera> right_camera = assignStereoCamera(ds, gc.get(), 0, 0, traits->width, traits->height, traits->doubleBuffer ? GL_BACK_RIGHT : GL_FRONT_RIGHT, 1.0);
1452             right_camera->setClearMask(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
1453             right_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 1);
1454 
1455             // for keystone:
1456             // left camera to render to left texture
1457             // right camera to render to right texture
1458             // left keystone camera to render to left buffer
1459             // left keystone camera to render to right buffer
1460             // one keystone and editing for the one window
1461 
1462             if (!keystones.empty())
1463             {
1464                 // for keystone:
1465                 // left camera to render to left texture using whole viewport of left texture
1466                 // right camera to render to right texture using whole viewport of right texture
1467                 // left keystone camera to render to left viewport/window
1468                 // right keystone camera to render to right viewport/window
1469                 // two keystone, one for each of the left and right viewports/windows
1470 
1471                 osg::ref_ptr<Keystone> keystone = keystones.front();
1472 
1473                 // create distortion texture
1474                 osg::ref_ptr<osg::Texture> left_texture = createDistortionTexture(traits->width, traits->height);
1475 
1476                 // convert to RTT Camera
1477                 left_camera->setViewport(0, 0, traits->width, traits->height);
1478                 left_camera->setDrawBuffer(GL_FRONT);
1479                 left_camera->setReadBuffer(GL_FRONT);
1480                 left_camera->setAllowEventFocus(true);
1481                 left_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1482 
1483                 // attach the texture and use it as the color buffer.
1484                 left_camera->attach(osg::Camera::COLOR_BUFFER, left_texture.get());
1485 
1486 
1487                 // create distortion texture
1488                 osg::ref_ptr<osg::Texture> right_texture = createDistortionTexture(traits->width, traits->height);
1489 
1490                 // convert to RTT Camera
1491                 right_camera->setViewport(0, 0, traits->width, traits->height);
1492                 right_camera->setDrawBuffer(GL_FRONT);
1493                 right_camera->setReadBuffer(GL_FRONT);
1494                 right_camera->setAllowEventFocus(true);
1495                 right_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1496 
1497                 // attach the texture and use it as the color buffer.
1498                 right_camera->attach(osg::Camera::COLOR_BUFFER, right_texture.get());
1499 
1500 
1501                 // create Keystone left distortion camera
1502                 keystone->setGridColor(osg::Vec4(1.0f,0.0f,0.0,1.0));
1503                 osg::ref_ptr<osg::Camera> left_keystone_camera = assignKeystoneDistortionCamera(ds, gc.get(),
1504                                                                                 0, 0, traits->width, traits->height,
1505                                                                                 traits->doubleBuffer ? GL_BACK_LEFT : GL_FRONT_LEFT,
1506                                                                                 left_texture.get(), keystone.get());
1507 
1508                 left_keystone_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 2);
1509 
1510                 // attach Keystone editing event handler.
1511                 left_keystone_camera->addEventCallback(new KeystoneHandler(keystone.get()));
1512 
1513 
1514                 // create Keystone right distortion camera
1515                 osg::ref_ptr<osg::Camera> right_keystone_camera = assignKeystoneDistortionCamera(ds, gc.get(),
1516                                                                                 0, 0, traits->width, traits->height,
1517                                                                                 traits->doubleBuffer ? GL_BACK_RIGHT : GL_FRONT_RIGHT,
1518                                                                                 right_texture.get(), keystone.get());
1519 
1520                 right_keystone_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 3);
1521                 right_keystone_camera->setAllowEventFocus(false);
1522 
1523             }
1524 
1525             break;
1526         }
1527         case(osg::DisplaySettings::ANAGLYPHIC):
1528         {
1529             // disconect the camera from the graphics context.
1530             camera->setGraphicsContext(0);
1531 
1532             // left Camera red
1533             osg::ref_ptr<osg::Camera> left_camera = assignStereoCamera(ds, gc.get(), 0, 0, traits->width, traits->height, traits->doubleBuffer ? GL_BACK : GL_FRONT, -1.0);
1534             left_camera->setClearMask(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
1535             left_camera->getOrCreateStateSet()->setAttribute(new osg::ColorMask(true, false, false, true));
1536             left_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 0);
1537 
1538             // right Camera cyan
1539             osg::ref_ptr<osg::Camera> right_camera = assignStereoCamera(ds, gc.get(), 0, 0, traits->width, traits->height, traits->doubleBuffer ? GL_BACK : GL_FRONT, 1.0);
1540             right_camera->setClearMask(GL_DEPTH_BUFFER_BIT);
1541             right_camera->getOrCreateStateSet()->setAttribute(new osg::ColorMask(false, true, true, true));
1542             right_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 1);
1543 
1544             if (!keystones.empty())
1545             {
1546                 // for keystone:
1547                 // left camera to render to texture using red colour mask
1548                 // right camera to render to same texture using cyan colour mask
1549                 // keystone camera to render to whole screen without colour masks
1550                 // one keystone and editing for the one window
1551 
1552                 osg::ref_ptr<Keystone> keystone = keystones.front();
1553 
1554                 bool useTwoTexture = true;
1555 
1556                 if (useTwoTexture)
1557                 {
1558 
1559                     // create left distortion texture
1560                     osg::ref_ptr<osg::Texture> left_texture = createDistortionTexture(traits->width, traits->height);
1561 
1562                     // convert to RTT Camera
1563                     left_camera->setDrawBuffer(GL_FRONT);
1564                     left_camera->setReadBuffer(GL_FRONT);
1565                     left_camera->setAllowEventFocus(false);
1566                     left_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 0);
1567                     left_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1568                     left_camera->getOrCreateStateSet()->removeAttribute(osg::StateAttribute::COLORMASK);
1569                     left_camera->setClearMask(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
1570 
1571                     // attach the texture and use it as the color buffer.
1572                     left_camera->attach(osg::Camera::COLOR_BUFFER, left_texture.get());
1573 
1574                     // create left distortion texture
1575                     osg::ref_ptr<osg::Texture> right_texture = createDistortionTexture(traits->width, traits->height);
1576 
1577                     // convert to RTT Camera
1578                     right_camera->setDrawBuffer(GL_FRONT);
1579                     right_camera->setReadBuffer(GL_FRONT);
1580                     right_camera->setAllowEventFocus(false);
1581                     right_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 1);
1582                     right_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1583                     right_camera->getOrCreateStateSet()->removeAttribute(osg::StateAttribute::COLORMASK);
1584                     right_camera->setClearMask(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
1585 
1586                     // attach the texture and use it as the color buffer.
1587                     right_camera->attach(osg::Camera::COLOR_BUFFER, right_texture.get());
1588 
1589                     // create Keystone left distortion camera
1590                     osg::ref_ptr<osg::Camera> left_keystone_camera = assignKeystoneDistortionCamera(ds, gc.get(),
1591                                                                                     0, 0, traits->width, traits->height,
1592                                                                                     traits->doubleBuffer ? GL_BACK : GL_FRONT,
1593                                                                                     left_texture.get(), keystone.get());
1594 
1595                     left_keystone_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 2);
1596                     left_keystone_camera->setClearMask(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
1597                     left_keystone_camera->getOrCreateStateSet()->setAttribute(new osg::ColorMask(true, false, false, true));
1598 
1599 
1600                     // create Keystone right distortion camera
1601                     osg::ref_ptr<osg::Camera> right_keystone_camera = assignKeystoneDistortionCamera(ds, gc.get(),
1602                                                                                     0, 0, traits->width, traits->height,
1603                                                                                     traits->doubleBuffer ? GL_BACK : GL_FRONT,
1604                                                                                     right_texture.get(), keystone.get());
1605 
1606                     right_keystone_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 3);
1607                     right_keystone_camera->setClearMask(GL_DEPTH_BUFFER_BIT);
1608                     right_keystone_camera->getOrCreateStateSet()->setAttribute(new osg::ColorMask(false, true, true, true));
1609 
1610                     // attach Keystone editing event handler.
1611                     left_keystone_camera->addEventCallback(new KeystoneHandler(keystone.get()));
1612 
1613                     camera->setAllowEventFocus(false);
1614 
1615                 }
1616                 else
1617                 {
1618                     // create distortion texture
1619                     osg::ref_ptr<osg::Texture> texture = createDistortionTexture(traits->width, traits->height);
1620 
1621                     // convert to RTT Camera
1622                     left_camera->setDrawBuffer(GL_FRONT);
1623                     left_camera->setReadBuffer(GL_FRONT);
1624                     left_camera->setAllowEventFocus(false);
1625                     left_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 0);
1626                     left_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1627 
1628                     // attach the texture and use it as the color buffer.
1629                     left_camera->attach(osg::Camera::COLOR_BUFFER, texture.get());
1630 
1631 
1632                     // convert to RTT Camera
1633                     right_camera->setDrawBuffer(GL_FRONT);
1634                     right_camera->setReadBuffer(GL_FRONT);
1635                     right_camera->setAllowEventFocus(false);
1636                     right_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 1);
1637                     right_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1638 
1639                     // attach the texture and use it as the color buffer.
1640                     right_camera->attach(osg::Camera::COLOR_BUFFER, texture.get());
1641 
1642 
1643                     // create Keystone distortion camera
1644                     osg::ref_ptr<osg::Camera> distortion_camera = assignKeystoneDistortionCamera(ds, gc.get(),
1645                                                                                     0, 0, traits->width, traits->height,
1646                                                                                     traits->doubleBuffer ? GL_BACK : GL_FRONT,
1647                                                                                     texture.get(), keystone.get());
1648 
1649                     distortion_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 2);
1650 
1651                     // attach Keystone editing event handler.
1652                     distortion_camera->addEventCallback(new KeystoneHandler(keystone.get()));
1653 
1654                     camera->setAllowEventFocus(false);
1655                 }
1656             }
1657 
1658             break;
1659         }
1660         case(osg::DisplaySettings::HORIZONTAL_SPLIT):
1661         {
1662             // disconect the camera from the graphics context.
1663             camera->setGraphicsContext(0);
1664 
1665             bool left_eye_left_viewport = ds->getSplitStereoHorizontalEyeMapping()==osg::DisplaySettings::LEFT_EYE_LEFT_VIEWPORT;
1666             int left_start = (left_eye_left_viewport) ? 0 : traits->width/2;
1667             int right_start = (left_eye_left_viewport) ? traits->width/2 : 0;
1668 
1669             // left viewport Camera
1670             osg::ref_ptr<osg::Camera> left_camera = assignStereoCamera(ds, gc.get(),
1671                                left_start, 0, traits->width/2, traits->height, traits->doubleBuffer ? GL_BACK : GL_FRONT,
1672                                -1.0);
1673 
1674             // right viewport Camera
1675             osg::ref_ptr<osg::Camera> right_camera = assignStereoCamera(ds, gc.get(),
1676                                right_start, 0, traits->width/2, traits->height, traits->doubleBuffer ? GL_BACK : GL_FRONT,
1677                                1.0);
1678 
1679             if (!keystones.empty())
1680             {
1681                 // for keystone:
1682                 // left camera to render to left texture using whole viewport of left texture
1683                 // right camera to render to right texture using whole viewport of right texture
1684                 // left keystone camera to render to left viewport/window
1685                 // right keystone camera to render to right viewport/window
1686                 // two keystone, one for each of the left and right viewports/windows
1687 
1688                 osg::ref_ptr<Keystone> left_keystone = keystones[0];
1689                 osg::ref_ptr<Keystone> right_keystone = keystones[1];
1690 
1691                 // create distortion texture
1692                 osg::ref_ptr<osg::Texture> left_texture = createDistortionTexture(traits->width/2, traits->height);
1693 
1694                 // convert to RTT Camera
1695                 left_camera->setViewport(0, 0, traits->width/2, traits->height);
1696                 left_camera->setDrawBuffer(GL_FRONT);
1697                 left_camera->setReadBuffer(GL_FRONT);
1698                 left_camera->setAllowEventFocus(true);
1699                 left_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 0);
1700                 left_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1701 
1702                 // attach the texture and use it as the color buffer.
1703                 left_camera->attach(osg::Camera::COLOR_BUFFER, left_texture.get());
1704 
1705 
1706                 // create distortion texture
1707                 osg::ref_ptr<osg::Texture> right_texture = createDistortionTexture(traits->width/2, traits->height);
1708 
1709                 // convert to RTT Camera
1710                 right_camera->setViewport(0, 0, traits->width/2, traits->height);
1711                 right_camera->setDrawBuffer(GL_FRONT);
1712                 right_camera->setReadBuffer(GL_FRONT);
1713                 right_camera->setAllowEventFocus(true);
1714                 right_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 1);
1715                 right_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1716 
1717                 // attach the texture and use it as the color buffer.
1718                 right_camera->attach(osg::Camera::COLOR_BUFFER, right_texture.get());
1719 
1720 
1721                 // create Keystone left distortion camera
1722                 left_keystone->setGridColor(osg::Vec4(1.0f,0.0f,0.0,1.0));
1723                 osg::ref_ptr<osg::Camera> left_keystone_camera = assignKeystoneDistortionCamera(ds, gc.get(),
1724                                                                                 left_start, 0, traits->width/2, traits->height,
1725                                                                                 traits->doubleBuffer ? GL_BACK : GL_FRONT,
1726                                                                                 left_texture.get(), left_keystone.get());
1727 
1728                 left_keystone_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 2);
1729 
1730                 // attach Keystone editing event handler.
1731                 left_keystone_camera->addEventCallback(new KeystoneHandler(left_keystone.get()));
1732 
1733 
1734                 // create Keystone right distortion camera
1735                 right_keystone->setGridColor(osg::Vec4(0.0f,1.0f,0.0,1.0));
1736                 osg::ref_ptr<osg::Camera> right_keystone_camera = assignKeystoneDistortionCamera(ds, gc.get(),
1737                                                                                 right_start, 0, traits->width/2, traits->height,
1738                                                                                 traits->doubleBuffer ? GL_BACK : GL_FRONT,
1739                                                                                 right_texture.get(), right_keystone.get());
1740 
1741                 right_keystone_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 3);
1742 
1743                 // attach Keystone editing event handler.
1744                 right_keystone_camera->addEventCallback(new KeystoneHandler(right_keystone.get()));
1745 
1746                 camera->setAllowEventFocus(false);
1747 
1748             }
1749 
1750             break;
1751         }
1752         case(osg::DisplaySettings::VERTICAL_SPLIT):
1753         {
1754             // disconect the camera from the graphics context.
1755             camera->setGraphicsContext(0);
1756 
1757             bool left_eye_bottom_viewport = ds->getSplitStereoVerticalEyeMapping()==osg::DisplaySettings::LEFT_EYE_BOTTOM_VIEWPORT;
1758             int left_start = (left_eye_bottom_viewport) ? 0 : traits->height/2;
1759             int right_start = (left_eye_bottom_viewport) ? traits->height/2 : 0;
1760 
1761             // bottom viewport Camera
1762             osg::ref_ptr<osg::Camera> left_camera = assignStereoCamera(ds, gc.get(),
1763                                0, left_start, traits->width, traits->height/2, traits->doubleBuffer ? GL_BACK : GL_FRONT,
1764                                -1.0);
1765 
1766             // top vieport camera
1767             osg::ref_ptr<osg::Camera> right_camera = assignStereoCamera(ds, gc.get(),
1768                                0, right_start, traits->width, traits->height/2, traits->doubleBuffer ? GL_BACK : GL_FRONT,
1769                                1.0);
1770 
1771             // for keystone:
1772             // left camera to render to left texture using whole viewport of left texture
1773             // right camera to render to right texture using whole viewport of right texture
1774             // left keystone camera to render to left viewport/window
1775             // right keystone camera to render to right viewport/window
1776             // two keystone, one for each of the left and right viewports/windows
1777 
1778             if (!keystones.empty())
1779             {
1780                 // for keystone:
1781                 // left camera to render to left texture using whole viewport of left texture
1782                 // right camera to render to right texture using whole viewport of right texture
1783                 // left keystone camera to render to left viewport/window
1784                 // right keystone camera to render to right viewport/window
1785                 // two keystone, one for each of the left and right viewports/windows
1786 
1787                 osg::ref_ptr<Keystone> left_keystone = keystones[0];
1788                 osg::ref_ptr<Keystone> right_keystone = keystones[1];
1789 
1790                 // create distortion texture
1791                 osg::ref_ptr<osg::Texture> left_texture = createDistortionTexture(traits->width, traits->height/2);
1792 
1793                 // convert to RTT Camera
1794                 left_camera->setViewport(0, 0, traits->width, traits->height/2);
1795                 left_camera->setDrawBuffer(GL_FRONT);
1796                 left_camera->setReadBuffer(GL_FRONT);
1797                 left_camera->setAllowEventFocus(true);
1798                 left_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 0);
1799                 left_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1800 
1801                 // attach the texture and use it as the color buffer.
1802                 left_camera->attach(osg::Camera::COLOR_BUFFER, left_texture.get());
1803 
1804 
1805                 // create distortion texture
1806                 osg::ref_ptr<osg::Texture> right_texture = createDistortionTexture(traits->width, traits->height/2);
1807 
1808                 // convert to RTT Camera
1809                 right_camera->setViewport(0, 0, traits->width, traits->height/2);
1810                 right_camera->setDrawBuffer(GL_FRONT);
1811                 right_camera->setReadBuffer(GL_FRONT);
1812                 right_camera->setAllowEventFocus(true);
1813                 right_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 1);
1814                 right_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1815 
1816                 // attach the texture and use it as the color buffer.
1817                 right_camera->attach(osg::Camera::COLOR_BUFFER, right_texture.get());
1818 
1819 
1820                 // create Keystone left distortion camera
1821                 left_keystone->setGridColor(osg::Vec4(1.0f,0.0f,0.0,1.0));
1822                 osg::ref_ptr<osg::Camera> left_keystone_camera = assignKeystoneDistortionCamera(ds, gc.get(),
1823                                                                                 0, left_start, traits->width, traits->height/2,
1824                                                                                 traits->doubleBuffer ? GL_BACK : GL_FRONT,
1825                                                                                 left_texture.get(), left_keystone.get());
1826 
1827                 left_keystone_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 2);
1828 
1829                 // attach Keystone editing event handler.
1830                 left_keystone_camera->addEventCallback(new KeystoneHandler(left_keystone.get()));
1831 
1832 
1833                 // create Keystone right distortion camera
1834                 right_keystone->setGridColor(osg::Vec4(0.0f,1.0f,0.0,1.0));
1835                 osg::ref_ptr<osg::Camera> right_keystone_camera = assignKeystoneDistortionCamera(ds, gc.get(),
1836                                                                                 0, right_start, traits->width, traits->height/2,
1837                                                                                 traits->doubleBuffer ? GL_BACK : GL_FRONT,
1838                                                                                 right_texture.get(), right_keystone.get());
1839 
1840                 right_keystone_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 3);
1841 
1842                 // attach Keystone editing event handler.
1843                 right_keystone_camera->addEventCallback(new KeystoneHandler(right_keystone.get()));
1844 
1845                 camera->setAllowEventFocus(false);
1846 
1847             }
1848 
1849             break;
1850         }
1851         case(osg::DisplaySettings::LEFT_EYE):
1852         {
1853             // disconect the camera from the graphics context.
1854             camera->setGraphicsContext(0);
1855 
1856             // single window, whole window, just left eye offsets
1857             osg::ref_ptr<osg::Camera> left_camera = assignStereoCamera(ds, gc.get(), 0, 0, traits->width, traits->height, traits->doubleBuffer ? GL_BACK : GL_FRONT, -1.0);
1858 
1859             // for keystone:
1860             // treat as standard keystone correction.
1861             // left eye camera to render to texture
1862             // keystone camera then render to window
1863             // one keystone and editing for window
1864 
1865             if (!keystones.empty())
1866             {
1867                 // for keystone:
1868                 // left camera to render to texture using red colour mask
1869                 // right camera to render to same texture using cyan colour mask
1870                 // keystone camera to render to whole screen without colour masks
1871                 // one keystone and editing for the one window
1872 
1873                 osg::ref_ptr<Keystone> keystone = keystones.front();
1874 
1875                 // create distortion texture
1876                 osg::ref_ptr<osg::Texture> texture = createDistortionTexture(traits->width, traits->height);
1877 
1878                 // convert to RTT Camera
1879                 left_camera->setDrawBuffer(GL_FRONT);
1880                 left_camera->setReadBuffer(GL_FRONT);
1881                 left_camera->setAllowEventFocus(false);
1882                 left_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 0);
1883                 left_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1884 
1885                 // attach the texture and use it as the color buffer.
1886                 left_camera->attach(osg::Camera::COLOR_BUFFER, texture.get());
1887 
1888 
1889                 // create Keystone distortion camera
1890                 osg::ref_ptr<osg::Camera> distortion_camera = assignKeystoneDistortionCamera(ds, gc.get(),
1891                                                                                 0, 0, traits->width, traits->height,
1892                                                                                 traits->doubleBuffer ? GL_BACK : GL_FRONT,
1893                                                                                 texture.get(), keystone.get());
1894 
1895                 distortion_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 2);
1896 
1897                 // attach Keystone editing event handler.
1898                 distortion_camera->addEventCallback(new KeystoneHandler(keystone.get()));
1899             }
1900             break;
1901         }
1902         case(osg::DisplaySettings::RIGHT_EYE):
1903         {
1904             // disconect the camera from the graphics context.
1905             camera->setGraphicsContext(0);
1906 
1907             // single window, whole window, just right eye offsets
1908             osg::ref_ptr<osg::Camera> right_camera = assignStereoCamera(ds, gc.get(), 0, 0, traits->width, traits->height, traits->doubleBuffer ? GL_BACK : GL_FRONT, 1.0);
1909 
1910             // for keystone:
1911             // treat as standard keystone correction.
1912             // left eye camera to render to texture
1913             // keystone camera then render to window
1914             // one keystone and editing for window
1915 
1916             if (!keystones.empty())
1917             {
1918                 // for keystone:
1919                 // left camera to render to texture using red colour mask
1920                 // right camera to render to same texture using cyan colour mask
1921                 // keystone camera to render to whole screen without colour masks
1922                 // one keystone and editing for the one window
1923 
1924                 osg::ref_ptr<Keystone> keystone = keystones.front();
1925 
1926                 // create distortion texture
1927                 osg::ref_ptr<osg::Texture> texture = createDistortionTexture(traits->width, traits->height);
1928 
1929                 // convert to RTT Camera
1930                 right_camera->setDrawBuffer(GL_FRONT);
1931                 right_camera->setReadBuffer(GL_FRONT);
1932                 right_camera->setAllowEventFocus(false);
1933                 right_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 0);
1934                 right_camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
1935 
1936                 // attach the texture and use it as the color buffer.
1937                 right_camera->attach(osg::Camera::COLOR_BUFFER, texture.get());
1938 
1939                 // create Keystone distortion camera
1940                 osg::ref_ptr<osg::Camera> distortion_camera = assignKeystoneDistortionCamera(ds, gc.get(),
1941                                                                                              0, 0, traits->width, traits->height,
1942                                                                                              traits->doubleBuffer ? GL_BACK : GL_FRONT,
1943                                                                                              texture.get(), keystone.get());
1944 
1945                 distortion_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 1);
1946 
1947                 // attach Keystone editing event handler.
1948                 distortion_camera->addEventCallback(new KeystoneHandler(keystone.get()));
1949             }
1950             break;
1951         }
1952         case(osg::DisplaySettings::HORIZONTAL_INTERLACE):
1953         case(osg::DisplaySettings::VERTICAL_INTERLACE):
1954         case(osg::DisplaySettings::CHECKERBOARD):
1955         {
1956             // disconect the camera from the graphics context.
1957             camera->setGraphicsContext(0);
1958 
1959             // set up the stencil buffer
1960             {
1961                 osg::ref_ptr<osg::Camera> camera = new osg::Camera;
1962                 camera->setGraphicsContext(gc.get());
1963                 camera->setViewport(0, 0, traits->width, traits->height);
1964                 camera->setDrawBuffer(traits->doubleBuffer ? GL_BACK : GL_FRONT);
1965                 camera->setReadBuffer(camera->getDrawBuffer());
1966                 camera->setReferenceFrame(osg::Camera::ABSOLUTE_RF);
1967                 camera->setClearMask(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT|GL_STENCIL_BUFFER_BIT);
1968                 camera->setClearStencil(0);
1969                 camera->setRenderOrder(osg::Camera::NESTED_RENDER, 0);
1970                 addSlave(camera.get(), false);
1971 
1972                 osg::ref_ptr<osg::Geometry> geometry = osg::createTexturedQuadGeometry(osg::Vec3(-1.0f,-1.0f,0.0f), osg::Vec3(2.0f,0.0f,0.0f), osg::Vec3(0.0f,2.0f,0.0f), 0.0f, 0.0f, 1.0f, 1.0f);
1973                 osg::ref_ptr<osg::Geode> geode = new osg::Geode;
1974                 geode->addDrawable(geometry.get());
1975                 camera->addChild(geode.get());
1976 
1977                 geode->setCullingActive(false);
1978 
1979                 osg::ref_ptr<osg::StateSet> stateset = geode->getOrCreateStateSet();
1980 
1981                 // set up stencil
1982                 osg::ref_ptr<osg::Stencil> stencil = new osg::Stencil;
1983                 stencil->setFunction(osg::Stencil::ALWAYS, 1, ~0u);
1984                 stencil->setOperation(osg::Stencil::REPLACE, osg::Stencil::REPLACE, osg::Stencil::REPLACE);
1985                 stencil->setWriteMask(~0u);
1986                 stateset->setAttributeAndModes(stencil.get(), osg::StateAttribute::ON);
1987 
1988                 // set up polygon stipple
1989                 if(ds->getStereoMode() == osg::DisplaySettings::VERTICAL_INTERLACE)
1990                 {
1991                     stateset->setAttributeAndModes(new osg::PolygonStipple(patternVertEven), osg::StateAttribute::ON);
1992                 }
1993                 else if(ds->getStereoMode() == osg::DisplaySettings::HORIZONTAL_INTERLACE)
1994                 {
1995                     stateset->setAttributeAndModes(new osg::PolygonStipple(patternHorzEven), osg::StateAttribute::ON);
1996                 }
1997                 else
1998                 {
1999                     stateset->setAttributeAndModes(new osg::PolygonStipple(patternCheckerboard), osg::StateAttribute::ON);
2000                 }
2001 
2002                 stateset->setMode(GL_LIGHTING, osg::StateAttribute::OFF);
2003                 stateset->setMode(GL_DEPTH_TEST, osg::StateAttribute::OFF);
2004 
2005             }
2006 
2007             // left Camera
2008             {
2009                 osg::ref_ptr<osg::Camera> left_camera = assignStereoCamera(ds, gc.get(), 0, 0, traits->width, traits->height, traits->doubleBuffer ? GL_BACK : GL_FRONT, -1.0);
2010                 left_camera->setClearMask(0);
2011                 left_camera->setClearMask(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
2012                 left_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 1);
2013 
2014                 osg::ref_ptr<osg::Stencil> stencil = new osg::Stencil;
2015                 stencil->setFunction(osg::Stencil::EQUAL, 0, ~0u);
2016                 stencil->setOperation(osg::Stencil::KEEP, osg::Stencil::KEEP, osg::Stencil::KEEP);
2017                 left_camera->getOrCreateStateSet()->setAttributeAndModes(stencil.get(), osg::StateAttribute::ON);
2018             }
2019 
2020             // right Camera
2021             {
2022                 osg::ref_ptr<osg::Camera> right_camera = assignStereoCamera(ds, gc.get(), 0, 0, traits->width, traits->height, traits->doubleBuffer ? GL_BACK : GL_FRONT, 1.0);
2023                 right_camera->setClearMask(GL_DEPTH_BUFFER_BIT);
2024                 right_camera->setRenderOrder(osg::Camera::NESTED_RENDER, 2);
2025 
2026                 osg::ref_ptr<osg::Stencil> stencil = new osg::Stencil;
2027                 stencil->setFunction(osg::Stencil::NOTEQUAL, 0, ~0u);
2028                 stencil->setOperation(osg::Stencil::KEEP, osg::Stencil::KEEP, osg::Stencil::KEEP);
2029                 right_camera->getOrCreateStateSet()->setAttributeAndModes(stencil.get(), osg::StateAttribute::ON);
2030             }
2031             break;
2032         }
2033     }
2034 }
2035