Bike-X  0.8
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros
Util_Render_Stereo.cpp
Go to the documentation of this file.
1 /************************************************************************************
2 
3 Filename : Util_Render_Stereo.cpp
4 Content : Stereo rendering configuration implementation
5 Created : October 22, 2012
6 Authors : Michael Antonov, Andrew Reisse, Tom Forsyth
7 
8 Copyright : Copyright 2014 Oculus VR, Inc. All Rights reserved.
9 
10 Licensed under the Oculus VR Rift SDK License Version 3.1 (the "License");
11 you may not use the Oculus VR Rift SDK except in compliance with the License,
12 which is provided at the time of installation or download, or which
13 otherwise accompanies this software in either electronic or hard copy form.
14 
15 You may obtain a copy of the License at
16 
17 http://www.oculusvr.com/licenses/LICENSE-3.1
18 
19 Unless required by applicable law or agreed to in writing, the Oculus VR SDK
20 distributed under the License is distributed on an "AS IS" BASIS,
21 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
22 See the License for the specific language governing permissions and
23 limitations under the License.
24 
25 *************************************************************************************/
26 
27 #include "Util_Render_Stereo.h"
28 #include "../OVR_SensorFusion.h"
29 
30 namespace OVR { namespace Util { namespace Render {
31 
32 
33 //-----------------------------------------------------------------------------------
34 // **** Useful debug functions.
35 
36 char const* GetDebugNameEyeCupType ( EyeCupType eyeCupType )
37 {
38  switch ( eyeCupType )
39  {
40  case EyeCup_DK1A: return "DK1 A"; break;
41  case EyeCup_DK1B: return "DK1 B"; break;
42  case EyeCup_DK1C: return "DK1 C"; break;
43  case EyeCup_DKHD2A: return "DKHD2 A"; break;
44  case EyeCup_OrangeA: return "Orange A"; break;
45  case EyeCup_RedA: return "Red A"; break;
46  case EyeCup_PinkA: return "Pink A"; break;
47  case EyeCup_BlueA: return "Blue A"; break;
48  case EyeCup_Delilah1A: return "Delilah 1 A"; break;
49  case EyeCup_Delilah2A: return "Delilah 2 A"; break;
50  case EyeCup_JamesA: return "James A"; break;
51  case EyeCup_SunMandalaA: return "Sun Mandala A"; break;
52  case EyeCup_DK2A: return "DK2 A"; break;
53  case EyeCup_LAST: return "LAST"; break;
54  default: OVR_ASSERT ( false ); return "Error"; break;
55  }
56 }
57 
58 char const* GetDebugNameHmdType ( HmdTypeEnum hmdType )
59 {
60  switch ( hmdType )
61  {
62  case HmdType_None: return "None"; break;
63  case HmdType_DK1: return "DK1"; break;
64  case HmdType_DKProto: return "DK1 prototype"; break;
65  case HmdType_DKHDProto: return "DK HD prototype 1"; break;
66  case HmdType_DKHDProto566Mi: return "DK HD prototype 566 Mi"; break;
67  case HmdType_DKHD2Proto: return "DK HD prototype 585"; break;
68  case HmdType_CrystalCoveProto: return "Crystal Cove"; break;
69  case HmdType_DK2: return "DK2"; break;
70  case HmdType_Unknown: return "Unknown"; break;
71  case HmdType_LAST: return "LAST"; break;
72  default: OVR_ASSERT ( false ); return "Error"; break;
73  }
74 }
75 
76 
77 //-----------------------------------------------------------------------------------
78 // **** Internal pipeline functions.
79 
81 {
84 };
85 
87  LensConfig const *pLensOverride = NULL,
88  FovPort const *pTanHalfFovOverride = NULL,
89  float extraEyeRotationInRadians = OVR_DEFAULT_EXTRA_EYE_ROTATION )
90 {
91  // pLensOverride can be NULL, which means no override.
92 
93  DistortionRenderDesc localDistortion = CalculateDistortionRenderDesc ( eyeType, hmd, pLensOverride );
94  FovPort fov = CalculateFovFromHmdInfo ( eyeType, localDistortion, hmd, extraEyeRotationInRadians );
95  // Here the app or the user would optionally clamp this visible fov to a smaller number if
96  // they want more perf or resolution and are willing to give up FOV.
97  // They may also choose to clamp UDLR differently e.g. to get cinemascope-style views.
98  if ( pTanHalfFovOverride != NULL )
99  {
100  fov = *pTanHalfFovOverride;
101  }
102 
103  // Here we could call ClampToPhysicalScreenFov(), but we do want people
104  // to be able to play with larger-than-screen views.
105  // The calling app can always do the clamping itself.
106  DistortionAndFov result;
107  result.Distortion = localDistortion;
108  result.Fov = fov;
109 
110  return result;
111 }
112 
113 
115  Sizei const actualRendertargetSurfaceSize,
116  Sizei const requestedRenderedPixelSize,
117  bool bRendertargetSharedByBothEyes,
118  bool bMonoRenderingMode = false )
119 {
120  Recti renderedViewport;
121  if ( bMonoRenderingMode || !bRendertargetSharedByBothEyes || (eyeType == StereoEye_Center) )
122  {
123  // One eye per RT.
124  renderedViewport.x = 0;
125  renderedViewport.y = 0;
126  renderedViewport.w = Alg::Min ( actualRendertargetSurfaceSize.w, requestedRenderedPixelSize.w );
127  renderedViewport.h = Alg::Min ( actualRendertargetSurfaceSize.h, requestedRenderedPixelSize.h );
128  }
129  else
130  {
131  // Both eyes share the RT.
132  renderedViewport.x = 0;
133  renderedViewport.y = 0;
134  renderedViewport.w = Alg::Min ( actualRendertargetSurfaceSize.w/2, requestedRenderedPixelSize.w );
135  renderedViewport.h = Alg::Min ( actualRendertargetSurfaceSize.h, requestedRenderedPixelSize.h );
136  if ( eyeType == StereoEye_Right )
137  {
138  renderedViewport.x = (actualRendertargetSurfaceSize.w+1)/2; // Round up, not down.
139  }
140  }
141  return renderedViewport;
142 }
143 
145  DistortionRenderDesc const &distortion,
146  FovPort const &fov,
147  Sizei const &actualRendertargetSurfaceSize,
148  bool bRendertargetSharedByBothEyes,
149  float desiredPixelDensity = 1.0f,
150  bool bMonoRenderingMode = false )
151 {
152  OVR_ASSERT ( actualRendertargetSurfaceSize.w > 0 );
153  OVR_ASSERT ( actualRendertargetSurfaceSize.h > 0 );
154 
155  // What size RT do we need to get 1:1 mapping?
156  Sizei idealPixelSize = CalculateIdealPixelSize ( eyeType, distortion, fov, desiredPixelDensity );
157  // ...but we might not actually get that size.
158  return CalculateViewportInternal ( eyeType,
159  actualRendertargetSurfaceSize,
160  idealPixelSize,
161  bRendertargetSharedByBothEyes, bMonoRenderingMode );
162 }
163 
165  ScaleAndOffset2D const &eyeToSourceNDC,
166  Recti const &renderedViewport,
167  Sizei const &actualRendertargetSurfaceSize )
168 {
169  ViewportScaleAndOffset result;
170  result.RenderedViewport = renderedViewport;
172  eyeToSourceNDC, renderedViewport, actualRendertargetSurfaceSize );
173  return result;
174 }
175 
176 
178  DistortionRenderDesc const &distortion,
179  FovPort const &fov,
180  Sizei const &actualRendertargetSurfaceSize,
181  Recti const &renderedViewport,
182  bool bRightHanded = true, float zNear = 0.01f, float zFar = 10000.0f,
183  bool bMonoRenderingMode = false,
184  float zoomFactor = 1.0f )
185 {
186  // Generate the projection matrix for intermediate rendertarget.
187  // Z range can also be inserted later by the app (though not in this particular case)
188  float fovScale = 1.0f / zoomFactor;
189  FovPort zoomedFov = fov;
190  zoomedFov.LeftTan *= fovScale;
191  zoomedFov.RightTan *= fovScale;
192  zoomedFov.UpTan *= fovScale;
193  zoomedFov.DownTan *= fovScale;
194  Matrix4f projection = CreateProjection ( bRightHanded, zoomedFov, zNear, zFar );
195 
196  // Find the mapping from TanAngle space to target NDC space.
197  // Note this does NOT take the zoom factor into account because
198  // this is the mapping of actual physical eye FOV (and our eyes do not zoom!)
199  // to screen space.
200  ScaleAndOffset2D eyeToSourceNDC = CreateNDCScaleAndOffsetFromFov ( fov );
201 
202  // The size of the final FB, which is fixed and determined by the physical size of the device display.
203  Recti distortedViewport = GetFramebufferViewport ( eyeType, hmd );
204  Vector3f virtualCameraOffset = CalculateEyeVirtualCameraOffset(hmd, eyeType, bMonoRenderingMode);
205 
206  StereoEyeParams result;
207  result.Eye = eyeType;
208  result.ViewAdjust = Matrix4f::Translation(virtualCameraOffset);
209  result.Distortion = distortion;
210  result.DistortionViewport = distortedViewport;
211  result.Fov = fov;
212  result.RenderedProjection = projection;
213  result.EyeToSourceNDC = eyeToSourceNDC;
214  ViewportScaleAndOffset vsao = CalculateViewportScaleAndOffsetInternal ( eyeToSourceNDC, renderedViewport, actualRendertargetSurfaceSize );
215  result.RenderedViewport = vsao.RenderedViewport;
216  result.EyeToSourceUV = vsao.EyeToSourceUV;
217 
218  return result;
219 }
220 
221 
223  StereoEye eyeType, bool bmonoRenderingMode)
224 {
225  Vector3f virtualCameraOffset(0);
226 
227  if (!bmonoRenderingMode)
228  {
229  float eyeCenterRelief = hmd.GetEyeCenter().ReliefInMeters;
230 
231  if (eyeType == StereoEye_Left)
232  {
233  virtualCameraOffset.x = hmd.EyeLeft.NoseToPupilInMeters;
234  virtualCameraOffset.z = eyeCenterRelief - hmd.EyeLeft.ReliefInMeters;
235  }
236  else if (eyeType == StereoEye_Right)
237  {
238  virtualCameraOffset.x = -hmd.EyeRight.NoseToPupilInMeters;
239  virtualCameraOffset.z = eyeCenterRelief - hmd.EyeRight.ReliefInMeters;
240  }
241  }
242 
243  return virtualCameraOffset;
244 }
245 
246 
247 //-----------------------------------------------------------------------------------
248 // **** Higher-level utility functions.
249 
251  bool bRendertargetSharedByBothEyes,
252  float pixelDensityInCenter /*= 1.0f*/ )
253 {
254  Sizei idealPixelSize[2];
255  for ( int eyeNum = 0; eyeNum < 2; eyeNum++ )
256  {
257  StereoEye eyeType = ( eyeNum == 0 ) ? StereoEye_Left : StereoEye_Right;
258 
260 
261  idealPixelSize[eyeNum] = CalculateIdealPixelSize ( eyeType,
262  distortionAndFov.Distortion,
263  distortionAndFov.Fov,
264  pixelDensityInCenter );
265  }
266 
267  Sizei result;
268  result.w = Alg::Max ( idealPixelSize[0].w, idealPixelSize[1].w );
269  result.h = Alg::Max ( idealPixelSize[0].h, idealPixelSize[1].h );
270  if ( bRendertargetSharedByBothEyes )
271  {
272  result.w *= 2;
273  }
274  return result;
275 }
276 
278  StereoEye eyeType,
279  Sizei const &actualRendertargetSurfaceSize,
280  bool bRendertargetSharedByBothEyes,
281  bool bRightHanded /*= true*/,
282  float zNear /*= 0.01f*/, float zFar /*= 10000.0f*/,
283  Sizei const *pOverrideRenderedPixelSize /* = NULL*/,
284  FovPort const *pOverrideFovport /*= NULL*/,
285  float zoomFactor /*= 1.0f*/ )
286 {
288  if ( pOverrideFovport != NULL )
289  {
290  distortionAndFov.Fov = *pOverrideFovport;
291  }
292 
293  Recti viewport;
294  if ( pOverrideRenderedPixelSize != NULL )
295  {
296  viewport = CalculateViewportInternal ( eyeType, actualRendertargetSurfaceSize, *pOverrideRenderedPixelSize, bRendertargetSharedByBothEyes, false );
297  }
298  else
299  {
300  viewport = CalculateViewportDensityInternal ( eyeType,
301  distortionAndFov.Distortion,
302  distortionAndFov.Fov,
303  actualRendertargetSurfaceSize, bRendertargetSharedByBothEyes, 1.0f, false );
304  }
305 
307  eyeType, hmd,
308  distortionAndFov.Distortion,
309  distortionAndFov.Fov,
310  actualRendertargetSurfaceSize, viewport,
311  bRightHanded, zNear, zFar, false, zoomFactor );
312 }
313 
314 
316  StereoEye eyeType,
317  bool bMakeFovSymmetrical /* = false */ )
318 {
320  FovPort fov = distortionAndFov.Fov;
321  if ( bMakeFovSymmetrical )
322  {
323  // Deal with engines that cannot support an off-center projection.
324  // Unfortunately this means they will be rendering pixels that the user can't actually see.
325  float fovTanH = Alg::Max ( fov.LeftTan, fov.RightTan );
326  float fovTanV = Alg::Max ( fov.UpTan, fov.DownTan );
327  fov.LeftTan = fovTanH;
328  fov.RightTan = fovTanH;
329  fov.UpTan = fovTanV;
330  fov.DownTan = fovTanV;
331  }
332  return fov;
333 }
334 
336  Sizei const &actualRendertargetSurfaceSize,
337  Recti const &renderViewport )
338 {
339  return CalculateViewportScaleAndOffsetInternal ( params.EyeToSourceNDC, renderViewport, actualRendertargetSurfaceSize );
340 }
341 
343  Sizei const &actualRendertargetSurfaceSize,
344  Sizei const &requestedRenderSize,
345  bool bRendertargetSharedByBothEyes /*= false*/ )
346 {
347  Recti renderViewport = CalculateViewportInternal ( params.Eye, actualRendertargetSurfaceSize, requestedRenderSize, bRendertargetSharedByBothEyes, false );
348  return CalculateViewportScaleAndOffsetInternal ( params.EyeToSourceNDC, renderViewport, actualRendertargetSurfaceSize );
349 }
350 
352  Sizei const &actualRendertargetSurfaceSize,
353  float pixelDensity /*= 1.0f*/,
354  bool bRendertargetSharedByBothEyes /*= false*/ )
355 {
356  Recti renderViewport = CalculateViewportDensityInternal ( params.Eye, params.Distortion, params.Fov, actualRendertargetSurfaceSize, bRendertargetSharedByBothEyes, pixelDensity, false );
357  return CalculateViewportScaleAndOffsetInternal ( params.EyeToSourceNDC, renderViewport, actualRendertargetSurfaceSize );
358 }
359 
360 
361 //-----------------------------------------------------------------------------------
362 // **** StereoConfig Implementation
363 
365  : Mode(mode),
366  DirtyFlag(true)
367 {
368  // Initialize "fake" default HMD values for testing without HMD plugged in.
369  // These default values match those returned by DK1
370  // (at least they did at time of writing - certainly good enough for debugging)
372  Hmd.ResolutionInPixels = Sizei(1280, 800);
373  Hmd.ScreenSizeInMeters = Sizef(0.1498f, 0.0936f);
374  Hmd.ScreenGapSizeInMeters = 0.0f;
375  Hmd.CenterFromTopInMeters = 0.0468f;
376  Hmd.LensSeparationInMeters = 0.0635f;
377  Hmd.LensDiameterInMeters = 0.035f;
381  Hmd.Shutter.VsyncToNextVsync = ( 1.0f / 60.0f );
382  Hmd.Shutter.VsyncToFirstScanline = 0.000052f;
384  Hmd.Shutter.PixelSettleTime = 0.015f;
385  Hmd.Shutter.PixelPersistence = ( 1.0f / 60.0f );
389  Hmd.EyeLeft.Distortion.K[0] = 1.0f;
390  Hmd.EyeLeft.Distortion.K[1] = -0.3999f;
391  Hmd.EyeLeft.Distortion.K[2] = 0.2408f;
392  Hmd.EyeLeft.Distortion.K[3] = -0.4589f;
393  Hmd.EyeLeft.Distortion.MaxR = 1.0f;
399  Hmd.EyeLeft.ReliefInMeters = 0.013f;
401 
404  // Not used in this mode, but init them anyway.
405  SetViewportSize[0] = Sizei(0,0);
406  SetViewportSize[1] = Sizei(0,0);
407  SetViewport[0] = Recti(0,0,0,0);
408  SetViewport[1] = Recti(0,0,0,0);
409 
410  OverrideLens = false;
411  OverrideTanHalfFov = false;
412  OverrideZeroIpd = false;
415  RightHandedProjection = true;
416 
417  // This should cause an assert if the app does not call SetRendertargetSize()
418  RendertargetSize = Sizei ( 0, 0 );
419 
420  ZNear = 0.01f;
421  ZFar = 10000.0f;
422 
423  Set2DAreaFov(DegreeToRad(85.0f));
424 }
425 
427 {
428  Hmd = hmd;
429  DirtyFlag = true;
430 }
431 
432 void StereoConfig::Set2DAreaFov(float fovRadians)
433 {
434  Area2DFov = fovRadians;
435  DirtyFlag = true;
436 }
437 
439 {
440  if ( DirtyFlag )
441  {
443  }
444 
445  static const UByte eyeParamIndices[3] = { 0, 0, 1 };
446 
447  OVR_ASSERT(eye < sizeof(eyeParamIndices));
448  return EyeRenderParams[eyeParamIndices[eye]];
449 }
450 
451 void StereoConfig::SetLensOverride ( LensConfig const *pLensOverrideLeft /*= NULL*/,
452  LensConfig const *pLensOverrideRight /*= NULL*/ )
453 {
454  if ( pLensOverrideLeft == NULL )
455  {
456  OverrideLens = false;
457  }
458  else
459  {
460  OverrideLens = true;
461  LensOverrideLeft = *pLensOverrideLeft;
462  LensOverrideRight = *pLensOverrideLeft;
463  if ( pLensOverrideRight != NULL )
464  {
465  LensOverrideRight = *pLensOverrideRight;
466  }
467  }
468  DirtyFlag = true;
469 }
470 
471 void StereoConfig::SetRendertargetSize (Size<int> const rendertargetSize,
472  bool rendertargetIsSharedByBothEyes )
473 {
474  RendertargetSize = rendertargetSize;
475  IsRendertargetSharedByBothEyes = rendertargetIsSharedByBothEyes;
476  DirtyFlag = true;
477 }
478 
479 void StereoConfig::SetFov ( FovPort const *pfovLeft /*= NULL*/,
480  FovPort const *pfovRight /*= NULL*/ )
481 {
482  DirtyFlag = true;
483  if ( pfovLeft == NULL )
484  {
485  OverrideTanHalfFov = false;
486  }
487  else
488  {
489  OverrideTanHalfFov = true;
490  FovOverrideLeft = *pfovLeft;
491  FovOverrideRight = *pfovLeft;
492  if ( pfovRight != NULL )
493  {
494  FovOverrideRight = *pfovRight;
495  }
496  }
497 }
498 
499 
500 void StereoConfig::SetZeroVirtualIpdOverride ( bool enableOverride )
501 {
502  DirtyFlag = true;
503  OverrideZeroIpd = enableOverride;
504 }
505 
506 
507 void StereoConfig::SetZClipPlanesAndHandedness ( float zNear /*= 0.01f*/, float zFar /*= 10000.0f*/, bool rightHandedProjection /*= true*/ )
508 {
509  DirtyFlag = true;
510  ZNear = zNear;
511  ZFar = zFar;
512  RightHandedProjection = rightHandedProjection;
513 }
514 
515 void StereoConfig::SetExtraEyeRotation ( float extraEyeRotationInRadians )
516 {
517  DirtyFlag = true;
518  ExtraEyeRotationInRadians = extraEyeRotationInRadians;
519 }
520 
521 Sizei StereoConfig::CalculateRecommendedTextureSize ( bool rendertargetSharedByBothEyes,
522  float pixelDensityInCenter /*= 1.0f*/ )
523 {
524  return Render::CalculateRecommendedTextureSize ( Hmd, rendertargetSharedByBothEyes, pixelDensityInCenter );
525 }
526 
527 
528 
530 {
531  int numEyes = 2;
532  StereoEye eyeTypes[2];
533 
534  switch ( Mode )
535  {
536  case Stereo_None:
537  numEyes = 1;
538  eyeTypes[0] = StereoEye_Center;
539  break;
540 
542  numEyes = 2;
543  eyeTypes[0] = StereoEye_Left;
544  eyeTypes[1] = StereoEye_Right;
545  break;
546 
547  default:
548  OVR_ASSERT( false ); break;
549  }
550 
551  // If either of these fire, you've probably forgotten to call SetRendertargetSize()
552  OVR_ASSERT ( RendertargetSize.w > 0 );
553  OVR_ASSERT ( RendertargetSize.h > 0 );
554 
555  for ( int eyeNum = 0; eyeNum < numEyes; eyeNum++ )
556  {
557  StereoEye eyeType = eyeTypes[eyeNum];
558  LensConfig *pLensOverride = NULL;
559  if ( OverrideLens )
560  {
561  if ( eyeType == StereoEye_Right )
562  {
563  pLensOverride = &LensOverrideRight;
564  }
565  else
566  {
567  pLensOverride = &LensOverrideLeft;
568  }
569  }
570 
571  FovPort *pTanHalfFovOverride = NULL;
572  if ( OverrideTanHalfFov )
573  {
574  if ( eyeType == StereoEye_Right )
575  {
576  pTanHalfFovOverride = &FovOverrideRight;
577  }
578  else
579  {
580  pTanHalfFovOverride = &FovOverrideLeft;
581  }
582  }
583 
584  DistortionAndFov distortionAndFov =
586  pLensOverride, pTanHalfFovOverride,
588 
589  EyeRenderParams[eyeNum].StereoEye.Distortion = distortionAndFov.Distortion;
590  EyeRenderParams[eyeNum].StereoEye.Fov = distortionAndFov.Fov;
591  }
592 
593  if ( OverrideZeroIpd )
594  {
595  // Take the union of the calculated eye FOVs.
596  FovPort fov;
601  EyeRenderParams[0].StereoEye.Fov = fov;
602  EyeRenderParams[1].StereoEye.Fov = fov;
603  }
604 
605  for ( int eyeNum = 0; eyeNum < numEyes; eyeNum++ )
606  {
607  StereoEye eyeType = eyeTypes[eyeNum];
608 
609  DistortionRenderDesc localDistortion = EyeRenderParams[eyeNum].StereoEye.Distortion;
610  FovPort fov = EyeRenderParams[eyeNum].StereoEye.Fov;
611 
612  // Use a placeholder - will be overridden later.
613  Recti tempViewport = Recti ( 0, 0, 1, 1 );
614 
616  eyeType, Hmd, localDistortion, fov,
617  RendertargetSize, tempViewport,
619  OverrideZeroIpd );
620 
621  // We want to create a virtual 2D surface we can draw debug text messages to.
622  // We'd like it to be a fixed distance (OrthoDistance) away,
623  // and to cover a specific FOV (Area2DFov). We need to find the projection matrix for this,
624  // and also to know how large it is in pixels to achieve a 1:1 mapping at the center of the screen.
625  float orthoDistance = 0.8f;
626  float orthoHalfFov = tanf ( Area2DFov * 0.5f );
627  Vector2f unityOrthoPixelSize = localDistortion.PixelsPerTanAngleAtCenter * ( orthoHalfFov * 2.0f );
628  float localInterpupillaryDistance = Hmd.EyeLeft.NoseToPupilInMeters + Hmd.EyeRight.NoseToPupilInMeters;
629  if ( OverrideZeroIpd )
630  {
631  localInterpupillaryDistance = 0.0f;
632  }
633  Matrix4f ortho = CreateOrthoSubProjection ( true, eyeType,
634  orthoHalfFov, orthoHalfFov,
635  unityOrthoPixelSize.x, unityOrthoPixelSize.y,
636  orthoDistance, localInterpupillaryDistance,
638  EyeRenderParams[eyeNum].OrthoProjection = ortho;
639  }
640 
641  // ...and now set up the viewport, scale & offset the way the app wanted.
643 
644  if ( OverrideZeroIpd )
645  {
646  // Monocular rendering has some fragile parts... don't break any by accident.
651  OVR_ASSERT ( EyeRenderParams[0].StereoEye.RenderedProjection.M[0][0] == EyeRenderParams[1].StereoEye.RenderedProjection.M[0][0] );
652  OVR_ASSERT ( EyeRenderParams[0].StereoEye.RenderedProjection.M[1][1] == EyeRenderParams[1].StereoEye.RenderedProjection.M[1][1] );
653  OVR_ASSERT ( EyeRenderParams[0].StereoEye.RenderedProjection.M[0][2] == EyeRenderParams[1].StereoEye.RenderedProjection.M[0][2] );
654  OVR_ASSERT ( EyeRenderParams[0].StereoEye.RenderedProjection.M[1][2] == EyeRenderParams[1].StereoEye.RenderedProjection.M[1][2] );
660  OVR_ASSERT ( EyeRenderParams[0].OrthoProjection.M[0][0] == EyeRenderParams[1].OrthoProjection.M[0][0] );
661  OVR_ASSERT ( EyeRenderParams[0].OrthoProjection.M[1][1] == EyeRenderParams[1].OrthoProjection.M[1][1] );
662  OVR_ASSERT ( EyeRenderParams[0].OrthoProjection.M[0][2] == EyeRenderParams[1].OrthoProjection.M[0][2] );
663  OVR_ASSERT ( EyeRenderParams[0].OrthoProjection.M[1][2] == EyeRenderParams[1].OrthoProjection.M[1][2] );
664  }
665 
666  DirtyFlag = false;
667 }
668 
669 
670 
672 {
673  for ( int eyeNum = 0; eyeNum < 2; eyeNum++ )
674  {
675  StereoEye eyeType = ( eyeNum == 0 ) ? StereoEye_Left : StereoEye_Right;
676 
677  DistortionRenderDesc localDistortion = EyeRenderParams[eyeNum].StereoEye.Distortion;
678  FovPort fov = EyeRenderParams[eyeNum].StereoEye.Fov;
679 
680  Recti renderedViewport;
681  switch ( SetViewportMode )
682  {
683  case SVPM_Density:
684  renderedViewport = CalculateViewportDensityInternal (
685  eyeType, localDistortion, fov,
688  break;
689  case SVPM_Size:
690  if ( ( eyeType == StereoEye_Right ) && !OverrideZeroIpd )
691  {
692  renderedViewport = CalculateViewportInternal (
693  eyeType, RendertargetSize,
694  SetViewportSize[1],
696  }
697  else
698  {
699  renderedViewport = CalculateViewportInternal (
700  eyeType, RendertargetSize,
701  SetViewportSize[0],
703  }
704  break;
705  case SVPM_Viewport:
706  if ( ( eyeType == StereoEye_Right ) && !OverrideZeroIpd )
707  {
708  renderedViewport = SetViewport[1];
709  }
710  else
711  {
712  renderedViewport = SetViewport[0];
713  }
714  break;
715  default: OVR_ASSERT ( false ); break;
716  }
717 
719  EyeRenderParams[eyeNum].StereoEye.EyeToSourceNDC,
720  renderedViewport,
724  }
725 
731  return result;
732 }
733 
734 // Specify a pixel density - how many rendered pixels per pixel in the physical display.
736 {
738  SetViewportPixelsPerDisplayPixel = pixelsPerDisplayPixel;
740 }
741 
742 // Supply the size directly. Will be clamped to the physical rendertarget size.
743 ViewportScaleAndOffsetBothEyes StereoConfig::SetRenderSize ( Sizei const &renderSizeLeft, Sizei const &renderSizeRight )
744 {
746  SetViewportSize[0] = renderSizeLeft;
747  SetViewportSize[1] = renderSizeRight;
749 }
750 
751 // Supply the viewport directly. This is not clamped to the physical rendertarget - careful now!
752 ViewportScaleAndOffsetBothEyes StereoConfig::SetRenderViewport ( Recti const &renderViewportLeft, Recti const &renderViewportRight )
753 {
755  SetViewport[0] = renderViewportLeft;
756  SetViewport[1] = renderViewportRight;
758 }
759 
761 {
762  int eyeNum = ( eye == StereoEye_Right ) ? 1 : 0;
763  float fovScale = 1.0f / fovZoom;
764  FovPort fovPort = EyeRenderParams[eyeNum].StereoEye.Fov;
765  fovPort.LeftTan *= fovScale;
766  fovPort.RightTan *= fovScale;
767  fovPort.UpTan *= fovScale;
768  fovPort.DownTan *= fovScale;
769  return CreateProjection ( RightHandedProjection, fovPort, ZNear, ZFar );
770 }
771 
772 
773 
774 
775 //-----------------------------------------------------------------------------------
776 // ***** Distortion Mesh Rendering
777 
778 
779 // Pow2 for the Morton order to work!
780 // 4 is too low - it is easy to see the "wobbles" in the HMD.
781 // 5 is realllly close but you can see pixel differences with even/odd frame checking.
782 // 6 is indistinguishable on a monitor on even/odd frames.
783 static const int DMA_GridSizeLog2 = 6;
784 static const int DMA_GridSize = 1<<DMA_GridSizeLog2;
785 static const int DMA_NumVertsPerEye = (DMA_GridSize+1)*(DMA_GridSize+1);
786 static const int DMA_NumTrisPerEye = (DMA_GridSize)*(DMA_GridSize)*2;
787 
788 
789 
790 void DistortionMeshDestroy ( DistortionMeshVertexData *pVertices, UInt16 *pTriangleMeshIndices )
791 {
792  OVR_FREE ( pVertices );
793  OVR_FREE ( pTriangleMeshIndices );
794 }
795 
796 void DistortionMeshCreate ( DistortionMeshVertexData **ppVertices, UInt16 **ppTriangleListIndices,
797  int *pNumVertices, int *pNumTriangles,
798  const StereoEyeParams &stereoParams, const HmdRenderInfo &hmdRenderInfo )
799 {
800  bool rightEye = ( stereoParams.Eye == StereoEye_Right );
801  int vertexCount = 0;
802  int triangleCount = 0;
803 
804  // Generate mesh into allocated data and return result.
805  DistortionMeshCreate(ppVertices, ppTriangleListIndices, &vertexCount, &triangleCount,
806  rightEye, hmdRenderInfo, stereoParams.Distortion, stereoParams.EyeToSourceNDC);
807 
808  *pNumVertices = vertexCount;
809  *pNumTriangles = triangleCount;
810 }
811 
812 
813 // Generate distortion mesh for a eye.
814 void DistortionMeshCreate( DistortionMeshVertexData **ppVertices, UInt16 **ppTriangleListIndices,
815  int *pNumVertices, int *pNumTriangles,
816  bool rightEye,
817  const HmdRenderInfo &hmdRenderInfo,
818  const DistortionRenderDesc &distortion, const ScaleAndOffset2D &eyeToSourceNDC )
819 {
820  *pNumVertices = DMA_NumVertsPerEye;
821  *pNumTriangles = DMA_NumTrisPerEye;
822 
823  *ppVertices = (DistortionMeshVertexData*)
824  OVR_ALLOC( sizeof(DistortionMeshVertexData) * (*pNumVertices) );
825  *ppTriangleListIndices = (UInt16*) OVR_ALLOC( sizeof(UInt16) * (*pNumTriangles) * 3 );
826 
827  if (!*ppVertices || !*ppTriangleListIndices)
828  {
829  if (*ppVertices)
830  {
831  OVR_FREE(*ppVertices);
832  }
833  if (*ppTriangleListIndices)
834  {
835  OVR_FREE(*ppTriangleListIndices);
836  }
837  *ppVertices = NULL;
838  *ppTriangleListIndices = NULL;
839  *pNumTriangles = 0;
840  *pNumVertices = 0;
841  return;
842  }
843 
844  // When does the fade-to-black edge start? Chosen heuristically.
845  const float fadeOutBorderFraction = 0.075f;
846 
847 
848  // Populate vertex buffer info
849  float xOffset = 0.0f;
850  float uOffset = 0.0f;
851  OVR_UNUSED(uOffset);
852 
853  if (rightEye)
854  {
855  xOffset = 1.0f;
856  uOffset = 0.5f;
857  }
858 
859  // First pass - build up raw vertex data.
860  DistortionMeshVertexData* pcurVert = *ppVertices;
861 
862  for ( int y = 0; y <= DMA_GridSize; y++ )
863  {
864  for ( int x = 0; x <= DMA_GridSize; x++ )
865  {
866 
867  Vector2f sourceCoordNDC;
868  // NDC texture coords [-1,+1]
869  sourceCoordNDC.x = 2.0f * ( (float)x / (float)DMA_GridSize ) - 1.0f;
870  sourceCoordNDC.y = 2.0f * ( (float)y / (float)DMA_GridSize ) - 1.0f;
871  Vector2f tanEyeAngle = TransformRendertargetNDCToTanFovSpace ( eyeToSourceNDC, sourceCoordNDC );
872 
873  // This is the function that does the really heavy lifting.
874  Vector2f screenNDC = TransformTanFovSpaceToScreenNDC ( distortion, tanEyeAngle, false );
875 
876  // We then need RGB UVs. Since chromatic aberration is generated from screen coords, not
877  // directly from texture NDCs, we can't just use tanEyeAngle, we need to go the long way round.
878  Vector2f tanEyeAnglesR, tanEyeAnglesG, tanEyeAnglesB;
879  TransformScreenNDCToTanFovSpaceChroma ( &tanEyeAnglesR, &tanEyeAnglesG, &tanEyeAnglesB,
880  distortion, screenNDC );
881 
882  pcurVert->TanEyeAnglesR = tanEyeAnglesR;
883  pcurVert->TanEyeAnglesG = tanEyeAnglesG;
884  pcurVert->TanEyeAnglesB = tanEyeAnglesB;
885 
886 
887  HmdShutterTypeEnum shutterType = hmdRenderInfo.Shutter.Type;
888  switch ( shutterType )
889  {
890  case HmdShutter_Global:
891  pcurVert->TimewarpLerp = 0.0f;
892  break;
894  // Retrace is left to right - left eye goes 0.0 -> 0.5, then right goes 0.5 -> 1.0
895  pcurVert->TimewarpLerp = screenNDC.x * 0.25f + 0.25f;
896  if (rightEye)
897  {
898  pcurVert->TimewarpLerp += 0.5f;
899  }
900  break;
902  // Retrace is right to left - right eye goes 0.0 -> 0.5, then left goes 0.5 -> 1.0
903  pcurVert->TimewarpLerp = 0.75f - screenNDC.x * 0.25f;
904  if (rightEye)
905  {
906  pcurVert->TimewarpLerp -= 0.5f;
907  }
908  break;
910  // Retrace is top to bottom on both eyes at the same time.
911  pcurVert->TimewarpLerp = screenNDC.y * 0.5f + 0.5f;
912  break;
913  default: OVR_ASSERT ( false ); break;
914  }
915 
916  // Fade out at texture edges.
917  float edgeFadeIn = ( 1.0f / fadeOutBorderFraction ) *
918  ( 1.0f - Alg::Max ( Alg::Abs ( sourceCoordNDC.x ), Alg::Abs ( sourceCoordNDC.y ) ) );
919  // Also fade out at screen edges.
920  float edgeFadeInScreen = ( 2.0f / fadeOutBorderFraction ) *
921  ( 1.0f - Alg::Max ( Alg::Abs ( screenNDC.x ), Alg::Abs ( screenNDC.y ) ) );
922  edgeFadeIn = Alg::Min ( edgeFadeInScreen, edgeFadeIn );
923 
924  // Don't let verts overlap to the other eye.
925  screenNDC.x = Alg::Max ( -1.0f, Alg::Min ( screenNDC.x, 1.0f ) );
926  screenNDC.y = Alg::Max ( -1.0f, Alg::Min ( screenNDC.y, 1.0f ) );
927 
928  pcurVert->Shade = Alg::Max ( 0.0f, Alg::Min ( edgeFadeIn, 1.0f ) );
929  pcurVert->ScreenPosNDC.x = 0.5f * screenNDC.x - 0.5f + xOffset;
930  pcurVert->ScreenPosNDC.y = -screenNDC.y;
931 
932  pcurVert++;
933  }
934  }
935 
936 
937  // Populate index buffer info
938  UInt16 *pcurIndex = *ppTriangleListIndices;
939 
940  for ( int triNum = 0; triNum < DMA_GridSize * DMA_GridSize; triNum++ )
941  {
942  // Use a Morton order to help locality of FB, texture and vertex cache.
943  // (0.325ms raster order -> 0.257ms Morton order)
944  OVR_ASSERT ( DMA_GridSize <= 256 );
945  int x = ( ( triNum & 0x0001 ) >> 0 ) |
946  ( ( triNum & 0x0004 ) >> 1 ) |
947  ( ( triNum & 0x0010 ) >> 2 ) |
948  ( ( triNum & 0x0040 ) >> 3 ) |
949  ( ( triNum & 0x0100 ) >> 4 ) |
950  ( ( triNum & 0x0400 ) >> 5 ) |
951  ( ( triNum & 0x1000 ) >> 6 ) |
952  ( ( triNum & 0x4000 ) >> 7 );
953  int y = ( ( triNum & 0x0002 ) >> 1 ) |
954  ( ( triNum & 0x0008 ) >> 2 ) |
955  ( ( triNum & 0x0020 ) >> 3 ) |
956  ( ( triNum & 0x0080 ) >> 4 ) |
957  ( ( triNum & 0x0200 ) >> 5 ) |
958  ( ( triNum & 0x0800 ) >> 6 ) |
959  ( ( triNum & 0x2000 ) >> 7 ) |
960  ( ( triNum & 0x8000 ) >> 8 );
961  int FirstVertex = x * (DMA_GridSize+1) + y;
962  // Another twist - we want the top-left and bottom-right quadrants to
963  // have the triangles split one way, the other two split the other.
964  // +---+---+---+---+
965  // | /| /|\ |\ |
966  // | / | / | \ | \ |
967  // |/ |/ | \| \|
968  // +---+---+---+---+
969  // | /| /|\ |\ |
970  // | / | / | \ | \ |
971  // |/ |/ | \| \|
972  // +---+---+---+---+
973  // |\ |\ | /| /|
974  // | \ | \ | / | / |
975  // | \| \|/ |/ |
976  // +---+---+---+---+
977  // |\ |\ | /| /|
978  // | \ | \ | / | / |
979  // | \| \|/ |/ |
980  // +---+---+---+---+
981  // This way triangle edges don't span long distances over the distortion function,
982  // so linear interpolation works better & we can use fewer tris.
983  if ( ( x < DMA_GridSize/2 ) != ( y < DMA_GridSize/2 ) ) // != is logical XOR
984  {
985  *pcurIndex++ = (UInt16)FirstVertex;
986  *pcurIndex++ = (UInt16)FirstVertex+1;
987  *pcurIndex++ = (UInt16)FirstVertex+(DMA_GridSize+1)+1;
988 
989  *pcurIndex++ = (UInt16)FirstVertex+(DMA_GridSize+1)+1;
990  *pcurIndex++ = (UInt16)FirstVertex+(DMA_GridSize+1);
991  *pcurIndex++ = (UInt16)FirstVertex;
992  }
993  else
994  {
995  *pcurIndex++ = (UInt16)FirstVertex;
996  *pcurIndex++ = (UInt16)FirstVertex+1;
997  *pcurIndex++ = (UInt16)FirstVertex+(DMA_GridSize+1);
998 
999  *pcurIndex++ = (UInt16)FirstVertex+1;
1000  *pcurIndex++ = (UInt16)FirstVertex+(DMA_GridSize+1)+1;
1001  *pcurIndex++ = (UInt16)FirstVertex+(DMA_GridSize+1);
1002  }
1003  }
1004 }
1005 
1006 //-----------------------------------------------------------------------------------
1007 // ***** Heightmap Mesh Rendering
1008 
1009 
1010 static const int HMA_GridSizeLog2 = 7;
1011 static const int HMA_GridSize = 1<<HMA_GridSizeLog2;
1012 static const int HMA_NumVertsPerEye = (HMA_GridSize+1)*(HMA_GridSize+1);
1014 
1015 
1016 void HeightmapMeshDestroy ( HeightmapMeshVertexData *pVertices, UInt16 *pTriangleMeshIndices )
1017 {
1018  OVR_FREE ( pVertices );
1019  OVR_FREE ( pTriangleMeshIndices );
1020 }
1021 
1022 void HeightmapMeshCreate ( HeightmapMeshVertexData **ppVertices, UInt16 **ppTriangleListIndices,
1023  int *pNumVertices, int *pNumTriangles,
1024  const StereoEyeParams &stereoParams, const HmdRenderInfo &hmdRenderInfo )
1025 {
1026  bool rightEye = ( stereoParams.Eye == StereoEye_Right );
1027  int vertexCount = 0;
1028  int triangleCount = 0;
1029 
1030  // Generate mesh into allocated data and return result.
1031  HeightmapMeshCreate(ppVertices, ppTriangleListIndices, &vertexCount, &triangleCount,
1032  rightEye, hmdRenderInfo, stereoParams.EyeToSourceNDC);
1033 
1034  *pNumVertices = vertexCount;
1035  *pNumTriangles = triangleCount;
1036 }
1037 
1038 
1039 // Generate heightmap mesh for one eye.
1040 void HeightmapMeshCreate( HeightmapMeshVertexData **ppVertices, UInt16 **ppTriangleListIndices,
1041  int *pNumVertices, int *pNumTriangles, bool rightEye,
1042  const HmdRenderInfo &hmdRenderInfo,
1043  const ScaleAndOffset2D &eyeToSourceNDC )
1044 {
1045  *pNumVertices = HMA_NumVertsPerEye;
1046  *pNumTriangles = HMA_NumTrisPerEye;
1047 
1048  *ppVertices = (HeightmapMeshVertexData*) OVR_ALLOC( sizeof(HeightmapMeshVertexData) * (*pNumVertices) );
1049  *ppTriangleListIndices = (UInt16*) OVR_ALLOC( sizeof(UInt16) * (*pNumTriangles) * 3 );
1050 
1051  if (!*ppVertices || !*ppTriangleListIndices)
1052  {
1053  if (*ppVertices)
1054  {
1055  OVR_FREE(*ppVertices);
1056  }
1057  if (*ppTriangleListIndices)
1058  {
1059  OVR_FREE(*ppTriangleListIndices);
1060  }
1061  *ppVertices = NULL;
1062  *ppTriangleListIndices = NULL;
1063  *pNumTriangles = 0;
1064  *pNumVertices = 0;
1065  return;
1066  }
1067 
1068  // Populate vertex buffer info
1069  float xOffset = 0.0f;
1070  float uOffset = 0.0f;
1071 
1072  if (rightEye)
1073  {
1074  xOffset = 1.0f;
1075  uOffset = 0.5f;
1076  }
1077 
1078  // First pass - build up raw vertex data.
1079  HeightmapMeshVertexData* pcurVert = *ppVertices;
1080 
1081  for ( int y = 0; y <= HMA_GridSize; y++ )
1082  {
1083  for ( int x = 0; x <= HMA_GridSize; x++ )
1084  {
1085  Vector2f sourceCoordNDC;
1086  // NDC texture coords [-1,+1]
1087  sourceCoordNDC.x = 2.0f * ( (float)x / (float)HMA_GridSize ) - 1.0f;
1088  sourceCoordNDC.y = 2.0f * ( (float)y / (float)HMA_GridSize ) - 1.0f;
1089  Vector2f tanEyeAngle = TransformRendertargetNDCToTanFovSpace ( eyeToSourceNDC, sourceCoordNDC );
1090 
1091  pcurVert->TanEyeAngles = tanEyeAngle;
1092 
1093  HmdShutterTypeEnum shutterType = hmdRenderInfo.Shutter.Type;
1094  switch ( shutterType )
1095  {
1096  case HmdShutter_Global:
1097  pcurVert->TimewarpLerp = 0.0f;
1098  break;
1100  // Retrace is left to right - left eye goes 0.0 -> 0.5, then right goes 0.5 -> 1.0
1101  pcurVert->TimewarpLerp = sourceCoordNDC.x * 0.25f + 0.25f;
1102  if (rightEye)
1103  {
1104  pcurVert->TimewarpLerp += 0.5f;
1105  }
1106  break;
1108  // Retrace is right to left - right eye goes 0.0 -> 0.5, then left goes 0.5 -> 1.0
1109  pcurVert->TimewarpLerp = 0.75f - sourceCoordNDC.x * 0.25f;
1110  if (rightEye)
1111  {
1112  pcurVert->TimewarpLerp -= 0.5f;
1113  }
1114  break;
1116  // Retrace is top to bottom on both eyes at the same time.
1117  pcurVert->TimewarpLerp = sourceCoordNDC.y * 0.5f + 0.5f;
1118  break;
1119  default: OVR_ASSERT ( false ); break;
1120  }
1121 
1122  // Don't let verts overlap to the other eye.
1123  //sourceCoordNDC.x = Alg::Max ( -1.0f, Alg::Min ( sourceCoordNDC.x, 1.0f ) );
1124  //sourceCoordNDC.y = Alg::Max ( -1.0f, Alg::Min ( sourceCoordNDC.y, 1.0f ) );
1125 
1126  //pcurVert->ScreenPosNDC.x = 0.5f * sourceCoordNDC.x - 0.5f + xOffset;
1127  pcurVert->ScreenPosNDC.x = sourceCoordNDC.x;
1128  pcurVert->ScreenPosNDC.y = -sourceCoordNDC.y;
1129 
1130  pcurVert++;
1131  }
1132  }
1133 
1134 
1135  // Populate index buffer info
1136  UInt16 *pcurIndex = *ppTriangleListIndices;
1137 
1138  for ( int triNum = 0; triNum < HMA_GridSize * HMA_GridSize; triNum++ )
1139  {
1140  // Use a Morton order to help locality of FB, texture and vertex cache.
1141  // (0.325ms raster order -> 0.257ms Morton order)
1142  OVR_ASSERT ( HMA_GridSize < 256 );
1143  int x = ( ( triNum & 0x0001 ) >> 0 ) |
1144  ( ( triNum & 0x0004 ) >> 1 ) |
1145  ( ( triNum & 0x0010 ) >> 2 ) |
1146  ( ( triNum & 0x0040 ) >> 3 ) |
1147  ( ( triNum & 0x0100 ) >> 4 ) |
1148  ( ( triNum & 0x0400 ) >> 5 ) |
1149  ( ( triNum & 0x1000 ) >> 6 ) |
1150  ( ( triNum & 0x4000 ) >> 7 );
1151  int y = ( ( triNum & 0x0002 ) >> 1 ) |
1152  ( ( triNum & 0x0008 ) >> 2 ) |
1153  ( ( triNum & 0x0020 ) >> 3 ) |
1154  ( ( triNum & 0x0080 ) >> 4 ) |
1155  ( ( triNum & 0x0200 ) >> 5 ) |
1156  ( ( triNum & 0x0800 ) >> 6 ) |
1157  ( ( triNum & 0x2000 ) >> 7 ) |
1158  ( ( triNum & 0x8000 ) >> 8 );
1159  int FirstVertex = x * (HMA_GridSize+1) + y;
1160  // Another twist - we want the top-left and bottom-right quadrants to
1161  // have the triangles split one way, the other two split the other.
1162  // +---+---+---+---+
1163  // | /| /|\ |\ |
1164  // | / | / | \ | \ |
1165  // |/ |/ | \| \|
1166  // +---+---+---+---+
1167  // | /| /|\ |\ |
1168  // | / | / | \ | \ |
1169  // |/ |/ | \| \|
1170  // +---+---+---+---+
1171  // |\ |\ | /| /|
1172  // | \ | \ | / | / |
1173  // | \| \|/ |/ |
1174  // +---+---+---+---+
1175  // |\ |\ | /| /|
1176  // | \ | \ | / | / |
1177  // | \| \|/ |/ |
1178  // +---+---+---+---+
1179  // This way triangle edges don't span long distances over the distortion function,
1180  // so linear interpolation works better & we can use fewer tris.
1181  if ( ( x < HMA_GridSize/2 ) != ( y < HMA_GridSize/2 ) ) // != is logical XOR
1182  {
1183  *pcurIndex++ = (UInt16)FirstVertex;
1184  *pcurIndex++ = (UInt16)FirstVertex+1;
1185  *pcurIndex++ = (UInt16)FirstVertex+(HMA_GridSize+1)+1;
1186 
1187  *pcurIndex++ = (UInt16)FirstVertex+(HMA_GridSize+1)+1;
1188  *pcurIndex++ = (UInt16)FirstVertex+(HMA_GridSize+1);
1189  *pcurIndex++ = (UInt16)FirstVertex;
1190  }
1191  else
1192  {
1193  *pcurIndex++ = (UInt16)FirstVertex;
1194  *pcurIndex++ = (UInt16)FirstVertex+1;
1195  *pcurIndex++ = (UInt16)FirstVertex+(HMA_GridSize+1);
1196 
1197  *pcurIndex++ = (UInt16)FirstVertex+1;
1198  *pcurIndex++ = (UInt16)FirstVertex+(HMA_GridSize+1)+1;
1199  *pcurIndex++ = (UInt16)FirstVertex+(HMA_GridSize+1);
1200  }
1201  }
1202 }
1203 
1204 //-----------------------------------------------------------------------------------
1205 // ***** Prediction and timewarp.
1206 //
1207 
1208 // Calculates the values from the HMD info.
1210  bool withTimewarp /*= true*/,
1211  bool withVsync /*= true*/ )
1212 {
1213  PredictionValues result;
1214 
1215  result.WithTimewarp = withTimewarp;
1216  result.WithVsync = withVsync;
1217 
1218  // For unclear reasons, most graphics systems add an extra frame of latency
1219  // somewhere along the way. In time we'll debug this and figure it out, but
1220  // for now this gets prediction a little bit better.
1221  const float extraFramesOfBufferingKludge = 1.0f;
1222 
1223  if ( withVsync )
1224  {
1225  // These are the times from the Present+Flush to when the middle of the scene is "averagely visible" (without timewarp)
1226  // So if you had no timewarp, this, plus the time until the next vsync, is how much to predict by.
1227  result.PresentFlushToRenderedScene = extraFramesOfBufferingKludge * hmdRenderInfo.Shutter.FirstScanlineToLastScanline;
1228  // Predict to the middle of the screen being scanned out.
1229  result.PresentFlushToRenderedScene += hmdRenderInfo.Shutter.VsyncToFirstScanline + 0.5f * hmdRenderInfo.Shutter.FirstScanlineToLastScanline;
1230  // Time for pixels to get half-way to settling.
1231  result.PresentFlushToRenderedScene += hmdRenderInfo.Shutter.PixelSettleTime * 0.5f;
1232  // Predict to half-way through persistence
1233  result.PresentFlushToRenderedScene += hmdRenderInfo.Shutter.PixelPersistence * 0.5f;
1234 
1235  // The time from the Present+Flush to when the first scanline is "averagely visible".
1236  result.PresentFlushToTimewarpStart = extraFramesOfBufferingKludge * hmdRenderInfo.Shutter.FirstScanlineToLastScanline;
1237  // Predict to the first line being scanned out.
1239  // Time for pixels to get half-way to settling.
1240  result.PresentFlushToTimewarpStart += hmdRenderInfo.Shutter.PixelSettleTime * 0.5f;
1241  // Predict to half-way through persistence
1242  result.PresentFlushToTimewarpStart += hmdRenderInfo.Shutter.PixelPersistence * 0.5f;
1243 
1244  // Time to the the last scanline.
1246 
1247  // Ideal framerate.
1248  result.PresentFlushToPresentFlush = hmdRenderInfo.Shutter.VsyncToNextVsync;
1249  }
1250  else
1251  {
1252  // Timewarp without vsync is a little odd.
1253  // Currently, we assume that without vsync, we have no idea which scanline
1254  // is currently being sent to the display. So we can't do lerping timewarp,
1255  // we can just do a full-screen late-stage fixup.
1256 
1257  // "PresentFlushToRenderedScene" means the time from the Present+Flush to when the middle of the scene is "averagely visible" (without timewarp)
1258  // So if you had no timewarp, this, plus the time until the next flush (which is usually the time to render the frame), is how much to predict by.
1259  // Time for pixels to get half-way to settling.
1260  result.PresentFlushToRenderedScene = hmdRenderInfo.Shutter.PixelSettleTime * 0.5f;
1261  // Predict to half-way through persistence
1262  result.PresentFlushToRenderedScene += hmdRenderInfo.Shutter.PixelPersistence * 0.5f;
1263 
1264  // Without vsync, you don't know timings, and so can't do anything useful with lerped warping.
1267 
1268  // There's no concept of "ideal" when vsync is off.
1269  result.PresentFlushToPresentFlush = 0.0f;
1270  }
1271 
1272  return result;
1273 }
1274 
1275 Matrix4f TimewarpComputePoseDelta ( Matrix4f const &renderedViewFromWorld, Matrix4f const &predictedViewFromWorld, Matrix4f const&eyeViewAdjust )
1276 {
1277  Matrix4f worldFromPredictedView = (eyeViewAdjust * predictedViewFromWorld).InvertedHomogeneousTransform();
1278  Matrix4f matRenderFromNowStart = (eyeViewAdjust * renderedViewFromWorld) * worldFromPredictedView;
1279 
1280  // The sensor-predicted orientations have: X=right, Y=up, Z=backwards.
1281  // The vectors inside the mesh are in NDC to keep the shader simple: X=right, Y=down, Z=forwards.
1282  // So we need to perform a similarity transform on this delta matrix.
1283  // The verbose code would look like this:
1284  /*
1285  Matrix4f matBasisChange;
1286  matBasisChange.SetIdentity();
1287  matBasisChange.M[0][0] = 1.0f;
1288  matBasisChange.M[1][1] = -1.0f;
1289  matBasisChange.M[2][2] = -1.0f;
1290  Matrix4f matBasisChangeInv = matBasisChange.Inverted();
1291  matRenderFromNow = matBasisChangeInv * matRenderFromNow * matBasisChange;
1292  */
1293  // ...but of course all the above is a constant transform and much more easily done.
1294  // We flip the signs of the Y&Z row, then flip the signs of the Y&Z column,
1295  // and of course most of the flips cancel:
1296  // +++ +-- +--
1297  // +++ -> flip Y&Z columns -> +-- -> flip Y&Z rows -> -++
1298  // +++ +-- -++
1299  matRenderFromNowStart.M[0][1] = -matRenderFromNowStart.M[0][1];
1300  matRenderFromNowStart.M[0][2] = -matRenderFromNowStart.M[0][2];
1301  matRenderFromNowStart.M[1][0] = -matRenderFromNowStart.M[1][0];
1302  matRenderFromNowStart.M[2][0] = -matRenderFromNowStart.M[2][0];
1303  matRenderFromNowStart.M[1][3] = -matRenderFromNowStart.M[1][3];
1304  matRenderFromNowStart.M[2][3] = -matRenderFromNowStart.M[2][3];
1305 
1306  return matRenderFromNowStart;
1307 }
1308 
1309 Matrix4f TimewarpComputePoseDeltaPosition ( Matrix4f const &renderedViewFromWorld, Matrix4f const &predictedViewFromWorld, Matrix4f const&eyeViewAdjust )
1310 {
1311  Matrix4f worldFromPredictedView = (eyeViewAdjust * predictedViewFromWorld).InvertedHomogeneousTransform();
1312  Matrix4f matRenderXform = (eyeViewAdjust * renderedViewFromWorld) * worldFromPredictedView;
1313 
1314  return matRenderXform.Inverted();
1315 }
1316 
1318 {
1319  for ( int i = 0; i < 2; i++ )
1320  {
1321  EyeRenderPoses[i] = Transformf();
1322  }
1323  DistortionTimeCount = 0;
1324  VsyncEnabled = false;
1325 }
1326 
1327 void TimewarpMachine::Reset(HmdRenderInfo& renderInfo, bool vsyncEnabled, double timeNow)
1328 {
1329  RenderInfo = renderInfo;
1330  VsyncEnabled = vsyncEnabled;
1333  DistortionTimeCount = 0;
1334  DistortionTimeAverage = 0.0f;
1335  LastFramePresentFlushTime = timeNow;
1336  AfterPresentAndFlush(timeNow);
1337 }
1338 
1340 {
1342  LastFramePresentFlushTime = timeNow;
1344 }
1345 
1347 {
1348  // Note that PredictionGetDeviceValues() did all the vsync-dependent thinking for us.
1350 }
1351 
1353 {
1354  double predictionTime = GetViewRenderPredictionTime();
1355  return sfusion.GetPoseAtTime(predictionTime);
1356 }
1357 
1359 {
1360  // Note that PredictionGetDeviceValues() did all the vsync-dependent thinking for us.
1362 }
1364 {
1365  // Note that PredictionGetDeviceValues() did all the vsync-dependent thinking for us.
1367 }
1369 {
1370  double predictionTime = GetVisiblePixelTimeStart();
1371  return sfusion.GetPoseAtTime(predictionTime);
1372 }
1374 {
1375  double predictionTime = GetVisiblePixelTimeEnd();
1376  return sfusion.GetPoseAtTime(predictionTime);
1377 }
1379 {
1380  Transformf visiblePose = GetPredictedVisiblePixelPoseStart ( sfusion );
1381  Matrix4f visibleMatrix(visiblePose);
1382  Matrix4f renderedMatrix(renderedPose);
1383  Matrix4f identity; // doesn't matter for orientation-only timewarp
1384  return TimewarpComputePoseDelta ( renderedMatrix, visibleMatrix, identity );
1385 }
1387 {
1388  Transformf visiblePose = GetPredictedVisiblePixelPoseEnd ( sfusion );
1389  Matrix4f visibleMatrix(visiblePose);
1390  Matrix4f renderedMatrix(renderedPose);
1391  Matrix4f identity; // doesn't matter for orientation-only timewarp
1392  return TimewarpComputePoseDelta ( renderedMatrix, visibleMatrix, identity );
1393 }
1394 
1395 
1396 // What time should the app wait until before starting distortion?
1398 {
1400  {
1401  // Don't wait.
1403  }
1404 
1405  const float fudgeFactor = 0.002f; // Found heuristically - 1ms is too short because of timing granularity - may need further tweaking!
1406  float howLongBeforePresent = DistortionTimeAverage + fudgeFactor;
1407  // Subtlety here. Technically, the correct time is NextFramePresentFlushTime - howLongBeforePresent.
1408  // However, if the app drops a frame, this then perpetuates it,
1409  // i.e. if the display is running at 60fps, but the last frame was slow,
1410  // (e.g. because of swapping or whatever), then NextFramePresentFlushTime is
1411  // 33ms in the future, not 16ms. Since this function supplies the
1412  // time to wait until, the app will indeed wait until 32ms, so the framerate
1413  // drops to 30fps and never comes back up!
1414  // So we return the *ideal* framerate, not the *actual* framerate.
1415  return LastFramePresentFlushTime + (float)( CurrentPredictionValues.PresentFlushToPresentFlush - howLongBeforePresent );
1416 }
1417 
1418 
1420 {
1421  if (!VsyncEnabled)
1422  {
1423  return false;
1424  }
1426 }
1427 
1429 {
1430  DistortionTimeCurrentStart = timeNow;
1431 }
1432 
1434 {
1435  float timeDelta = (float)( timeNow - DistortionTimeCurrentStart );
1437  {
1438  DistortionTimes[DistortionTimeCount] = timeDelta;
1441  {
1442  // Median.
1443  float distortionTimeMedian = 0.0f;
1444  for ( int i = 0; i < NumDistortionTimes/2; i++ )
1445  {
1446  // Find the maximum time of those remaining.
1447  float maxTime = DistortionTimes[0];
1448  int maxIndex = 0;
1449  for ( int j = 1; j < NumDistortionTimes; j++ )
1450  {
1451  if ( maxTime < DistortionTimes[j] )
1452  {
1453  maxTime = DistortionTimes[j];
1454  maxIndex = j;
1455  }
1456  }
1457  // Zero that max time, so we'll find the next-highest time.
1458  DistortionTimes[maxIndex] = 0.0f;
1459  distortionTimeMedian = maxTime;
1460  }
1461  DistortionTimeAverage = distortionTimeMedian;
1462  }
1463  }
1464  else
1465  {
1466  OVR_ASSERT ( !"Really didn't need more measurements, thanks" );
1467  }
1468 }
1469 
1470 
1471 }}} // OVR::Util::Render
1472 
float RightTan
Definition: OVR_Stereo.h:70
struct OVR::HmdRenderInfo::ShutterInfo Shutter
Transformf GetViewRenderPredictionPose(SensorFusion &sfusion)
void HeightmapMeshCreate(HeightmapMeshVertexData **ppVertices, UInt16 **ppTriangleListIndices, int *pNumVertices, int *pNumTriangles, const StereoEyeParams &stereoParams, const HmdRenderInfo &hmdRenderInfo)
Sizei CalculateRecommendedTextureSize(HmdRenderInfo const &hmd, bool bRendertargetSharedByBothEyes, float pixelDensityInCenter)
ViewportScaleAndOffsetBothEyes SetRenderViewport(Recti const &renderViewportLeft, Recti const &renderViewportRight)
HmdTypeEnum HmdType
Definition: OVR_Stereo.h:262
const float OVR_DEFAULT_EXTRA_EYE_ROTATION
Definition: OVR_Stereo.h:346
StereoConfig(StereoMode mode=Stereo_LeftRight_Multipass)
Matrix4f TimewarpComputePoseDeltaPosition(Matrix4f const &renderedViewFromWorld, Matrix4f const &predictedViewFromWorld, Matrix4f const &eyeViewAdjust)
Vector2f TransformTanFovSpaceToScreenNDC(DistortionRenderDesc const &distortion, const Vector2f &tanEyeAngle, bool usePolyApprox)
float K[NumCoefficients]
Definition: OVR_Stereo.h:205
Recti GetFramebufferViewport(StereoEye eyeType, HmdRenderInfo const &hmd)
static const int DMA_GridSize
OVR_FORCE_INLINE const T Max(const T a, const T b)
Definition: OVR_Alg.h:49
struct OVR::HmdRenderInfo::EyeConfig EyeLeft
ViewportScaleAndOffset ModifyRenderViewport(StereoEyeParams const &params, Sizei const &actualRendertargetSurfaceSize, Recti const &renderViewport)
#define NULL
ScaleAndOffset2D CreateUVScaleAndOffsetfromNDCScaleandOffset(ScaleAndOffset2D scaleAndOffsetNDC, Recti renderedViewport, Sizei renderTargetSize)
float LensSeparationInMeters
Definition: OVR_Stereo.h:271
void HeightmapMeshDestroy(HeightmapMeshVertexData *pVertices, UInt16 *pTriangleMeshIndices)
struct OVR::HmdRenderInfo::EyeConfig EyeRight
ViewportScaleAndOffset ModifyRenderDensity(StereoEyeParams const &params, Sizei const &actualRendertargetSurfaceSize, float pixelDensity, bool bRendertargetSharedByBothEyes)
void JustInTime_BeforeDistortionTimeMeasurement(double timeNow)
uint16_t UInt16
Definition: OVR_Types.h:251
ViewportScaleAndOffset ModifyRenderSize(StereoEyeParams const &params, Sizei const &actualRendertargetSurfaceSize, Sizei const &requestedRenderSize, bool bRendertargetSharedByBothEyes)
Matrix4 Inverted() const
Definition: OVR_Math.h:1452
DistortionRenderDesc Distortion
Definition: OVR_Stereo.h:415
float DistortionTimes[NumDistortionTimes]
char const * GetDebugNameEyeCupType(EyeCupType eyeCupType)
FovPort CalculateFovFromHmdInfo(StereoEye eyeType, DistortionRenderDesc const &distortion, HmdRenderInfo const &hmd, float extraEyeRotationInRadians)
Matrix4f TimewarpComputePoseDelta(Matrix4f const &renderedViewFromWorld, Matrix4f const &predictedViewFromWorld, Matrix4f const &eyeViewAdjust)
T DegreeToRad(T rads)
Definition: OVR_Math.h:228
#define OVR_UNUSED(a)
void SetExtraEyeRotation(float extraEyeRotationInRadians=0.0f)
void SetZClipPlanesAndHandedness(float zNear=0.01f, float zFar=10000.0f, bool rightHandedProjection=true)
static StereoEyeParams CalculateStereoEyeParamsInternal(StereoEye eyeType, HmdRenderInfo const &hmd, DistortionRenderDesc const &distortion, FovPort const &fov, Sizei const &actualRendertargetSurfaceSize, Recti const &renderedViewport, bool bRightHanded=true, float zNear=0.01f, float zFar=10000.0f, bool bMonoRenderingMode=false, float zoomFactor=1.0f)
static const int HMA_NumVertsPerEye
void TransformScreenNDCToTanFovSpaceChroma(Vector2f *resultR, Vector2f *resultG, Vector2f *resultB, DistortionRenderDesc const &distortion, const Vector2f &framebufferNDC)
uint8_t UByte
Definition: OVR_Types.h:249
Sizei CalculateIdealPixelSize(StereoEye eyeType, DistortionRenderDesc const &distortion, FovPort tanHalfFov, float pixelsPerDisplayPixel)
float MetersPerTanAngleAtCenter
Definition: OVR_Stereo.h:208
Transformf GetPredictedVisiblePixelPoseEnd(SensorFusion &sfusion)
static const int HMA_GridSizeLog2
Sizei CalculateRecommendedTextureSize(bool rendertargetSharedByBothEyes, float pixelDensityInCenter=1.0f)
Matrix4f RenderedProjection
Definition: OVR_Stereo.h:421
float ScreenGapSizeInMeters
Definition: OVR_Stereo.h:267
Transform< float > Transformf
Definition: OVR_Math.h:1159
static const int DMA_NumVertsPerEye
ViewportScaleAndOffsetBothEyes SetRenderDensity(float pixelsPerDisplayPixel)
Transformf GetPoseAtTime(double absoluteTime) const
DistortionRenderDesc CalculateDistortionRenderDesc(StereoEye eyeType, HmdRenderInfo const &hmd, const LensConfig *pLensOverride)
void SetHmdRenderInfo(const HmdRenderInfo &hmd)
Size< int > ResolutionInPixels
Definition: OVR_Stereo.h:265
static const int DMA_GridSizeLog2
Transformf GetPredictedVisiblePixelPoseStart(SensorFusion &sfusion)
StereoEyeParams CalculateStereoEyeParams(HmdRenderInfo const &hmd, StereoEye eyeType, Sizei const &actualRendertargetSurfaceSize, bool bRendertargetSharedByBothEyes, bool bRightHanded, float zNear, float zFar, Sizei const *pOverrideRenderedPixelSize, FovPort const *pOverrideFovport, float zoomFactor)
#define OVR_ASSERT(p)
Matrix4f GetTimewarpDeltaEnd(SensorFusion &sfusion, Transformf const &renderedPose)
Matrix4f CreateProjection(bool rightHanded, FovPort tanHalfFov, float zNear, float zFar)
void Reset(HmdRenderInfo &renderInfo, bool vsyncEnabled, double timeNow)
Vector3f CalculateEyeVirtualCameraOffset(HmdRenderInfo const &hmd, StereoEye eyeType, bool bmonoRenderingMode)
ScaleAndOffset2D EyeToSourceUV
Definition: OVR_Stereo.h:423
static DistortionAndFov CalculateDistortionAndFovInternal(StereoEye eyeType, HmdRenderInfo const &hmd, LensConfig const *pLensOverride=NULL, FovPort const *pTanHalfFovOverride=NULL, float extraEyeRotationInRadians=OVR_DEFAULT_EXTRA_EYE_ROTATION)
StereoEyeParamsWithOrtho EyeRenderParams[2]
enum OVR::Util::Render::StereoConfig::SetViewportModeEnum SetViewportMode
char const * GetDebugNameHmdType(HmdTypeEnum hmdType)
static Recti CalculateViewportDensityInternal(StereoEye eyeType, DistortionRenderDesc const &distortion, FovPort const &fov, Sizei const &actualRendertargetSurfaceSize, bool bRendertargetSharedByBothEyes, float desiredPixelDensity=1.0f, bool bMonoRenderingMode=false)
static const int DMA_NumTrisPerEye
T M[4][4]
Definition: OVR_Math.h:1197
ViewportScaleAndOffsetBothEyes setupViewportScaleAndOffsets()
void SetRendertargetSize(Size< int > const rendertargetSize, bool rendertargetIsSharedByBothEyes)
float LensDiameterInMeters
Definition: OVR_Stereo.h:272
const StereoEyeParamsWithOrtho & GetEyeRenderParams(StereoEye eye)
void DistortionMeshCreate(DistortionMeshVertexData **ppVertices, UInt16 **ppTriangleListIndices, int *pNumVertices, int *pNumTriangles, const StereoEyeParams &stereoParams, const HmdRenderInfo &hmdRenderInfo)
Size< float > Sizef
Definition: OVR_Math.h:638
float UpTan
Definition: OVR_Stereo.h:67
void JustInTime_AfterDistortionTimeMeasurement(double timeNow)
void DistortionMeshDestroy(DistortionMeshVertexData *pVertices, UInt16 *pTriangleMeshIndices)
void SetToIdentity()
Definition: OVR_Stereo.cpp:374
static const int HMA_NumTrisPerEye
ScaleAndOffset2D CreateNDCScaleAndOffsetFromFov(FovPort tanHalfFov)
static const int HMA_GridSize
PredictionValues PredictionGetDeviceValues(const HmdRenderInfo &hmdRenderInfo, bool withTimewarp, bool withVsync)
ViewportScaleAndOffsetBothEyes SetRenderSize(Sizei const &renderSizeLeft, Sizei const &renderSizeRight)
float LensSurfaceToMidplateInMeters
Definition: OVR_Stereo.h:273
void SetFov(FovPort const *pfovLeft=NULL, FovPort const *pfovRight=NULL)
EyeConfig GetEyeCenter() const
Definition: OVR_Stereo.h:327
OVR_FORCE_INLINE const T Min(const T a, const T b)
Definition: OVR_Alg.h:46
Matrix4f CreateOrthoSubProjection(bool rightHanded, StereoEye eyeType, float tanHalfFovX, float tanHalfFovY, float unitsX, float unitsY, float distanceFromCamera, float interpupillaryDistance, Matrix4f const &projection, float zNear, float zFar)
float DownTan
Definition: OVR_Stereo.h:68
void SetZeroVirtualIpdOverride(bool enableOverride)
#define OVR_ALLOC(s)
float ChromaticAberration[4]
Definition: OVR_Stereo.h:215
Size< float > ScreenSizeInMeters
Definition: OVR_Stereo.h:266
static Matrix4 Translation(const Vector3< float > &v)
Definition: OVR_Math.h:1588
static ViewportScaleAndOffset CalculateViewportScaleAndOffsetInternal(ScaleAndOffset2D const &eyeToSourceNDC, Recti const &renderedViewport, Sizei const &actualRendertargetSurfaceSize)
EyeCupType EyeCups
Definition: OVR_Stereo.h:274
Matrix4f GetProjectionWithZoom(StereoEye eye, float fovZoom) const
Vector2f PixelsPerTanAngleAtCenter
Definition: OVR_Stereo.h:247
Vector2f TransformRendertargetNDCToTanFovSpace(const ScaleAndOffset2D &eyeToSourceNDC, const Vector2f &textureNDC)
float LeftTan
Definition: OVR_Stereo.h:69
#define OVR_FREE(p)
Rect< int > Recti
Definition: OVR_Math.h:681
StereoEye
Definition: OVR_Stereo.h:44
Matrix4f GetTimewarpDeltaStart(SensorFusion &sfusion, Transformf const &renderedPose)
FovPort CalculateRecommendedFov(HmdRenderInfo const &hmd, StereoEye eyeType, bool bMakeFovSymmetrical)
Size< int > Sizei
Definition: OVR_Math.h:636
ScaleAndOffset2D EyeToSourceNDC
Definition: OVR_Stereo.h:422
OVR_FORCE_INLINE const T Abs(const T v)
Definition: OVR_Alg.h:79
static Recti CalculateViewportInternal(StereoEye eyeType, Sizei const actualRendertargetSurfaceSize, Sizei const requestedRenderedPixelSize, bool bRendertargetSharedByBothEyes, bool bMonoRenderingMode=false)
DistortionEqnType Eqn
Definition: OVR_Stereo.h:204
float CenterFromTopInMeters
Definition: OVR_Stereo.h:270
void SetLensOverride(LensConfig const *pLensOverrideLeft=NULL, LensConfig const *pLensOverrideRight=NULL)