Bike-X  0.8
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros
CAPI_FrameTimeManager.cpp
Go to the documentation of this file.
1 /************************************************************************************
2 
3 Filename : CAPI_FrameTimeManager.cpp
4 Content : Manage frame timing and pose prediction for rendering
5 Created : November 30, 2013
6 Authors : Volga Aksoy, Michael Antonov
7 
8 Copyright : Copyright 2014 Oculus VR, Inc. All Rights reserved.
9 
10 Licensed under the Oculus VR Rift SDK License Version 3.1 (the "License");
11 you may not use the Oculus VR Rift SDK except in compliance with the License,
12 which is provided at the time of installation or download, or which
13 otherwise accompanies this software in either electronic or hard copy form.
14 
15 You may obtain a copy of the License at
16 
17 http://www.oculusvr.com/licenses/LICENSE-3.1
18 
19 Unless required by applicable law or agreed to in writing, the Oculus VR SDK
20 distributed under the License is distributed on an "AS IS" BASIS,
21 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
22 See the License for the specific language governing permissions and
23 limitations under the License.
24 
25 ************************************************************************************/
26 
27 #include "CAPI_FrameTimeManager.h"
28 
29 
30 namespace OVR { namespace CAPI {
31 
32 
33 //-------------------------------------------------------------------------------------
34 // ***** FrameLatencyTracker
35 
36 
38 {
39  Reset();
40 }
41 
43 {
44  TrackerEnabled = true;
46  FrameIndex = 0;
47  MatchCount = 0;
50 
52 }
53 
54 
56 {
59  {
60  return (unsigned char)Util::FrameTimeRecord::ReadbackIndexToColor(0);
61  }
62 
65 }
66 
67 
68 void FrameLatencyTracker::SaveDrawColor(unsigned char drawColor, double endFrameTime,
69  double renderIMUTime, double timewarpIMUTime )
70 {
72  return;
73 
75  {
77  OVR_UNUSED(drawColor);
78 
79  // saves {color, endFrame time}
81  FrameEndTimes[FrameIndex].TimeSeconds = endFrameTime;
85  FrameIndex++;
86  }
87  else
88  {
89  // If the request was outstanding for too long, switch to zero mode to restart.
90  if (endFrameTime > (FrameEndTimes[FrameIndex-1].TimeSeconds + 0.15))
91  {
92  if (MatchCount == 0)
93  {
94  // If nothing was matched, we have no latency reading.
97  }
98 
100  MatchCount = 0;
101  FrameIndex = 0;
102  }
103  }
104 }
105 
106 
108 {
109  if (!TrackerEnabled)
110  return;
111 
113  {
114  // Do we have all zeros?
115  if (r.IsAllZeroes())
116  {
117  OVR_ASSERT(FrameIndex == 0);
119  MatchCount = 0;
120  }
121  return;
122  }
123 
124  // We are in Match Mode. Wait until all colors are matched or timeout,
125  // at which point we go back to zeros.
126 
127  for (int i = 0; i < FrameIndex; i++)
128  {
129  int recordIndex = 0;
130  int consecutiveMatch = 0;
131 
132  OVR_ASSERT(FrameEndTimes[i].ReadbackIndex != 0);
133 
134  if (r.FindReadbackIndex(&recordIndex, FrameEndTimes[i].ReadbackIndex))
135  {
136  // Advance forward to see that we have several more matches.
137  int ri = recordIndex + 1;
138  int j = i + 1;
139 
140  consecutiveMatch++;
141 
142  for (; (j < FrameIndex) && (ri < Util::FrameTimeRecordSet::RecordCount); j++, ri++)
143  {
144  if (r[ri].ReadbackIndex != FrameEndTimes[j].ReadbackIndex)
145  break;
146  consecutiveMatch++;
147  }
148 
149  // Match at least 2 items in the row, to avoid accidentally matching color.
150  if (consecutiveMatch > 1)
151  {
152  // Record latency values for all but last samples. Keep last 2 samples
153  // for the future to simplify matching.
154  for (int q = 0; q < consecutiveMatch; q++)
155  {
156  const Util::FrameTimeRecord &scanoutFrame = r[recordIndex+q];
157  FrameTimeRecordEx &renderFrame = FrameEndTimes[i+q];
158 
159  if (!renderFrame.MatchedRecord)
160  {
161  double deltaSeconds = scanoutFrame.TimeSeconds - renderFrame.TimeSeconds;
162  if (deltaSeconds > 0.0)
163  {
164  FrameDeltas.AddTimeDelta(deltaSeconds);
165  LatencyRecordTime = scanoutFrame.TimeSeconds;
166  RenderLatencySeconds = scanoutFrame.TimeSeconds - renderFrame.RenderIMUTimeSeconds;
167  TimewarpLatencySeconds = (renderFrame.TimewarpIMUTimeSeconds == 0.0) ? 0.0 :
168  (scanoutFrame.TimeSeconds - renderFrame.TimewarpIMUTimeSeconds);
169  }
170 
171  renderFrame.MatchedRecord = true;
172  MatchCount++;
173  }
174  }
175 
176  // Exit for.
177  break;
178  }
179  }
180  } // for ( i => FrameIndex )
181 
182 
183  // If we matched all frames, start over.
184  if (MatchCount == FramesTracked)
185  {
187  MatchCount = 0;
188  FrameIndex = 0;
189  }
190 }
191 
192 
194 {
195  if (ovr_GetTimeInSeconds() > (LatencyRecordTime + 2.0))
196  {
197  latencies[0] = 0.0f;
198  latencies[1] = 0.0f;
199  latencies[2] = 0.0f;
200  }
201  else
202  {
203  latencies[0] = (float)RenderLatencySeconds;
204  latencies[1] = (float)TimewarpLatencySeconds;
205  latencies[2] = (float)FrameDeltas.GetMedianTimeDelta();
206  }
207 }
208 
209 
210 //-------------------------------------------------------------------------------------
211 
213  : VsyncEnabled(vsyncEnabled), DynamicPrediction(true), SdkRender(false),
214  FrameTiming()
215 {
216  RenderIMUTimeSeconds = 0.0;
218 
219  // HACK: SyncToScanoutDelay observed close to 1 frame in video cards.
220  // Overwritten by dynamic latency measurement on DK2.
221  VSyncToScanoutDelay = 0.013f;
222  NoVSyncToScanoutDelay = 0.004f;
223 }
224 
226 {
227  // Set up prediction distances.
228  // With-Vsync timings.
229  RenderInfo = renderInfo;
230 
233 }
234 
235 void FrameTimeManager::ResetFrameTiming(unsigned frameIndex,
236  bool dynamicPrediction,
237  bool sdkRender)
238 {
239  DynamicPrediction = dynamicPrediction;
240  SdkRender = sdkRender;
241 
245 
246  FrameTiming.FrameIndex = frameIndex;
252 
253  LocklessTiming.SetState(FrameTiming);
254 }
255 
256 
258 {
259  // Timing difference between frame is tracked by FrameTimeDeltas, or
260  // is a hard-coded value of 1/FrameRate.
261  double frameDelta;
262 
263  if (!VsyncEnabled)
264  {
265  frameDelta = 0.0;
266  }
267  else if (FrameTimeDeltas.GetCount() > 3)
268  {
269  frameDelta = FrameTimeDeltas.GetMedianTimeDelta();
270  if (frameDelta > (RenderInfo.Shutter.VsyncToNextVsync + 0.001))
271  frameDelta = RenderInfo.Shutter.VsyncToNextVsync;
272  }
273  else
274  {
275  frameDelta = RenderInfo.Shutter.VsyncToNextVsync;
276  }
277 
278  return frameDelta;
279 }
280 
281 
283 {
284  double screenDelay = ScreenSwitchingDelay;
285  double measuredVSyncToScanout;
286 
287  // Use real-time DK2 latency tester HW for prediction if its is working.
288  // Do sanity check under 60 ms
289  if (!VsyncEnabled)
290  {
291  screenDelay += NoVSyncToScanoutDelay;
292  }
293  else if ( DynamicPrediction &&
295  (measuredVSyncToScanout = ScreenLatencyTracker.FrameDeltas.GetMedianTimeDelta(),
296  (measuredVSyncToScanout > 0.0001) && (measuredVSyncToScanout < 0.06)) )
297  {
298  screenDelay += measuredVSyncToScanout;
299  }
300  else
301  {
302  screenDelay += VSyncToScanoutDelay;
303  }
304 
305  return screenDelay;
306 }
307 
308 
310 {
311  // If timewarp timing hasn't been calculated, we should wait.
312  if (!VsyncEnabled)
313  return 0.0;
314 
315  if (SdkRender)
316  {
318  return 0.0;
319  return -(DistortionRenderTimes.GetMedianTimeDelta() + 0.002);
320  }
321 
322  // Just a hard-coded "high" value for game-drawn code.
323  // TBD: Just return 0 and let users calculate this themselves?
324  return -0.003;
325 }
326 
327 
328 
330  HmdShutterTypeEnum shutterType,
331  double thisFrameTime, unsigned int frameIndex)
332 {
333  // ThisFrameTime comes from the end of last frame, unless it it changed.
334  double nextFrameBase;
335  double frameDelta = inputs.FrameDelta;
336 
337  FrameIndex = frameIndex;
338 
339  ThisFrameTime = thisFrameTime;
340  NextFrameTime = ThisFrameTime + frameDelta;
341  nextFrameBase = NextFrameTime + inputs.ScreenDelay;
342  MidpointTime = nextFrameBase + frameDelta * 0.5;
343  TimewarpPointTime = (inputs.TimewarpWaitDelta == 0.0) ?
344  0.0 : (NextFrameTime + inputs.TimewarpWaitDelta);
345 
346  // Calculate absolute points in time when eye rendering or corresponding time-warp
347  // screen edges will become visible.
348  // This only matters with VSync.
349  switch(shutterType)
350  {
354  TimeWarpStartEndTimes[0][0] = nextFrameBase;
355  TimeWarpStartEndTimes[0][1] = nextFrameBase + frameDelta;
356  TimeWarpStartEndTimes[1][0] = nextFrameBase;
357  TimeWarpStartEndTimes[1][1] = nextFrameBase + frameDelta;
358  break;
360  EyeRenderTimes[0] = nextFrameBase + frameDelta * 0.25;
361  EyeRenderTimes[1] = nextFrameBase + frameDelta * 0.75;
362 
363  /*
364  // TBD: MA: It is probably better if mesh sets it up per-eye.
365  // Would apply if screen is 0 -> 1 for each eye mesh
366  TimeWarpStartEndTimes[0][0] = nextFrameBase;
367  TimeWarpStartEndTimes[0][1] = MidpointTime;
368  TimeWarpStartEndTimes[1][0] = MidpointTime;
369  TimeWarpStartEndTimes[1][1] = nextFrameBase + frameDelta;
370  */
371 
372  // Mesh is set up to vary from Edge of scree 0 -> 1 across both eyes
373  TimeWarpStartEndTimes[0][0] = nextFrameBase;
374  TimeWarpStartEndTimes[0][1] = nextFrameBase + frameDelta;
375  TimeWarpStartEndTimes[1][0] = nextFrameBase;
376  TimeWarpStartEndTimes[1][1] = nextFrameBase + frameDelta;
377 
378  break;
380 
381  EyeRenderTimes[0] = nextFrameBase + frameDelta * 0.75;
382  EyeRenderTimes[1] = nextFrameBase + frameDelta * 0.25;
383 
384  // This is *Correct* with Tom's distortion mesh organization.
385  TimeWarpStartEndTimes[0][0] = nextFrameBase ;
386  TimeWarpStartEndTimes[0][1] = nextFrameBase + frameDelta;
387  TimeWarpStartEndTimes[1][0] = nextFrameBase ;
388  TimeWarpStartEndTimes[1][1] = nextFrameBase + frameDelta;
389  break;
390  case HmdShutter_Global:
391  // TBD
398  break;
399  default:
400  break;
401  }
402 }
403 
404 
405 double FrameTimeManager::BeginFrame(unsigned frameIndex)
406 {
407  RenderIMUTimeSeconds = 0.0;
409 
410  // ThisFrameTime comes from the end of last frame, unless it it changed.
411  double thisFrameTime = (FrameTiming.NextFrameTime != 0.0) ?
413 
414  // We are starting to process a new frame...
416  thisFrameTime, frameIndex);
417 
418  return FrameTiming.ThisFrameTime;
419 }
420 
421 
423 {
424  // Record timing since last frame; must be called after Present & sync.
426  if (FrameTiming.ThisFrameTime > 0.0)
427  {
430  }
431 
432  // Write to Lock-less
433  LocklessTiming.SetState(FrameTiming);
434 }
435 
436 
437 
438 // Thread-safe function to query timing for a future frame
439 
441 {
442  Timing frameTiming = LocklessTiming.GetState();
443 
444  if (frameTiming.ThisFrameTime != 0.0)
445  {
446  // If timing hasn't been initialized, starting based on "now" is the best guess.
447  frameTiming.InitTimingFromInputs(frameTiming.Inputs, RenderInfo.Shutter.Type,
448  ovr_GetTimeInSeconds(), frameIndex);
449  }
450 
451  else if (frameIndex > frameTiming.FrameIndex)
452  {
453  unsigned frameDelta = frameIndex - frameTiming.FrameIndex;
454  double thisFrameTime = frameTiming.NextFrameTime +
455  double(frameDelta-1) * frameTiming.Inputs.FrameDelta;
456  // Don't run away too far into the future beyond rendering.
457  OVR_ASSERT(frameDelta < 6);
458 
459  frameTiming.InitTimingFromInputs(frameTiming.Inputs, RenderInfo.Shutter.Type,
460  thisFrameTime, frameIndex);
461  }
462 
463  return frameTiming;
464 }
465 
466 
468 {
469  if (VsyncEnabled)
470  {
471  return FrameTiming.EyeRenderTimes[eye];
472  }
473 
474  // No VSync: Best guess for the near future
476 }
477 
479 {
480  double eyeRenderTime = GetEyePredictionTime(eye);
481  ovrSensorState eyeState = ovrHmd_GetSensorState(hmd, eyeRenderTime);
482 
483 // EyeRenderPoses[eye] = eyeState.Predicted.Pose;
484 
485  // Record view pose sampling time for Latency reporting.
486  if (RenderIMUTimeSeconds == 0.0)
488 
489  return eyeState.Predicted.Pose;
490 }
491 
492 
493 void FrameTimeManager::GetTimewarpPredictions(ovrEyeType eye, double timewarpStartEnd[2])
494 {
495  if (VsyncEnabled)
496  {
497  timewarpStartEnd[0] = FrameTiming.TimeWarpStartEndTimes[eye][0];
498  timewarpStartEnd[1] = FrameTiming.TimeWarpStartEndTimes[eye][1];
499  return;
500  }
501 
502  // Free-running, so this will be displayed immediately.
503  // Unfortunately we have no idea which bit of the screen is actually going to be displayed.
504  // TODO: guess which bit of the screen is being displayed!
505  // (e.g. use DONOTWAIT on present and see when the return isn't WASSTILLWAITING?)
506 
507  // We have no idea where scan-out is currently, so we can't usefully warp the screen spatially.
509  timewarpStartEnd[1] = timewarpStartEnd[0];
510 }
511 
512 
514  ovrPosef renderPose, ovrMatrix4f twmOut[2])
515 {
516  if (!hmd)
517  {
518  return;
519  }
520 
521  double timewarpStartEnd[2] = { 0.0, 0.0 };
522  GetTimewarpPredictions(eyeId, timewarpStartEnd);
523 
524  ovrSensorState startState = ovrHmd_GetSensorState(hmd, timewarpStartEnd[0]);
525  ovrSensorState endState = ovrHmd_GetSensorState(hmd, timewarpStartEnd[1]);
526 
527  if (TimewarpIMUTimeSeconds == 0.0)
529 
530  Quatf quatFromStart = startState.Predicted.Pose.Orientation;
531  Quatf quatFromEnd = endState.Predicted.Pose.Orientation;
532  Quatf quatFromEye = renderPose.Orientation; //EyeRenderPoses[eyeId].Orientation;
533  quatFromEye.Invert();
534 
535  Quatf timewarpStartQuat = quatFromEye * quatFromStart;
536  Quatf timewarpEndQuat = quatFromEye * quatFromEnd;
537 
538  Matrix4f timewarpStart(timewarpStartQuat);
539  Matrix4f timewarpEnd(timewarpEndQuat);
540 
541 
542  // The real-world orientations have: X=right, Y=up, Z=backwards.
543  // The vectors inside the mesh are in NDC to keep the shader simple: X=right, Y=down, Z=forwards.
544  // So we need to perform a similarity transform on this delta matrix.
545  // The verbose code would look like this:
546  /*
547  Matrix4f matBasisChange;
548  matBasisChange.SetIdentity();
549  matBasisChange.M[0][0] = 1.0f;
550  matBasisChange.M[1][1] = -1.0f;
551  matBasisChange.M[2][2] = -1.0f;
552  Matrix4f matBasisChangeInv = matBasisChange.Inverted();
553  matRenderFromNow = matBasisChangeInv * matRenderFromNow * matBasisChange;
554  */
555  // ...but of course all the above is a constant transform and much more easily done.
556  // We flip the signs of the Y&Z row, then flip the signs of the Y&Z column,
557  // and of course most of the flips cancel:
558  // +++ +-- +--
559  // +++ -> flip Y&Z columns -> +-- -> flip Y&Z rows -> -++
560  // +++ +-- -++
561  timewarpStart.M[0][1] = -timewarpStart.M[0][1];
562  timewarpStart.M[0][2] = -timewarpStart.M[0][2];
563  timewarpStart.M[1][0] = -timewarpStart.M[1][0];
564  timewarpStart.M[2][0] = -timewarpStart.M[2][0];
565 
566  timewarpEnd .M[0][1] = -timewarpEnd .M[0][1];
567  timewarpEnd .M[0][2] = -timewarpEnd .M[0][2];
568  timewarpEnd .M[1][0] = -timewarpEnd .M[1][0];
569  timewarpEnd .M[2][0] = -timewarpEnd .M[2][0];
570 
571  twmOut[0] = timewarpStart;
572  twmOut[1] = timewarpEnd;
573 }
574 
575 
576 // Used by renderer to determine if it should time distortion rendering.
578 {
579  if (!VsyncEnabled)
580  return false;
581  return DistortionRenderTimes.GetCount() < 10;
582 }
583 
584 
585 void FrameTimeManager::AddDistortionTimeMeasurement(double distortionTimeSeconds)
586 {
587  DistortionRenderTimes.AddTimeDelta(distortionTimeSeconds);
588 
589  // If timewarp timing changes based on this sample, update it.
590  double newTimewarpWaitDelta = calcTimewarpWaitDelta();
591  if (newTimewarpWaitDelta != FrameTiming.Inputs.TimewarpWaitDelta)
592  {
593  FrameTiming.Inputs.TimewarpWaitDelta = newTimewarpWaitDelta;
594  LocklessTiming.SetState(FrameTiming);
595  }
596 }
597 
598 
600  unsigned char frameLatencyTestColor,
601  const Util::FrameTimeRecordSet& rs)
602 {
603  // FrameTiming.NextFrameTime in this context (after EndFrame) is the end frame time.
604  ScreenLatencyTracker.SaveDrawColor(frameLatencyTestColor,
608 
610 
611  // If screen delay changed, update timing.
612  double newScreenDelay = calcScreenDelay();
613  if (newScreenDelay != FrameTiming.Inputs.ScreenDelay)
614  {
615  FrameTiming.Inputs.ScreenDelay = newScreenDelay;
616  LocklessTiming.SetState(FrameTiming);
617  }
618 }
619 
620 
621 //-----------------------------------------------------------------------------------
622 // ***** TimeDeltaCollector
623 
624 void TimeDeltaCollector::AddTimeDelta(double timeSeconds)
625 {
626  // avoid adding invalid timing values
627  if(timeSeconds < 0.0f)
628  return;
629 
630  if (Count == Capacity)
631  {
632  for(int i=0; i< Count-1; i++)
633  TimeBufferSeconds[i] = TimeBufferSeconds[i+1];
634  Count--;
635  }
636  TimeBufferSeconds[Count++] = timeSeconds;
637 }
638 
640 {
641  double SortedList[Capacity];
642  bool used[Capacity];
643 
644  memset(used, 0, sizeof(used));
645  SortedList[0] = 0.0; // In case Count was 0...
646 
647  // Probably the slowest way to find median...
648  for (int i=0; i<Count; i++)
649  {
650  double smallestDelta = 1000000.0;
651  int index = 0;
652 
653  for (int j = 0; j < Count; j++)
654  {
655  if (!used[j])
656  {
657  if (TimeBufferSeconds[j] < smallestDelta)
658  {
659  smallestDelta = TimeBufferSeconds[j];
660  index = j;
661  }
662  }
663  }
664 
665  // Mark as used
666  used[index] = true;
667  SortedList[i] = smallestDelta;
668  }
669 
670  return SortedList[Count/2];
671 }
672 
673 
674 }} // namespace OVR::CAPI
675 
struct OVR::HmdRenderInfo::ShutterInfo Shutter
void InitTimingFromInputs(const TimingInputs &inputs, HmdShutterTypeEnum shutterType, double thisFrameTime, unsigned int frameIndex)
FrameTimeRecordEx FrameEndTimes[FramesTracked]
ovrPoseStatef Predicted
Definition: OVR_CAPI.h:257
OVR_EXPORT ovrSensorState ovrHmd_GetSensorState(ovrHmd hmd, double absTime)
Definition: OVR_CAPI.cpp:377
const Timing & GetFrameTiming() const
void SaveDrawColor(unsigned char drawColor, double endFrameTime, double renderIMUTime, double timewarpIMUTime)
double GetEyePredictionTime(ovrEyeType eye)
OVR_EXPORT double ovr_GetTimeInSeconds()
Definition: OVR_CAPI.cpp:182
#define OVR_UNUSED(a)
Transformf GetEyePredictionPose(ovrHmd hmd, ovrEyeType eye)
TimeDeltaCollector DistortionRenderTimes
void Init(HmdRenderInfo &renderInfo)
static unsigned char ReadbackIndexToColor(int readbackIndex)
FrameLatencyTracker ScreenLatencyTracker
void Invert()
Definition: OVR_Math.h:963
ovrEyeType
Definition: OVR_CAPI.h:177
#define OVR_ASSERT(p)
void AddDistortionTimeMeasurement(double distortionTimeSeconds)
void GetLatencyTimings(float latencies[3])
T M[4][4]
Definition: OVR_Math.h:1197
void GetTimewarpPredictions(ovrEyeType eye, double timewarpStartEnd[2])
double BeginFrame(unsigned frameIndex)
void UpdateFrameLatencyTrackingAfterEndFrame(unsigned char frameLatencyTestColor, const Util::FrameTimeRecordSet &rs)
void AddTimeDelta(double timeSeconds)
void ResetFrameTiming(unsigned frameIndex, bool dynamicPrediction, bool sdkRender)
__BEGIN_NAMESPACE_STD void void __END_NAMESPACE_STD void __BEGIN_NAMESPACE_STD void * memset(void *__s, int __c, size_t __n) __THROW __nonnull((1))
int char * index(const char *__s, int __c) __THROW __attribute_pure__ __nonnull((1))
ovrPosef Pose
Definition: OVR_CAPI.h:89
void MatchRecord(const Util::FrameTimeRecordSet &r)
FrameTimeManager(bool vsyncEnabled=true)
bool FindReadbackIndex(int *i, int readbackIndex) const
LocklessUpdater< Timing > LocklessTiming
ovrPoseStatef Recorded
Definition: OVR_CAPI.h:260
ovrQuatf Orientation
Definition: OVR_CAPI.h:82
double TimeInSeconds
Definition: OVR_CAPI.h:94
void GetTimewarpMatrices(ovrHmd hmd, ovrEyeType eye, ovrPosef renderPose, ovrMatrix4f twmOut[2])