PlusLib  2.9.0
Software library for tracked ultrasound image acquisition, calibration, and processing.
nvSDIin.cpp
Go to the documentation of this file.
1 /***************************************************************************\
2 |* *|
3 |* Copyright 2007 NVIDIA Corporation. All rights reserved. *|
4 |* *|
5 |* NOTICE TO USER: *|
6 |* *|
7 |* This source code is subject to NVIDIA ownership rights under U.S. *|
8 |* and international Copyright laws. Users and possessors of this *|
9 |* source code are hereby granted a nonexclusive, royalty-free *|
10 |* license to use this code in individual and commercial software. *|
11 |* *|
12 |* NVIDIA MAKES NO REPRESENTATION ABOUT THE SUITABILITY OF THIS SOURCE *|
13 |* CODE FOR ANY PURPOSE. IT IS PROVIDED "AS IS" WITHOUT EXPRESS OR *|
14 |* IMPLIED WARRANTY OF ANY KIND. NVIDIA DISCLAIMS ALL WARRANTIES WITH *|
15 |* REGARD TO THIS SOURCE CODE, INCLUDING ALL IMPLIED WARRANTIES OF *|
16 |* MERCHANTABILITY, NONINFRINGEMENT, AND FITNESS FOR A PARTICULAR *|
17 |* PURPOSE. IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY SPECIAL, *|
18 |* INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES *|
19 |* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN *|
20 |* AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING *|
21 |* OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOURCE *|
22 |* CODE. *|
23 |* *|
24 |* U.S. Government End Users. This source code is a "commercial item" *|
25 |* as that term is defined at 48 C.F.R. 2.101 (OCT 1995), consisting *|
26 |* of "commercial computer software" and "commercial computer software *|
27 |* documentation" as such terms are used in 48 C.F.R. 12.212 (SEPT 1995) *|
28 |* and is provided to the U.S. Government only as a commercial end item. *|
29 |* Consistent with 48 C.F.R.12.212 and 48 C.F.R. 227.7202-1 through *|
30 |* 227.7202-4 (JUNE 1995), all U.S. Government End Users acquire the *|
31 |* source code with only those rights set forth herein. *|
32 |* *|
33 |* Any use of this source code in individual and commercial software must *|
34 |* include, in the user documentation and internal comments to the code, *|
35 |* the above Disclaimer and U.S. Government End Users Notice. *|
36 |* *|
37 |* *|
38 \***************************************************************************/
39 
40 #include "PlusCommon.h"
41 #include "glExtensions.h"
42 #include "nvCommon.h"
43 #include "nvGPUutil.h"
44 #include "nvSDIin.h"
45 #include "nvSDIutil.h"
46 
47 //----------------------------------------------------------------------------
49 {
50  m_bInitialized = false;
51  m_nDevice = 0;
52  if( Init() )
53  {
54  m_bInitialized = true;
55  }
56 }
57 
58 //----------------------------------------------------------------------------
60 {
61  if( !m_bInitialized )
62  {
63  return;
64  }
65  for( int i = 0; i < m_nDevice; i++ )
66  {
67  delete m_lDevice[i];
68  m_lDevice[i] = NULL;
69  }
70 }
71 
72 //----------------------------------------------------------------------------
74 {
75  static CNvSDIinTopology instance;
76  return instance;
77 }
78 
79 //----------------------------------------------------------------------------
81 {
82  return m_nDevice;
83 }
84 
85 //----------------------------------------------------------------------------
86 NVVIOTOPOLOGYTARGET* CNvSDIinTopology::GetDevice( int index )
87 {
88  if( index >= 0 && index < m_nDevice )
89  {
90  return m_lDevice[index];
91  }
92  return NULL;
93 }
94 
95 //----------------------------------------------------------------------------
97 {
98  if( m_bInitialized )
99  {
100  return true;
101  }
102 
103 #if WIN32
104  HWND hWnd;
105  HGLRC hGLRC;
106  if( CreateDummyGLWindowWin32( &hWnd, &hGLRC ) == false )
107  {
108  return false;
109  }
110 #elif __linux__
111 
112 #endif
113 
114  //load all the required extensions:
115  //video capture
117  {
118  LOG_ERROR( "Could not load OpenGL Video Capture extension." );
119  return false;
120  }
121  //timer query
122  if( !loadTimerQueryExtension() )
123  {
124  LOG_ERROR( "Could not load OpenGL timer query extension." );
125  return false;
126  }
127  //timer query
129  {
130  LOG_ERROR( "Could not load OpenGL buffer object extension." );
131  return false;
132  }
133 
134  NVVIOCAPS l_vioCaps;
135  NVVIOTOPOLOGY l_vioTopos;
136  NvAPI_Status ret = NVAPI_OK;
137 
138  // NVAPI Initialization of Video Capture Device.
139  if ( NvAPI_Initialize() != NVAPI_OK )
140  {
141  LOG_ERROR( "Error Initializing NVAPI." );
142  return false;
143  }
144 
145  // Query Available Video I/O Topologies
146  memset( &l_vioTopos, 0, sizeof( l_vioTopos ) );
147  l_vioTopos.version = NVVIOTOPOLOGY_VER;
148  if ( NvAPI_VIO_QueryTopology( &l_vioTopos ) != NVAPI_OK )
149  {
150  LOG_ERROR( "Video I/O Unsupported." );
151  return false;
152  }
153 
154  // Cycle through all SDI topologies looking for the first
155  // available SDI input device.
156  unsigned int i = 0;
157  m_nDevice = 0;
158  while ( i < l_vioTopos.vioTotalDeviceCount )
159  {
160  // Get video I/O capabilities for current video I/O target.
161  memset( &l_vioCaps, 0, sizeof( l_vioCaps ) );
162  l_vioCaps.version = NVVIOCAPS_VER;
163  if ( NvAPI_VIO_GetCapabilities( l_vioTopos.vioTarget[i].hVioHandle, &l_vioCaps ) != NVAPI_OK )
164  {
165  ++i;
166  continue;
167  }
168 
169  // If video input device found, set flag.
170  if ( l_vioCaps.adapterCaps & NVVIOCAPS_VIDIN_SDI )
171  {
172  m_lDevice[m_nDevice] = new NVVIOTOPOLOGYTARGET;
173  *m_lDevice[m_nDevice] = l_vioTopos.vioTarget[i];
174  m_nDevice++;
175  }
176 
177  i++;
178  }
179 
180  m_bInitialized = true;
181 
182 #if WIN32
183  // We can kill the dummy window now
184  if( DestroyGLWindowWin32( &hWnd, &hGLRC ) == false )
185  {
186  return false;
187  }
188 #elif __linux__
189 
190 #endif
191 
192  return true;
193 }
194 
195 //----------------------------------------------------------------------------
197 {
198  m_device = NULL;
199  m_hDC = NULL;
200  m_vioHandle = NULL;
201  m_videoSlot = 0;
202  // Setup CSC for each stream.
203 
204  float scale = 1.0f;
205 
206  m_cscMax[0] = 5000;
207  m_cscMax[1] = 5000;
208  m_cscMax[2] = 5000;
209  m_cscMax[3] = 5000;
210  m_cscMin[0] = 0;
211  m_cscMin[1] = 0;
212  m_cscMin[2] = 0;
213  m_cscMin[3] = 0;
214 
215  // Initialize matrix to the identity.
216  m_cscMat[0][0] = scale;
217  m_cscMat[0][1] = 0;
218  m_cscMat[0][2] = 0;
219  m_cscMat[0][3] = 0;
220  m_cscMat[1][0] = 0;
221  m_cscMat[1][1] = scale;
222  m_cscMat[1][2] = 0;
223  m_cscMat[1][3] = 0;
224  m_cscMat[2][0] = 0;
225  m_cscMat[2][1] = 0;
226  m_cscMat[2][2] = scale;
227  m_cscMat[2][3] = 0;
228  m_cscMat[3][0] = 0;
229  m_cscMat[3][1] = 0;
230  m_cscMat[3][2] = 0;
231  m_cscMat[3][3] = scale;
232 
233  m_bCaptureStarted = false;
234 }
235 
236 //----------------------------------------------------------------------------
238 {
239 }
240 
241 //----------------------------------------------------------------------------
242 void CNvSDIin::SetCSCParams( GLfloat* cscMat, GLfloat* cscOffset, GLfloat* cscMin, GLfloat* cscMax )
243 {
244  memcpy( m_cscMat, cscMat, sizeof( GLfloat ) * 16 );
245  memcpy( m_cscOffset, cscOffset, sizeof( GLfloat ) * 4 );
246  memcpy( m_cscMin, cscMin, sizeof( GLfloat ) * 4 );
247  memcpy( m_cscMax, cscMax, sizeof( GLfloat ) * 4 );
248 }
249 
250 //----------------------------------------------------------------------------
252 {
253  return m_vioHandle;
254 }
255 
256 //----------------------------------------------------------------------------
257 unsigned int CNvSDIin::GetWidth()
258 {
259  return m_videoWidth;
260 }
261 
262 //----------------------------------------------------------------------------
263 unsigned int CNvSDIin::GetHeight()
264 {
265  return m_videoHeight;
266 }
267 
268 //----------------------------------------------------------------------------
269 void CNvSDIin::DumpChannelStatus( NVVIOCHANNELSTATUS jack )
270 {
271  LOG_INFO( "Link ID: " << LinkIDToString( jack.linkID ).c_str() );
272  LOG_INFO( "SMPTE 352: " << jack.smpte352 );
273  LOG_INFO( "Signal Format: " << SignalFormatToString( jack.signalFormat ).c_str() );
274  LOG_INFO( "Sampling Format: " << ComponentSamplingFormatToString( jack.samplingFormat ).c_str() );
275  LOG_INFO( "Color Space: " << ColorSpaceToString( jack.colorSpace ).c_str() );
276  LOG_INFO( "Bits Per Component: " << jack.bitsPerComponent );
277  LOG_INFO( "SMPTE 352 Payload ID: " << jack.smpte352 );
278 }
279 
280 //----------------------------------------------------------------------------
281 void CNvSDIin::DumpStreamStatus( NVVIOSTREAM stream )
282 {
283  LOG_INFO( "Links: " );
284  for( unsigned int i = 0; i < stream.numLinks; i++ )
285  {
286  LOG_INFO( "Jack: " << stream.links[i].jack << " Channel: " << stream.links[i].channel );
287  }
288  LOG_INFO( "Sampling: " << ComponentSamplingFormatToString( stream.sampling ).c_str() );
289  LOG_INFO( "Expansion Enable: " << stream.expansionEnable );
290  LOG_INFO( "Bits Per Component: " << stream.bitsPerComponent );
291 }
292 
293 //----------------------------------------------------------------------------
294 HRESULT CNvSDIin::Init( nvOptions* options )
295 {
296  if( options )
297  {
298  m_bDualLink = options->dualLink;
299  m_Sampling = options->sampling;
302  if( m_Sampling == NVVIOCOMPONENTSAMPLING_444 || m_Sampling == NVVIOCOMPONENTSAMPLING_4444 )
303  {
304  m_ExpansionEnable = false;
305  }
306  }
307  else
308  {
309  m_bDualLink = false;
310  m_Sampling = NVVIOCOMPONENTSAMPLING_422;
311  m_BitsPerComponent = 8;
312  m_ExpansionEnable = true;
313  }
314 
315  return S_OK;
316 }
317 
318 //----------------------------------------------------------------------------
319 HRESULT CNvSDIin::GetVideoInState( NVVIOCONFIG_V1* vioConfig, NVVIOSTATUS* vioStatus )
320 {
321  NvAPI_Status ret = NVAPI_OK;
322 
323  memset( vioStatus, 0, sizeof( NVVIOSTATUS ) );
324  vioStatus->version = NVVIOSTATUS_VER;
325  if ( NvAPI_VIO_Status( m_vioHandle, vioStatus ) != NVAPI_OK )
326  {
327  LOG_ERROR( "Cannot get status of SDI input device." );
328  return E_FAIL;
329  }
330 
331  // Cycle through the jacks and display the status of each active channel.
332  for ( unsigned int i = 0; i < NVAPI_MAX_VIO_JACKS; i++ )
333  {
334  for ( unsigned int j = 0; j < NVAPI_MAX_VIO_CHANNELS_PER_JACK; j++ )
335  {
336  DumpChannelStatus( vioStatus->vioStatus.inStatus.vidIn[i][j] );
337  }
338  }
339 
340  // Get stream configuration
341  memset( vioConfig, 0, sizeof( NVVIOCONFIG_V1 ) );
342  vioConfig->version = NVVIOCONFIG_VER1;
343  vioConfig->nvvioConfigType = NVVIOCONFIGTYPE_IN;
344  vioConfig->fields = NVVIOCONFIG_SIGNALFORMAT | NVVIOCONFIG_STREAMS;
345  if ( NvAPI_VIO_GetConfig( m_vioHandle, ( NVVIOCONFIG* )vioConfig ) != NVAPI_OK )
346  {
347  LOG_ERROR( "Cannot get configuration of SDI input device." );
348  return E_FAIL;
349  }
350 
351  // Display stream configuration of input device.
352  LOG_DEBUG( "Number of Streams: " << vioConfig->vioConfig.inConfig.numStreams );
353  LOG_DEBUG( "Signal Format: " << SignalFormatToString( vioConfig->vioConfig.inConfig.signalFormat ).c_str() );
354 
355  // Display the configuration of each stream.
356  for ( unsigned int i = 0; i < vioConfig->vioConfig.inConfig.numStreams; i++ )
357  {
358  DumpStreamStatus( vioConfig->vioConfig.inConfig.streams[i] );
359  }
360 
361  return S_OK;
362 }
363 
364 //----------------------------------------------------------------------------
366 {
367  NVVIOCONFIG_V1 l_vioConfig;
368  NVVIOSTATUS l_vioStatus;
369  // Get initial device state.
370  GetVideoInState( &l_vioConfig, &l_vioStatus );
371 
372  // Calculate the number of active streams. For now, this is the
373  // number of streams that we will draw. 3G formats can have upto
374  // two channels per jack. For now, only look at the first channel.
375  m_numStreams = 0;
376  for ( unsigned int i = 0; i < NVAPI_MAX_VIO_JACKS; i++ )
377  {
378  if ( l_vioStatus.vioStatus.inStatus.vidIn[i][0].signalFormat != NVVIOSIGNALFORMAT_NONE )
379  {
381  m_numStreams++;
382  }
383  }
384 
385  // Return an error if there are no active streams detected.
386  if ( m_numStreams == 0 )
387  {
388  LOG_ERROR( "No active video input input streams detected." );
389  return E_FAIL;
390  }
391 
392  // Now, set the config that we really want here.
393  memset( &l_vioConfig, 0, sizeof( l_vioConfig ) );
394  l_vioConfig.version = NVVIOCONFIG_VER1;
395  l_vioConfig.nvvioConfigType = NVVIOCONFIGTYPE_IN;
396 
397  // Set signal format for capture to the detected signal format.
398  // on the first channel of the first active jack.
399  for ( unsigned int i = 0; i < NVAPI_MAX_VIO_JACKS; i++ )
400  {
401  for ( unsigned int j = 0; j < NVAPI_MAX_VIO_CHANNELS_PER_JACK; j++ )
402  {
403  if ( l_vioStatus.vioStatus.inStatus.vidIn[i][j].signalFormat != NVVIOSIGNALFORMAT_NONE )
404  {
405  l_vioConfig.vioConfig.inConfig.signalFormat = l_vioStatus.vioStatus.inStatus.vidIn[i][j].signalFormat;
406  }
407  }
408  }
409 
410  int numLinks = 1;
411  if( m_bDualLink )
412  {
413  m_numStreams >>= 1;
414  numLinks = 2;
415  }
416 
417  l_vioConfig.fields = NVVIOCONFIG_SIGNALFORMAT;
418 
419  // Define streams.
420  l_vioConfig.vioConfig.inConfig.numStreams = m_numStreams;
421 
422  l_vioConfig.fields |= NVVIOCONFIG_STREAMS;
423  l_vioConfig.vioConfig.inConfig.numRawCaptureImages = NVAPI_GVI_DEFAULT_RAW_CAPTURE_IMAGES;
424 
425  switch( l_vioConfig.vioConfig.inConfig.signalFormat )
426  {
427  case NVVIOSIGNALFORMAT_1080P_50_00_SMPTE274_3G_LEVEL_A:
428  case NVVIOSIGNALFORMAT_1080P_59_94_SMPTE274_3G_LEVEL_A:
429  case NVVIOSIGNALFORMAT_1080P_60_00_SMPTE274_3G_LEVEL_A:
430  case NVVIOSIGNALFORMAT_1080P_60_00_SMPTE274_3G_LEVEL_B:
431  case NVVIOSIGNALFORMAT_1080I_60_00_SMPTE274_3G_LEVEL_B:
432  case NVVIOSIGNALFORMAT_2048I_60_00_SMPTE372_3G_LEVEL_B:
433  case NVVIOSIGNALFORMAT_1080P_50_00_SMPTE274_3G_LEVEL_B:
434  case NVVIOSIGNALFORMAT_1080I_50_00_SMPTE274_3G_LEVEL_B:
435  case NVVIOSIGNALFORMAT_2048I_50_00_SMPTE372_3G_LEVEL_B:
436  case NVVIOSIGNALFORMAT_1080P_30_00_SMPTE274_3G_LEVEL_B:
437  case NVVIOSIGNALFORMAT_2048P_30_00_SMPTE372_3G_LEVEL_B:
438  case NVVIOSIGNALFORMAT_1080P_25_00_SMPTE274_3G_LEVEL_B:
439  case NVVIOSIGNALFORMAT_2048P_25_00_SMPTE372_3G_LEVEL_B:
440  case NVVIOSIGNALFORMAT_1080P_24_00_SMPTE274_3G_LEVEL_B:
441  case NVVIOSIGNALFORMAT_2048P_24_00_SMPTE372_3G_LEVEL_B:
442  case NVVIOSIGNALFORMAT_1080I_48_00_SMPTE274_3G_LEVEL_B:
443  case NVVIOSIGNALFORMAT_2048I_48_00_SMPTE372_3G_LEVEL_B:
444  case NVVIOSIGNALFORMAT_1080P_59_94_SMPTE274_3G_LEVEL_B:
445  case NVVIOSIGNALFORMAT_1080I_59_94_SMPTE274_3G_LEVEL_B:
446  case NVVIOSIGNALFORMAT_2048I_59_94_SMPTE372_3G_LEVEL_B:
447  case NVVIOSIGNALFORMAT_1080P_29_97_SMPTE274_3G_LEVEL_B:
448  case NVVIOSIGNALFORMAT_2048P_29_97_SMPTE372_3G_LEVEL_B:
449  case NVVIOSIGNALFORMAT_1080P_23_98_SMPTE274_3G_LEVEL_B:
450  case NVVIOSIGNALFORMAT_2048P_23_98_SMPTE372_3G_LEVEL_B:
451  case NVVIOSIGNALFORMAT_1080I_47_96_SMPTE274_3G_LEVEL_B:
452  case NVVIOSIGNALFORMAT_2048I_47_96_SMPTE372_3G_LEVEL_B:
453  // Verify 3G configuration. 3G is only supported on physical jacks 1 and 3,
454  // logical jacks 0 and 2. Display a warning message if a 3G signal is
455  // connected to jacks other than those jacks.
456  if ( ( m_activeJacks[0] == 1 ) || ( m_activeJacks[0] == 3 ) ||
457  ( m_activeJacks[1] == 1 ) || ( m_activeJacks[1] == 3 ) )
458  {
459  LOG_WARNING( "3G capture supported on Jacks 1 and 3 only." );
460  }
461 
462  for ( unsigned int i = 0; i < m_numStreams; i++ )
463  {
464  l_vioConfig.vioConfig.inConfig.streams[i].sampling = m_Sampling;
465  l_vioConfig.vioConfig.inConfig.streams[i].bitsPerComponent = m_BitsPerComponent;
466  l_vioConfig.vioConfig.inConfig.streams[i].expansionEnable = m_ExpansionEnable;
467  l_vioConfig.vioConfig.inConfig.streams[i].numLinks = 2;
468  l_vioConfig.vioConfig.inConfig.streams[i].links[0].jack = m_activeJacks[i];
469  l_vioConfig.vioConfig.inConfig.streams[i].links[0].channel = 0;
470  l_vioConfig.vioConfig.inConfig.streams[i].links[1].jack = m_activeJacks[i];
471  l_vioConfig.vioConfig.inConfig.streams[i].links[1].channel = 1;
472  }
473 
474  break;
475  default:
476  switch( m_numStreams )
477  {
478  case 4: //then the signal cannot be dual link
479  l_vioConfig.vioConfig.inConfig.streams[3].sampling = m_Sampling;
480  l_vioConfig.vioConfig.inConfig.streams[3].bitsPerComponent = m_BitsPerComponent;
481  l_vioConfig.vioConfig.inConfig.streams[3].expansionEnable = m_ExpansionEnable;
482  l_vioConfig.vioConfig.inConfig.streams[3].numLinks = 1;
483  l_vioConfig.vioConfig.inConfig.streams[3].links[0].jack = 3;
484  l_vioConfig.vioConfig.inConfig.streams[3].links[0].channel = 0;
485  case 3:
486  l_vioConfig.vioConfig.inConfig.streams[2].sampling = m_Sampling;
487  l_vioConfig.vioConfig.inConfig.streams[2].bitsPerComponent = m_BitsPerComponent;
488  l_vioConfig.vioConfig.inConfig.streams[2].expansionEnable = m_ExpansionEnable;
489  l_vioConfig.vioConfig.inConfig.streams[2].numLinks = 1;
490  l_vioConfig.vioConfig.inConfig.streams[2].links[0].jack = m_activeJacks[2];
491  l_vioConfig.vioConfig.inConfig.streams[2].links[0].channel = 0;
492  case 2:
493  l_vioConfig.vioConfig.inConfig.streams[1].sampling = m_Sampling;
494  l_vioConfig.vioConfig.inConfig.streams[1].bitsPerComponent = m_BitsPerComponent;
495  l_vioConfig.vioConfig.inConfig.streams[1].expansionEnable = m_ExpansionEnable;
496  l_vioConfig.vioConfig.inConfig.streams[1].numLinks = numLinks;
497  if( numLinks == 2 )
498  {
499  l_vioConfig.vioConfig.inConfig.streams[1].links[1].jack = 3;
500  l_vioConfig.vioConfig.inConfig.streams[1].links[1].channel = 0;
501  l_vioConfig.vioConfig.inConfig.streams[1].links[0].jack = 2;
502  l_vioConfig.vioConfig.inConfig.streams[1].links[0].channel = 0;
503  }
504  else
505  {
506  l_vioConfig.vioConfig.inConfig.streams[1].links[0].jack = m_activeJacks[1];
507  l_vioConfig.vioConfig.inConfig.streams[1].links[0].channel = 0;
508  }
509  case 1:
510  l_vioConfig.vioConfig.inConfig.streams[0].sampling = m_Sampling;
511  l_vioConfig.vioConfig.inConfig.streams[0].bitsPerComponent = m_BitsPerComponent;
512  l_vioConfig.vioConfig.inConfig.streams[0].expansionEnable = m_ExpansionEnable;
513  l_vioConfig.vioConfig.inConfig.streams[0].numLinks = numLinks;
514  if( numLinks == 2 )
515  {
516  l_vioConfig.vioConfig.inConfig.streams[0].links[1].jack = 1;
517  l_vioConfig.vioConfig.inConfig.streams[0].links[1].channel = 0;
518  l_vioConfig.vioConfig.inConfig.streams[0].links[0].jack = 0;
519  l_vioConfig.vioConfig.inConfig.streams[0].links[0].channel = 0;
520  }
521  else
522  {
523  l_vioConfig.vioConfig.inConfig.streams[0].links[0].jack = m_activeJacks[0];
524  l_vioConfig.vioConfig.inConfig.streams[0].links[0].channel = 0;
525  }
526  break;
527  case 0:
528  default:
529  LOG_ERROR( "Cannot configure streams, no active inputs detected." );
530  return E_FAIL;
531  break;
532  } // switch
533  break;
534  } // switch
535 
536  NvAPI_Status stat = NVAPI_OK;
537  stat = NvAPI_VIO_SetConfig( m_vioHandle, ( NVVIOCONFIG* )&l_vioConfig );
538  if ( stat != NVAPI_OK )
539  {
540  LOG_ERROR( "Cannot set configuration of SDI input device." );
541  return E_FAIL;
542  }
543 
544  if ( ( NvAPI_VIO_GetConfig( m_vioHandle, ( NVVIOCONFIG* )&l_vioConfig ) != NVAPI_OK ) )
545  {
546  LOG_ERROR( "Cannot get configuration of SDI input device." );
547  return E_FAIL;
548  }
549 
550  return S_OK;
551 }
552 
553 //----------------------------------------------------------------------------
554 HRESULT CNvSDIin::SetupDevice( int deviceNumber )
555 {
556  NVVIOCONFIG_V1 l_vioConfig;
557  NVVIOSTATUS l_vioStatus;
558  NvAPI_Status ret = NVAPI_OK;
559 
560  if( CNvSDIinTopology::Instance().GetNumDevice() == 0 ) //just in case the app failed to scan the topology
561  {
562  LOG_ERROR( "No SDI video input devices found." );
563  return E_FAIL;
564  }
565  m_deviceNumber = deviceNumber;
566  NVVIOTOPOLOGYTARGET* device = CNvSDIinTopology::Instance().GetDevice( deviceNumber );
567  if( device == NULL )
568  {
569  LOG_ERROR( "Unable to set the selected device." );
570  return E_FAIL;
571  }
572 
573  m_vioID = device->vioId;
574  m_vioHandle = device->hVioHandle;
575 
576  // Get initial device state.
577  if FAILED( SetVideoConfig() )
578  {
579  return E_FAIL;
580  }
581 
582  // TODO: At this point, need to query the status again and confirm that
583  // all ports are configured for the same signal format. SDI capture cannot
584  // succeed if all input ports are not configured and detecting the same
585  // signal format.
586  GetVideoInState( &l_vioConfig, &l_vioStatus );
587 
588  // Get width and height of video signal format. Long term this
589  // may be queried from OpenGL, but for now, need to get this from
590  // the control API.
591  NVVIOSIGNALFORMATDETAIL l_vioSignalFormatDetail;
592  memset( &l_vioSignalFormatDetail, 0, sizeof( l_vioSignalFormatDetail ) );
593 
594  ULONG l_vioSignalFormatIndex = ( ULONG )NVVIOSIGNALFORMAT_NONE;
595 
596  // Enumerate all signal formats until we find the one we are looking
597  // for, the enumeration ends, or there is an error.
598  while( 1 )
599  {
600  ret = NvAPI_VIO_EnumSignalFormats( m_vioHandle,
601  l_vioSignalFormatIndex,
602  &l_vioSignalFormatDetail );
603  if ( ret == NVAPI_END_ENUMERATION || ret != NVAPI_OK )
604  {
605  return E_FAIL;
606  }
607  l_vioSignalFormatIndex++;
608 
609  // We found the signal format that we were looking for so break.
610  if ( l_vioSignalFormatDetail.signalFormat ==
611  l_vioConfig.vioConfig.inConfig.signalFormat )
612  {
613  m_signalFormatDetail = l_vioSignalFormatDetail;
614  break;
615  }
616  }
617 
618  m_videoFormat = l_vioSignalFormatDetail.signalFormat;
619 
620  // Cache framerate so that it may later be compared with the actual
621  // achievable framerate.
622  if ( l_vioSignalFormatDetail.videoMode.interlaceMode == NVVIOINTERLACEMODE_INTERLACE )
623  {
624  m_fFrameRate = l_vioSignalFormatDetail.videoMode.fFrameRate / 2.0f;
625  }
626  else
627  {
628  m_fFrameRate = l_vioSignalFormatDetail.videoMode.fFrameRate;
629  }
630 
631  // Get video width and height. Should be the same for all streams.
632 
633  m_videoWidth = l_vioSignalFormatDetail.videoMode.horizontalPixels;
634  m_videoHeight = l_vioSignalFormatDetail.videoMode.verticalLines;
635 
636 
637  return S_OK;
638 }
639 
640 //----------------------------------------------------------------------------
641 HRESULT CNvSDIin::BindDevice( GLuint videoSlot, HDC hDC )
642 {
643  BOOL bRet;
644  HVIDEOINPUTDEVICENV* videoDevices;
645  m_hDC = hDC;
646 
647  UINT numDevices = wglEnumerateVideoCaptureDevicesNV( m_hDC, NULL );
648  if ( numDevices <= 0 )
649  {
650  LOG_ERROR( "wglEnumerateVideoDevicesNV() did not return any devices." );
651  return E_FAIL;
652  }
653 
654  assert( glGetError() == GL_NO_ERROR );
655 
656  videoDevices = ( HVIDEOINPUTDEVICENV* )malloc( numDevices *
657  sizeof( videoDevices[0] ) );
658 
659  if ( !videoDevices )
660  {
661  LOG_ERROR( "Memory allocation failed." );
662  return E_FAIL;
663  }
664 
665  assert( glGetError() == GL_NO_ERROR );
666 
667  if ( numDevices != wglEnumerateVideoCaptureDevicesNV( m_hDC, videoDevices ) )
668  {
669  free( videoDevices );
670  LOG_ERROR( "Inconsistent results from wglEnumerateVideoDevicesNV()" );
671  return E_FAIL;
672  }
673 
674  // find a device that matches the m_vioID that we got from nvapi and lock it
675  for ( UINT i = 0; i < numDevices; ++i )
676  {
677  int uniqueID;
678  bRet = wglQueryVideoCaptureDeviceNV( m_hDC, videoDevices[i],
679  WGL_UNIQUE_ID_NV, &uniqueID );
680  if( bRet && uniqueID == m_vioID )
681  {
682  BOOL bLocked;
683  bLocked = wglLockVideoCaptureDeviceNV( m_hDC, videoDevices[i] );
684  assert( glGetError() == GL_NO_ERROR );
685  if ( bLocked )
686  {
687  m_device = videoDevices[i];
688  break;
689  }
690  }
691  }
692 
693  free( videoDevices );
694  int numSlots;
695  wglQueryCurrentContextNV( WGL_NUM_VIDEO_CAPTURE_SLOTS_NV, &numSlots );
696  if ( m_device == NULL )
697  {
698  // No lockable devices found
699  LOG_ERROR( "No lockable video capture device found." );
700  return E_FAIL;
701  }
702  m_videoSlot = videoSlot;
703 
704  // wglBindVideoCaptureDeviceNV needs a context current
706  GLenum err = glGetError();
707 
708  assert( bRet && "Failed trying to bind the video capture device!" );
709  return bRet ? S_OK : E_FAIL;
710 }
711 
712 //----------------------------------------------------------------------------
713 /*
714 HRESULT CNvSDIin::BindVideoObjects()
715 {
716  if(m_signalFormatDetail.videoMode.interlaceMode != NVVIOINTERLACEMODE_INTERLACE)
717  {
718  m_bCaptureFields = false;
719  m_bCaptureStackedFields = false;
720  }
721  if(m_bCaptureFields)
722  {
723  m_videoHeight = m_signalFormatDetail.videoMode.verticalLines>>1;
724  }
725 
726  m_numVideoObjects = m_configOptions.numStreams;
727  if(m_bCaptureFields)
728  m_numVideoObjects <<= 1;
729 
730  if(m_bCaptureBuffers)
731  {
732  // Create video buffer objects for each stream
733  glGenBuffersARB(m_numVideoObjects, m_vidBufObj);
734  assert(glGetError() == GL_NO_ERROR);
735  for (UINT i = 0; i < m_configOptions.numStreams; i++) {
736  glVideoCaptureStreamParameterivNV(m_videoSlot, i,
737  GL_VIDEO_BUFFER_INTERNAL_FORMAT_NV,
738  &m_captureInternalFormat);
739  assert(glGetError() == GL_NO_ERROR);
740  UINT objInd = i;
741  if(m_bCaptureFields) //setup 2 objects for capturing fields
742  {
743  //setup upper field object
744  objInd = 2*i;
745  glGetVideoCaptureStreamivNV(m_videoSlot, i, GL_VIDEO_BUFFER_PITCH_NV,
746  &m_bufPitch[i]);
747  assert(glGetError() == GL_NO_ERROR);
748 
749  glBindBufferARB(GL_VIDEO_BUFFER_NV, m_vidBufObj[objInd]);
750  assert(glGetError() == GL_NO_ERROR);
751  // Allocate required space in video capture buffer
752  //in the case of the following format the upper field has one extra line!!!
753  if(m_signalFormatDetail.signalFormat == NVVIOSIGNALFORMAT_487I_59_94_SMPTE259_NTSC)
754  glBufferDataARB(GL_VIDEO_BUFFER_NV, m_bufPitch[i] * (m_videoHeight+1),
755  NULL, GL_STREAM_COPY);
756  else
757  glBufferDataARB(GL_VIDEO_BUFFER_NV, m_bufPitch[i] * m_videoHeight,
758  NULL, GL_STREAM_COPY);
759 
760  assert(glGetError() == GL_NO_ERROR);
761  glBindVideoCaptureStreamBufferNV(m_videoSlot, i, GL_FIELD_UPPER_NV, 0);
762  assert(glGetError() == GL_NO_ERROR);
763  //setup lower field object
764  objInd = 2*i+1;
765  glGetVideoCaptureStreamivNV(m_videoSlot, i, GL_VIDEO_BUFFER_PITCH_NV,
766  &m_bufPitch[objInd]);
767  glBindBufferARB(GL_VIDEO_BUFFER_NV, m_vidBufObj[objInd]);
768  assert(glGetError() == GL_NO_ERROR);
769  // Allocate required space in video capture buffer
770  glBufferDataARB(GL_VIDEO_BUFFER_NV, m_bufPitch[i] * m_videoHeight,
771  NULL, GL_STREAM_COPY);
772  assert(glGetError() == GL_NO_ERROR);
773  glBindVideoCaptureStreamBufferNV(m_videoSlot, i, GL_FIELD_LOWER_NV, 0);
774  assert(glGetError() == GL_NO_ERROR);
775  }
776  else
777  {
778  if(m_bCaptureStackedFields)
779  {
780  glGetVideoCaptureStreamivNV(m_videoSlot, i, GL_VIDEO_BUFFER_PITCH_NV,
781  &m_bufPitch[objInd]);
782 
783  glBindBufferARB(GL_VIDEO_BUFFER_NV, m_vidBufObj[objInd]);
784  assert(glGetError() == GL_NO_ERROR);
785  // Allocate required space in video capture buffer
786  glBufferDataARB(GL_VIDEO_BUFFER_NV, m_bufPitch[i] * m_videoHeight,
787  NULL, GL_STREAM_COPY);
788  assert(glGetError() == GL_NO_ERROR);
789  if(m_signalFormatDetail.signalFormat == NVVIOSIGNALFORMAT_487I_59_94_SMPTE259_NTSC)
790  {
791  glBindVideoCaptureStreamBufferNV(m_videoSlot, i, GL_FIELD_UPPER_NV, 0);
792  glBindVideoCaptureStreamBufferNV(m_videoSlot, i, GL_FIELD_LOWER_NV, m_bufPitch[i] *((m_videoHeight>>1)+1));
793  }
794  else
795  {
796  glBindVideoCaptureStreamBufferNV(m_videoSlot, i, GL_FIELD_UPPER_NV, 0);
797  glBindVideoCaptureStreamBufferNV(m_videoSlot, i, GL_FIELD_LOWER_NV, m_bufPitch[i] *(m_videoHeight>>1));
798  }
799  assert(glGetError() == GL_NO_ERROR);
800  }
801  else
802  {
803  glGetVideoCaptureStreamivNV(m_videoSlot, i, GL_VIDEO_BUFFER_PITCH_NV,
804  &m_bufPitch[objInd]);
805 
806  glBindBufferARB(GL_VIDEO_BUFFER_NV, m_vidBufObj[objInd]);
807  assert(glGetError() == GL_NO_ERROR);
808  // Allocate required space in video capture buffer
809  glBufferDataARB(GL_VIDEO_BUFFER_NV, m_bufPitch[i] * m_videoHeight,
810  NULL, GL_STREAM_COPY);
811  assert(glGetError() == GL_NO_ERROR);
812  glBindVideoCaptureStreamBufferNV(m_videoSlot, i, GL_FRAME_NV, 0);
813  assert(glGetError() == GL_NO_ERROR);
814  }
815  }
816  }
817  if(m_bStreamToTextures)
818  {
819  //create the textures to go with the buffers
820  glGenTextures(m_numVideoObjects, m_videoTextures);
821  assert(glGetError() == GL_NO_ERROR);
822 
823  for (unsigned int i = 0; i < m_numVideoObjects; i++) {
824  glBindTexture(GL_TEXTURE_RECTANGLE_NV, m_videoTextures[i]);
825  assert(glGetError() == GL_NO_ERROR);
826  glTexParameterf(GL_TEXTURE_RECTANGLE_NV, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
827  assert(glGetError() == GL_NO_ERROR);
830  if(m_captureInternalFormat == GL_LUMINANCE8)
831  {
832  glPixelStorei(GL_PACK_ROW_LENGTH,m_bufPitch[i]);
833  glPixelStorei(GL_UNPACK_ROW_LENGTH,m_bufPitch[i]);
834  }
835  glTexImage2D(GL_TEXTURE_RECTANGLE_NV, 0, m_textureInternalFormat, m_videoWidth, m_videoHeight,
836  0, m_pixelFormat, GL_UNSIGNED_BYTE, NULL);
837  assert(glGetError() == GL_NO_ERROR);
838  }
839 
840  }
841  return S_OK;
842  }
843  else //capture to texture directly
844  {
845  glGenTextures(m_numVideoObjects, m_videoTextures);
846  assert(glGetError() == GL_NO_ERROR);
847 
848  for (UINT i = 0; i < m_configOptions.numStreams; i++) {
849  UINT objInd = i;
850  if(m_bCaptureFields)//setup 2 objects for capturing fields
851  {
852  //setup upper field object
853  objInd = 2*i;
854  glBindTexture(GL_TEXTURE_RECTANGLE_NV, m_videoTextures[objInd]);
855  assert(glGetError() == GL_NO_ERROR);
856  glTexParameterf(GL_TEXTURE_RECTANGLE_NV, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
857  assert(glGetError() == GL_NO_ERROR);
858  //in the case of the following format the upper field has one extra line!!!
859  if(m_signalFormatDetail.signalFormat == NVVIOSIGNALFORMAT_487I_59_94_SMPTE259_NTSC && m_bCaptureFields)
860  glTexImage2D(GL_TEXTURE_RECTANGLE_NV, 0, m_textureInternalFormat, m_videoWidth, m_videoHeight+1,
861  0, m_pixelFormat, GL_UNSIGNED_BYTE, NULL);
862  else
863  glTexImage2D(GL_TEXTURE_RECTANGLE_NV, 0, m_textureInternalFormat, m_videoWidth, m_videoHeight,
864  0, m_pixelFormat, GL_UNSIGNED_BYTE, NULL);
865 
866  assert(glGetError() == GL_NO_ERROR);
867  glBindVideoCaptureStreamTextureNV(m_videoSlot, i, GL_FIELD_UPPER_NV, GL_TEXTURE_RECTANGLE_NV,m_videoTextures[objInd]);
868  assert(glGetError() == GL_NO_ERROR);
869  //setup lower field object
870  objInd = 2*i+1;
871  glBindTexture(GL_TEXTURE_RECTANGLE_NV, m_videoTextures[objInd]);
872  assert(glGetError() == GL_NO_ERROR);
873  glTexParameterf(GL_TEXTURE_RECTANGLE_NV, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
874  assert(glGetError() == GL_NO_ERROR);
875 
876  glTexImage2D(GL_TEXTURE_RECTANGLE_NV, 0, m_textureInternalFormat, m_videoWidth, m_videoHeight,
877  0, m_pixelFormat, GL_UNSIGNED_BYTE, NULL);
878  assert(glGetError() == GL_NO_ERROR);
879  glBindVideoCaptureStreamTextureNV(m_videoSlot, i, GL_FIELD_LOWER_NV, GL_TEXTURE_RECTANGLE_NV,m_videoTextures[objInd]);
880  assert(glGetError() == GL_NO_ERROR);
881  }
882  else
883  {
884  glBindTexture(GL_TEXTURE_RECTANGLE_NV, m_videoTextures[objInd]);
885  assert(glGetError() == GL_NO_ERROR);
886  glTexParameterf(GL_TEXTURE_RECTANGLE_NV, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
887  assert(glGetError() == GL_NO_ERROR);
888 
889  glTexImage2D(GL_TEXTURE_RECTANGLE_NV, 0, m_textureInternalFormat, m_videoWidth, m_videoHeight,
890  0, m_pixelFormat, GL_UNSIGNED_BYTE, NULL);
891  assert(glGetError() == GL_NO_ERROR);
892  glBindVideoCaptureStreamTextureNV(m_videoSlot, i, GL_FRAME_NV, GL_TEXTURE_RECTANGLE_NV,m_videoTextures[objInd]);
893  assert(glGetError() == GL_NO_ERROR);
894  }
895  }
896  return S_OK;
897  }
898 }
899 */
900 
901 //----------------------------------------------------------------------------
902 HRESULT CNvSDIin::BindVideoTexture( const GLuint videoTexture, unsigned int stream )
903 {
904  if( stream >= 0 && stream < m_numStreams )
905  {
906  glBindVideoCaptureStreamTextureNV( m_videoSlot, stream, GL_FRAME_NV, GL_TEXTURE_RECTANGLE_NV, videoTexture );
907  assert( glGetError() == GL_NO_ERROR );
908  }
909  return S_OK;
910 }
911 
912 //----------------------------------------------------------------------------
913 HRESULT CNvSDIin::UnbindVideoTexture( unsigned int stream )
914 {
915  if( stream >= 0 && stream < m_numStreams )
916  {
917  glBindVideoCaptureStreamTextureNV( m_videoSlot, stream, GL_FRAME_NV, GL_TEXTURE_RECTANGLE_NV, 0 );
918  assert( glGetError() == GL_NO_ERROR );
919  }
920  return S_OK;
921 }
922 
923 //----------------------------------------------------------------------------
924 HRESULT CNvSDIin::BindVideoFrameBuffer( GLuint videoBuffer, GLint videoBufferFormat, unsigned int stream )
925 {
926  if( stream >= 0 && stream < m_numStreams )
927  {
928  //set the buffer format
930  GL_VIDEO_BUFFER_INTERNAL_FORMAT_NV,
931  &videoBufferFormat );
932  assert( glGetError() == GL_NO_ERROR );
933  glBindBuffer( GL_VIDEO_BUFFER_NV, videoBuffer );
934  assert( glGetError() == GL_NO_ERROR );
935  glBindVideoCaptureStreamBufferNV( m_videoSlot, stream, GL_FRAME_NV, 0 );
936  assert( glGetError() == GL_NO_ERROR );
937  }
938 
939  return S_OK;
940 }
941 
942 //----------------------------------------------------------------------------
943 int CNvSDIin::GetBufferObjectPitch( unsigned int streamIndex )
944 {
945  GLint bufferPitch = 0;
946  if( streamIndex >= 0 && streamIndex < m_numStreams )
947  {
948  // Get the video buffer pitch
949  glGetVideoCaptureStreamivNV( m_videoSlot, streamIndex, GL_VIDEO_BUFFER_PITCH_NV,
950  &bufferPitch );
951  assert( glGetError() == GL_NO_ERROR );
952  }
953  return bufferPitch;
954 }
955 
956 //----------------------------------------------------------------------------
958 {
959  return m_numStreams;
960 }
961 
962 //----------------------------------------------------------------------------
963 NVVIOSIGNALFORMAT CNvSDIin::GetSignalFormat()
964 {
965  return m_videoFormat;
966 }
967 
968 //----------------------------------------------------------------------------
969 HRESULT CNvSDIin::UnbindVideoFrameBuffer( unsigned int stream )
970 {
971  return S_OK;
972 }
973 
974 //----------------------------------------------------------------------------
976 {
977  for ( NvU32 i = 0; i < m_numStreams; i++ )
978  {
979  // Set the buffer object capture data format for each stream.
980 
982  GL_VIDEO_COLOR_CONVERSION_MATRIX_NV,
983  &m_cscMat[0][0] );
984  assert( glGetError() == GL_NO_ERROR );
985 
987  GL_VIDEO_COLOR_CONVERSION_MAX_NV, &m_cscMax[0] );
988  assert( glGetError() == GL_NO_ERROR );
989 
991  GL_VIDEO_COLOR_CONVERSION_MIN_NV, &m_cscMin[0] );
992  assert( glGetError() == GL_NO_ERROR );
993 
995  GL_VIDEO_COLOR_CONVERSION_OFFSET_NV, &m_cscOffset[0] );
996  assert( glGetError() == GL_NO_ERROR );
997  }
998 
999  // Start video capture
1002  GLenum err = glGetError();
1003  assert( err == GL_NO_ERROR );
1004  if( err == GL_NO_ERROR )
1005  {
1006  m_bCaptureStarted = true;
1007  }
1008  return S_OK;
1009 }
1010 
1011 //----------------------------------------------------------------------------
1012 GLenum CNvSDIin::Capture( GLuint* sequenceNum, GLuint64EXT* captureTime )
1013 {
1014  GLenum ret;
1015 #ifdef MEASURE_PERFORMANCE
1016  GLuint64EXT captureTimeStart;
1017  GLuint64EXT captureTimeEnd;
1018 #endif
1019 
1020  // Capture the video to a buffer object
1021 #ifdef MEASURE_PERFORMANCE
1022  //glBeginQuery(GL_TIME_ELAPSED_EXT,m_captureTimeQuery);
1023  glGetInteger64v( GL_CURRENT_TIME_NV, ( GLint64* )&captureTimeStart );
1024 #endif
1025 
1026  ret = glVideoCaptureNV( m_videoSlot, sequenceNum, captureTime );
1027 
1028  m_gviTime = 0;
1029  m_gpuTime = 0;
1030 #ifdef MEASURE_PERFORMANCE
1031  glGetInteger64v( GL_CURRENT_TIME_NV, ( GLint64* )&captureTimeEnd );
1032 
1033  m_gviTime = ( captureTimeEnd - *captureTime ) * .000000001;
1034 
1035  GLuint64EXT timeElapsed = 0;
1036  m_gpuTime = timeElapsed * .000000001;
1037 #endif
1038  assert( glGetError() == GL_NO_ERROR );
1039  return ret;
1040 }
1041 
1042 //----------------------------------------------------------------------------
1044 {
1045  if( m_bCaptureStarted )
1046  {
1049  assert( glGetError() == GL_NO_ERROR );
1050  m_bCaptureStarted = false;
1051  }
1052  return S_OK;
1053 }
1054 
1055 //----------------------------------------------------------------------------
1057 {
1058  if( m_hDC )
1059  {
1060  BOOL bRet;
1061  bRet = wglBindVideoCaptureDeviceNV( m_videoSlot, NULL );
1062  assert( bRet && "Failed trying to unbind the video capture device!" );
1064  assert( bRet && "Failed trying to release the video capture device!" );
1065  m_hDC = NULL;
1066  }
1067  return S_OK;
1068 }
1069 
1070 //----------------------------------------------------------------------------
1072 {
1073  EndCapture();
1074  UnbindDevice();
1075  return S_OK;
1076 }
1077 
1078 //----------------------------------------------------------------------------
1079 HRESULT CNvSDIin::GetFrameRate( float* rate )
1080 {
1081  *rate = m_fFrameRate;
1082 
1083  return S_OK;
1084 }
1085 
1086 //----------------------------------------------------------------------------
1088 {
1089  return m_deviceNumber;
1090 }
PFNGLVIDEOCAPTURENVPROC glVideoCaptureNV
HRESULT StartCapture()
Definition: nvSDIin.cpp:975
HRESULT Cleanup()
Definition: nvSDIin.cpp:1071
HRESULT GetFrameRate(float *rate)
Definition: nvSDIin.cpp:1079
NvU32 m_vioID
Definition: nvSDIin.h:149
int bitsPerComponent
Definition: nvCommon.h:112
PFNGLBEGINVIDEOCAPTURENVPROC glBeginVideoCaptureNV
int GetDeviceNumber()
Definition: nvSDIin.cpp:1087
NVVIOCOMPONENTSAMPLING m_Sampling
Definition: nvSDIin.h:132
virtual ~CNvSDIinTopology()
Definition: nvSDIin.cpp:59
PFNGLVIDEOCAPTURESTREAMPARAMETERFVNVPROC glVideoCaptureStreamParameterfvNV
std::string LinkIDToString(NVVIOLINKID id)
Definition: nvSDIutil.cpp:524
PFNGLGETINTEGER64VPROC glGetInteger64v
GLenum Capture(GLuint *sequenceNum, GLuint64EXT *captureTime)
Definition: nvSDIin.cpp:1012
unsigned int GetHeight()
Definition: nvSDIin.cpp:263
PFNGLENDVIDEOCAPTURENVPROC glEndVideoCaptureNV
HRESULT UnbindVideoFrameBuffer(unsigned int stream)
Definition: nvSDIin.cpp:969
float m_gviTime
Definition: nvSDIin.h:85
PFNGLGENQUERIESPROC glGenQueries
GLfloat m_cscMax[4]
Definition: nvSDIin.h:155
static CNvSDIinTopology & Instance()
Definition: nvSDIin.cpp:73
for i
double * timeElapsed
Definition: phidget22.h:3939
unsigned int m_videoWidth
Definition: nvSDIin.h:137
HRESULT BindDevice(GLuint videoSlot, HDC hDC)
Definition: nvSDIin.cpp:641
int GetNumDevice()
Definition: nvSDIin.cpp:80
GLfloat m_cscMat[4][4]
Definition: nvSDIin.h:154
bool m_bCaptureStarted
Definition: nvSDIin.h:159
HRESULT SetupDevice(int deviceNumber=0)
Definition: nvSDIin.cpp:554
bool m_bInitialized
Definition: nvSDIin.h:76
HRESULT GetVideoInState(NVVIOCONFIG_V1 *vioConfig, NVVIOSTATUS *vioStatus)
Definition: nvSDIin.cpp:319
NVVIOSIGNALFORMAT GetSignalFormat()
Definition: nvSDIin.cpp:963
unsigned int m_videoHeight
Definition: nvSDIin.h:138
NvVioHandle GetVioHandle()
Definition: nvSDIin.cpp:251
PFNWGLLOCKVIDEOCAPTUREDEVICENVPROC wglLockVideoCaptureDeviceNV
PFNGLBINDVIDEOCAPTURESTREAMTEXTURENVPROC glBindVideoCaptureStreamTextureNV
bool loadBufferObjectExtension()
NVVIOSIGNALFORMATDETAIL m_signalFormatDetail
Definition: nvSDIin.h:143
NVVIOCOMPONENTSAMPLING sampling
Definition: nvCommon.h:111
float m_fFrameRate
Definition: nvSDIin.h:139
bool loadTimerQueryExtension()
CNvSDIin()
Definition: nvSDIin.cpp:196
void DumpChannelStatus(NVVIOCHANNELSTATUS jack)
Definition: nvSDIin.cpp:269
PFNGLGETVIDEOCAPTURESTREAMIVNVPROC glGetVideoCaptureStreamivNV
HRESULT BindVideoFrameBuffer(GLuint videoBuffer, GLint videoBufferFormat, unsigned int stream)
Definition: nvSDIin.cpp:924
HRESULT Init(nvOptions *options=NULL)
Definition: nvSDIin.cpp:294
PFNGLVIDEOCAPTURESTREAMPARAMETERIVNVPROC glVideoCaptureStreamParameterivNV
std::string ColorSpaceToString(NVVIOCOLORSPACE space)
Definition: nvSDIutil.cpp:495
float m_gpuTime
Definition: nvSDIin.h:86
~CNvSDIin()
Definition: nvSDIin.cpp:237
unsigned int m_activeJacks[NVAPI_MAX_VIO_JACKS]
Definition: nvSDIin.h:142
int m_BitsPerComponent
Definition: nvSDIin.h:133
HRESULT SetVideoConfig()
Definition: nvSDIin.cpp:365
NvVioHandle m_vioHandle
Definition: nvSDIin.h:150
NVVIOTOPOLOGYTARGET * m_lDevice[NVAPI_MAX_VIO_DEVICES]
Definition: nvSDIin.h:75
PFNWGLENUMERATEVIDEOCAPTUREDEVICESNVPROC wglEnumerateVideoCaptureDevicesNV
GLuint m_captureTimeQuery
Definition: nvSDIin.h:161
bool m_bDualLink
Definition: nvSDIin.h:131
GLfloat m_cscMin[4]
Definition: nvSDIin.h:156
bool dualLink
Definition: nvCommon.h:110
PFNWGLRELEASEVIDEOCAPTUREDEVICENVPROC wglReleaseVideoCaptureDeviceNV
PFNGLDELETEQUERIESPROC glDeleteQueries
unsigned int GetWidth()
Definition: nvSDIin.cpp:257
unsigned long ULONG
Definition: ATC3DGm.h:432
PFNWGLQUERYVIDEOCAPTUREDEVICENVPROC wglQueryVideoCaptureDeviceNV
HRESULT UnbindVideoTexture(unsigned int stream)
Definition: nvSDIin.cpp:913
HRESULT BindVideoTexture(GLuint videoTexture, unsigned int stream)
Definition: nvSDIin.cpp:902
GLfloat m_cscOffset[4]
Definition: nvSDIin.h:157
unsigned int GetNumStreams()
Definition: nvSDIin.cpp:957
void SetCSCParams(GLfloat *cscMat, GLfloat *cscOffset, GLfloat *cscMin, GLfloat *cscMax)
Definition: nvSDIin.cpp:242
std::string SignalFormatToString(NVVIOSIGNALFORMAT format)
Definition: nvSDIutil.cpp:42
bool loadCaptureVideoExtension()
std::string ComponentSamplingFormatToString(NVVIOCOMPONENTSAMPLING sampling)
Definition: nvSDIutil.cpp:473
unsigned int m_numStreams
Definition: nvSDIin.h:141
int GetBufferObjectPitch(unsigned int streamIndex)
Definition: nvSDIin.cpp:943
PFNWGLQUERYCURRENTCONTEXTNVPROC wglQueryCurrentContextNV
bool expansionEnable
Definition: nvCommon.h:113
HDC m_hDC
Definition: nvSDIin.h:145
NVVIOSIGNALFORMAT m_videoFormat
Definition: nvSDIin.h:140
PFNGLBINDVIDEOCAPTURESTREAMBUFFERNVPROC glBindVideoCaptureStreamBufferNV
NVVIOTOPOLOGYTARGET * GetDevice(int index)
Definition: nvSDIin.cpp:86
int BOOL
Definition: ATC3DGm.h:446
HRESULT EndCapture()
Definition: nvSDIin.cpp:1043
PFNGLBINDBUFFERARBPROC glBindBuffer
void DumpStreamStatus(NVVIOSTREAM stream)
Definition: nvSDIin.cpp:281
GLuint m_videoSlot
Definition: nvSDIin.h:146
HRESULT UnbindDevice()
Definition: nvSDIin.cpp:1056
int m_deviceNumber
Definition: nvSDIin.h:151
PFNWGLBINDVIDEOCAPTUREDEVICENVPROC wglBindVideoCaptureDeviceNV
HVIDEOINPUTDEVICENV m_device
Definition: nvSDIin.h:148
int m_ExpansionEnable
Definition: nvSDIin.h:134