我正在尝试使用 Windows Directshow API 使用 UVC 相机捕获静态图像。我正在使用 Microsoft 的这篇文章(https://learn.microsoft.com/en-us/windows/win32/directshow/capturing-an-image-from-a-still-image-pin)来实现此目的。我希望能够捕捉不同曝光的静态图像,以便稍后使用 OpenCV 创建 HDR 图像。我知道 Directshow 是一项遗留功能。我最终使用了它,因为它是唯一一个具有如何为 UVC 相机实现静态图像捕获示例的 API。
我尝试使用 IAMCameraControl 接口(https://learn.microsoft.com/en-us/windows/win32/api/strmif/)在静态图像触发之前设置相机曝光(pAMVidControl->SetMode(pPin, VideoControlFlag_Trigger)) nn-strmif-iamcameracontrol)。我可以在预览源中看到曝光变化,但在按下 SetMode 触发命令到某个值 x 时它会重置(不确定它是什么值)。因此捕获的静态图像具有不同的曝光(x) 与我设置的值相同。是否有不同的界面用于设置静态图像拍摄的相机曝光?
这是我正在尝试做的事情的示例。不幸的是,由于我必须添加其他内容才能使 directshow 正常工作,因此它很长。
#include <dshow.h>
#include <iostream>
EXTERN_C const CLSID CLSID_NullRenderer;
EXTERN_C const CLSID CLSID_SampleGrabber;
static
const
IID IID_ISampleGrabber = { 0x6B652FFF, 0x11FE, 0x4fce, { 0x92, 0xAD, 0x02, 0x66, 0xB5, 0xD7, 0xC7, 0x8F } };
class __declspec(uuid("{C1F400A0-3F08-11D3-9F0B-006008039E37}")) SampleGrabber;
#pragma region SampleGrabber
struct __declspec(uuid("0579154a-2b53-4994-b0d0-e773148eff85"))
ISampleGrabberCB : IUnknown
{
//
// Raw methods provided by interface
//
virtual HRESULT __stdcall SampleCB (
double SampleTime,
struct IMediaSample * pSample ) = 0;
virtual HRESULT __stdcall BufferCB (
double SampleTime,
unsigned char * pBuffer,
long BufferLen ) = 0;
};
struct __declspec(uuid("6b652fff-11fe-4fce-92ad-0266b5d7c78f"))
ISampleGrabber : IUnknown
{
//
// Raw methods provided by interface
//
virtual HRESULT __stdcall SetOneShot (
long OneShot ) = 0;
virtual HRESULT __stdcall SetMediaType (
struct _AMMediaType * pType ) = 0;
virtual HRESULT __stdcall GetConnectedMediaType (
struct _AMMediaType * pType ) = 0;
virtual HRESULT __stdcall SetBufferSamples (
long BufferThem ) = 0;
virtual HRESULT __stdcall GetCurrentBuffer (
/*[in,out]*/ long * pBufferSize,
/*[out]*/ long * pBuffer ) = 0;
virtual HRESULT __stdcall GetCurrentSample (
/*[out,retval]*/ struct IMediaSample * * ppSample ) = 0;
virtual HRESULT __stdcall SetCallback (
struct ISampleGrabberCB * pCallback,
long WhichMethodToCallback ) = 0;
};
struct __declspec(uuid("c1f400a0-3f08-11d3-9f0b-006008039e37"))
SampleGrabber;
// [ default ] interface ISampleGrabber
#pragma endregion
// DirectShow objects
HRESULT hr;
ICreateDevEnum *pDevEnum = NULL;
IEnumMoniker *pEnum = NULL;
IMoniker *pMoniker = NULL;
IPropertyBag *pPropBag = NULL;
IGraphBuilder *pGraph = NULL;
ICaptureGraphBuilder2 *pBuilder = NULL;
IBaseFilter *pCap = NULL;
IBaseFilter *pSampleGrabberFilter = NULL;
IBaseFilter *pSampleGrabberStillFilter = NULL;
ISampleGrabber *pSampleGrabber = NULL;
ISampleGrabber *pSampleGrabberStill = NULL;
IBaseFilter *pNullRenderer = NULL;
IBaseFilter *pNullRendererStill = NULL;
IMediaControl *pMediaControl = NULL;
IAMVideoControl *pAMVidControl = NULL;
IAMCameraControl *pAMVCamControl = NULL;
int img_index = 0;
AM_MEDIA_TYPE g_StillMediaType;
// Find the still pin.
IPin *pPin = NULL;
char *pBuffer = NULL;
void exit_message(const char* error_message, int error)
{
// Print an error message
fprintf(stderr, error_message);
fprintf(stderr, "\n");
// Clean up DirectShow / COM stuff
if (pBuffer != NULL) delete[] pBuffer;
if (pMediaControl != NULL) pMediaControl->Release();
if (pNullRenderer != NULL) pNullRenderer->Release();
if (pNullRendererStill != NULL) pNullRendererStill->Release();
if (pSampleGrabber != NULL) pSampleGrabber->Release();
if (pSampleGrabberStill != NULL) pSampleGrabberStill->Release();
if (pSampleGrabberFilter != NULL) pSampleGrabberFilter->Release();
if (pSampleGrabberStillFilter != NULL) pSampleGrabberStillFilter->Release();
if (pCap != NULL) pCap->Release();
if (pBuilder != NULL) pBuilder->Release();
if (pGraph != NULL) pGraph->Release();
if (pPropBag != NULL) pPropBag->Release();
if (pMoniker != NULL) pMoniker->Release();
if (pEnum != NULL) pEnum->Release();
if (pDevEnum != NULL) pDevEnum->Release();
if (pPin != NULL) pPin->Release();
if (pAMVidControl != NULL) pAMVidControl->Release();
if (pAMVCamControl != NULL) pAMVCamControl->Release();
CoUninitialize();
// Exit the program
exit(error);
}
class SampleGrabberCallback : public ISampleGrabberCB
{
public:
// Fake referance counting.
STDMETHODIMP_(ULONG) AddRef() { return 1; }
STDMETHODIMP_(ULONG) Release() { return 2; }
char filename[100];
STDMETHODIMP QueryInterface(REFIID riid, void **ppvObject)
{
if (NULL == ppvObject) return E_POINTER;
if (riid == __uuidof(IUnknown))
{
*ppvObject = static_cast<IUnknown*>(this);
return S_OK;
}
if (riid == __uuidof(ISampleGrabberCB))
{
*ppvObject = static_cast<ISampleGrabberCB*>(this);
return S_OK;
}
return E_NOTIMPL;
}
STDMETHODIMP SampleCB(double Time, IMediaSample *pSample)
{
fprintf(stderr, "Callback triggered SampleCB\n");
return E_NOTIMPL;
}
STDMETHODIMP BufferCB(double Time, BYTE *pBuffer, long BufferLen)
{
if ((g_StillMediaType.majortype != MEDIATYPE_Video) ||
(g_StillMediaType.formattype != FORMAT_VideoInfo) ||
(g_StillMediaType.cbFormat < sizeof(VIDEOINFOHEADER)) ||
(g_StillMediaType.pbFormat == NULL) ||
(g_StillMediaType.subtype != MEDIASUBTYPE_RGB24)
)
{
return VFW_E_INVALIDMEDIATYPE;
}
fprintf(stderr, "Save file\n");
sprintf(filename, "still/Image_Still_%d.bmp", img_index);
img_index++;
HANDLE hf = CreateFile(filename, GENERIC_WRITE,
FILE_SHARE_WRITE, NULL, CREATE_ALWAYS, 0, NULL);
if (hf == INVALID_HANDLE_VALUE)
{
return E_FAIL;
}
long cbBitmapInfoSize = g_StillMediaType.cbFormat - SIZE_PREHEADER;
VIDEOINFOHEADER *pVideoHeader =
(VIDEOINFOHEADER*)g_StillMediaType.pbFormat;
BITMAPFILEHEADER bfh;
ZeroMemory(&bfh, sizeof(bfh));
bfh.bfType = 'MB'; // Little-endian for "BM".
bfh.bfSize = sizeof( bfh ) + BufferLen + cbBitmapInfoSize;
bfh.bfOffBits = sizeof( BITMAPFILEHEADER ) + cbBitmapInfoSize;
// Write the file header.
DWORD dwWritten = 0;
WriteFile( hf, &bfh, sizeof( bfh ), &dwWritten, NULL );
WriteFile(hf, HEADER(pVideoHeader), cbBitmapInfoSize, &dwWritten, NULL);
WriteFile( hf, pBuffer, BufferLen, &dwWritten, NULL );
CloseHandle( hf );
return S_OK;
}
};
// Global instance of the class.
SampleGrabberCallback g_StillCapCB;
int main(int argc, char **argv)
{
// Capture settings
int show_preview_window = 0;
int device_number = 2;
char device_name[100];
// Other variables
char char_buffer[100];
// Parse command line arguments. Available options:
// /preview
int n = 1;
while (n < argc)
{
// Process next command line argument
if (strcmp(argv[n], "/preview") == 0)
{
// Enable preview window
show_preview_window = 1;
}
else
{
// Unknown command line argument
fprintf(stderr, "Unrecognised option: %s\n", argv[n]);
exit_message("", 1);
}
// Increment command line argument counter
n++;
}
// Intialise COM
hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
if (hr != S_OK)
exit_message("Could not initialise COM", 1);
// Create filter graph
hr = CoCreateInstance(CLSID_FilterGraph, NULL,
CLSCTX_INPROC_SERVER, IID_IGraphBuilder,
(void**)&pGraph);
if (hr != S_OK)
exit_message("Could not create filter graph", 1);
// Create capture graph builder.
hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL,
CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2,
(void **)&pBuilder);
if (hr != S_OK)
exit_message("Could not create capture graph builder", 1);
// Attach capture graph builder to graph
hr = pBuilder->SetFiltergraph(pGraph);
if (hr != S_OK)
exit_message("Could not attach capture graph builder to graph", 1);
// Create system device enumerator
hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL,
CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pDevEnum));
if (hr != S_OK)
exit_message("Could not crerate system device enumerator", 1);
// Video input device enumerator
hr = pDevEnum->CreateClassEnumerator(
CLSID_VideoInputDeviceCategory, &pEnum, 0);
if (hr != S_OK)
exit_message("No video devices found", 1);
// Get moniker for specified video input device,
// or for the first device if no device number
// was specified.
VARIANT var;
n = 0;
while(1)
{
// Access next device
hr = pEnum->Next(1, &pMoniker, NULL);
if (hr == S_OK)
{
n++; // increment device count
}
else
{
if (device_number == 0)
{
fprintf(stderr,
"Video capture device %s not found\n",
device_name);
}
else
{
fprintf(stderr,
"Video capture device %d not found\n",
device_number);
}
exit_message("", 1);
}
if (n >= device_number)
{
break;
}
}
// Get video input device name
hr = pMoniker->BindToStorage(0, 0, IID_PPV_ARGS(&pPropBag));
VariantInit(&var);
hr = pPropBag->Read(L"FriendlyName", &var, 0);
fprintf(stderr, "Capture device: %ls\n", var.bstrVal);
VariantClear(&var);
// Create capture filter and add to graph
hr = pMoniker->BindToObject(0, 0,
IID_IBaseFilter, (void**)&pCap);
if (hr != S_OK) exit_message("Could not create capture filter", 1);
// Add capture filter to graph
hr = pGraph->AddFilter(pCap, L"Capture Filter");
if (hr != S_OK) exit_message("Could not add capture filter to graph", 1);
// Create sample grabber filter
hr = CoCreateInstance(CLSID_SampleGrabber, NULL,
CLSCTX_INPROC_SERVER, IID_IBaseFilter,
(void**)&pSampleGrabberFilter);
if (hr != S_OK)
exit_message("Could not create Sample Grabber filter", 1);
// Query the ISampleGrabber interface of the sample grabber filter
hr = pSampleGrabberFilter->QueryInterface(
IID_ISampleGrabber, (void**)&pSampleGrabber);
if (hr != S_OK)
exit_message("Could not get ISampleGrabber interface to sample grabber filter", 1);
// Enable sample buffering in the sample grabber filter
hr = pSampleGrabber->SetBufferSamples(TRUE);
if (hr != S_OK)
exit_message("Could not enable sample buffering in the sample grabber", 1);
AM_MEDIA_TYPE mt;
ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
mt.majortype = MEDIATYPE_Video;
mt.subtype = MEDIASUBTYPE_RGB24;
// Set media type in sample grabber filter
hr = pSampleGrabber->SetMediaType(&mt);
if (hr != S_OK)
exit_message("Could not set media type in sample grabber", 1);
// Add sample grabber filter to filter graph
hr = pGraph->AddFilter(pSampleGrabberFilter, L"SampleGrab");
if (hr != S_OK)
exit_message("Could not add Sample Grabber to filter graph", 1);
// Create Null Renderer filter
hr = CoCreateInstance(CLSID_NullRenderer, NULL,
CLSCTX_INPROC_SERVER, IID_IBaseFilter,
(void**)&pNullRenderer);
if (hr != S_OK)
exit_message("Could not create Null Renderer filter", 1);
// Add Null Renderer filter to filter graph
hr = pGraph->AddFilter(pNullRenderer, L"NullRender");
if (hr != S_OK)
exit_message("Could not add Null Renderer to filter graph", 1);
// Connect up the filter graph's capture stream
hr = pBuilder->RenderStream(
&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,
pCap, pSampleGrabberFilter, pNullRenderer);
if (hr != S_OK)
exit_message("Could not render capture video stream", 1);
// Connect up the filter graph's preview stream
if (show_preview_window > 0)
{
hr = pBuilder->RenderStream(
&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video,
pCap, NULL, NULL);
if (hr != S_OK && hr != VFW_S_NOPREVIEWPIN)
exit_message("Could not render preview video stream", 1);
}
// ------------STILL-IMAGE-CAPTURE-SETUP---------------
// Create sample grabber still filter
hr = CoCreateInstance(CLSID_SampleGrabber, NULL,
CLSCTX_INPROC_SERVER, IID_IBaseFilter,
(void**)&pSampleGrabberStillFilter);
if (hr != S_OK)
exit_message("Could not create Sample Grabber filter", 1);
// Add sample grabber filter to filter graph for still
hr = pGraph->AddFilter(pSampleGrabberStillFilter, L"SampleGrabStill");
if (hr != S_OK)
exit_message("Could not add Sample Grabber to filter graph still", 1);
// Create Null Render filter for still
hr = CoCreateInstance(CLSID_NullRenderer, NULL,
CLSCTX_INPROC_SERVER, IID_IBaseFilter,
(void**)&pNullRendererStill);
if (hr != S_OK)
exit_message("Could not create Null Renderer filter", 1);
// Add Null Renderer filter to filter graph for still
hr = pGraph->AddFilter(pNullRendererStill, L"NullRender Still");
if (hr != S_OK)
exit_message("Could not add Null Renderer to filter graph", 1);
// Query the ISampleGrabber interface of the sample grabber filter
hr = pSampleGrabberStillFilter->QueryInterface(
IID_ISampleGrabber, (void**)&pSampleGrabberStill);
if (hr != S_OK)
exit_message("Could not get ISampleGrabber interface to sample grabber filter", 1);
//Set media type for Still sample grabber
hr = pSampleGrabberStill->SetMediaType(&mt);
if (hr != S_OK)
exit_message("Could not set media type in sample grabber", 1);
// Connect up the filter graph's capture stream
hr = pBuilder->RenderStream(
&PIN_CATEGORY_STILL, &MEDIATYPE_Video,
pCap, pSampleGrabberStillFilter, pNullRendererStill);
if (hr != S_OK)
exit_message("Could not render capture video stream still", 1);
// Enable sample buffering in the sample grabber filter
hr = pSampleGrabberStill->SetOneShot(FALSE);
if (hr != S_OK)
exit_message("Set oneshot to false for sample", 1);
// Enable sample buffering in the sample grabber filter
hr = pSampleGrabberStill->SetBufferSamples(TRUE);
if (hr != S_OK)
exit_message("Could not enable sample buffering in the sample grabber", 1);
hr = pSampleGrabberStill->SetCallback(&g_StillCapCB, 1);
if (hr != S_OK)
exit_message("Could not set sample grabber callback", 1);
// Get media control interfaces to graph builder object
hr = pGraph->QueryInterface(IID_IMediaControl,
(void**)&pMediaControl);
if (hr != S_OK) exit_message("Could not get media control interface", 1);
// Run graph
while(1)
{
hr = pMediaControl->Run();
// Hopefully, the return value was S_OK or S_FALSE
if (hr == S_OK) break; // graph is now running
if (hr == S_FALSE) continue; // graph still preparing to run
// If the Run function returned something else,
// there must be a problem
fprintf(stderr, "Error: %u\n", hr);
exit_message("Could not run filter graph", 1);
}
// Query the capture filter for IAMVideoControl
hr = pCap->QueryInterface(IID_IAMVideoControl, (void**)&pAMVidControl);
if (hr != S_OK) exit_message("Could not get IAMVideoControl", 1);
// Query the capture filter for IAMCameraControl
hr = pCap->QueryInterface(IID_IAMCameraControl, (void**)&pAMVCamControl);
if (hr != S_OK) exit_message("Could not get IAMCameraControl", 1);
long Min, Max, Step, Default, Flags, Val;
// Get the range and default value.
hr = pAMVCamControl->GetRange(CameraControl_Exposure, &Min, &Max, &Step,
&Default, &Flags);
// Set Default exposure
hr = pAMVCamControl->Set(CameraControl_Exposure, Default, CameraControl_Flags_Manual);
if (hr != S_OK) exit_message("Could not set exposure", 1);
hr = pAMVCamControl->Get(CameraControl_Exposure, &Val, &Flags);
if (hr != S_OK) exit_message("Could not get current camera exposure", 1);
hr = pBuilder->FindPin(pCap, PINDIR_OUTPUT, &PIN_CATEGORY_STILL, NULL, FALSE, 0, &pPin);
if (hr != S_OK) exit_message("Could not get Pin of category StillImage", 1);
fprintf(stderr, "Found Pin\n");
// Get the media type from the stillsample grabber filter
hr = pSampleGrabberStill->GetConnectedMediaType(
&g_StillMediaType);
if (hr != S_OK) exit_message("Could not get media type", 1);
int count = 0;
// Loop to change exposure and capture still image
while(true)
{
fprintf(stderr, "Image = %d\n", count);
if(count == 30)
{
break;
}
if(count == 10)
{
hr = pAMVidControl->SetMode(pPin, VideoControlFlag_Trigger);
if (hr != S_OK) exit_message("Could not set mode trigger", 1);
pPin->Release();
}
if(count == 20)
{
fprintf(stderr, "Set New expo\n");
Val = -11;
Flags = CameraControl_Flags_Manual;
hr = pAMVCamControl->Set(CameraControl_Exposure, Val, Flags);
if (hr != S_OK) exit_message("Could not set exposure", 1);
}
count++;
hr = pAMVCamControl->Get(CameraControl_Exposure, &Val, &Flags);
if (hr != S_OK) exit_message("Could not get exposure", 1);
fprintf(stderr, "Expo Val: %ld\n", Val);
fprintf(stderr, "Expo Flags: %ld\n", Flags);
Sleep(500);
}
// Stop the graph
pMediaControl->Stop();
// Clean up and exit
exit_message("", 0);
}
这是正确的做法。原来是硬件问题,通过切换到不同的相机解决了。