DirectX屏幕捕捉并输出为视频

4
我正在进行桌面屏幕捕捉,并希望输出为视频文件。目前,我有一些代码从这里获取以输出PNG图像。我稍微修改了代码,将输出更改为JPEG文件,然后使用OpenCV 3.0.0将其转换为AVI视频输出。我需要将JPEG文件作为输出的原因是我正在运行Windows 8.1,而OpenCV VideoWriter :: fourcc('M','J','P','G')是我唯一能用的选项。
PNG图像输出完美工作,但JPEG则不然。图像上有垂直线条,并且生成JPEG输出的时间相对较长。
我有两个选择:
1. 改进JPEG文件的输出,使其更快速和清晰。 2. 摆脱OpenCV 3.0.0问题,接受PNG文件输入并能够输出视频文件(最好是AVI / MP4文件格式)。
任何一个解决方案都可以适用于我。请帮忙。谢谢。
我的代码:
#include "stdafx.h"
#include <Wincodec.h>             // we use WIC for saving images
#include <d3d9.h>                 // DirectX 9 header
#include <opencv\cv.h>
#include <opencv\cxcore.hpp>
#include <opencv2\highgui\highgui.hpp>
#pragma comment(lib, "d3d9.lib")  // link to DirectX 9 library

using namespace cv;

#define WIDEN2(x) L ## x
#define WIDEN(x) WIDEN2(x)
#define __WFILE__ WIDEN(__FILE__)
#define HRCHECK(__expr) {hr=(__expr);if(FAILED(hr)){wprintf(L"FAILURE 0x%08X (%i)\n\tline: %u file: '%s'\n\texpr: '" WIDEN(#__expr) L"'\n",hr, hr, __LINE__,__WFILE__);goto cleanup;}}
#define RELEASE(__p) {if(__p!=nullptr){__p->Release();__p=nullptr;}}

HRESULT Direct3D9TakeScreenshots(UINT adapter, UINT count);
HRESULT SavePixelsToFile32bppPBGRA(UINT width, UINT height, UINT stride, LPBYTE pixels, LPWSTR filePath, const GUID &format);


int _tmain(int argc, _TCHAR* argv[])
{
    HRESULT hr = Direct3D9TakeScreenshots(D3DADAPTER_DEFAULT, 10);
    return 0;
}


HRESULT Direct3D9TakeScreenshots(UINT adapter, UINT count)
{
    HRESULT hr = S_OK;
    IDirect3D9 *d3d = nullptr;
    IDirect3DDevice9 *device = nullptr;
    IDirect3DSurface9 *surface = nullptr;
    D3DPRESENT_PARAMETERS parameters = { 0 };
    D3DDISPLAYMODE mode;
    D3DLOCKED_RECT rc;
    UINT pitch;
    SYSTEMTIME st;
    LPBYTE *shots = nullptr;

    // init D3D and get screen size
    d3d = Direct3DCreate9(D3D_SDK_VERSION);
    HRCHECK(d3d->GetAdapterDisplayMode(adapter, &mode));

    parameters.Windowed = TRUE;
    parameters.BackBufferCount = 1;
    parameters.BackBufferHeight = mode.Height;
    parameters.BackBufferWidth = mode.Width;
    parameters.SwapEffect = D3DSWAPEFFECT_DISCARD;
    parameters.hDeviceWindow = NULL;

    // create device & capture surface
    HRCHECK(d3d->CreateDevice(adapter, D3DDEVTYPE_HAL, NULL, D3DCREATE_SOFTWARE_VERTEXPROCESSING, &parameters, &device));
    HRCHECK(device->CreateOffscreenPlainSurface(mode.Width, mode.Height, D3DFMT_A8R8G8B8, D3DPOOL_SYSTEMMEM, &surface, nullptr));

    // compute the required buffer size
    HRCHECK(surface->LockRect(&rc, NULL, 0));
    pitch = rc.Pitch;
    HRCHECK(surface->UnlockRect());

    // allocate screenshots buffers
    shots = new LPBYTE[count];
    for (UINT i = 0; i < count; i++)
    {
        shots[i] = new BYTE[pitch * mode.Height];
    }

    GetSystemTime(&st); // measure the time we spend doing <count> captures
    wprintf(L"START Capture--> %i:%i:%i.%i\n", st.wHour, st.wMinute, st.wSecond, st.wMilliseconds);
    for (UINT i = 0; i < count; i++)
    {
        // get the data
        HRCHECK(device->GetFrontBufferData(0, surface));

        // copy it into our buffers
        HRCHECK(surface->LockRect(&rc, NULL, 0));
        CopyMemory(shots[i], rc.pBits, rc.Pitch * mode.Height);
        HRCHECK(surface->UnlockRect());
    }
    GetSystemTime(&st);
    wprintf(L"END Capture--> %i:%i:%i.%i\n", st.wHour, st.wMinute, st.wSecond, st.wMilliseconds);

    // save all screenshots
    for (UINT i = 0; i < count; i++)
    {
        WCHAR file[100];
        wsprintf(file, L"cap%i.jpg", i);
        HRCHECK(SavePixelsToFile32bppPBGRA(mode.Width, mode.Height, pitch, shots[i], file, GUID_ContainerFormatJpeg));
    }

    cleanup:
    if (shots != nullptr)
    {
        for (UINT i = 0; i < count; i++)
        {
        delete shots[i];
        }
        delete[] shots;
    }

    RELEASE(surface);
    RELEASE(device);
    RELEASE(d3d);
    return hr;
}

HRESULT SavePixelsToFile32bppPBGRA(UINT width, UINT height, UINT stride, LPBYTE pixels, LPWSTR filePath, const GUID &format)
{
    if (!filePath || !pixels)
        return E_INVALIDARG;

    HRESULT hr = S_OK;
    IWICImagingFactory *factory = nullptr;
    IWICBitmapEncoder *encoder = nullptr;
    IWICBitmapFrameEncode *frame = nullptr;
    IWICStream *stream = nullptr;
    GUID pf = GUID_WICPixelFormat32bppPBGRA;
    BOOL coInit = CoInitialize(nullptr);

    HRCHECK(CoCreateInstance(CLSID_WICImagingFactory, nullptr, CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&factory)));
    HRCHECK(factory->CreateStream(&stream));
    HRCHECK(stream->InitializeFromFilename(filePath, GENERIC_WRITE));
    HRCHECK(factory->CreateEncoder(format, nullptr, &encoder));
    HRCHECK(encoder->Initialize(stream, WICBitmapEncoderNoCache));
    HRCHECK(encoder->CreateNewFrame(&frame, nullptr)); // we don't use options here
    HRCHECK(frame->Initialize(nullptr)); // we dont' use any options here
    HRCHECK(frame->SetSize(width, height));
    HRCHECK(frame->SetPixelFormat(&pf));
    HRCHECK(frame->WritePixels(height, stride, stride * height, pixels));
    HRCHECK(frame->Commit());
    HRCHECK(encoder->Commit());

    cleanup:
    RELEASE(stream);
    RELEASE(frame);
    RELEASE(encoder);
    RELEASE(factory);
    if (coInit) CoUninitialize();

    //This part do encode JPEG file to video file 
    VideoCapture in_capture("cap%d.jpg");

    Mat img;

    VideoWriter out_capture("video.avi", CV_FOURCC('M','J','P','G'), 1, Size(1920,1080));

    while (true)
    {
        in_capture >> img;
        if(img.empty())
            break;

        out_capture.write(img);
    }

    return hr;
}

在我的Windows 10电脑上,我没有复制JPEG保存时间更长的问题。无论如何,“垂直线”问题可能来自于DX离屏表面是ARGB(32bpp)格式,而JPEG仅是RGB(24bpp)格式的事实。由于我们从内存到内存进行原始复制,这样做是行不通的。如果您真的想要JPG,则必须将DX表面内存转换为RGB(这将需要一些时间并降低性能...)。您也可以从保存的PNG文件中执行此操作(WIC可以执行此操作)。 - Simon Mourier
那是不是意味着我需要将图像从PNG转换为JPEG呢?您能展示一下如何做吗? - Fei Hap Lee
DirectX文件写入过滤器怎么样?可以用来保存视频吗? - xmedeko
1个回答

6

由于JPEG编码器编码3种格式 JPEG本机编解码器,所以你的图像不正确:

  • GUID_WICPixelFormat8bppGray
  • GUID_WICPixelFormat24bppBGR
  • GUID_WICPixelFormat32bppCMYK

你尝试过D3DXSaveSurfaceToFile吗?它是否太慢了?

EDIT

对于你的第二个选项,我已经编写了一个程序,使用Media Foundation将屏幕捕获编码为h264:

  • 这在Windows Seven上使用1280 * 1024分辨率工作,并具有合理的性能,但我不知道它是否符合您的要求。
  • 这个程序不是实时捕获,也许你需要更正传递给sink writer的时间戳。考虑使用Media Foundation时钟。
  • 你可以尝试更改编码格式:MF_TRANSCODE_CONTAINERTYPE
  • 请注意,编码器的分辨率受限。
  • 更改MF_MT_AVG_BITRATE可以在性能上提高质量。根据您的要求设置该值。
  • 在我的系统上,捕获图像是反向的,因此我需要以正确的方式复制图像(请参见#define REVERSE_IMAGE)。也许比MFCopyImage慢,但在测试两种实现之后,我不是很确定。
  • 我们不需要WIC,因为Media Foundation编码器处理来自d3d9捕获的格式。
  • 在Windows Seven上,请考虑使用IDirect3DDevice9Ex而不是IDirect3DDevice9。
  • 我将你的d3d9捕获代码与这个教程混合使用:使用Sink Writer编码视频。程序的设计也可以改进。

代码如下:

#include <Windows.h>
#include <mfapi.h>
#include <mfidl.h>
#include <Mfreadwrite.h>
#include <mferror.h>
#include <d3d9.h>

#pragma comment(lib, "mfreadwrite")
#pragma comment(lib, "mfplat")
#pragma comment(lib, "mfuuid")
#pragma comment(lib, "d3d9.lib")

template <class T> void SafeRelease(T **ppT){

    if(*ppT){
        (*ppT)->Release();
        *ppT = NULL;
    }
}

#define REVERSE_IMAGE

// Format constants
const UINT32 VIDEO_FPS = 30;
const UINT64 VIDEO_FRAME_DURATION = 10 * 1000 * 1000 / VIDEO_FPS;
const UINT32 VIDEO_BIT_RATE = 2000000;
const GUID   VIDEO_ENCODING_FORMAT = MFVideoFormat_H264;
const GUID   VIDEO_INPUT_FORMAT = MFVideoFormat_RGB32;
const UINT32 VIDEO_FRAME_COUNT = 5 * VIDEO_FPS;

HRESULT InitializeDirect3D9(IDirect3DDevice9** ppDevice, IDirect3DSurface9** ppSurface, UINT32& uiWidth, UINT32& uiHeight){

    IDirect3D9* d3d = NULL;

    d3d = Direct3DCreate9(D3D_SDK_VERSION);

    if(d3d == NULL)
        return E_POINTER;

    D3DDISPLAYMODE mode;
    HRESULT hr = d3d->GetAdapterDisplayMode(D3DADAPTER_DEFAULT, &mode);

    if(FAILED(hr)){
        SafeRelease(&d3d);
        return hr;
    }

    D3DPRESENT_PARAMETERS parameters = {0};

    parameters.Windowed = TRUE;
    parameters.BackBufferCount = 1;
    uiHeight = parameters.BackBufferHeight = mode.Height;
    uiWidth = parameters.BackBufferWidth = mode.Width;
    parameters.SwapEffect = D3DSWAPEFFECT_DISCARD;
    parameters.hDeviceWindow = NULL;

    hr = d3d->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, NULL, D3DCREATE_SOFTWARE_VERTEXPROCESSING, &parameters, ppDevice);

    if(FAILED(hr)){
        SafeRelease(&d3d);
        return hr;
    }

    hr = (*ppDevice)->CreateOffscreenPlainSurface(mode.Width, mode.Height, D3DFMT_A8R8G8B8, D3DPOOL_SYSTEMMEM, ppSurface, nullptr);

    SafeRelease(&d3d);

    return hr;
}

HRESULT InitializeSinkWriter(IMFSinkWriter **ppWriter, DWORD *pStreamIndex, const UINT32 uiWidth, const UINT32 uiHeight){

    *ppWriter = NULL;
    *pStreamIndex = NULL;

    IMFSinkWriter   *pSinkWriter = NULL;
    IMFMediaType    *pMediaTypeOut = NULL;
    IMFMediaType    *pMediaTypeIn = NULL;
    DWORD           streamIndex;

    HRESULT hr = MFCreateSinkWriterFromURL(L"output.mp4", NULL, NULL, &pSinkWriter);

    // Set the output media type.
    if(SUCCEEDED(hr)){
        hr = MFCreateMediaType(&pMediaTypeOut);
    }
    if(SUCCEEDED(hr)){
        hr = pMediaTypeOut->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
    }
    if(SUCCEEDED(hr)){
        hr = pMediaTypeOut->SetGUID(MF_MT_SUBTYPE, VIDEO_ENCODING_FORMAT);
    }
    if(SUCCEEDED(hr)){
        hr = pMediaTypeOut->SetUINT32(MF_MT_AVG_BITRATE, VIDEO_BIT_RATE);
    }
    if(SUCCEEDED(hr)){
        hr = pMediaTypeOut->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
    }
    if(SUCCEEDED(hr)){
        hr = MFSetAttributeSize(pMediaTypeOut, MF_MT_FRAME_SIZE, uiWidth, uiHeight);
    }
    if(SUCCEEDED(hr)){
        hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_FRAME_RATE, VIDEO_FPS, 1);
    }
    if(SUCCEEDED(hr)){
        hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_PIXEL_ASPECT_RATIO, 1, 1);
    }
    if(SUCCEEDED(hr)){
        hr = pSinkWriter->AddStream(pMediaTypeOut, &streamIndex);
    }

    // Set the input media type.
    if(SUCCEEDED(hr)){
        hr = MFCreateMediaType(&pMediaTypeIn);
    }
    if(SUCCEEDED(hr)){
        hr = pMediaTypeIn->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
    }
    if(SUCCEEDED(hr)){
        hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, VIDEO_INPUT_FORMAT);
    }
    if(SUCCEEDED(hr)){
        hr = pMediaTypeIn->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
    }
    if(SUCCEEDED(hr)){
        hr = MFSetAttributeSize(pMediaTypeIn, MF_MT_FRAME_SIZE, uiWidth, uiHeight);
    }
    if(SUCCEEDED(hr)){
        hr = MFSetAttributeRatio(pMediaTypeIn, MF_MT_FRAME_RATE, VIDEO_FPS, 1);
    }
    if(SUCCEEDED(hr)){
        hr = MFSetAttributeRatio(pMediaTypeIn, MF_MT_PIXEL_ASPECT_RATIO, 1, 1);
    }
    if(SUCCEEDED(hr)){
        hr = pSinkWriter->SetInputMediaType(streamIndex, pMediaTypeIn, NULL);
    }

    // Tell the sink writer to start accepting data.
    if(SUCCEEDED(hr)){
        hr = pSinkWriter->BeginWriting();
    }

    // Return the pointer to the caller.
    if(SUCCEEDED(hr)){

        *ppWriter = pSinkWriter;
        (*ppWriter)->AddRef();
        *pStreamIndex = streamIndex;
    }

    SafeRelease(&pSinkWriter);
    SafeRelease(&pMediaTypeOut);
    SafeRelease(&pMediaTypeIn);
    return hr;
}

HRESULT WriteFrame(IDirect3DDevice9* pDevice, IDirect3DSurface9* pSurface, IMFSinkWriter* pWriter, DWORD streamIndex, const LONGLONG& rtStart, const UINT32 uiWidth, const UINT32 uiHeight){

    HRESULT hr = pDevice->GetFrontBufferData(0, pSurface);

    if(FAILED(hr)){
        return hr;
    }

    D3DLOCKED_RECT rc;
    hr = pSurface->LockRect(&rc, NULL, 0);

    if(FAILED(hr)){
        return hr;
    }

    IMFSample *pSample = NULL;
    IMFMediaBuffer *pBuffer = NULL;

    const LONG cbWidth = 4 * uiWidth;
    const DWORD cbBuffer = cbWidth * uiHeight;

    BYTE *pData = NULL;

    // Create a new memory buffer.
    hr = MFCreateMemoryBuffer(cbBuffer, &pBuffer);

    // Lock the buffer and copy the video frame to the buffer.
    if(SUCCEEDED(hr)){
        hr = pBuffer->Lock(&pData, NULL, NULL);
    }

    if(SUCCEEDED(hr)){

#ifdef REVERSE_IMAGE
        for(int i = 0, j = uiHeight - 1; i < uiHeight; i++, j--)
            for(int k = 0; k < cbWidth; k++)
                    pData[(i * cbWidth) + k] = ((BYTE*)rc.pBits)[(j * cbWidth) + k];
#else
        hr = MFCopyImage(pData, cbWidth, (BYTE*)rc.pBits, rc.Pitch, cbWidth, uiHeight);
#endif
    }

    if(pBuffer){
        pBuffer->Unlock();
    }

    // Set the data length of the buffer.
    if(SUCCEEDED(hr)){
        hr = pBuffer->SetCurrentLength(cbBuffer);
    }

    // Create a media sample and add the buffer to the sample.
    if(SUCCEEDED(hr)){
        hr = MFCreateSample(&pSample);
    }

    if(SUCCEEDED(hr)){
        hr = pSample->AddBuffer(pBuffer);
    }

    // Set the time stamp and the duration.
    if(SUCCEEDED(hr)){
        hr = pSample->SetSampleTime(rtStart);
    }

    if(SUCCEEDED(hr)){
        hr = pSample->SetSampleDuration(VIDEO_FRAME_DURATION);
    }

    // Send the sample to the Sink Writer.
    if(SUCCEEDED(hr)){
        hr = pWriter->WriteSample(streamIndex, pSample);
    }

    hr = pSurface->UnlockRect();

    SafeRelease(&pSample);
    SafeRelease(&pBuffer);
    return hr;
}

void main(){

    HRESULT hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED);

    if(SUCCEEDED(hr)){

        hr = MFStartup(MF_VERSION);

        if(SUCCEEDED(hr)){

                UINT32 uiWidth = 0;
                UINT32 uiHeight = 0;

                IDirect3DDevice9* pDevice = NULL;
                IDirect3DSurface9* pSurface = NULL;

                hr = InitializeDirect3D9(&pDevice, &pSurface, uiWidth, uiHeight);

                if(SUCCEEDED(hr)){

                        IMFSinkWriter *pSinkWriter = NULL;
                        DWORD stream;

                        hr = InitializeSinkWriter(&pSinkWriter, &stream, uiWidth, uiHeight);

                        if(SUCCEEDED(hr)){

                            LONGLONG rtStart = 0;

                            for(DWORD i = 0; i < VIDEO_FRAME_COUNT; ++i){

                                hr = WriteFrame(pDevice, pSurface, pSinkWriter, stream, rtStart, uiWidth, uiHeight);

                                            if(FAILED(hr)){
                                                    break;
                                            }

                                            rtStart += VIDEO_FRAME_DURATION;
                                    }
                            }

                            if(SUCCEEDED(hr)){
                                    hr = pSinkWriter->Finalize();
                            }

                            SafeRelease(&pSinkWriter);
                    }

                    SafeRelease(&pDevice);
                    SafeRelease(&pSurface);
                    MFShutdown();
            }

            CoUninitialize();
    }
}

1
只是一个小改进:您可以将MF_MT_INTERLACE_MODE设置为MFVideoInterlace_FieldInterleavedUpperFirst,并删除#ifdef REVERSE_IMAGE以提高性能! - thedemons
你确定吗?MFVideoInterlace_FieldInterleavedUpperFirst与隔行模式有关,不会翻转图像。 - mofo77
1
我不知道,我对此不熟悉,但它对我有效。 - thedemons

网页内容由stack overflow 提供, 点击上面的
可以查看英文原文,
原文链接