#include <Windows.h>
#include <stdint.h>
#include <d3d9.h>
//#include <D3dx9tex.h>
extern "C" {
#include "libswscale/swscale.h"
#include "libavutil/pixdesc.h"
#include "libavutil/imgutils.h"
#include "libavformat/avformat.h"
}
#pragma comment(lib, "D3d9.lib")
//#pragma comment(lib, "D3dx9.lib")
#pragma comment(lib, "winmm.lib")
#pragma comment(lib, "avformat.lib")
#pragma comment(lib, "avcodec.lib")
#pragma comment(lib, "avutil.lib")
#pragma comment(lib, "swscale.lib")
void SaveFrameToJpg(AVFrame *frame)
{
AVFormatContext *ofmtCtx = NULL;
AVCodec *codec = NULL;
AVCodecContext *codecCtx = NULL;
AVStream *stream = NULL;
int ret;
/* allocate the output media context */
avformat_alloc_output_context2(&ofmtCtx, NULL, NULL, "aa.jpg");
if (!ofmtCtx) {
return;
}
codec = avcodec_find_encoder(AV_CODEC_ID_MJPEG);
if (!codec) {
return;
}
stream = avformat_new_stream(ofmtCtx, NULL);
if (!stream) {
return;
}
codecCtx = avcodec_alloc_context3(codec);
if (!codecCtx) {
return;
}
codecCtx->pix_fmt = AV_PIX_FMT_YUVJ420P;
codecCtx->bit_rate = 400000;
codecCtx->width = frame->width;
codecCtx->height = frame->height;
codecCtx->time_base.num = 1;
codecCtx->time_base.den = 25;
/* open the codec */
ret = avcodec_open2(codecCtx, codec, NULL);
if (ret < 0) {
return;
}
/* open the output file, if needed */
ret = avio_open(&ofmtCtx->pb, "aa.jpg", AVIO_FLAG_WRITE);
if (ret < 0) {
return;
}
/* Write the stream header, if any. */
ret = avformat_write_header(ofmtCtx, NULL);
if (ret < 0) {
return;
}
AVPacket avpkt = { 0 };
int got_packet = 0;
av_init_packet(&avpkt);
/* encode the image */
ret = avcodec_send_frame(codecCtx, frame);
if (ret < 0) {
return;
}
ret = avcodec_receive_packet(codecCtx, &avpkt);
if (ret < 0) {
return;
}
av_write_frame(ofmtCtx, &avpkt);
/* Write the trailer, if any. The trailer must be written before you
* close the CodecContexts open when you wrote the header; otherwise
* av_write_trailer() may try to use memory that was freed on
* av_codec_close(). */
av_write_trailer(ofmtCtx);
/* Close the output file. */
avio_closep(&ofmtCtx->pb);
avcodec_free_context(&codecCtx);
/* free the stream */
avformat_free_context(ofmtCtx);
return;
}
HRESULT capture(IDirect3DDevice9* d3ddev9)
{
IDirect3DSurface9 *destTarget = NULL;
HRESULT hr;
int screenWidth = GetSystemMetrics(SM_CXSCREEN);
int screenHeight = GetSystemMetrics(SM_CYSCREEN);
hr = d3ddev9->CreateOffscreenPlainSurface(screenWidth, screenHeight, D3DFMT_A8R8G8B8,
D3DPOOL_SCRATCH, &destTarget, NULL);
if (FAILED(hr))
{
//printf("Failed CreateOffscreenPlainSurface!");
}
hr = d3ddev9->GetFrontBufferData(0, destTarget);
//D3DXSaveSurfaceToFile(L"Desktop.jpg", D3DXIFF_JPG, destTarget, NULL, NULL);
D3DLOCKED_RECT lr;
ZeroMemory(&lr, sizeof(D3DLOCKED_RECT));
hr = destTarget->LockRect(&lr, 0, D3DLOCK_READONLY);
if (FAILED(hr))
{
//printf("Cannot lock rect!");
}
if (lr.pBits)
{
uint8_t *dst_data[4] = { 0 };
int dst_linesize[4];
uint8_t *src_data[4] = { 0 };
int src_linesize[4];
struct SwsContext *sws_ctx = NULL;
AVFrame *frame = NULL;
int ret;
sws_ctx = sws_getContext(screenWidth, screenHeight, AV_PIX_FMT_BGRA,
screenWidth, screenHeight, AV_PIX_FMT_YUV420P,
SWS_BILINEAR, NULL, NULL, NULL);
if ((ret = av_image_alloc(dst_data, dst_linesize,
screenWidth, screenHeight, AV_PIX_FMT_YUV420P, 1)) < 0)
{
fprintf(stderr, "Could not allocate destination image\n");
return E_FAIL;
}
frame = av_frame_alloc();
frame->format = AV_PIX_FMT_YUVJ420P;
frame->width = screenWidth;
frame->height = screenHeight;
src_data[0] = (uint8_t*)lr.pBits;
src_linesize[0] = lr.Pitch;
sws_scale(sws_ctx, src_data,
src_linesize, 0, screenHeight, dst_data, dst_linesize);
frame->data[0] = dst_data[0];
frame->data[1] = dst_data[1];
frame->data[2] = dst_data[2];
frame->linesize[0] = dst_linesize[0];
frame->linesize[1] = dst_linesize[1];
frame->linesize[2] = dst_linesize[2];
SaveFrameToJpg(frame);
av_freep(&dst_data[0]);
av_frame_free(&frame);
sws_freeContext(sws_ctx);
}
hr = destTarget->UnlockRect();
if (FAILED(hr))
{
//printf("Cannot unlock rect!");
}
destTarget->Release();
return hr;
}
int CaptureLoop(HWND hwnd)
{
IDirect3D9* d3d9 = NULL;
IDirect3DDevice9* d3ddev9 = NULL;
D3DPRESENT_PARAMETERS d3dpp = { 0 };
d3d9 = Direct3DCreate9(D3D_SDK_VERSION);
D3DDISPLAYMODE* dispMode = new D3DDISPLAYMODE();
d3d9->GetAdapterDisplayMode(D3DADAPTER_DEFAULT, dispMode);
d3dpp.Windowed = true;
d3dpp.SwapEffect = D3DSWAPEFFECT_DISCARD;
d3dpp.hDeviceWindow = hwnd;
d3dpp.BackBufferFormat = dispMode->Format;
d3d9->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, hwnd, D3DCREATE_SOFTWARE_VERTEXPROCESSING, &d3dpp, &d3ddev9);
delete dispMode;
capture(d3ddev9);
MSG msg;
memset(&msg, 0, sizeof(msg));
while (msg.message != WM_QUIT)
{
if (PeekMessage(&msg, 0, 0, 0, PM_REMOVE))
{
TranslateMessage(&msg);
DispatchMessage(&msg);
}
}
if (d3ddev9) d3ddev9->Release();
if (d3d9) d3d9->Release();
return msg.wParam;
}
LRESULT CALLBACK WindowProcedure(HWND hwnd, UINT message, WPARAM wParam, LPARAM lParam)
{
switch (message)
{
case WM_DESTROY:
PostQuitMessage(0);
break;
default:
return DefWindowProc(hwnd, message, wParam, lParam);
}
return 0;
}
int WINAPI WinMain(HINSTANCE hThisInstance, HINSTANCE hPrevInstance, LPSTR lpszArgument, int nCmdShow)
{
WNDCLASSEX wincl;
wincl.hInstance = hThisInstance;
wincl.lpszClassName = L"D3DWindow";
wincl.lpfnWndProc = WindowProcedure;
wincl.style = CS_DBLCLKS;
wincl.cbSize = sizeof(WNDCLASSEX);
wincl.hIcon = LoadIcon(NULL, IDI_APPLICATION);
wincl.hIconSm = LoadIcon(NULL, IDI_APPLICATION);
wincl.hCursor = LoadCursor(NULL, IDC_ARROW);
wincl.lpszMenuName = NULL;
wincl.cbClsExtra = 0;
wincl.cbWndExtra = 0;
wincl.hbrBackground = (HBRUSH)COLOR_BACKGROUND;
if (!RegisterClassEx(&wincl)) return 0;
HWND hwnd = CreateWindowEx(0, L"D3DWindow", L"D3D9: Window", WS_OVERLAPPEDWINDOW, CW_USEDEFAULT, CW_USEDEFAULT, 544, 375, HWND_DESKTOP, NULL, hThisInstance, NULL);
av_register_all();
return CaptureLoop(hwnd);
}
参考
Windows桌面共享中一些常见的抓屏技术
http://www.cppblog.com/weiym/archive/2013/12/01/204536.html
https://www.codeproject.com/Articles/5051/Various-methods-for-capturing-the-screen
DXSDK部分头文件和库文件的直接下载地址
https://github.com/brian9206/docker-dxsdk/tree/master/DXSDK
网友评论