在OSX中编程实现自动截屏

6
我将把一些针对Linux的屏幕截图代码(C++)移植到OSX。当前解决方案在xvfb中运行图形应用程序,然后使用xlib从显示器中获取屏幕截图。(如果我们没有xvfb也可以支持这种方式)
因此,我了解到OSX正在远离X11,我的问题是除了xlib之外还有什么实现方法?我找到了Quartz Display Services。现在使用它是否合适?它能与xvfb配合使用吗?
3个回答

4

是的,您可以通过将应用程序服务框架链接到您的C ++工具来调用像CGDisplayCreateImage这样的函数(为您提供了文档链接)。


它能截取Xvfb的屏幕吗?未来版本中Xvfb会存在吗? - Zitrax
我不能百分之百确定,但是那个特定函数的描述说“返回一个包含指定显示内容的图像。”所以我的理解是,无论屏幕上显示什么,无论是xvfb、Safari还是Photoshop等,最终都会出现在捕获的图像缓冲区中。 - Michael Dautermann
我可以补充一下,截图Xvfb并没有起作用。所以也许在Mac上还有另一个可以使用的帧缓冲区。但目前我可以不用它来管理。 - Zitrax
我正在使用CGDisplayCreateImage(CGMainDisplayID())来捕获屏幕。当我们快速切换到另一个用户时,它仍然从第一个用户那里捕获屏幕。在第二个用户上,CGMainDisplayID()返回1104977152。有任何想法为什么会发生这种情况吗? - Seema Kadavan
这听起来像是一个独立的、新的问题 @SeemaKadavan - Michael Dautermann
发布了新的查询 - http://stackoverflow.com/questions/31475656/issues-with-screen-capture-on-os-x-cgdisplaycreateimage - Seema Kadavan

1
我已经编写了一个示例,用于捕获电脑显示屏并转换为OpenCV Mat。
#include <iostream>
#include <opencv2/opencv.hpp>
#include <unistd.h>
#include <stdio.h>
#include <ApplicationServices/ApplicationServices.h>

using namespace std;
using namespace cv;

int main (int argc, char * const argv[])
{
    size_t width = CGDisplayPixelsWide(CGMainDisplayID());
    size_t height = CGDisplayPixelsHigh(CGMainDisplayID());

    Mat im(cv::Size(width,height), CV_8UC4);
    Mat bgrim(cv::Size(width,height), CV_8UC3);
    Mat resizedim(cv::Size(width,height), CV_8UC3);

    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef contextRef = CGBitmapContextCreate(
                                                    im.data, im.cols, im.rows,
                                                    8, im.step[0],
                                                    colorSpace,    kCGImageAlphaPremultipliedLast|kCGBitmapByteOrderDefault);

    while (true)
    {
        CGImageRef imageRef = CGDisplayCreateImage(CGMainDisplayID());
        CGContextDrawImage(contextRef,
                           CGRectMake(0, 0, width, height),
                           imageRef);
        cvtColor(im, bgrim, CV_RGBA2BGR);
        resize(bgrim, resizedim,cv::Size(),0.5,0.5);
        imshow("test", resizedim);
        cvWaitKey(10);
        CGImageRelease(imageRef);
    }

//    CGContextRelease(contextRef);
//    CGColorSpaceRelease(colorSpace);

    return 0;
}

然后,结果就在这里了。 输入图像描述

我本来以为会捕获我的当前显示屏幕,但实际上只捕获了背景墙纸。CGMainDisplayID()所指的可能就是这个问题的提示。

无论如何,我希望这可以稍微接近你的目标。


0
void captureScreen(){
    CGImageRef image_ref = CGDisplayCreateImage(CGMainDisplayID()); 
    CGDataProviderRef provider = CGImageGetDataProvider(image_ref);
    CFDataRef dataref = CGDataProviderCopyData(provider);
    size_t width, height;    width = CGImageGetWidth(image_ref);
    height = CGImageGetHeight(image_ref); 
    size_t bpp = CGImageGetBitsPerPixel(image_ref) / 8;
    uint8 *pixels = malloc(width * height * bpp);
    memcpy(pixels, CFDataGetBytePtr(dataref), width * height * bpp);
    CFRelease(dataref); 
   CGImageRelease(image_ref); 
   FILE *stream = fopen("/Users/username/Desktop/screencap.raw", "w+");
   fwrite(pixels, bpp, width * height, stream);
   fclose(stream); 
   free(pixels);
}

或者在C#中:

// https://dev59.com/AknSa4cB1Zd3GeqPQLXO
// https://github.com/Acollie/C-Screenshot-OSX/blob/master/C%2B%2B-screenshot/C%2B%2B-screenshot/main.cpp
// https://github.com/ScreenshotMonitor/ScreenshotCapture/blob/master/src/Pranas.ScreenshotCapture/ScreenshotCapture.cs
// https://screenshotmonitor.com/blog/capturing-screenshots-in-net-and-mono/
namespace rtaStreamingServer
{

    // https://github.com/xamarin/xamarin-macios


    // https://qiita.com/shimshimkaz/items/18bcf4767143ea5897c7
    public static class OSxScreenshot
    {

        private const string LIBCOREGRAPHICS = "/System/Library/Frameworks/CoreGraphics.framework/CoreGraphics";

        [System.Runtime.InteropServices.DllImport(LIBCOREGRAPHICS)]
        private static extern System.IntPtr CGDisplayCreateImage(System.UInt32 displayId);

        [System.Runtime.InteropServices.DllImport(LIBCOREGRAPHICS)]
        private static extern void CFRelease(System.IntPtr handle);


        public static void TestCapture()
        {
            Foundation.NSNumber mainScreen = (Foundation.NSNumber)AppKit.NSScreen.MainScreen.DeviceDescription["NSScreenNumber"];

            using (CoreGraphics.CGImage cgImage = CreateImage(mainScreen.UInt32Value))
            {
                // https://stackoverflow.com/questions/17334786/get-pixel-from-the-screen-screenshot-in-max-osx/17343305#17343305

                // Get byte-array from CGImage
                // https://gist.github.com/zhangao0086/5fafb1e1c0b5d629eb76

                AppKit.NSBitmapImageRep bitmapRep = new AppKit.NSBitmapImageRep(cgImage);

                // var imageData = bitmapRep.representationUsingType(NSBitmapImageFileType.NSPNGFileType, properties: [:])
                Foundation.NSData imageData = bitmapRep.RepresentationUsingTypeProperties(AppKit.NSBitmapImageFileType.Png);

                long len = imageData.Length;
                byte[] bytes = new byte[len];
                System.Runtime.InteropServices.GCHandle pinnedArray = System.Runtime.InteropServices.GCHandle.Alloc(bytes, System.Runtime.InteropServices.GCHandleType.Pinned);
                System.IntPtr pointer = pinnedArray.AddrOfPinnedObject();
                // Do your stuff...
                imageData.GetBytes(pointer, new System.IntPtr(len));
                pinnedArray.Free();

                using (AppKit.NSImage nsImage = new AppKit.NSImage(cgImage, new System.Drawing.SizeF(cgImage.Width, cgImage.Height)))
                {
                    // ImageView.Image = nsImage;
                    // And now ? How to get the image bytes ? 

                    // https://theconfuzedsourcecode.wordpress.com/2016/02/24/convert-android-bitmap-image-and-ios-uiimage-to-byte-array-in-xamarin/
                    // https://dev59.com/uVbTa4cB1Zd3GeqP8Cq9
                    // https://stackoverflow.com/questions/53060723/nsimage-source-from-byte-array-cocoa-app-xamarin-c-sharp
                    // https://gist.github.com/zhangao0086/5fafb1e1c0b5d629eb76
                    // https://www.quora.com/What-is-a-way-to-convert-UIImage-to-a-byte-array-in-Swift?share=1
                    // https://dev59.com/hWQn5IYBdhLWcg3wETzj

                } // End Using nsImage 

            } // End Using cgImage 

        } // End Sub TestCapture 


        public static CoreGraphics.CGImage CreateImage(System.UInt32 displayId)
        {
            System.IntPtr handle = System.IntPtr.Zero;

            try
            {
                handle = CGDisplayCreateImage(displayId);
                return new CoreGraphics.CGImage(handle);
            }
            finally
            {
                if (handle != System.IntPtr.Zero)
                {
                    CFRelease(handle);
                }
            }
        } // End Sub CreateImage 


    } // End Class OSxScreenshot 


} // End Namespace rtaStreamingServer 

网页内容由stack overflow 提供, 点击上面的
可以查看英文原文,
原文链接