有没有办法将 Metal 输出捕获为位图或其他适合打印的格式?
Is there a way to capture Metal output as a bitmap or other format suitable for printing?
我正在使用 MTKit 将应用程序从 OpenGL 转换为 Metal。原始应用程序允许用户打印正在查看的 OpenGL 生成的屏幕(我也使用 Core Graphics 完成了此操作)。我想在 Metal 中做同样的事情。我认为很容易找到解决方案或至少找到解决问题的方法,但我被困住了。有没有人解决过这个问题,或者有没有人知道我应该寻找什么?
我是一名经验丰富的 Mac 程序员(更喜欢 Objective-C 解决方案,但我对其他一切都做了一些工作)并且是 Metal 的高级新手。
- MacOS 10.15.3,所有Mac书籍。
- Swift 5.
- Xcode 11.2.1.
我假设您想保存来自 MTKView 的图像。但是这个函数应该适用于任何纹理。也不要忘记设置:framebufferOnly = false;
Property.
bool takeScreenshot = true;
/// Called whenever the view needs to render a frame
- (void)drawInMTKView:(nonnull MTKView *)view
{
// Create a new command buffer for each render pass to the current drawable
id<MTLCommandBuffer> commandBuffer = [_commandQueue commandBuffer];
commandBuffer.label = @"MyCommand";
// Obtain a renderPassDescriptor generated from the view's drawable textures
MTLRenderPassDescriptor *renderPassDescriptor = view.currentRenderPassDescriptor;
id<MTLTexture> currentSwapChainTexture = view.currentDrawable.texture;
// Your render code...
[commandBuffer addCompletedHandler:^(id<MTLCommandBuffer> cb)
{
if(takeScreenshot)
{
SaveTexture(currentSwapChainTexture);
takeScreenshot = false;
}
}];
// Finalize rendering here & push the command buffer to the GPU
[commandBuffer commit];
// CPU <-> GPU Synchronization
if(takeScreenshot)[commandBuffer waitUntilCompleted];
保存纹理函数:
void SaveTexture(id<MTLTexture> texture)
{
int width = (int) texture.width;
int height = (int) texture.height;
int bytePerPixel = 4;
int bytesPerRow = width * bytePerPixel;
int bytesCount = width * height * bytePerPixel;
int bitsPerComponent = 8;
int bitsPerPixel = 32;
void *imageBytes = malloc(bytesCount);
void *destBytes = malloc(bytesCount);
MTLRegion mtlregion = MTLRegionMake2D(0, 0, width, height);
[texture getBytes:imageBytes bytesPerRow:bytesPerRow fromRegion:mtlregion mipmapLevel:0];
vImage_Buffer src;
src.data = imageBytes;
src.width = width;
src.height = height;
src.rowBytes = bytesPerRow;
vImage_Buffer dest;
dest.data = destBytes;
dest.width = width;
dest.height = height;
dest.rowBytes = bytesPerRow;
// BGRA -> RGBA (Swap)
const uint8_t map[4] = {2, 1, 0, 3};
vImagePermuteChannels_ARGB8888(&src, &dest, map, kvImageNoFlags);
CGColorSpaceRef cgColorSpaceRef = CGColorSpaceCreateWithName(kCGColorSpaceDisplayP3); //kCGColorSpaceSRGB - For sRGB
CGBitmapInfo bitmapInfo = kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big;
CGContextRef context = CGBitmapContextCreate(destBytes, width, height, bitsPerComponent, bytesPerRow, cgColorSpaceRef, bitmapInfo);
CGImageRef cgImage = CGBitmapContextCreateImage(context);
// Your NSImage
NSImage * image = [[NSImage alloc] initWithCGImage:cgImage size:NSZeroSize];
// Save to Photos
[[PHPhotoLibrary sharedPhotoLibrary] performChanges:^
{
[PHAssetCreationRequest creationRequestForAssetFromImage: image];
}
completionHandler:^(BOOL success, NSError * error)
{
if(success) printf("Success \n");
}];
free(imageBytes);
free(destBytes);
CGColorSpaceRelease(cgColorSpaceRef);
CGContextRelease(context);
CGImageRelease(cgImage);
texture = nil;
}
我正在使用 MTKit 将应用程序从 OpenGL 转换为 Metal。原始应用程序允许用户打印正在查看的 OpenGL 生成的屏幕(我也使用 Core Graphics 完成了此操作)。我想在 Metal 中做同样的事情。我认为很容易找到解决方案或至少找到解决问题的方法,但我被困住了。有没有人解决过这个问题,或者有没有人知道我应该寻找什么?
我是一名经验丰富的 Mac 程序员(更喜欢 Objective-C 解决方案,但我对其他一切都做了一些工作)并且是 Metal 的高级新手。
- MacOS 10.15.3,所有Mac书籍。
- Swift 5.
- Xcode 11.2.1.
我假设您想保存来自 MTKView 的图像。但是这个函数应该适用于任何纹理。也不要忘记设置:framebufferOnly = false;
Property.
bool takeScreenshot = true;
/// Called whenever the view needs to render a frame
- (void)drawInMTKView:(nonnull MTKView *)view
{
// Create a new command buffer for each render pass to the current drawable
id<MTLCommandBuffer> commandBuffer = [_commandQueue commandBuffer];
commandBuffer.label = @"MyCommand";
// Obtain a renderPassDescriptor generated from the view's drawable textures
MTLRenderPassDescriptor *renderPassDescriptor = view.currentRenderPassDescriptor;
id<MTLTexture> currentSwapChainTexture = view.currentDrawable.texture;
// Your render code...
[commandBuffer addCompletedHandler:^(id<MTLCommandBuffer> cb)
{
if(takeScreenshot)
{
SaveTexture(currentSwapChainTexture);
takeScreenshot = false;
}
}];
// Finalize rendering here & push the command buffer to the GPU
[commandBuffer commit];
// CPU <-> GPU Synchronization
if(takeScreenshot)[commandBuffer waitUntilCompleted];
保存纹理函数:
void SaveTexture(id<MTLTexture> texture)
{
int width = (int) texture.width;
int height = (int) texture.height;
int bytePerPixel = 4;
int bytesPerRow = width * bytePerPixel;
int bytesCount = width * height * bytePerPixel;
int bitsPerComponent = 8;
int bitsPerPixel = 32;
void *imageBytes = malloc(bytesCount);
void *destBytes = malloc(bytesCount);
MTLRegion mtlregion = MTLRegionMake2D(0, 0, width, height);
[texture getBytes:imageBytes bytesPerRow:bytesPerRow fromRegion:mtlregion mipmapLevel:0];
vImage_Buffer src;
src.data = imageBytes;
src.width = width;
src.height = height;
src.rowBytes = bytesPerRow;
vImage_Buffer dest;
dest.data = destBytes;
dest.width = width;
dest.height = height;
dest.rowBytes = bytesPerRow;
// BGRA -> RGBA (Swap)
const uint8_t map[4] = {2, 1, 0, 3};
vImagePermuteChannels_ARGB8888(&src, &dest, map, kvImageNoFlags);
CGColorSpaceRef cgColorSpaceRef = CGColorSpaceCreateWithName(kCGColorSpaceDisplayP3); //kCGColorSpaceSRGB - For sRGB
CGBitmapInfo bitmapInfo = kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big;
CGContextRef context = CGBitmapContextCreate(destBytes, width, height, bitsPerComponent, bytesPerRow, cgColorSpaceRef, bitmapInfo);
CGImageRef cgImage = CGBitmapContextCreateImage(context);
// Your NSImage
NSImage * image = [[NSImage alloc] initWithCGImage:cgImage size:NSZeroSize];
// Save to Photos
[[PHPhotoLibrary sharedPhotoLibrary] performChanges:^
{
[PHAssetCreationRequest creationRequestForAssetFromImage: image];
}
completionHandler:^(BOOL success, NSError * error)
{
if(success) printf("Success \n");
}];
free(imageBytes);
free(destBytes);
CGColorSpaceRelease(cgColorSpaceRef);
CGContextRelease(context);
CGImageRelease(cgImage);
texture = nil;
}