相机视图未出现在 swift/objective-c++ (opencv) 项目中 - ios 10.3 xcode 8
camera view does not appear in a swift/objective-c++ (opencv) project - ios 10.3 xcode 8
我想 link 带有 swift/objective c++ 的 OpenCV 能够为 ios 开发应用程序。我发现 CocoaPods 与 OpenCV pods 配合得相当好。所以我以它们为起点,并成功地尝试了一些图像拼接示例。但是,当我尝试从相机捕捉图像时,我无法在显示屏上看到输出。代码 运行 并围绕 captureOutput
函数循环,但不显示相机图像。好像后台的代码运行s:
Objective c++代码:
@interface VideoSource () <AVCaptureVideoDataOutputSampleBufferDelegate>
@property (strong, nonatomic) AVCaptureVideoPreviewLayer *previewLayer;
@property (strong, nonatomic) AVCaptureSession *captureSession;
@end
@implementation VideoSource
- (void)setTargetView:(UIView *)targetView {
if (self.previewLayer == nil) {
return;
}
self.previewLayer.contentsGravity = kCAGravityResizeAspectFill;
self.previewLayer.frame = targetView.bounds;
self.previewLayer.affineTransform = CGAffineTransformMakeRotation(M_PI / 2);
[targetView.layer addSublayer:self.previewLayer];
std::cout<<"VideoSource setTargetView ... done "<<std::endl;
}
- (instancetype)init
{
self = [super init];
if (self) {
_captureSession = [[AVCaptureSession alloc] init];
_captureSession.sessionPreset = AVCaptureSessionPreset640x480;
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [[AVCaptureDeviceInput alloc] initWithDevice:device error:&error];
[_captureSession addInput:input];
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
output.videoSettings = @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA)};
output.alwaysDiscardsLateVideoFrames = YES;
[_captureSession addOutput:output];
dispatch_queue_t queue = dispatch_queue_create("VideoQueue", DISPATCH_QUEUE_SERIAL);
[output setSampleBufferDelegate:self queue:queue];
_previewLayer = [AVCaptureVideoPreviewLayer layer];
std::cout<<"VideoSource init ... done "<<std::endl;
}
return self;
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, 0);
uint8_t *base;
int width, height, bytesPerRow;
base = (uint8_t*)CVPixelBufferGetBaseAddress(imageBuffer);
width = (int)CVPixelBufferGetWidth(imageBuffer);
height = (int)CVPixelBufferGetHeight(imageBuffer);
bytesPerRow = (int)CVPixelBufferGetBytesPerRow(imageBuffer);
Mat mat = Mat(height, width, CV_8UC4, base);
//Processing here
[self.delegate processFrame:mat];
CGImageRef imageRef = [self CGImageFromCVMat:mat];
dispatch_sync(dispatch_get_main_queue(), ^{
self.previewLayer.contents = (__bridge id)imageRef;
});
CGImageRelease(imageRef);
CVPixelBufferUnlockBaseAddress( imageBuffer, 0 );
std::cout<<"VideoSource captureOutput ... done "<<std::endl;
}
- (void)start {
[self.captureSession startRunning];
std::cout<<"VideoSource start ... done "<<std::endl;
}
- (CGImageRef)CGImageFromCVMat:(Mat)cvMat {
if (cvMat.elemSize() == 4) {
cv::cvtColor(cvMat, cvMat, COLOR_BGRA2RGBA);
}
NSData *data = [NSData dataWithBytes:cvMat.data length:cvMat.elemSize()*cvMat.total()];
CGColorSpaceRef colorSpace;
if (cvMat.elemSize() == 1) {
colorSpace = CGColorSpaceCreateDeviceGray();
} else {
colorSpace = CGColorSpaceCreateDeviceRGB();
}
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
// Creating CGImage from cv::Mat
CGImageRef imageRef = CGImageCreate(cvMat.cols,
//width
cvMat.rows,
//height
8,
//bits per component
8 * cvMat.elemSize(),
//bits per pixel
cvMat.step[0],
//bytesPerRow
colorSpace,
//colorspace
kCGImageAlphaNone|kCGBitmapByteOrderDefault,// bitmap info
provider,
//CGDataProviderRef
NULL,
//decode
false,
//should interpolate
kCGRenderingIntentDefault
//intent
);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace);
//std::cout<<"VideoSource CGImageFromCVMat ... done "<<std::endl;
return imageRef;
}
@end
swift方:
@IBOutlet var spinner:UIActivityIndicatorView!
@IBOutlet weak var previewView: UIView!
let wrapper = Wrapper()
然后在调用函数中:
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
self.view.backgroundColor = UIColor.darkGray
self.wrapper.setTargetView(self.previewView)
self.wrapper.start()
}
我解决了这个问题。解决方案只是通过拖动特定的 UI 组件将 UI (main.storyboard) 连接到 ViewController.swift。
两种范式都有效:
上面贴出的源代码改编自:https://github.com/akira108/MinimumOpenCVLiveCamera
这需要将 main.storyboard
的 UIView
连接到 ViewController.swift
中的 previewView
(UIView)(只需拖放即可创建连接)
涉及CvVideoCameraDelegate
class中的swiftViewcontroller(Video processing with OpenCV in IOS Swift project)。在这里,我在 main.storyboard
处插入了一个 UIImage
object,并将此 object 连接到 ViewController
处的 previewImage
。因为此示例需要在 swift (cap_ios.h
) 中使用特定的 opencv header,所以我只使用 OpenCV 2.4 进行了测试。
我想 link 带有 swift/objective c++ 的 OpenCV 能够为 ios 开发应用程序。我发现 CocoaPods 与 OpenCV pods 配合得相当好。所以我以它们为起点,并成功地尝试了一些图像拼接示例。但是,当我尝试从相机捕捉图像时,我无法在显示屏上看到输出。代码 运行 并围绕 captureOutput
函数循环,但不显示相机图像。好像后台的代码运行s:
Objective c++代码:
@interface VideoSource () <AVCaptureVideoDataOutputSampleBufferDelegate>
@property (strong, nonatomic) AVCaptureVideoPreviewLayer *previewLayer;
@property (strong, nonatomic) AVCaptureSession *captureSession;
@end
@implementation VideoSource
- (void)setTargetView:(UIView *)targetView {
if (self.previewLayer == nil) {
return;
}
self.previewLayer.contentsGravity = kCAGravityResizeAspectFill;
self.previewLayer.frame = targetView.bounds;
self.previewLayer.affineTransform = CGAffineTransformMakeRotation(M_PI / 2);
[targetView.layer addSublayer:self.previewLayer];
std::cout<<"VideoSource setTargetView ... done "<<std::endl;
}
- (instancetype)init
{
self = [super init];
if (self) {
_captureSession = [[AVCaptureSession alloc] init];
_captureSession.sessionPreset = AVCaptureSessionPreset640x480;
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [[AVCaptureDeviceInput alloc] initWithDevice:device error:&error];
[_captureSession addInput:input];
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
output.videoSettings = @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA)};
output.alwaysDiscardsLateVideoFrames = YES;
[_captureSession addOutput:output];
dispatch_queue_t queue = dispatch_queue_create("VideoQueue", DISPATCH_QUEUE_SERIAL);
[output setSampleBufferDelegate:self queue:queue];
_previewLayer = [AVCaptureVideoPreviewLayer layer];
std::cout<<"VideoSource init ... done "<<std::endl;
}
return self;
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, 0);
uint8_t *base;
int width, height, bytesPerRow;
base = (uint8_t*)CVPixelBufferGetBaseAddress(imageBuffer);
width = (int)CVPixelBufferGetWidth(imageBuffer);
height = (int)CVPixelBufferGetHeight(imageBuffer);
bytesPerRow = (int)CVPixelBufferGetBytesPerRow(imageBuffer);
Mat mat = Mat(height, width, CV_8UC4, base);
//Processing here
[self.delegate processFrame:mat];
CGImageRef imageRef = [self CGImageFromCVMat:mat];
dispatch_sync(dispatch_get_main_queue(), ^{
self.previewLayer.contents = (__bridge id)imageRef;
});
CGImageRelease(imageRef);
CVPixelBufferUnlockBaseAddress( imageBuffer, 0 );
std::cout<<"VideoSource captureOutput ... done "<<std::endl;
}
- (void)start {
[self.captureSession startRunning];
std::cout<<"VideoSource start ... done "<<std::endl;
}
- (CGImageRef)CGImageFromCVMat:(Mat)cvMat {
if (cvMat.elemSize() == 4) {
cv::cvtColor(cvMat, cvMat, COLOR_BGRA2RGBA);
}
NSData *data = [NSData dataWithBytes:cvMat.data length:cvMat.elemSize()*cvMat.total()];
CGColorSpaceRef colorSpace;
if (cvMat.elemSize() == 1) {
colorSpace = CGColorSpaceCreateDeviceGray();
} else {
colorSpace = CGColorSpaceCreateDeviceRGB();
}
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
// Creating CGImage from cv::Mat
CGImageRef imageRef = CGImageCreate(cvMat.cols,
//width
cvMat.rows,
//height
8,
//bits per component
8 * cvMat.elemSize(),
//bits per pixel
cvMat.step[0],
//bytesPerRow
colorSpace,
//colorspace
kCGImageAlphaNone|kCGBitmapByteOrderDefault,// bitmap info
provider,
//CGDataProviderRef
NULL,
//decode
false,
//should interpolate
kCGRenderingIntentDefault
//intent
);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace);
//std::cout<<"VideoSource CGImageFromCVMat ... done "<<std::endl;
return imageRef;
}
@end
swift方:
@IBOutlet var spinner:UIActivityIndicatorView!
@IBOutlet weak var previewView: UIView!
let wrapper = Wrapper()
然后在调用函数中:
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
self.view.backgroundColor = UIColor.darkGray
self.wrapper.setTargetView(self.previewView)
self.wrapper.start()
}
我解决了这个问题。解决方案只是通过拖动特定的 UI 组件将 UI (main.storyboard) 连接到 ViewController.swift。
两种范式都有效:
上面贴出的源代码改编自:https://github.com/akira108/MinimumOpenCVLiveCamera 这需要将
main.storyboard
的UIView
连接到ViewController.swift
中的previewView
(UIView)(只需拖放即可创建连接)涉及
CvVideoCameraDelegate
class中的swiftViewcontroller(Video processing with OpenCV in IOS Swift project)。在这里,我在main.storyboard
处插入了一个UIImage
object,并将此 object 连接到ViewController
处的previewImage
。因为此示例需要在 swift (cap_ios.h
) 中使用特定的 opencv header,所以我只使用 OpenCV 2.4 进行了测试。