OpenCV install in xcode

后端 未结 6 1014
你的背包
你的背包 2020-12-30 18:21

I use bellow link for install opencv in my project but how we generate command in terminal I don`t know can anybody help me?? http://aptogo.co.uk/2011/09/opencv-framework-f

相关标签:
6条回答
  • You can write all those class methods, or you can simply include the ios.h file. It has two methods already written for image processing.

    Here is my code.

    Sorry for all the comments, I include them to show the progress of my research.

    #import "JmBViewController.h"
    
    @interface JmBViewController ()
    
    @end
    
    @implementation JmBViewController
    
    - (void)viewDidLoad {
    [super viewDidLoad];
    _imgtest = [UIImage imageNamed:@"IMG_0424.PNG"];
    
    cv::Mat cvImage;
    UIImageToMat(_imgtest, cvImage);
    if (!cvImage.empty()) {
        cv::Mat gray;
      //  cv::Mat filteredMat;
        cv::cvtColor(cvImage, gray, CV_BGRA2GRAY);
       // cv::GaussianBlur(gray, gray, cv::Size(5, 5), 1.2, 1.2);
        cv::vector<cv::Vec3f> circles;
    
        /*
        for(size_t i = 0; i < circles.size(); i++)
        {
            cv::Point center((cvRound(circles[i][0]), cvRound(circles[i][1])));
            int radius = cvRound(circles[i][2]);
            cv::circle(gray, center, 3, cv::Scalar(0,255,0));
            cv::circle(gray, center, radius, cv::Scalar(0,0,255));
        }
       */
    
     //  for ( int i = 1; i < 15; i = i + 2 )
    
            cv::GaussianBlur(gray, gray, cv::Size(9, 9), 1.5, 1.5);
    
            cv::Mat edges;
            cv::Canny(gray, edges, 0, 50);
            //gray.setTo(cv::Scalar::all(0));
            //gray.setTo(cv::Scalar::all(255), edges);
            cv::HoughCircles(gray, circles, CV_HOUGH_GRADIENT, 1, 30, 50, 20, 10, 25);
            for(size_t i = 0; i < circles.size(); i++)
            {
                cv::Point center(cvRound(circles[i][0]), cvRound(circles[i][1]));
                int radius = cvRound(circles[i][2]);
                cv::circle(cvImage, center, 5, cv::Scalar::all(200), -1, 8, 0 );//center
                cv::circle(cvImage, center, radius, cv::Scalar::all(255), 3, 8, 0 );//diamter
            NSLog(@"Circles: %ld", i+1);
    
           // cv::imshow(&"circles i " [ i], gray);
        }
    
    
        _imgView.image = MatToUIImage(cvImage);
        }
        /*
    cv::Mat cvImage;
    cv::Mat grey;
    cv::Mat filteredMat;
    cv::vector<cv::Vec3f> circles;
    // cv::cvtColor(_imgtest, cvImage, CV_BGR2GRAY);
    cv::threshold(grey, filteredMat, 100, 255, CV_THRESH_BINARY);
    [UIImageCVMatConverter cvMatGrayFromUIImage:_imgtest];
    //  cv::cvtColor(cvImage, grey, CV_RGBA2GRAY);
    // UIImageToMat(_imgtest, cvImage);
    cv::HoughCircles(cvImage, circles, CV_HOUGH_GRADIENT, 1, 50);
    //  MatToUIImage(cvImage);
    _imgView.image = [UIImageCVMatConverter UIImageFromCVMat:cvImage];
    _imgView.image = MatToUIImage(cvImage);
    */
    
    // Do any additional setup after loading the view, typically from a nib.
    }
    
    - (void)didReceiveMemoryWarning
    {
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
    }
    /*
    
    UIImage* MatToUIImage(const cv::Mat& image) {
    NSData *data = [NSData dataWithBytes:image.data length:image.elemSize()*image.total()];
    CGColorSpaceRef colorSpace;
    if (image.elemSize() == 1) {
        colorSpace = CGColorSpaceCreateDeviceGray();
    }else { colorSpace = CGColorSpaceCreateDeviceRGB();
     }
    CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
    
    CGImageRef imageRef = CGImageCreate(image.cols, image.rows, 8, 8*image.elemSize(), image.step.p[0], colorSpace, kCGImageAlphaNone|kCGBitmapByteOrderDefault, provider, NULL, FALSE, kCGRenderingIntentDefault);
    UIImage *finalImage = [UIImage imageWithCGImage:imageRef];
    
    return finalImage;
     }
     */
    
    
    @end
    

    I hope this helps!

    Here are all my #includes in the Viewer header file.

    #import <UIKit/UIKit.h>
    //  #import "UIImageCVMatConverter.h"
    #import <opencv2/highgui/highgui_c.h>
    #import <opencv2/highgui/highgui.hpp>
    #import <opencv2/imgproc/imgproc_c.h>
    #import <opencv2/imgproc/imgproc.hpp>
    #import <opencv2/highgui/ios.h>
    #import <opencv2/core/core_c.h>
    #import <opencv2/core/core.hpp>
    
    @interface JmBViewController : UIViewController
    @property (weak, nonatomic) IBOutlet UIImageView *imgView;
    @property (weak, nonatomic) UIImage *imgtest;
    
    @end
    

    No need to compile or make your own framework, simply download the version you want from opencv's website, drag it into your project under frameworks, make sure to 'copy all files to destination' when asked to confirm by Xcode, if you are using iOS. This is the easiest way that I have found to accomplish the inclusion of frameworks into your project without all the terminal commands and cMake nonsense.

    0 讨论(0)
  • 2020-12-30 18:53

    As mentioned in link please open Terminal application located at

    /Applications/Utilities/Terminal.app

    in your Mac system and execute the mentioned commands.

    0 讨论(0)
  • 2020-12-30 18:56

    @Nims, as @moosgummi says, it works, but also I've made the following steps:

    • Add the library libc++.dylib
    • In "Build Settings" - "Apple LLVM compiler XX - Language" - "Compile Sources As" - Object-C++
    0 讨论(0)
  • 2020-12-30 19:00

    Dont forget to convert your all .m files into .mm files, else anything will not work

    0 讨论(0)
  • 2020-12-30 19:04

    Use macports to download openCV libraries An follow the instructions in the link given below

    https://www.dropbox.com/s/foipmm7q9n8aaht/How%20to%20get%20OpenCV%20working%20under%20Mac%20OS%20X%20Lion%20with%20XCode%204.1%20%C2%AB%20Salem%27s%20Log.pdf

    0 讨论(0)
  • 2020-12-30 19:07

    If you want to use OpenCV on iOS you should go with the official framework provided by OpenCV (as of version 2.4.2).

    Get the lastest version here: OpenCV for iOS, drop it into your project and include this into your project prefixes:

    ExampleApp-Prefix.pch:

    #ifdef __cplusplus
        #import <opencv2/opencv.hpp>
    #endif
    

    You'll also have to "convert" an UIImage to a cv::Mat to use it with OpenCV.

    UIImageCVMatConverter.h:

    //
    //  UIImageCVMatConverter.h
    //
    
    #import <Foundation/Foundation.h>
    
    @interface UIImageCVMatConverter : NSObject {
    
    }
    
    + (UIImage *)UIImageFromCVMat:(cv::Mat)cvMat;
    + (UIImage *)UIImageFromCVMat:(cv::Mat)cvMat withUIImage:(UIImage*)image;
    + (cv::Mat)cvMatFromUIImage:(UIImage *)image;
    + (cv::Mat)cvMatGrayFromUIImage:(UIImage *)image;
    + (UIImage *)scaleAndRotateImageFrontCamera:(UIImage *)image;
    + (UIImage *)scaleAndRotateImageBackCamera:(UIImage *)image;
    
    @end
    

    UIImageCVMatConverter.mm:

    //
    //  UIImageCVMatConverter.mm
    //
    
    #import "UIImageCVMatConverter.h"
    
    @implementation UIImageCVMatConverter
    
    + (UIImage *)UIImageFromCVMat:(cv::Mat)cvMat withUIImage:(UIImage*)image;
    {
      CGColorSpaceRef colorSpace = CGImageGetColorSpace( image.CGImage );
        CGFloat cols = image.size.width;
        CGFloat rows = image.size.height;
        CGFloat widthStep = image.size.width;
        CGContextRef contextRef = CGBitmapContextCreate( NULL, cols, rows, 8, widthStep*4, colorSpace, kCGImageAlphaNoneSkipLast | kCGBitmapByteOrderDefault );
        CGContextDrawImage( contextRef, CGRectMake(0, 0, cols, rows), image.CGImage );
        CGContextSetRGBStrokeColor( contextRef, 1, 0, 0, 1 );
        CGImageRef cgImage = CGBitmapContextCreateImage( contextRef );
        UIImage* result = [UIImage imageWithCGImage:cgImage];
        CGImageRelease( cgImage );
        CGContextRelease( contextRef );
        CGColorSpaceRelease( colorSpace );
        return result;
    }
    
    +(UIImage *)UIImageFromCVMat:(cv::Mat)cvMat
    {
        NSData *data = [NSData dataWithBytes:cvMat.data length:cvMat.elemSize()*cvMat.total()];
        CGColorSpaceRef colorSpace;
        if ( cvMat.elemSize() == 1 ) {
            colorSpace = CGColorSpaceCreateDeviceGray();
        }
        else {
            colorSpace = CGColorSpaceCreateDeviceRGB();
        }
        CGDataProviderRef provider = CGDataProviderCreateWithCFData( (__bridge CFDataRef)data );
        CGImageRef imageRef = CGImageCreate( cvMat.cols, cvMat.rows, 8, 8 * cvMat.elemSize(), cvMat.step[0], colorSpace, kCGImageAlphaNone|kCGBitmapByteOrderDefault, provider, NULL, false, kCGRenderingIntentDefault );
        UIImage *finalImage = [UIImage imageWithCGImage:imageRef];
        CGImageRelease( imageRef );
        CGDataProviderRelease( provider );
        CGColorSpaceRelease( colorSpace );
        return finalImage;
    }
    
    + (cv::Mat)cvMatFromUIImage:(UIImage *)image
    {
        CGColorSpaceRef colorSpace = CGImageGetColorSpace( image.CGImage );
        CGFloat cols = image.size.width;
        CGFloat rows = image.size.height;
        cv::Mat cvMat( rows, cols, CV_8UC4 );
        CGContextRef contextRef = CGBitmapContextCreate( cvMat.data, cols, rows, 8, cvMat.step[0], colorSpace, kCGImageAlphaNoneSkipLast | kCGBitmapByteOrderDefault );
        CGContextDrawImage( contextRef, CGRectMake(0, 0, cols, rows), image.CGImage );
        CGContextRelease( contextRef );
        CGColorSpaceRelease( colorSpace );
        return cvMat;
    }
    
    + (cv::Mat)cvMatGrayFromUIImage:(UIImage *)image
    {
      cv::Mat cvMat = [UIImageCVMatConverter cvMatFromUIImage:image];
      cv::Mat grayMat;
        if ( cvMat.channels() == 1 ) {
            grayMat = cvMat;
      }
        else {
            grayMat = cv :: Mat( cvMat.rows,cvMat.cols, CV_8UC1 );
            cv::cvtColor( cvMat, grayMat, CV_BGR2GRAY );
        }
      return grayMat;
    }
    
    + (UIImage *)scaleAndRotateImageBackCamera:(UIImage *)image
    {
      static int kMaxResolution = 640;
      CGImageRef imgRef = image.CGImage;
      CGFloat width = CGImageGetWidth( imgRef );
      CGFloat height = CGImageGetHeight( imgRef );
      CGAffineTransform transform = CGAffineTransformIdentity;
      CGRect bounds = CGRectMake( 0, 0, width, height );
      if ( width > kMaxResolution || height > kMaxResolution ) {
        CGFloat ratio = width/height;
        if ( ratio > 1 ) {
          bounds.size.width = kMaxResolution;
          bounds.size.height = bounds.size.width / ratio;
        }
            else {
          bounds.size.height = kMaxResolution;
          bounds.size.width = bounds.size.height * ratio;
        }
      }
      CGFloat scaleRatio = bounds.size.width / width;
      CGSize imageSize = CGSizeMake( CGImageGetWidth(imgRef), CGImageGetHeight(imgRef) );
      CGFloat boundHeight;
      UIImageOrientation orient = image.imageOrientation;
      switch( orient ) {
        case UIImageOrientationUp:
          transform = CGAffineTransformIdentity;
          break;
        case UIImageOrientationUpMirrored:
          transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0);
          transform = CGAffineTransformScale(transform, -1.0, 1.0);
          break;
        case UIImageOrientationDown:
          transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height);
          transform = CGAffineTransformRotate(transform, M_PI);
          break;
        case UIImageOrientationDownMirrored:
          transform = CGAffineTransformMakeTranslation(0.0, imageSize.height);
          transform = CGAffineTransformScale(transform, 1.0, -1.0);
          break;
        case UIImageOrientationLeftMirrored:
          boundHeight = bounds.size.height;
          bounds.size.height = bounds.size.width;
          bounds.size.width = boundHeight;
          transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width);
          transform = CGAffineTransformScale(transform, -1.0, 1.0);
          transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
          break;
        case UIImageOrientationLeft:
          boundHeight = bounds.size.height;
          bounds.size.height = bounds.size.width;
          bounds.size.width = boundHeight;
          transform = CGAffineTransformMakeTranslation(0.0, imageSize.width);
          transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
          break;
        case UIImageOrientationRightMirrored:
          boundHeight = bounds.size.height;
          bounds.size.height = bounds.size.width;
          bounds.size.width = boundHeight;
          transform = CGAffineTransformMakeScale(-1.0, 1.0);
          transform = CGAffineTransformRotate(transform, M_PI / 2.0);
          break;
        case UIImageOrientationRight:
          boundHeight = bounds.size.height;
          bounds.size.height = bounds.size.width;
          bounds.size.width = boundHeight;
          transform = CGAffineTransformMakeTranslation(imageSize.height, 0.0);
          transform = CGAffineTransformRotate(transform, M_PI / 2.0);
          break;
        default:
          [NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"];
      }
      UIGraphicsBeginImageContext( bounds.size );
      CGContextRef context = UIGraphicsGetCurrentContext();
      if ( orient == UIImageOrientationRight || orient == UIImageOrientationLeft ) {
        CGContextScaleCTM( context, -scaleRatio, scaleRatio );
        CGContextTranslateCTM( context, -height, 0 );
      }
        else {
        CGContextScaleCTM( context, scaleRatio, -scaleRatio );
        CGContextTranslateCTM( context, 0, -height );
      }
      CGContextConcatCTM( context, transform );
      CGContextDrawImage( UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef );
      UIImage *returnImage = UIGraphicsGetImageFromCurrentImageContext();
      UIGraphicsEndImageContext();
      return returnImage;
    }
    
    + (UIImage *)scaleAndRotateImageFrontCamera:(UIImage *)image
    {
      static int kMaxResolution = 640;
      CGImageRef imgRef = image.CGImage;
      CGFloat width = CGImageGetWidth(imgRef);
      CGFloat height = CGImageGetHeight(imgRef);
      CGAffineTransform transform = CGAffineTransformIdentity;
      CGRect bounds = CGRectMake( 0, 0, width, height);
      if (width > kMaxResolution || height > kMaxResolution) {
        CGFloat ratio = width/height;
        if (ratio > 1) {
          bounds.size.width = kMaxResolution;
          bounds.size.height = bounds.size.width / ratio;
        } else {
          bounds.size.height = kMaxResolution;
          bounds.size.width = bounds.size.height * ratio;
        }
      }
    
      CGFloat scaleRatio = bounds.size.width / width;
      CGSize imageSize = CGSizeMake(CGImageGetWidth(imgRef), CGImageGetHeight(imgRef));
      CGFloat boundHeight;
      UIImageOrientation orient = image.imageOrientation;
      switch(orient) {
        case UIImageOrientationUp:
          transform = CGAffineTransformIdentity;
          break;
        case UIImageOrientationUpMirrored:
          transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0);
          transform = CGAffineTransformScale(transform, -1.0, 1.0);
          break;
        case UIImageOrientationDown:
          transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height);
          transform = CGAffineTransformRotate(transform, M_PI);
          break;
        case UIImageOrientationDownMirrored:
          transform = CGAffineTransformMakeTranslation(0.0, imageSize.height);
          transform = CGAffineTransformScale(transform, 1.0, -1.0);
          break;
        case UIImageOrientationLeftMirrored:
          boundHeight = bounds.size.height;
          bounds.size.height = bounds.size.width;
          bounds.size.width = boundHeight;
          transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width);
          transform = CGAffineTransformScale(transform, -1.0, 1.0);
          transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
          break;
        case UIImageOrientationLeft:
          boundHeight = bounds.size.height;
          bounds.size.height = bounds.size.width;
          bounds.size.width = boundHeight;
          transform = CGAffineTransformMakeTranslation(0.0, imageSize.width);
          transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
          break;
            case UIImageOrientationRight:
        case UIImageOrientationRightMirrored:
          boundHeight = bounds.size.height;
          bounds.size.height = bounds.size.width;
          bounds.size.width = boundHeight;
          transform = CGAffineTransformMakeScale(-1.0, 1.0);
          transform = CGAffineTransformRotate(transform, M_PI / 2.0);
          break;
            default:
          [NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"];
      }
      UIGraphicsBeginImageContext( bounds.size );
      CGContextRef context = UIGraphicsGetCurrentContext();
      if ( orient == UIImageOrientationRight || orient == UIImageOrientationLeft ) {
        CGContextScaleCTM(context, -scaleRatio, scaleRatio);
        CGContextTranslateCTM(context, -height, 0);
      }
        else {
        CGContextScaleCTM(context, scaleRatio, -scaleRatio);
        CGContextTranslateCTM(context, 0, -height);
      }
      CGContextConcatCTM( context, transform );
      CGContextDrawImage( UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef );
      UIImage *returnImage = UIGraphicsGetImageFromCurrentImageContext();
      UIGraphicsEndImageContext();
      return returnImage;
    }
    
    @end
    

    Rename your view controller implementation file to *.mm

    MyViewController.m -> MyViewController.mm
    

    And import the UIImageCVMatConverter in your view controller:

    #import "UIImageCVMatConverter.h"
    

    Now you can mix Objective-C and C++ OpenCV code inside your view controller:

    cv::Mat img = [UIImageCVMatConverter cvMatFromUIImage:[UIImage imageNamed:@"my_image.png"]];
    ...
    

    Have fun!

    0 讨论(0)
提交回复
热议问题