objective c - How to find the pixel position from live camera preview using AVCapture preview layer in ios? -


i doing sample application pixel color live camera preview

here code

let session = avcapturesession() session.sessionpreset = avcapturesessionpresetmedium   let capturevideopreviewlayer = avcapturevideopreviewlayer(session: session)  capturevideopreviewlayer.videogravity = avlayervideogravityresizeaspectfill  capturevideopreviewlayer.bounds = previewview.layer.bounds; capturevideopreviewlayer.position = cgpointmake(cgrectgetmidx(previewview.layer.bounds), cgrectgetmidy(previewview.layer.bounds))  self.previewview.layer.addsublayer(capturevideopreviewlayer)  let device = avcapturedevice.defaultdevicewithmediatype(avmediatypevideo)  {      input = try avcapturedeviceinput(device: device)    }catch {         print("input nil")         input = nil    }          output = avcapturevideodataoutput()         var myqueue = dispatch_queue_create("myqueue", nil)          if input != nil {             session.addinput(input!)         }          if output != nil {             output!.setsamplebufferdelegate(self, queue: myqueue)             output!.alwaysdiscardslatevideoframes = true             myqueue = nil             output!.videosettings = [(kcvpixelbufferpixelformattypekey nsstring) : nsnumber(unsignedint: kcvpixelformattype_32bgra)]             session.addoutput(output!)         }          if session.running {             session.stoprunning()          }         session.startrunning()  

and getting sample buffer in delegate method

func captureoutput(captureoutput: avcaptureoutput!, didoutputsamplebuffer samplebuffer: cmsamplebuffer!, fromconnection connection: avcaptureconnection!) 

converting samplebuffer cgimage this

let cgimg = getimagefromsamplebuffer(samplebuffer) 

rotaing image potrait

let image = uiimage(cgimage: cgimg, scale: 1, orientation: uiimageorientation.right) 

here problem ::

i running application in iphone 5c

here coordinates

previewlayer frame = (0.0, 0.0, 320.0, 568.0) image size = (360.0, 480.0)

if touch in location (100, 200) on view, how map in image exact pixel color ?

note : have used below code pixel giving wrong color

- (uicolor*) getpixelcoloratlocation:(cgpoint)point andimage: (uiimage *)image {     uicolor* color = nil;     cgimageref inimage = image.cgimage;     // create off screen bitmap context draw image into. format argb 4 bytes each pixel: alpa, red, green, blue     cgcontextref cgctx = [self createargbbitmapcontextfromimage:inimage];     if (cgctx == null) { return nil; /* error */ }      size_t w = cgimagegetwidth(inimage);     size_t h = cgimagegetheight(inimage);     cgrect rect = {{0,0},{w,h}};      // draw image bitmap context. once draw, memory     // allocated context rendering contain     // raw image data in specified color space.     cgcontextdrawimage(cgctx, rect, inimage);      // can pointer image data associated bitmap     // context.     unsigned char* data = cgbitmapcontextgetdata (cgctx);     if (data != null) {         //offset locates pixel in data x,y.         //4 4 bytes of data per pixel, w width of 1 row of data.         int offset = 4*((w*round(point.y))+round(point.x));         int alpha =  data[offset];         int red = data[offset+1];         int green = data[offset+2];         int blue = data[offset+3]; //        nslog(@"offset: %i colors: rgb %i %i %i  %i",offset,red,green,blue,alpha);         color = [uicolor colorwithred:(red/255.0f) green:(green/255.0f) blue:(blue/255.0f) alpha:(alpha/255.0f)];     }      // when finished, release context //    cgcontextrelease(cgctx);     // free image data memory context     if (data) { free(data); }      return color; }    - (cgcontextref) createargbbitmapcontextfromimage:(cgimageref) inimage {      cgcontextref    context = null;     cgcolorspaceref colorspace;     void *          bitmapdata;     int             bitmapbytecount;     int             bitmapbytesperrow;      // image width, height. we'll use entire image.     size_t pixelswide = cgimagegetwidth(inimage);     size_t pixelshigh = cgimagegetheight(inimage);      // declare number of bytes per row. each pixel in bitmap in     // example represented 4 bytes; 8 bits each of red, green, blue, ,     // alpha.     bitmapbytesperrow   = (pixelswide * 4);     bitmapbytecount     = (bitmapbytesperrow * pixelshigh);      // use generic rgb color space.     colorspace = cgcolorspacecreatedevicergb();      if (colorspace == null)     {         fprintf(stderr, "error allocating color space\n");         return null;     }      // allocate memory image data. destination in memory     // drawing bitmap context rendered.     bitmapdata = malloc( bitmapbytecount );     if (bitmapdata == null)     {         fprintf (stderr, "memory not allocated!");         cgcolorspacerelease( colorspace );         return null;     }      // create bitmap context. want pre-multiplied argb, 8-bits     // per component. regardless of source image format     // (cmyk, grayscale, , on) converted on format     // specified here cgbitmapcontextcreate.     context = cgbitmapcontextcreate (bitmapdata,                                      pixelswide,                                      pixelshigh,                                      8,      // bits per component                                      bitmapbytesperrow,                                      colorspace,                                      kcgimagealphapremultipliedfirst);     if (context == null)     {         free (bitmapdata);         fprintf (stderr, "context not created!");     }      // make sure , release colorspace before returning     cgcolorspacerelease( colorspace );      return context; } 

thanks in advance


Comments

Popular posts from this blog

Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.12:test (default-test) on project.Error occurred in starting fork -

windows - Debug iNetMgr.exe unhandle exception System.Management.Automation.CmdletInvocationException -

configurationsection - activeMq-5.13.3 setup configurations for wildfly 10.0.0 -