swift iOS:如何使用相机选择颜色
我想用相机选择颜色,我得到了颜色。但这不是我在相机中触摸到的颜色。代码如下: 摄像机设置(swift): 捕获输出(swift): 抓住要点(swift): 选择带有点的颜色:swift iOS:如何使用相机选择颜色,ios,colors,camera,picker,Ios,Colors,Camera,Picker,我想用相机选择颜色,我得到了颜色。但这不是我在相机中触摸到的颜色。代码如下: 摄像机设置(swift): 捕获输出(swift): 抓住要点(swift): 选择带有点的颜色: - (UIColor*)pickColorWithPoint:(CGPoint)atPoint pixelFormatType: (UInt32)format { UIColor* color = nil; CGImageRef cgImage = self.CGImage; size_t
- (UIColor*)pickColorWithPoint:(CGPoint)atPoint pixelFormatType: (UInt32)format {
UIColor* color = nil;
CGImageRef cgImage = self.CGImage;
size_t width = CGImageGetWidth(cgImage);
size_t height = CGImageGetHeight(cgImage);
NSUInteger x = (NSUInteger)floor(atPoint.x) * self.scale;
NSUInteger y = (NSUInteger)floor(atPoint.y) * self.scale;
if ((x < width) && (y < height)) {
CGDataProviderRef provider = CGImageGetDataProvider(cgImage);
CFDataRef bitmapData = CGDataProviderCopyData(provider);
const UInt8 *data = CFDataGetBytePtr(bitmapData);
size_t offset = ((width * y) + x) * 4;
if (format == kCVPixelFormatType_32RGBA) {
UInt8 red = data[offset];
UInt8 green = data[offset + 1];
UInt8 blue = data[offset + 2];
UInt8 alpha = data[offset + 3];
color = [UIColor colorWithRed:red/255.0f green:green/255.0f blue:blue/255.0f alpha:alpha/255.0f];
} else if (format == kCVPixelFormatType_32BGRA) {
UInt8 blue = data[offset + 0];
UInt8 green = data[offset + 1];
UInt8 red = data[offset + 2];
color = [UIColor colorWithRed:red/255.0f green:green/255.0f blue:blue/255.0f alpha:1.0f];
}
CFRelease(bitmapData);
}
return color;
// CFDataRef pixelData = CGDataProviderCopyData(CGImageGetDataProvider(self.CGImage));
// const UInt8* data = CFDataGetBytePtr(pixelData);
//
// int pixelInfo = ((self.size.width * atPoint.y) + atPoint.x) * 4;
//
// UInt8 blue = data[pixelInfo];
// UInt8 green = data[(pixelInfo + 1)];
// UInt8 red = data[pixelInfo + 2];
// UInt8 alpha = data[pixelInfo + 3];
// CFRelease(pixelData);
//
// UIColor* color = [UIColor colorWithRed:red/255.0f green:green/255.0f blue:blue/255.0f alpha:alpha/255];
//
// return color;
}
-(UIColor*)pickColorWithPoint:(CGPoint)atPoint像素格式类型:(UInt32)格式{
UIColor*color=nil;
CGImageRef cgImage=self.cgImage;
大小\u t宽度=CGImageGetWidth(cgImage);
尺寸高度=CGImageGetHeight(cgImage);
NSUInteger x=(NSUInteger)楼层(atPoint.x)*自刻度;
NSU整数y=(NSU整数)楼层(在点y)*自刻度;
如果((x<宽度)和(y<高度)){
CGDataProviderRef provider=CGImageGetDataProvider(cgImage);
CFDataRef bitmapData=CGDataProviderCopyData(提供程序);
const UInt8*data=CFDataGetBytePtr(位图数据);
尺寸t偏移=((宽度*y)+x)*4;
if(格式==kCVPixelFormatType_32RGBA){
UInt8红色=数据[偏移];
UInt8绿色=数据[偏移量+1];
UInt8蓝色=数据[偏移量+2];
UInt8 alpha=数据[偏移量+3];
颜色=[UIColor color WITH red:red/255.0f green:green/255.0f blue:blue/255.0f alpha:alpha/255.0f];
}else if(格式==kCVPixelFormatType_32BGRA){
UInt8蓝色=数据[偏移量+0];
UInt8绿色=数据[偏移量+1];
UInt8红色=数据[偏移量+2];
颜色=[UIColor color WITH red:red/255.0f green:green/255.0f blue:blue/255.0f alpha:1.0f];
}
CFRelease(位图数据);
}
返回颜色;
//CFDataRef pixelData=CGDataProviderCopyData(CGImageGetDataProvider(self.CGImage));
//const UInt8*data=CFDataGetBytePtr(像素数据);
//
//int pixelInfo=((self.size.width*atPoint.y)+atPoint.x)*4;
//
//UInt8蓝色=数据[pixelInfo];
//UInt8绿色=数据[(像素信息+1)];
//UInt8红色=数据[pixelInfo+2];
//UInt8 alpha=数据[pixelInfo+3];
//CFRelease(pixelData);
//
//UIColor*color=[UIColor colorWithRed:red/255.0f green:green/255.0f blue:blue/255.0f alpha:alpha/255];
//
//返回颜色;
}
你找到答案了吗?
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
//Get pixel data
guard self.point != nil else {
return;
}
let image = UIImage(fromSampleBuffer: sampleBuffer);
let convertPoint = image.convertPoint(self.point!, fromView: self.view);
let color = image.pickColorWithPoint(convertPoint, pixelFormatType: kCVPixelFormatType_32BGRA);
dispatch_async(dispatch_get_main_queue()) {
[weak self] () ->Void in
self?.colorView.backgroundColor = color;
}
self.point = nil;
}
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
super.touchesBegan(touches, withEvent: event);
self.point = touches.first?.locationInView(self.view);
}
override func touchesMoved(touches: Set<UITouch>, withEvent event: UIEvent?) {
super.touchesMoved(touches, withEvent: event);
self.point = touches.first?.locationInView(self.view);
}
- (CGPoint)convertPoint:(CGPoint)viewPoint fromView:(UIView *)view {
CGPoint imagePoint = viewPoint;
CGSize imageSize = self.size;
CGSize viewSize = view.bounds.size;
CGFloat ratioX = viewSize.width / imageSize.width;
CGFloat ratioY = viewSize.height / imageSize.height;
UIViewContentMode contentMode = view.contentMode;
switch (contentMode) {
case UIViewContentModeScaleToFill:
case UIViewContentModeRedraw:
{
imagePoint.x /= ratioX;
imagePoint.y /= ratioY;
break;
}
case UIViewContentModeScaleAspectFit:
case UIViewContentModeScaleAspectFill:
{
CGFloat scale;
if (contentMode == UIViewContentModeScaleAspectFit) {
scale = MIN(ratioX, ratioY);
}
else /*if (contentMode == UIViewContentModeScaleAspectFill)*/ {
scale = MAX(ratioX, ratioY);
}
// Remove the x or y margin added in FitMode
imagePoint.x -= (viewSize.width - imageSize.width * scale) / 2.0f;
imagePoint.y -= (viewSize.height - imageSize.height * scale) / 2.0f;
imagePoint.x /= scale;
imagePoint.y /= scale;
break;
}
case UIViewContentModeCenter:
{
imagePoint.x -= (viewSize.width - imageSize.width) / 2.0f;
imagePoint.y -= (viewSize.height - imageSize.height) / 2.0f;
break;
}
case UIViewContentModeTop:
{
imagePoint.x -= (viewSize.width - imageSize.width) / 2.0f;
break;
}
case UIViewContentModeBottom:
{
imagePoint.x -= (viewSize.width - imageSize.width) / 2.0f;
imagePoint.y -= (viewSize.height - imageSize.height);
break;
}
case UIViewContentModeLeft:
{
imagePoint.y -= (viewSize.height - imageSize.height) / 2.0f;
break;
}
case UIViewContentModeRight:
{
imagePoint.x -= (viewSize.width - imageSize.width);
imagePoint.y -= (viewSize.height - imageSize.height) / 2.0f;
break;
}
case UIViewContentModeTopRight:
{
imagePoint.x -= (viewSize.width - imageSize.width);
break;
}
case UIViewContentModeBottomLeft:
{
imagePoint.y -= (viewSize.height - imageSize.height);
break;
}
case UIViewContentModeBottomRight:
{
imagePoint.x -= (viewSize.width - imageSize.width);
imagePoint.y -= (viewSize.height - imageSize.height);
break;
}
case UIViewContentModeTopLeft:
default:
{
break;
}
}
return imagePoint;
}
+ (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
//NSLog(@"imageFromSampleBuffer: called");
// Get a CMSampleBuffer's Core Video image buffer for the media data
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer, 0);
// Get the number of bytes per row for the pixel buffer
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
// Get the number of bytes per row for the pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// Create a bitmap graphics context with the sample buffer data
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
//! 旋转90度
CGFloat degrees = 90.f;
CGFloat radians = degrees * (M_PI / 180.f);
CGContextRotateCTM(context, radians);
// Create a Quartz image from the pixel data in the bitmap graphics context
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
// Free up the context and color space
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
//UIDeviceOrientation *deviceOrientation = [[UIDevice currentDevice] orientation];
// Create an image object from the Quartz image
// UIImage *image = [UIImage imageWithCGImage:quartzImage scale:1 orientation:UIImageOrientationRight];
UIImage *image = [UIImage imageWithCGImage:quartzImage];
// Release the Quartz image
CGImageRelease(quartzImage);
return (image);
}
- (UIColor*)pickColorWithPoint:(CGPoint)atPoint pixelFormatType: (UInt32)format {
UIColor* color = nil;
CGImageRef cgImage = self.CGImage;
size_t width = CGImageGetWidth(cgImage);
size_t height = CGImageGetHeight(cgImage);
NSUInteger x = (NSUInteger)floor(atPoint.x) * self.scale;
NSUInteger y = (NSUInteger)floor(atPoint.y) * self.scale;
if ((x < width) && (y < height)) {
CGDataProviderRef provider = CGImageGetDataProvider(cgImage);
CFDataRef bitmapData = CGDataProviderCopyData(provider);
const UInt8 *data = CFDataGetBytePtr(bitmapData);
size_t offset = ((width * y) + x) * 4;
if (format == kCVPixelFormatType_32RGBA) {
UInt8 red = data[offset];
UInt8 green = data[offset + 1];
UInt8 blue = data[offset + 2];
UInt8 alpha = data[offset + 3];
color = [UIColor colorWithRed:red/255.0f green:green/255.0f blue:blue/255.0f alpha:alpha/255.0f];
} else if (format == kCVPixelFormatType_32BGRA) {
UInt8 blue = data[offset + 0];
UInt8 green = data[offset + 1];
UInt8 red = data[offset + 2];
color = [UIColor colorWithRed:red/255.0f green:green/255.0f blue:blue/255.0f alpha:1.0f];
}
CFRelease(bitmapData);
}
return color;
// CFDataRef pixelData = CGDataProviderCopyData(CGImageGetDataProvider(self.CGImage));
// const UInt8* data = CFDataGetBytePtr(pixelData);
//
// int pixelInfo = ((self.size.width * atPoint.y) + atPoint.x) * 4;
//
// UInt8 blue = data[pixelInfo];
// UInt8 green = data[(pixelInfo + 1)];
// UInt8 red = data[pixelInfo + 2];
// UInt8 alpha = data[pixelInfo + 3];
// CFRelease(pixelData);
//
// UIColor* color = [UIColor colorWithRed:red/255.0f green:green/255.0f blue:blue/255.0f alpha:alpha/255];
//
// return color;
}