@Xiaojun-Jin
2015-03-30T02:10:33.000000Z
字数 13560
阅读 2659
iOS
UIImage
typedef struct __tag_MBITMAP
{
MDWord dwPixelArrayFormat;
MLong lWidth;
MLong lHeight;
MLong lPitch[MPAF_MAX_PLANES];
MByte *pPlane[MPAF_MAX_PLANES];
}MBITMAP, *LPMBITMAP;
+ (MBITMAP *)mallocEmptyMBitmap32WithWidth:(MLong)lWidth height:(MLong)lHeight
{
MBITMAP *pBitmapData = (MBITMAP *)malloc(sizeof(MBITMAP));
if (!pBitmapData) return nil;
memset(pBitmapData, 0, sizeof(MBITMAP));
pBitmapData->dwPixelArrayFormat = DISPLAY32PIXEL;
pBitmapData->lWidth = lWidth;
pBitmapData->lHeight = lHeight;
pBitmapData->lPitch[0] = LINE_BYTES(pBitmapData->lWidth, 32);
pBitmapData->pPlane[0] = (MByte *)malloc(pBitmapData->lPitch[0] * pBitmapData->lHeight);
if (pBitmapData->pPlane[0] == nil)
{
free(pBitmapData); pBitmapData = nil;
}
return pBitmapData;
}
+ (MRESULT)convertUIImage:(UIImage *)image toMBitmap:(MBITMAP *)pBitmap
{
MRESULT res = MOK; if(!image || !pBitmap) return MERR_INVALID_PARAM;
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGImageRef imgRef = [image CGImage];
if (CGImageGetWidth(imgRef) != pBitmap->lWidth ||
CGImageGetHeight(imgRef) != pBitmap->lHeight)
{
CGColorSpaceRelease(colorSpace);
return MERR_INVALID_PARAM;
}
CGContextRef context = CGBitmapContextCreate(pBitmap->pPlane[0],
pBitmap->lWidth,
pBitmap->lHeight,
8,
pBitmap->lPitch[0],
colorSpace,
kCGImageAlphaPremultipliedLast |
kCGBitmapByteOrder32Big);
CGColorSpaceRelease(colorSpace);
colorSpace = nil; if (!context) return MERR_UNKNOWN;
CGRect rect = {{0,0}, {pBitmap->lWidth, pBitmap->lHeight}};
CGContextDrawImage(context, rect, imgRef);
CGContextRelease(context); context = nil;
return res;
}
+ (UIImage *)convertMBitmap32:(MBITMAP *)bitmap
withOrientation:(UIImageOrientation)orientation
{
CGFloat width = bitmap->lWidth; CGFloat height = bitmap->lHeight;
unsigned char *buffer = bitmap->pPlane[0];
size_t bufferLength = width * height * 4;
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, buffer, bufferLength, NULL);
size_t bitsPerComponent = 8;
size_t bitsPerPixel = 32;
size_t bytesPerRow = 4 * width;
CGColorSpaceRef colorSpaceRef = CGColorSpaceCreateDeviceRGB();
if(colorSpaceRef == NULL)
{
CGDataProviderRelease(provider); return nil;
}
CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault;
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
CGImageRef iref = CGImageCreate(width,
height,
bitsPerComponent,
bitsPerPixel,
bytesPerRow,
colorSpaceRef,
bitmapInfo,
provider,
NULL,
YES,
renderingIntent);
uint32_t* pixels = (uint32_t*)malloc(bufferLength);
if(pixels == NULL)
{
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpaceRef);
CGImageRelease(iref); return nil;
}
CGContextRef context = CGBitmapContextCreate(pixels,
width,
height,
bitsPerComponent,
bytesPerRow,
colorSpaceRef,
kCGImageAlphaPremultipliedLast |
kCGBitmapByteOrder32Big);
if(context == NULL)
{
NSLog(@"Error context not created"); // free(pixels);
}
UIImage *image = nil;
if(context)
{
CGContextDrawImage(context, CGRectMake(0.0f, 0.0f, width, height), iref);
CGImageRef imageRef = CGBitmapContextCreateImage(context);
// Support both iPad 3.2 and iPhone 4 Retina displays with the correct scale
if([UIImage respondsToSelector:@selector(imageWithCGImage:scale:orientation:)])
{
image = [UIImage imageWithCGImage:imageRef
scale:1.f
orientation:UIImageOrientationUp];
}
else
{
image = [UIImage imageWithCGImage:imageRef];
}
CGImageRelease(imageRef); CGContextRelease(context);
}
CGColorSpaceRelease(colorSpaceRef);
CGImageRelease(iref);
CGDataProviderRelease(provider);
if(pixels) free(pixels); return image;
}
释放MBITMAP:
+ (void)releaseBitmap:(MBITMAP *)bitmap
{
if (bitmap)
{
if (bitmap->pPlane[0])
{
free(bitmap->pPlane[0]);
bitmap->pPlane[0] = nil;
free(bitmap);
bitmap = nil;
}
}
}
- (unsigned char *)bitmap24FromUIImage:(UIImage *)image
{
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
if (colorSpace == NULL) return NULL;
void *bitmapData = malloc(image.size.width * image.size.height * 4);
if (bitmapData == NULL)
{
CGColorSpaceRelease(colorSpace); return NULL;
}
CGContextRef context = CGBitmapContextCreate (bitmapData,
image.size.width,
image.size.height,
8,
image.size.width * 4,
colorSpace,
kCGImageAlphaPremultipliedFirst);
CGColorSpaceRelease(colorSpace );
if (context == NULL)
{
free (bitmapData); return NULL;
}
CGRect rect = CGRectMake(0.0f, 0.0f, image.size.width, image.size.height);
CGContextDrawImage(context, rect, image.CGImage);
unsigned char *data = CGBitmapContextGetData (context);
CGContextRelease(context);
long lWidth = image.size.width; long lHeight = image.size.height;
unsigned char *pRstData = MNull; pRstData = malloc(lWidth * lHeight * 3);
for (int j=0; j<lHeight; j++)
{
for (int i=0; i<lWidth; i++)
{
*(pRstData + j*3*lWidth+i*3) = *(data + j*4*lWidth+i*4+3);
*(pRstData + j*3*lWidth+i*3+1) = *(data + j*4*lWidth+i*4+2);
*(pRstData + j*3*lWidth+i*3+2) = *(data + j*4*lWidth+i*4+1);
}
}
if (data) { free(data); data = NULL; }
return pRstData;
}
转换示例代码:
// create
MBITMAP offImageOri = {0};
unsigned char *pImgData = [self bitmap24FromUIImage:newImage];
long lWidth = newImage.size.width; long lHeight = newImage.size.height;
offImageOri.dwPixelArrayFormat = MPAF_RGB24_B8G8R8;
offImageOri.lWidth = lWidth;
offImageOri.lHeight = lHeight;
offImageOri.lPitch[0] = lWidth*3;
offImageOri.pPlane[0] = pImgData;
// release
if (offImageOri.pPlane[0]) free(offImageOri.pPlane[0]);
- (UIImage *)createUIImageFrom24MBitmap:(MBITMAP)offImageRst
{
unsigned char *pRstData = NULL; pRstData = malloc(lWidth * lHeight * 4);
for (int j=0; j<lHeight; j++)
{
for (int i=0; i<lWidth; i++)
{
*(pRstData + j*4*lWidth+i*4) = 255;
*(pRstData + j*4*lWidth+i*4+1) = *(offImageRst.pPlane[0] + j*3*lWidth+i*3+2);
*(pRstData + j*4*lWidth+i*4+2) = *(offImageRst.pPlane[0] + j*3*lWidth+i*3+1);
*(pRstData + j*4*lWidth+i*4+3) = *(offImageRst.pPlane[0] + j*3*lWidth+i*3+0);
}
}
UIImage *resultImage = [self imageWithBits:pRstData
withSize:CGSizeMake(offImageRst.lWidth,
offImageRst.lHeight)];
return resultImage;
}
- (UIImage *)imageWithBits:(unsigned char *)bits withSize:(CGSize)size
{
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
if (colorSpace == NULL) free(bits); return nil;
CGContextRef context = CGBitmapContextCreate (bits,
size.width,
size.height,
8,
size.width * 4,
colorSpace,
kCGImageAlphaPremultipliedFirst);
if (context == NULL)
{
free (bits); CGColorSpaceRelease(colorSpace ); return nil;
}
CGColorSpaceRelease(colorSpace );
CGImageRef ref = CGBitmapContextCreateImage(context);
free(CGBitmapContextGetData(context));
CGContextRelease(context);
UIImage *img = [UIImage imageWithCGImage:ref];
CFRelease(ref); return img;
}
typedef struct __tag_ASVL_OFFSCREEN
{
MUInt32 u32PixelArrayFormat;
MInt32 i32Width;
MInt32 i32Height;
MUInt8* ppu8Plane[4];
MInt32 pi32Pitch[4];
}ASVLOFFSCREEN, *LPASVLOFFSCREEN;
- (unsigned char *)createASVLImageFromUIImage:(UIImage *)image
{
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
if (colorSpace == NULL) return NULL;
void *bitmapData = malloc(image.size.width * image.size.height * 4);
if (bitmapData == NULL)
{
CGColorSpaceRelease(colorSpace); return NULL;
}
CGContextRef context = CGBitmapContextCreate (bitmapData,
image.size.width,
image.size.height,
8,
image.size.width * 4,
colorSpace,
kCGImageAlphaPremultipliedFirst);
CGColorSpaceRelease(colorSpace );
if (context == NULL)
{
free (bitmapData); return NULL;
}
CGRect rect = CGRectMake(0.0f, 0.0f, image.size.width, image.size.height);
CGContextDrawImage(context, rect, image.CGImage);
unsigned char *data = CGBitmapContextGetData (context);
CGContextRelease(context); return data;
}
转换示例代码:
unsigned char *pImgData = [self createASVLImageFromUIImage:newImage];
long lWidth = newImage.size.width; long lHeight = newImage.size.height;
ASVLOFFSCREEN m_offscreenOri;
m_offscreenOri.u32PixelArrayFormat = ASVL_PAF_RGB32_A8R8G8B8;
m_offscreenOri.i32Width = lWidth;
m_offscreenOri.i32Height = lHeight;
m_offscreenOri.pi32Pitch[0] = lWidth*4;
m_offscreenOri.ppu8Plane[0] = pImgData;
- (UIImage *)convertASVLImageToUIImage:(ASVLOFFSCREEN *)pImg
{
CVPixelBufferRef pBuffer;
int bufferWidth = pImg->i32Width;
int bufferHeight = pImg->i32Height;
OSType pixelFormatType = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
NSDictionary *mAttrs = [NSDictionary dictionaryWithObject:[NSDictionary dictionary]
forKey:(id)kCVPixelBufferIOSurfacePropertiesKey];
CVPixelBufferCreate(kCFAllocatorDefault,
bufferWidth,
bufferHeight,
pixelFormatType,
(CFDictionaryRef)mAttrs,
&pBuffer);
CVPixelBufferLockBaseAddress(pBuffer,0);
UInt8 *pPlanY = (UInt8 *)CVPixelBufferGetBaseAddressOfPlane(pBuffer, 0);
UInt8 *pPlanUV = (UInt8 *)CVPixelBufferGetBaseAddressOfPlane(pBuffer, 1);
// Taking 4 bytes alignment into consideration, do not use bufferWdith drectly
size_t rowBytesY = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
size_t rowBytesUV = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
for (int i = 0; i < pImg->i32Height; ++i)
{
memcpy(pPlanY + i*rowBytesY, pImg->ppu8Plane[0]+ i*pImg->pi32Pitch[0], pImg->i32Width);
}
for (int i = 0; i < pImg->i32Height/2; ++i)
{
memcpy(pPlanUV + i*rowBytesUV, pImg->ppu8Plane[1]+i*pImg->pi32Pitch[1], pImg->i32Width);
}
CVPixelBufferUnlockBaseAddress(pBuffer,0);
CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pBuffer];
CIContext *context = [CIContext contextWithOptions:nil];
CGImageRef myImage = [context createCGImage:ciImage
fromRect:CGRectMake(0, 0,
CVPixelBufferGetWidth(pBuffer),
CVPixelBufferGetHeight(pBuffer))];
UIImage *nImage = [UIImage imageWithCGImage:myImage];
CGImageRelease(myImage); CVPixelBufferRelease(pBuffer);
return nImage;
}
CIContext 对渲染图片的大小有限制, 可以用inputImageMaximumSize和outputImageMaximumSize查看最大值(4096*4096)
- (void)createASVLImageFromCVPixelBufferRef:(CVPixelBufferRef)pixelBuffer
{
int width = CVPixelBufferGetWidth(pixelBuffer);
int height = CVPixelBufferGetHeight(pixelBuffer);
UInt8 *pPlanY = (UInt8 *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
size_t nPlanYRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
UInt8 *pPlanUV = (UInt8 *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
size_t nPlanUVRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
MRESULT eResult = MOK; ASVLOFFSCREEN fullSrcImage;
memset(&fullSrcImage, 0, sizeof(ASVLOFFSCREEN));
fullSrcImage.i32Width =width;
fullSrcImage.i32Height =height;
fullSrcImage.u32PixelArrayFormat =ASVL_PAF_NV12;
fullSrcImage.pi32Pitch[0] =nPlanYRow;
fullSrcImage.ppu8Plane[0] =pPlanY;
fullSrcImage.pi32Pitch[1] =nPlanUVRow;
fullSrcImage.ppu8Plane[1] =pPlanUV;
}
- (UIImage *)imageFromASVLImage:(ASVLOFFSCREEN *)pImg
{
uint8_t *rgbBuffer = malloc(pImg->i32Height * pImg->i32Width * 4);
uint8_t *yBuffer = pImg->ppu8Plane[0];
uint8_t *cbCrBuffer = pImg->ppu8Plane[1];
for(int y = 0; y < pImg->i32Height; y++)
{
uint8_t *rgbBufferLine = &rgbBuffer[y * pImg->i32Width * 4];
uint8_t *yBufferLine = &yBuffer[y * pImg->pi32Pitch[0]];
uint8_t *cbCrBufferLine = &cbCrBuffer[(y >> 1) * pImg->pi32Pitch[1]];
for(int x = 0; x < pImg->i32Width; x++)
{
uint8_t y = yBufferLine[x];
uint8_t cb = cbCrBufferLine[x & ~1];
uint8_t cr = cbCrBufferLine[x | 1];
uint8_t *rgbOutput = &rgbBufferLine[x*4];
int r, g, b;
r = y + 1.4075 *(cr-128);
g = y - 0.3455 *(cb -128)-0.7169 *(cr-128);
b = y + 1.779 *(cb - 128);
r = r<0?0:r; r = r>255?255:r;
g = g<0?0:g; g = g>255?255:g;
b = b<0?0:b; b = b>255?255:b;
rgbOutput[0] = r;
rgbOutput[1] = g;
rgbOutput[2] = b;
rgbOutput[3] = 255;
}
}
// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
if (!colorSpace) NSLog(@"CGColorSpaceCreateDeviceRGB failure");
CGContextRef context = CGBitmapContextCreate(rgbBuffer,
pImg->i32Width,
pImg->i32Height,
8,
pImg->i32Width*4,
colorSpace,
kCGBitmapByteOrder32Big |
kCGImageAlphaPremultipliedLast);
CGImageRef cgiMage = CGBitmapContextCreateImage(context);
CGColorSpaceRelease(colorSpace); free(rgbBuffer); CGContextRelease(context);
UIImage *img = [UIImage imageWithCGImage:cgiMage];
CFRelease(cgiMage); return img;
}
- (UIImage *)imageFromCMSampleBuffer:(CMSampleBufferRef)sampleBufferRef
{
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBufferRef);
CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
CIContext *context = [CIContext contextWithOptions:nil];
CGImageRef myImage = [context createCGImage:ciImage
fromRect:CGRectMake(0, 0,
CVPixelBufferGetWidth(pixelBuffer),
CVPixelBufferGetHeight(pixelBuffer))];
UIImage *uiImage = [UIImage imageWithCGImage:myImage];
return uiImage;
}
void resizeSampleBuffer(CVPixelBufferRef cameraFrame,
CGSize finalSize,
CMSampleBufferRef *sampleBuffer)
{
// CVPixelBufferCreateWithPlanarBytes for YUV input
CGSize originalSize = CGSizeMake(CVPixelBufferGetWidth(cameraFrame),
CVPixelBufferGetHeight(cameraFrame));
CVPixelBufferLockBaseAddress(cameraFrame, 0);
GLubyte *sourceImageBytes = CVPixelBufferGetBaseAddress(cameraFrame);
CGDataProviderRef dataProvider = CGDataProviderCreateWithData(NULL, sourceImageBytes,
CVPixelBufferGetBytesPerRow(cameraFrame) * originalSize.height, NULL);
CGColorSpaceRef genericRGBColorspace = CGColorSpaceCreateDeviceRGB();
CGImageRef cgImageFromBytes = CGImageCreate((int)originalSize.width,
(int)originalSize.height,
8,
32,
CVPixelBufferGetBytesPerRow(cameraFrame),
genericRGBColorspace,
kCGBitmapByteOrder32Little |
kCGImageAlphaPremultipliedFirst,
dataProvider,
NULL,
NO,
kCGRenderingIntentDefault);
GLubyte *imageData = (GLubyte *) calloc(1, (int)finalSize.width * (int)finalSize.height * 4);
CGContextRef imageContext = CGBitmapContextCreate(imageData,
(int)finalSize.width,
(int)finalSize.height,
8,
(int)finalSize.width * 4,
genericRGBColorspace,
kCGBitmapByteOrder32Little |
kCGImageAlphaPremultipliedFirst);
CGContextDrawImage(imageContext,
CGRectMake(0.0, 0.0, finalSize.width, finalSize.height),
cgImageFromBytes);
CGImageRelease(cgImageFromBytes);
CGContextRelease(imageContext);
CGColorSpaceRelease(genericRGBColorspace);
CGDataProviderRelease(dataProvider);
CVPixelBufferRef pixel_buffer = NULL;
CVPixelBufferCreateWithBytes(kCFAllocatorDefault,
finalSize.width,
finalSize.height,
kCVPixelFormatType_32BGRA,
imageData,
finalSize.width * 4,
stillImageDataReleaseCallback, NULL, NULL, &pixel_buffer);
CMVideoFormatDescriptionRef videoInfo = NULL;
CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixel_buffer, &videoInfo);
CMTime frameTime = CMTimeMake(1, 30);
CMSampleTimingInfo timing = {frameTime, frameTime, kCMTimeInvalid};
CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault,
pixel_buffer,
YES,
NULL,
NULL,
videoInfo,
&timing,
sampleBuffer);
CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
CFRelease(videoInfo);
CVPixelBufferRelease(pixel_buffer);
}