I'm trying to test the performance of converting YUV images produced by Vuforia and converting them to UIImage
using the iOS Accelerate Framework's vImage calls. In the current state of the code I'm just trying to get it to work. Right now converting yields a dark striped image. Are there any published details about how Vuforia has laid out the YUV format in their implementation? My initial assumption was that they used the bi-planar 420p format iOS devices use. Relevant test code follows.
UIImage *imageWithQCARCameraImage(const QCAR::Image *cameraImage)
{
UIImage *image = nil;
if (cameraImage) {
QCAR::PIXEL_FORMAT pixelFormat = cameraImage->getFormat();
CGColorSpaceRef colorSpace = NULL;
switch (pixelFormat) {
case QCAR::YUV:
case QCAR::RGB888:
colorSpace = CGColorSpaceCreateDeviceRGB();
break;
case QCAR::GRAYSCALE:
colorSpace = CGColorSpaceCreateDeviceGray();
break;
case QCAR::RGB565:
case QCAR::RGBA8888:
case QCAR::INDEXED:
std::cerr << "Image format conversion not implemented." << std::endl;
break;
case QCAR::UNKNOWN_FORMAT:
std::cerr << "Image format unknown." << std::endl;
break;
}
int bitsPerComponent = 8;
int width = cameraImage->getWidth();
int height = cameraImage->getHeight();
const void *baseAddress = cameraImage->getPixels();
size_t totalBytes = QCAR::getBufferSize(width, height, pixelFormat);
CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaNone;
CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
CGImageRef imageRef = NULL;
if (pixelFormat == QCAR::YUV) {
int bytesPerPixel = 4;
uint8_t *sourceDataAddress = (uint8_t *)baseAddress;
static vImage_Buffer srcYp = {
.width = static_cast<vImagePixelCount>(width),
.height = static_cast<vImagePixelCount>(height),
.data = const_cast<void *>(baseAddress)
};
size_t lumaBytes = width * height;
size_t chromianceBytes = totalBytes - lumaBytes;
static vImage_Buffer srcCb = {
.data = static_cast<void *>(sourceDataAddress + lumaBytes)
};
static vImage_Buffer srcCr = {
.data = static_cast<void *>(sourceDataAddress + lumaBytes + (chromianceBytes / 2))
};
static vImage_Buffer dest = {
.width = static_cast<vImagePixelCount>(width),
.height = static_cast<vImagePixelCount>(height),
.data = imageData
};
//uint8_t permuteMap[] = { 1, 2, 3, 0 };
vImage_YpCbCrPixelRange pixelRange = (vImage_YpCbCrPixelRange){ 0, 128, 255, 255, 255, 1, 255, 0 };
vImage_YpCbCrToARGB info;
vImage_Error error;
error = vImageConvert_YpCbCrToARGB_GenerateConversion(kvImage_YpCbCrToARGBMatrix_ITU_R_601_4,
&pixelRange,
&info,
kvImage420Yp8_Cb8_Cr8,
kvImageARGB8888,
kvImagePrintDiagnosticsToConsole);
error = vImageConvert_420Yp8_Cb8_Cr8ToARGB8888(&srcYp,
&srcCb,
&srcCr,
&dest,
&info,
NULL,
1,
kvImageNoFlags);
vImage_CGImageFormat format =
{
.bitsPerComponent = static_cast<uint32_t>(bitsPerComponent),
.bitsPerPixel = static_cast<uint32_t>(3 * bitsPerComponent),
.colorSpace = colorSpace,
.bitmapInfo = bitmapInfo,
.version = 0,
.decode = NULL,
.renderingIntent = renderingIntent
};
imageRef = vImageCreateCGImageFromBuffer(&dest,
&format,
NULL,
NULL,
kvImageNoFlags,
&error);
if (error) {
std::cerr << "Err." << std::endl;
}
} else {
int bitsPerPixel = QCAR::getBitsPerPixel(pixelFormat);
int bytesPerRow = cameraImage->getStride();
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL,
baseAddress,
totalBytes,
NULL);
imageRef = CGImageCreate(width,
height,
bitsPerComponent,
bitsPerPixel,
bytesPerRow,
colorSpace,
bitmapInfo,
provider,
NULL,
false,
renderingIntent);
CGDataProviderRelease(provider);
}
if (imageRef != NULL) {
image = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef);
}
if (colorSpace != NULL) {
CGColorSpaceRelease(colorSpace);
}
}
return image;
}
void *baseAddress = buffer;
size_t totalBytes = width * height * 3 / 2;
uint8_t *sourceDataAddress = (uint8_t *)baseAddress;
vImage_Buffer srcYp = {
.width = static_cast<vImagePixelCount>(width),
.height = static_cast<vImagePixelCount>(height),
.rowBytes = static_cast<size_t>(width),
.data = const_cast<void *>(baseAddress),
};
size_t lumaBytes = width * height;
size_t chromianceBytes = totalBytes - lumaBytes;
vImage_Buffer srcCb = {
.width = static_cast<vImagePixelCount>(width) / 2,
.height = static_cast<vImagePixelCount>(height) / 2,
.rowBytes = static_cast<size_t>(width) / 2,
.data = static_cast<void *>(sourceDataAddress + lumaBytes),
};
vImage_Buffer srcCr = {
.width = static_cast<vImagePixelCount>(width) / 2,
.height = static_cast<vImagePixelCount>(height) / 2,
.rowBytes = static_cast<size_t>(width) / 2,
.data = static_cast<void *>(sourceDataAddress + lumaBytes + (chromianceBytes / 2)),
};
vImage_Buffer dest;
dest.data = NULL;
vImage_Error error = kvImageNoError;
error = vImageBuffer_Init(&dest, height, width, 32, kvImageNoFlags);
// vImage_YpCbCrPixelRange pixelRange = (vImage_YpCbCrPixelRange){ 0, 128, 255, 255, 255, 1, 255, 0 };
vImage_YpCbCrPixelRange pixelRange = { 16, 128, 235, 240, 255, 0, 255, 0 };
vImage_YpCbCrToARGB info;
error = kvImageNoError;
error = vImageConvert_YpCbCrToARGB_GenerateConversion(kvImage_YpCbCrToARGBMatrix_ITU_R_601_4,
&pixelRange,
&info,
kvImage420Yp8_Cb8_Cr8,
kvImageARGB8888,
kvImagePrintDiagnosticsToConsole);
error = kvImageNoError;
uint8_t permuteMap[4] = {3, 2, 1, 0}; // BGRA - iOS only support BGRA
error = vImageConvert_420Yp8_Cb8_Cr8ToARGB8888(&srcYp,
&srcCb,
&srcCr,
&dest,
&info,
permuteMap, // for iOS must be no NULL, mac can be NULL iOS only support BGRA
255,
kvImageNoFlags);
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With