Wow Blackmagic Raw SDK ++

I was wondering why a big chunk of the URSA manual was just APIs and now I think I’m understanding.

https://www.blackmagicdesign.com/developer/products/braw/sdk-and-software

Installed for Mac OS. Went to “/Applications/Blackmagic RAW/Blackmagic RAW SDK/Mac” and then took the “ExtractFrame.cpp” Xcodeproject and sprinkle some codex and boom now a command to extract frames from blackmagic raw into sbs

itunes@ituness-MacBook-Pro Desktop % for f in /Volumes/Expansion2/BLACKMAGIC/2026-02-february/*.braw; do [ -e "$f" ] || continue; ./ExtractFrame "$f"; done
Created /Users/itunes/Desktop/ExtractFrame_SBS_A001_02121815_C001_frame0_221414_20260323.jpg
Created /Users/itunes/Desktop/ExtractFrame_SBS_A001_02121821_C002_frame0_221420_20260323.jpg
Created /Users/itunes/Desktop/ExtractFrame_SBS_A001_02121941_C007_frame0_221426_20260323.jpg
Created /Users/itunes/Desktop/ExtractFrame_SBS_A001_02122019_C012_frame0_221432_20260323.jpg
Created /Users/itunes/Desktop/ExtractFrame_SBS_A001_02122025_C013_frame0_221439_20260323.jpg
itunes@ituness-MacBook-Pro Desktop % for f in /Volumes/Expansion2/BLACKMAGIC/2026-03-march/*.braw; do [ -e "$f" ] || continue; ./ExtractFrame "$f"; done   
Created /Users/itunes/Desktop/ExtractFrame_SBS_A002_03100815_C001_frame0_221540_20260323.jpg
Created /Users/itunes/Desktop/ExtractFrame_SBS_A002_03100900_C002_frame0_221546_20260323.jpg
Created /Users/itunes/Desktop/ExtractFrame_SBS_A002_03100904_C003_frame0_221553_20260323.jpg
Created /Users/itunes/Desktop/ExtractFrame_SBS_A002_03100925_C004_frame0_221559_20260323.jpg
Created /Users/itunes/Desktop/ExtractFrame_SBS_A002_03101036_C005_frame0_221605_20260323.jpg

And now you have left and right frame exported from .braw! Hooray!

Why do this? to investigate a suspicion of a stereo issue … ?

#include "BlackmagicRawAPI.h"

#include <cerrno>
#include <cctype>
#include <cstring>
#include <cstdlib>
#include <ctime>
#include <iomanip>
#include <iostream>
#include <limits>
#include <mutex>
#include <pwd.h>
#include <string>
#include <unistd.h>
#include <vector>

#include <CoreServices/CoreServices.h>
#include <ImageIO/ImageIO.h>

#ifdef DEBUG
	#include <cassert>
	#define VERIFY(condition) assert(SUCCEEDED(condition))
#else
	#define VERIFY(condition) condition
#endif

static const BlackmagicRawResourceFormat s_resourceFormat = blackmagicRawResourceFormatRGBAU8;
static const CFStringRef s_blackmagicRawLibrariesPath = CFSTR("/Applications/Blackmagic RAW/Blackmagic RAW SDK/Mac/Libraries");

struct OutputFormat
{
	const char* cliName;
	const char* fileExtension;
	CFStringRef imageType;
	bool isLossy;
};

enum class LayoutMode
{
	SideBySide,
	SeparateEyes
};

enum class EyeIndex
{
	Left,
	Right
};

struct EyeImageData
{
	uint32_t width = 0;
	uint32_t height = 0;
	size_t rowBytes = 0;
	std::vector<uint8_t> imageData;
	bool isReady = false;
};

struct ExtractionState
{
	LayoutMode layoutMode = LayoutMode::SideBySide;
	const OutputFormat* outputFormat = nullptr;
	CFStringRef sideBySideOutputFileName = nullptr;
	CFStringRef leftOutputFileName = nullptr;
	CFStringRef rightOutputFileName = nullptr;
	EyeImageData leftEyeImage;
	EyeImageData rightEyeImage;
	bool hasError = false;
	std::string errorMessage;
	std::mutex mutex;
};

struct EyeJobData
{
	EyeIndex eye = EyeIndex::Left;
	ExtractionState* extractionState = nullptr;
};

struct CommandLineOptions
{
	const char* clipPath;
	const OutputFormat* outputFormat;
	LayoutMode layoutMode;
	uint64_t frameIndex;
	bool showInfo;
};

enum class ParseArgumentsResult
{
	Success,
	Help,
	Error
};

static const OutputFormat s_pngOutputFormat = { "png", "png", kUTTypePNG, false };
static const OutputFormat s_jpegOutputFormat = { "jpeg", "jpg", kUTTypeJPEG, true };
static const OutputFormat s_tiffOutputFormat = { "tiff", "tiff", kUTTypeTIFF, false };

static void PrintUsage(std::ostream& stream, const char* executableName)
{
	stream
		<< "Usage:\n"
		<< "  " << executableName << " clipName.braw [--frame frameIndex] [--layout sbs|separate] [--format png|jpeg|jpg|tiff]\n"
		<< "  " << executableName << " clipName.braw [sbs|separate] [png|jpeg|jpg|tiff]\n"
		<< "  " << executableName << " --info clipName.braw\n";
}

static bool ParseUInt64(const char* value, uint64_t* parsedValue)
{
	if (value == nullptr || parsedValue == nullptr || value[0] == '\0' || value[0] == '-')
		return false;

	char* end = nullptr;
	errno = 0;
	const unsigned long long parsedUnsignedLongLong = std::strtoull(value, &end, 10);
	if (errno != 0 || end == value || *end != '\0')
		return false;

	*parsedValue = static_cast<uint64_t>(parsedUnsignedLongLong);
	return true;
}

static std::string CFStringToUTF8String(CFStringRef value)
{
	if (value == nullptr)
		return std::string();

	const char* directCString = CFStringGetCStringPtr(value, kCFStringEncodingUTF8);
	if (directCString != nullptr)
		return std::string(directCString);

	const CFIndex length = CFStringGetLength(value);
	const CFIndex maxSize = CFStringGetMaximumSizeForEncoding(length, kCFStringEncodingUTF8) + 1;
	std::vector<char> buffer(static_cast<size_t>(maxSize), '\0');
	if (CFStringGetCString(value, buffer.data(), maxSize, kCFStringEncodingUTF8))
		return std::string(buffer.data());

	return std::string();
}

static std::string GetHomeDirectoryPath()
{
	const char* homeDirectory = std::getenv("HOME");
	if (homeDirectory != nullptr && homeDirectory[0] != '\0')
		return std::string(homeDirectory);

	const passwd* userInfo = getpwuid(getuid());
	if (userInfo != nullptr && userInfo->pw_dir != nullptr && userInfo->pw_dir[0] != '\0')
		return std::string(userInfo->pw_dir);

	return std::string();
}

static std::string GetClipStem(const char* clipPath)
{
	std::string fileName = clipPath != nullptr ? std::string(clipPath) : std::string("output");

	const std::string::size_type lastSeparator = fileName.find_last_of("/\\");
	if (lastSeparator != std::string::npos)
		fileName = fileName.substr(lastSeparator + 1);

	const std::string::size_type lastDot = fileName.find_last_of('.');
	if (lastDot != std::string::npos)
		fileName = fileName.substr(0, lastDot);

	if (fileName.empty())
		fileName = "output";

	return fileName;
}

static void SanitizeFileNameComponent(std::string& fileNameComponent)
{
	for (char& character : fileNameComponent)
	{
		const unsigned char unsignedCharacter = static_cast<unsigned char>(character);
		if (! std::isalnum(unsignedCharacter) && character != '-' && character != '_' && character != '.')
			character = '_';
	}
}

static std::string CreateTimeDateString()
{
	const std::time_t currentTime = std::time(nullptr);
	std::tm localTime = {};
	if (localtime_r(&currentTime, &localTime) == nullptr)
		return "unknown_time";

	char timestamp[32] = {};
	if (std::strftime(timestamp, sizeof(timestamp), "%H%M%S_%Y%m%d", &localTime) == 0)
		return "unknown_time";

	return std::string(timestamp);
}

static std::string ToLowerASCII(const char* value)
{
	std::string lowerCaseValue = value != nullptr ? std::string(value) : std::string();
	for (char& character : lowerCaseValue)
		character = static_cast<char>(std::tolower(static_cast<unsigned char>(character)));

	return lowerCaseValue;
}

static const char* GetEyeLabel(EyeIndex eye)
{
	return eye == EyeIndex::Left ? "left" : "right";
}

static const char* GetEyeOutputName(EyeIndex eye)
{
	return eye == EyeIndex::Left ? "LEFT" : "RIGHT";
}

static bool ParseLayoutMode(const char* layoutName, LayoutMode* layoutMode)
{
	if (layoutMode == nullptr)
		return false;

	if (layoutName == nullptr)
	{
		*layoutMode = LayoutMode::SideBySide;
		return true;
	}

	const std::string normalizedLayoutName = ToLowerASCII(layoutName);
	if (normalizedLayoutName == "sbs" || normalizedLayoutName == "side-by-side" || normalizedLayoutName == "sidebyside")
	{
		*layoutMode = LayoutMode::SideBySide;
		return true;
	}

	if (normalizedLayoutName == "separate" || normalizedLayoutName == "eyes" || normalizedLayoutName == "separate-eyes")
	{
		*layoutMode = LayoutMode::SeparateEyes;
		return true;
	}

	return false;
}

static const OutputFormat* ParseOutputFormat(const char* formatName)
{
	if (formatName == nullptr)
		return &s_jpegOutputFormat;

	const std::string normalizedFormatName = ToLowerASCII(formatName);
	if (normalizedFormatName == "png")
		return &s_pngOutputFormat;
	if (normalizedFormatName == "jpeg" || normalizedFormatName == "jpg")
		return &s_jpegOutputFormat;
	if (normalizedFormatName == "tiff" || normalizedFormatName == "tif")
		return &s_tiffOutputFormat;

	return nullptr;
}

static ParseArgumentsResult ParseArguments(int argc, const char* argv[], CommandLineOptions* options, std::string* errorMessage)
{
	if (options == nullptr || errorMessage == nullptr)
		return ParseArgumentsResult::Error;

	options->clipPath = nullptr;
	options->outputFormat = &s_jpegOutputFormat;
	options->layoutMode = LayoutMode::SideBySide;
	options->frameIndex = 0;
	options->showInfo = false;
	errorMessage->clear();

	for (int argumentIndex = 1; argumentIndex < argc; ++argumentIndex)
	{
		const std::string argument = argv[argumentIndex];

		if (argument == "--help" || argument == "-h")
			return ParseArgumentsResult::Help;

		if (argument == "--info")
		{
			options->showInfo = true;
			continue;
		}

		if (argument == "--frame" || argument == "--format" || argument == "--layout")
		{
			if (argumentIndex + 1 >= argc)
			{
				*errorMessage = "Missing value for " + argument + ".";
				return ParseArgumentsResult::Error;
			}

			const char* value = argv[++argumentIndex];
			if (argument == "--frame")
			{
				if (! ParseUInt64(value, &options->frameIndex))
				{
					*errorMessage = "Invalid frame index: " + std::string(value) + ".";
					return ParseArgumentsResult::Error;
				}
			}
			else if (argument == "--format")
			{
				options->outputFormat = ParseOutputFormat(value);
				if (options->outputFormat == nullptr)
				{
					*errorMessage = "Unsupported output format: " + std::string(value) + ".";
					return ParseArgumentsResult::Error;
				}
			}
			else if (! ParseLayoutMode(value, &options->layoutMode))
			{
				*errorMessage = "Unsupported layout mode: " + std::string(value) + ".";
				return ParseArgumentsResult::Error;
			}
			continue;
		}

		if (argument.rfind("--frame=", 0) == 0)
		{
			const char* value = argument.c_str() + 8;
			if (! ParseUInt64(value, &options->frameIndex))
			{
				*errorMessage = "Invalid frame index: " + std::string(value) + ".";
				return ParseArgumentsResult::Error;
			}
			continue;
		}

		if (argument.rfind("--format=", 0) == 0)
		{
			const char* value = argument.c_str() + 9;
			options->outputFormat = ParseOutputFormat(value);
			if (options->outputFormat == nullptr)
			{
				*errorMessage = "Unsupported output format: " + std::string(value) + ".";
				return ParseArgumentsResult::Error;
			}
			continue;
		}

		if (argument.rfind("--layout=", 0) == 0)
		{
			const char* value = argument.c_str() + 9;
			if (! ParseLayoutMode(value, &options->layoutMode))
			{
				*errorMessage = "Unsupported layout mode: " + std::string(value) + ".";
				return ParseArgumentsResult::Error;
			}
			continue;
		}

		if (options->clipPath == nullptr)
		{
			options->clipPath = argv[argumentIndex];
			continue;
		}

		const OutputFormat* positionalOutputFormat = ParseOutputFormat(argument.c_str());
		if (positionalOutputFormat != nullptr)
		{
			options->outputFormat = positionalOutputFormat;
			continue;
		}

		LayoutMode positionalLayoutMode = LayoutMode::SideBySide;
		if (ParseLayoutMode(argument.c_str(), &positionalLayoutMode))
		{
			options->layoutMode = positionalLayoutMode;
			continue;
		}

		*errorMessage = "Unrecognized argument: " + argument + ".";
		return ParseArgumentsResult::Error;
	}

	if (options->clipPath == nullptr)
	{
		*errorMessage = "Missing clip path.";
		return ParseArgumentsResult::Error;
	}

	return ParseArgumentsResult::Success;
}

static CFStringRef CreateOutputFileName(const char* clipPath, const char* outputName, uint64_t frameIndex, const std::string& timeDateString, const OutputFormat& outputFormat)
{
	std::string outputDirectory = GetHomeDirectoryPath();
	if (outputDirectory.empty())
		return nullptr;

	if (outputDirectory.back() != '/')
		outputDirectory += '/';

	std::string clipStem = GetClipStem(clipPath);
	SanitizeFileNameComponent(clipStem);

	std::string outputFileName = outputDirectory + "Desktop/ExtractFrame_" + outputName + "_" + clipStem + "_frame" + std::to_string(frameIndex) + "_" + timeDateString + "." + outputFormat.fileExtension;
	return CFStringCreateWithCString(kCFAllocatorDefault, outputFileName.c_str(), kCFStringEncodingUTF8);
}

static void PrintClipInfo(const char* clipPath, IBlackmagicRawClip* clip)
{
	if (clipPath != nullptr)
		std::cout << "Clip: " << clipPath << std::endl;

	uint32_t width = 0;
	uint32_t height = 0;
	if (clip->GetWidth(&width) == S_OK && clip->GetHeight(&height) == S_OK)
		std::cout << "Dimensions: " << width << "x" << height << std::endl;

	float frameRate = 0.0f;
	const bool hasFrameRate = (clip->GetFrameRate(&frameRate) == S_OK);
	if (hasFrameRate)
	{
		const std::streamsize originalPrecision = std::cout.precision();
		const std::ios::fmtflags originalFlags = std::cout.flags();
		std::cout << std::fixed << std::setprecision(3);
		std::cout << "Frame rate: " << frameRate << std::endl;
		std::cout.flags(originalFlags);
		std::cout.precision(originalPrecision);
	}

	uint64_t frameCount = 0;
	if (clip->GetFrameCount(&frameCount) == S_OK)
	{
		std::cout << "Frame count: " << frameCount << std::endl;
		if (frameCount > 0)
		{
			std::cout << "Valid frame range: 0-" << (frameCount - 1) << std::endl;

			if (hasFrameRate && frameRate > 0.0f)
			{
				const std::streamsize originalPrecision = std::cout.precision();
				const std::ios::fmtflags originalFlags = std::cout.flags();
				const double durationSeconds = static_cast<double>(frameCount) / static_cast<double>(frameRate);
				std::cout << std::fixed << std::setprecision(3);
				std::cout << "Duration: " << durationSeconds << " seconds" << std::endl;
				std::cout.flags(originalFlags);
				std::cout.precision(originalPrecision);
			}

			CFStringRef firstTimecode = nullptr;
			if (clip->GetTimecodeForFrame(0, &firstTimecode) == S_OK && firstTimecode != nullptr)
			{
				std::cout << "First frame timecode: " << CFStringToUTF8String(firstTimecode) << std::endl;
				CFRelease(firstTimecode);
			}

			CFStringRef lastTimecode = nullptr;
			if (clip->GetTimecodeForFrame(frameCount - 1, &lastTimecode) == S_OK && lastTimecode != nullptr)
			{
				std::cout << "Last frame timecode: " << CFStringToUTF8String(lastTimecode) << std::endl;
				CFRelease(lastTimecode);
			}
		}
	}
}

static bool IsJPEGOutputFormat(const OutputFormat& outputFormat)
{
	return CFEqual(outputFormat.imageType, kUTTypeJPEG);
}

static bool ComputeRGBABytesPerRow(uint32_t width, size_t* rowBytes)
{
	if (rowBytes == nullptr || width == 0)
		return false;

	const size_t bytesPerPixel = 4U;
	if (static_cast<size_t>(width) > (std::numeric_limits<size_t>::max() / bytesPerPixel))
		return false;

	*rowBytes = static_cast<size_t>(width) * bytesPerPixel;
	return true;
}

static bool ComputeImageSizeFromRowBytes(size_t rowBytes, uint32_t height, size_t* sizeBytes)
{
	if (sizeBytes == nullptr || rowBytes == 0 || height == 0)
		return false;

	if (static_cast<size_t>(height) > (std::numeric_limits<size_t>::max() / rowBytes))
		return false;

	*sizeBytes = static_cast<size_t>(height) * rowBytes;
	return true;
}

static bool ComputeRGBAImageSizeBytes(uint32_t width, uint32_t height, size_t* sizeBytes)
{
	size_t rowBytes = 0;
	if (! ComputeRGBABytesPerRow(width, &rowBytes))
		return false;

	return ComputeImageSizeFromRowBytes(rowBytes, height, sizeBytes);
}

static bool ComputeCapturedImageLayout(uint32_t width, uint32_t height, size_t sizeBytes, size_t* rowBytes, size_t* copySizeBytes)
{
	if (rowBytes == nullptr || copySizeBytes == nullptr || width == 0 || height == 0 || sizeBytes == 0)
		return false;

	size_t packedRowBytes = 0;
	size_t packedSizeBytes = 0;
	if (! ComputeRGBABytesPerRow(width, &packedRowBytes) || ! ComputeRGBAImageSizeBytes(width, height, &packedSizeBytes))
		return false;

	if (sizeBytes < packedSizeBytes)
		return false;

	const size_t heightAsSizeT = static_cast<size_t>(height);
	if ((sizeBytes % heightAsSizeT) == 0)
	{
		const size_t computedRowBytes = sizeBytes / heightAsSizeT;
		if (computedRowBytes >= packedRowBytes)
		{
			*rowBytes = computedRowBytes;
			*copySizeBytes = sizeBytes;
			return true;
		}
	}

	// Some clips appear to expose extra trailing bytes without padding every row.
	// Fall back to a tightly packed RGBA interpretation in that case.
	*rowBytes = packedRowBytes;
	*copySizeBytes = packedSizeBytes;
	return true;
}

static void SetExtractionError(ExtractionState* extractionState, const std::string& message)
{
	if (extractionState == nullptr)
	{
		std::cerr << message << std::endl;
		return;
	}

	std::lock_guard<std::mutex> lock(extractionState->mutex);
	if (! extractionState->hasError)
	{
		extractionState->hasError = true;
		extractionState->errorMessage = message;
	}
}

static EyeJobData* GetEyeJobData(IBlackmagicRawJob* job)
{
	if (job == nullptr)
		return nullptr;

	void* userData = nullptr;
	if (job->GetUserData(&userData) != S_OK || userData == nullptr)
		return nullptr;

	return static_cast<EyeJobData*>(userData);
}

static std::string CreateEyeErrorMessage(const char* action, const EyeJobData* eyeJobData)
{
	const std::string prefix = action != nullptr ? std::string(action) : std::string("Failed to process");
	if (eyeJobData == nullptr)
		return prefix + " the extracted image.";

	return prefix + " the " + std::string(GetEyeLabel(eyeJobData->eye)) + " eye image.";
}

static std::string CreateEyeCaptureFailureMessage(const EyeJobData* eyeJobData, uint32_t width, uint32_t height, size_t sizeBytes)
{
	const std::string eyeLabel = (eyeJobData != nullptr) ? std::string(GetEyeLabel(eyeJobData->eye)) : std::string("unknown");
	return "Failed to capture the " + eyeLabel + " eye image. width=" + std::to_string(width) + ", height=" + std::to_string(height) + ", sizeBytes=" + std::to_string(sizeBytes) + ".";
}

static bool CaptureEyeImage(ExtractionState* extractionState, EyeIndex eye, uint32_t width, uint32_t height, size_t sizeBytes, const void* imageData)
{
	if (extractionState == nullptr || imageData == nullptr)
		return false;

	size_t rowBytes = 0;
	size_t copySizeBytes = 0;
	if (! ComputeCapturedImageLayout(width, height, sizeBytes, &rowBytes, &copySizeBytes))
		return false;

	const uint8_t* imageBytes = static_cast<const uint8_t*>(imageData);
	std::lock_guard<std::mutex> lock(extractionState->mutex);

	EyeImageData& eyeImage = (eye == EyeIndex::Left) ? extractionState->leftEyeImage : extractionState->rightEyeImage;
	eyeImage.width = width;
	eyeImage.height = height;
	eyeImage.rowBytes = rowBytes;
	eyeImage.imageData.assign(imageBytes, imageBytes + copySizeBytes);
	eyeImage.isReady = true;
	return true;
}

static bool ValidateEyeImage(const EyeImageData& eyeImage, const char* eyeLabel, std::string* errorMessage)
{
	if (! eyeImage.isReady)
	{
		if (errorMessage != nullptr)
			*errorMessage = "The " + std::string(eyeLabel) + " eye image was not captured.";
		return false;
	}

	size_t expectedSizeBytes = 0;
	if (! ComputeImageSizeFromRowBytes(eyeImage.rowBytes, eyeImage.height, &expectedSizeBytes))
	{
		if (errorMessage != nullptr)
			*errorMessage = "The " + std::string(eyeLabel) + " eye image dimensions are invalid.";
		return false;
	}

	size_t minimumRowBytes = 0;
	if (! ComputeRGBABytesPerRow(eyeImage.width, &minimumRowBytes) || eyeImage.rowBytes < minimumRowBytes)
	{
		if (errorMessage != nullptr)
			*errorMessage = "The " + std::string(eyeLabel) + " eye image row stride is invalid.";
		return false;
	}

	if (eyeImage.imageData.size() != expectedSizeBytes)
	{
		if (errorMessage != nullptr)
			*errorMessage = "The " + std::string(eyeLabel) + " eye image buffer size does not match its row stride.";
		return false;
	}

	return true;
}

static bool OutputImage(CFStringRef outputFileName, const OutputFormat& outputFormat, uint32_t width, uint32_t height, size_t bytesPerRow, size_t sizeBytes, const void* imageData)
{
	bool success = false;
	const std::string outputFileNameAsString = CFStringToUTF8String(outputFileName);

	CFURLRef file = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, outputFileName, kCFURLPOSIXPathStyle, false);
	if (file != nullptr)
	{
		const size_t bitsPerComponent	= 8;
		const size_t bitsPerPixel		= 32;

		CGColorSpaceRef space			= CGColorSpaceCreateWithName(kCGColorSpaceSRGB);
		CGBitmapInfo bitmapInfo			= kCGImageAlphaNoneSkipLast | kCGImageByteOrderDefault;
		CGDataProviderRef provider		= CGDataProviderCreateWithData(nullptr, imageData, sizeBytes, nullptr);
		const CGFloat* decode			= nullptr;
		bool shouldInterpolate			= false;
		CGColorRenderingIntent intent	= kCGRenderingIntentDefault;

		if (space != nullptr && provider != nullptr)
		{
			CGImageRef imageRef = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, space, bitmapInfo, provider, decode, shouldInterpolate, intent);
			if (imageRef != nullptr)
			{
				CGImageDestinationRef destination = CGImageDestinationCreateWithURL(file, outputFormat.imageType, 1, nullptr);
				if (destination)
				{
					CFDictionaryRef imageProperties = nullptr;
					CFNumberRef compressionQualityNumber = nullptr;

					if (outputFormat.isLossy)
					{
						const float compressionQuality = 0.92f;
						compressionQualityNumber = CFNumberCreate(kCFAllocatorDefault, kCFNumberFloatType, &compressionQuality);
						if (compressionQualityNumber != nullptr)
						{
							const void* propertyKeys[] = { kCGImageDestinationLossyCompressionQuality };
							const void* propertyValues[] = { compressionQualityNumber };
							imageProperties = CFDictionaryCreate(kCFAllocatorDefault, propertyKeys, propertyValues, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
						}
					}

					CGImageDestinationAddImage(destination, imageRef, imageProperties);
					success = CGImageDestinationFinalize(destination);

					if (imageProperties != nullptr)
						CFRelease(imageProperties);

					if (compressionQualityNumber != nullptr)
						CFRelease(compressionQualityNumber);

					CFRelease(destination);

					if (success)
						std::cout << "Created " << outputFileNameAsString << std::endl;
				}

				CGImageRelease(imageRef);
			}
		}

		if (provider != nullptr)
			CGDataProviderRelease(provider);

		if (space != nullptr)
			CGColorSpaceRelease(space);

		CFRelease(file);
	}

	if (! success)
		std::cerr << "Failed to create " << outputFileNameAsString << "!" << std::endl;

	return success;
}

static bool WriteCapturedOutputs(const ExtractionState& extractionState)
{
	if (extractionState.outputFormat == nullptr)
	{
		std::cerr << "Failed to resolve output format!" << std::endl;
		return false;
	}

	if (extractionState.hasError)
	{
		if (! extractionState.errorMessage.empty())
			std::cerr << extractionState.errorMessage << std::endl;
		return false;
	}

	std::string validationError;
	if (! ValidateEyeImage(extractionState.leftEyeImage, "left", &validationError) || ! ValidateEyeImage(extractionState.rightEyeImage, "right", &validationError))
	{
		std::cerr << validationError << std::endl;
		return false;
	}

	const EyeImageData& leftEyeImage = extractionState.leftEyeImage;
	const EyeImageData& rightEyeImage = extractionState.rightEyeImage;
	if (leftEyeImage.width != rightEyeImage.width || leftEyeImage.height != rightEyeImage.height)
	{
		std::cerr << "Left and right eye images have different dimensions." << std::endl;
		return false;
	}

	if (extractionState.layoutMode == LayoutMode::SeparateEyes)
	{
		if (extractionState.leftOutputFileName == nullptr || extractionState.rightOutputFileName == nullptr)
		{
			std::cerr << "Failed to resolve separate-eye output file names!" << std::endl;
			return false;
		}

		const bool leftSuccess = OutputImage(extractionState.leftOutputFileName, *extractionState.outputFormat, leftEyeImage.width, leftEyeImage.height, leftEyeImage.rowBytes, leftEyeImage.imageData.size(), leftEyeImage.imageData.data());
		const bool rightSuccess = OutputImage(extractionState.rightOutputFileName, *extractionState.outputFormat, rightEyeImage.width, rightEyeImage.height, rightEyeImage.rowBytes, rightEyeImage.imageData.size(), rightEyeImage.imageData.data());
		return leftSuccess && rightSuccess;
	}

	if (extractionState.sideBySideOutputFileName == nullptr)
	{
		std::cerr << "Failed to resolve the side-by-side output file name!" << std::endl;
		return false;
	}

	if (rightEyeImage.width > (std::numeric_limits<uint32_t>::max() - leftEyeImage.width))
	{
		std::cerr << "Side-by-side output width exceeds supported image dimensions." << std::endl;
		return false;
	}

	const uint32_t combinedWidth = leftEyeImage.width + rightEyeImage.width;
	const uint32_t combinedHeight = leftEyeImage.height;
	if (IsJPEGOutputFormat(*extractionState.outputFormat) && (combinedWidth > 65535U || combinedHeight > 65535U))
	{
		std::cerr << "Side-by-side JPEG dimensions exceed the 65535-pixel limit per axis." << std::endl;
		return false;
	}

	size_t combinedSizeBytes = 0;
	if (! ComputeRGBAImageSizeBytes(combinedWidth, combinedHeight, &combinedSizeBytes))
	{
		std::cerr << "Side-by-side output size exceeds supported memory limits." << std::endl;
		return false;
	}

	std::vector<uint8_t> combinedImage(combinedSizeBytes);
	size_t eyePixelBytesPerRow = 0;
	size_t combinedBytesPerRow = 0;
	if (! ComputeRGBABytesPerRow(leftEyeImage.width, &eyePixelBytesPerRow) || ! ComputeRGBABytesPerRow(combinedWidth, &combinedBytesPerRow))
	{
		std::cerr << "Side-by-side output row stride exceeds supported memory limits." << std::endl;
		return false;
	}

	for (uint32_t row = 0; row < combinedHeight; ++row)
	{
		uint8_t* combinedRow = combinedImage.data() + (static_cast<size_t>(row) * combinedBytesPerRow);
		const uint8_t* leftRow = leftEyeImage.imageData.data() + (static_cast<size_t>(row) * leftEyeImage.rowBytes);
		const uint8_t* rightRow = rightEyeImage.imageData.data() + (static_cast<size_t>(row) * rightEyeImage.rowBytes);
		std::memcpy(combinedRow, leftRow, eyePixelBytesPerRow);
		std::memcpy(combinedRow + eyePixelBytesPerRow, rightRow, eyePixelBytesPerRow);
	}

	return OutputImage(extractionState.sideBySideOutputFileName, *extractionState.outputFormat, combinedWidth, combinedHeight, combinedBytesPerRow, combinedImage.size(), combinedImage.data());
}

class CameraCodecCallback : public IBlackmagicRawCallback
{
public:
	explicit CameraCodecCallback() = default;
	virtual ~CameraCodecCallback() = default;

	virtual void ReadComplete(IBlackmagicRawJob* readJob, HRESULT result, IBlackmagicRawFrame* frame)
	{
		EyeJobData* eyeJobData = GetEyeJobData(readJob);
		IBlackmagicRawJob* decodeAndProcessJob = nullptr;

		if (result == S_OK)
			result = (frame != nullptr) ? frame->SetResourceFormat(s_resourceFormat) : E_FAIL;

		if (result == S_OK)
			result = frame->CreateJobDecodeAndProcessFrame(nullptr, nullptr, &decodeAndProcessJob);

		if (result == S_OK)
		{
			if (eyeJobData != nullptr)
				result = decodeAndProcessJob->SetUserData(eyeJobData);
			else
				result = E_FAIL;
		}

		if (result == S_OK)
			result = decodeAndProcessJob->Submit();

		if (result != S_OK)
		{
			if (decodeAndProcessJob)
				decodeAndProcessJob->Release();

			SetExtractionError(eyeJobData != nullptr ? eyeJobData->extractionState : nullptr, CreateEyeErrorMessage("Failed to decode and process", eyeJobData));
		}

		readJob->Release();
	}

	virtual void ProcessComplete(IBlackmagicRawJob* job, HRESULT result, IBlackmagicRawProcessedImage* processedImage)
	{
		EyeJobData* eyeJobData = GetEyeJobData(job);
		unsigned int width = 0;
		unsigned int height = 0;
		unsigned int sizeBytes = 0;
		void* imageData = nullptr;

		if (result == S_OK)
			result = (processedImage != nullptr) ? processedImage->GetWidth(&width) : E_FAIL;

		if (result == S_OK)
			result = processedImage->GetHeight(&height);

		if (result == S_OK)
			result = processedImage->GetResourceSizeBytes(&sizeBytes);

		if (result == S_OK)
			result = processedImage->GetResource(&imageData);

		if (result == S_OK && eyeJobData != nullptr)
		{
			if (! CaptureEyeImage(eyeJobData->extractionState, eyeJobData->eye, width, height, sizeBytes, imageData))
				result = E_FAIL;
		}
		else if (result == S_OK)
			result = E_FAIL;

		if (result != S_OK)
			SetExtractionError(eyeJobData != nullptr ? eyeJobData->extractionState : nullptr, CreateEyeCaptureFailureMessage(eyeJobData, width, height, sizeBytes));

		job->Release();
	}

	virtual void DecodeComplete(IBlackmagicRawJob*, HRESULT) {}
	virtual void TrimProgress(IBlackmagicRawJob*, float) {}
	virtual void TrimComplete(IBlackmagicRawJob*, HRESULT) {}
	virtual void SidecarMetadataParseWarning(IBlackmagicRawClip*, CFStringRef, uint32_t, CFStringRef) {}
	virtual void SidecarMetadataParseError(IBlackmagicRawClip*, CFStringRef, uint32_t, CFStringRef) {}
	virtual void PreparePipelineComplete(void*, HRESULT) {}

	virtual HRESULT STDMETHODCALLTYPE QueryInterface(REFIID, LPVOID*)
	{
		return E_NOTIMPL;
	}

	virtual ULONG STDMETHODCALLTYPE AddRef(void)
	{
		return 0;
	}

	virtual ULONG STDMETHODCALLTYPE Release(void)
	{
		return 0;
	}
};

int main(int argc, const char* argv[])
{
	CommandLineOptions options = {};
	std::string argumentError;
	const ParseArgumentsResult parseArgumentsResult = ParseArguments(argc, argv, &options, &argumentError);
	if (parseArgumentsResult == ParseArgumentsResult::Help)
	{
		PrintUsage(std::cout, argv[0]);
		return 0;
	}

	if (parseArgumentsResult != ParseArgumentsResult::Success)
	{
		if (! argumentError.empty())
			std::cerr << argumentError << std::endl;
		PrintUsage(std::cerr, argv[0]);
		return 1;
	}

	CFStringRef clipName = CFStringCreateWithCString(NULL, options.clipPath, kCFStringEncodingUTF8);
	if (clipName == nullptr)
	{
		std::cerr << "Failed to create clip path." << std::endl;
		return 1;
	}

	HRESULT result = S_OK;

	IBlackmagicRawFactory* factory = nullptr;
	IBlackmagicRaw* codec = nullptr;
	IBlackmagicRawClip* clip = nullptr;
	IBlackmagicRawClipImmersiveVideo* immersiveClip = nullptr;
	IBlackmagicRawJob* leftReadJob = nullptr;
	IBlackmagicRawJob* rightReadJob = nullptr;
	CFStringRef sideBySideOutputFileName = nullptr;
	CFStringRef leftOutputFileName = nullptr;
	CFStringRef rightOutputFileName = nullptr;

	ExtractionState extractionState = {};
	extractionState.layoutMode = options.layoutMode;
	extractionState.outputFormat = options.outputFormat;

	EyeJobData leftEyeJobData = { EyeIndex::Left, &extractionState };
	EyeJobData rightEyeJobData = { EyeIndex::Right, &extractionState };

	CameraCodecCallback callback;
	bool hasSubmittedJobs = false;

	do
	{
		factory = CreateBlackmagicRawFactoryInstanceFromPath(s_blackmagicRawLibrariesPath);
		if (factory == nullptr)
		{
			std::cerr << "Failed to create IBlackmagicRawFactory!" << std::endl;
			break;
		}

		result = factory->CreateCodec(&codec);
		if (result != S_OK)
		{
			std::cerr << "Failed to create IBlackmagicRaw!" << std::endl;
			break;
		}

			result = codec->OpenClip(clipName, &clip);
			if (result != S_OK)
			{
				std::cerr << "Failed to open IBlackmagicRawClip!" << std::endl;
				break;
			}

			if (options.showInfo)
			{
				PrintClipInfo(options.clipPath, clip);
				break;
			}

			uint64_t frameCount = 0;
			result = clip->GetFrameCount(&frameCount);
			if (result != S_OK)
			{
				std::cerr << "Failed to query frame count!" << std::endl;
				break;
			}

			if (frameCount == 0)
			{
				std::cerr << "Clip contains no frames." << std::endl;
				result = E_FAIL;
				break;
			}

			if (options.frameIndex >= frameCount)
			{
				std::cerr << "Requested frame index " << options.frameIndex << " is out of range. Valid range: 0-" << (frameCount - 1) << "." << std::endl;
				result = E_INVALIDARG;
				break;
			}

			result = codec->SetCallback(&callback);
			if (result != S_OK)
			{
				std::cerr << "Failed to set IBlackmagicRawCallback!" << std::endl;
				break;
			}

			result = clip->QueryInterface(IID_IBlackmagicRawClipImmersiveVideo, reinterpret_cast<void**>(&immersiveClip));
			if (result != S_OK || immersiveClip == nullptr)
			{
				std::cerr << "Clip does not expose IBlackmagicRawClipImmersiveVideo!" << std::endl;
				break;
			}

			const std::string timeDateString = CreateTimeDateString();

			if (options.layoutMode == LayoutMode::SideBySide)
			{
				sideBySideOutputFileName = CreateOutputFileName(options.clipPath, "SBS", options.frameIndex, timeDateString, *options.outputFormat);
				if (sideBySideOutputFileName == nullptr)
				{
					std::cerr << "Failed to create side-by-side output file path." << std::endl;
					result = E_FAIL;
					break;
				}

				extractionState.sideBySideOutputFileName = sideBySideOutputFileName;
			}
			else
			{
				leftOutputFileName = CreateOutputFileName(options.clipPath, GetEyeOutputName(EyeIndex::Left), options.frameIndex, timeDateString, *options.outputFormat);
				if (leftOutputFileName == nullptr)
				{
					std::cerr << "Failed to create left output file path." << std::endl;
					result = E_FAIL;
					break;
				}

				rightOutputFileName = CreateOutputFileName(options.clipPath, GetEyeOutputName(EyeIndex::Right), options.frameIndex, timeDateString, *options.outputFormat);
				if (rightOutputFileName == nullptr)
				{
					std::cerr << "Failed to create right output file path." << std::endl;
					result = E_FAIL;
					break;
				}

				extractionState.leftOutputFileName = leftOutputFileName;
				extractionState.rightOutputFileName = rightOutputFileName;
			}

			result = immersiveClip->CreateJobImmersiveReadFrame(blackmagicRawImmersiveVideoTrackLeft, options.frameIndex, &leftReadJob);
			if (result != S_OK)
			{
				std::cerr << "Failed to create left immersive read job!" << std::endl;
				break;
			}

		result = leftReadJob->SetUserData(&leftEyeJobData);
		if (result != S_OK)
		{
			std::cerr << "Failed to attach left eye job data!" << std::endl;
			break;
		}

		result = leftReadJob->Submit();
			if (result != S_OK)
			{
				std::cerr << "Failed to submit left immersive read job!" << std::endl;
				break;
			}
			leftReadJob = nullptr;
			hasSubmittedJobs = true;

			result = immersiveClip->CreateJobImmersiveReadFrame(blackmagicRawImmersiveVideoTrackRight, options.frameIndex, &rightReadJob);
			if (result != S_OK)
			{
				std::cerr << "Failed to create right immersive read job!" << std::endl;
				break;
			}

		result = rightReadJob->SetUserData(&rightEyeJobData);
		if (result != S_OK)
		{
			std::cerr << "Failed to attach right eye job data!" << std::endl;
			break;
		}

		result = rightReadJob->Submit();
		if (result != S_OK)
		{
			std::cerr << "Failed to submit right immersive read job!" << std::endl;
			break;
		}
		rightReadJob = nullptr;

		codec->FlushJobs();
		hasSubmittedJobs = false;

		if (! WriteCapturedOutputs(extractionState))
		{
			result = E_FAIL;
			break;
		}

	} while(0);

	if (hasSubmittedJobs && codec != nullptr)
		codec->FlushJobs();

	if (leftReadJob != nullptr)
		leftReadJob->Release();

	if (rightReadJob != nullptr)
		rightReadJob->Release();

	if (immersiveClip != nullptr)
		immersiveClip->Release();

	if (clip != nullptr)
		clip->Release();

	if (codec != nullptr)
		codec->Release();

	if (factory != nullptr)
		factory->Release();

	if (sideBySideOutputFileName != nullptr)
		CFRelease(sideBySideOutputFileName);

	if (rightOutputFileName != nullptr)
		CFRelease(rightOutputFileName);

	if (leftOutputFileName != nullptr)
		CFRelease(leftOutputFileName);

	if (clipName != nullptr)
		CFRelease(clipName);

	return result;
}

/* -LICENSE-START-
 ** Copyright (c) 2018 Blackmagic Design
 **
 ** Permission is hereby granted, free of charge, to any person or organization
 ** obtaining a copy of the software and accompanying documentation covered by
 ** this license (the "Software") to use, reproduce, display, distribute,
 ** execute, and transmit the Software, and to prepare derivative works of the
 ** Software, and to permit third-parties to whom the Software is furnished to
 ** do so, all subject to the following:
 **
 ** The copyright notices in the Software and this entire statement, including
 ** the above license grant, this restriction and the following disclaimer,
 ** must be included in all copies of the Software, in whole or in part, and
 ** all derivative works of the Software, unless such copies or derivative
 ** works are solely in the form of machine-executable object code generated by
 ** a source language processor.
 **
 ** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 ** IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 ** FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
 ** SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
 ** FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
 ** ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
 ** DEALINGS IN THE SOFTWARE.
 ** -LICENSE-END-
 */