Vuforia SDK + remote video streaming on iOS

Image for post
Image for post
- (BOOL)loadMediaURL:(NSURL*)url
{
BOOL ret = NO;
asset = [[[AVURLAsset alloc] initWithURL:url options:nil] retain];

if (nil != asset) {
// We can now attempt to load the media, so report success. We will
// discover if the load actually completes successfully when we are
// called back by the system
ret = YES;

[asset loadValuesAsynchronouslyForKeys:@[kTracksKey] completionHandler: ^{
// Completion handler block (dispatched on main queue when loading
// completes)
dispatch_async(dispatch_get_main_queue(),^{
NSError *error = nil;
AVKeyValueStatus status = [asset statusOfValueForKey:kTracksKey error:&error];


NSDictionary *settings = @{(id) kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA)};
AVPlayerItemVideoOutput *output = [[[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:settings] autorelease];
self.videoOutput = output;


if (status == AVKeyValueStatusLoaded) {
// Asset loaded, retrieve info and prepare
// for playback
if (![self prepareAssetForPlayback]) {
mediaState = ERROR;
}
}
else {
// Error
mediaState = ERROR;
}
});
}];
}

return ret;
}
// Update the OpenGL video texture with the latest available video data
- (GLuint)updateVideoData
{
GLuint textureID = 0;

// If currently playing on texture
if (PLAYING == mediaState && PLAYER_TYPE_ON_TEXTURE == playerType) {
[latestSampleBufferLock lock];

playerCursorPosition = CACurrentMediaTime() - mediaStartTime;
// self.playerCursorCurrentCMTIME = self.player.currentTime;
// CMTime caCurrentTime = CMTimeMake(self.playerCursorPosition * TIMESCALE, TIMESCALE);

unsigned char* pixelBufferBaseAddress = NULL;
CVPixelBufferRef pixelBuffer;



// If we have a valid buffer, lock the base address of its pixel buffer
// if (NULL != latestSampleBuffer) {
// pixelBuffer = CMSampleBufferGetImageBuffer(latestSampleBuffer);
pixelBuffer = [self.videoOutput copyPixelBufferForItemTime:player.currentItem.currentTime itemTimeForDisplay:nil];

CVPixelBufferLockBaseAddress(pixelBuffer, 0);
pixelBufferBaseAddress = (unsigned char*)CVPixelBufferGetBaseAddress(pixelBuffer);
// }
// else {
// No video sample buffer available: we may have been asked to
// provide one before any are available, or we may have read all
// available frames
// DEBUGLOG(@"No video sample buffer available");
// }

if (NULL != pixelBufferBaseAddress) {
// If we haven't created the video texture, do so now
if (0 == videoTextureHandle) {
videoTextureHandle = [self createVideoTexture];
}

glBindTexture(GL_TEXTURE_2D, videoTextureHandle);
const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);

if (bytesPerRow / BYTES_PER_TEXEL == videoSize.width) {
// No padding between lines of decoded video
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (GLsizei) videoSize.width, (GLsizei) videoSize.height, 0, GL_BGRA, GL_UNSIGNED_BYTE, pixelBufferBaseAddress);
}
else {
// Decoded video contains padding between lines. We must not
// upload it to graphics memory as we do not want to display it

// Allocate storage for the texture (correctly sized)
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (GLsizei) videoSize.width, (GLsizei) videoSize.height, 0, GL_BGRA, GL_UNSIGNED_BYTE, NULL);

// Now upload each line of texture data as a sub-image
for (int i = 0; i < videoSize.height; ++i) {
GLubyte* line = pixelBufferBaseAddress + i * bytesPerRow;
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, i, (GLsizei) videoSize.width, 1, GL_BGRA, GL_UNSIGNED_BYTE, line);
}
}

glBindTexture(GL_TEXTURE_2D, 0);

// Unlock the buffers
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);

textureID = videoTextureHandle;
}

if (pixelBuffer) {
CFRelease(pixelBuffer);
}



[latestSampleBufferLock unlock];
}

return textureID;
}
// Prepare the AVURLAsset for playback
- (BOOL)prepareAssetForPlayback
{
// Get video properties
NSArray *videoTracks = [self.asset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack *videoTrack = videoTracks[0];
self.videoSize = videoTrack.naturalSize;

self.videoLengthSeconds = CMTimeGetSeconds([self.asset duration]);

// Start playback at time 0.0
self.playerCursorStartPosition = kCMTimeZero;

// Start playback at full volume (audio mix level, not system volume level)
self.currentVolume = PLAYER_VOLUME_DEFAULT;

// Create asset tracks for reading
BOOL ret = [self prepareAssetForReading:self.playerCursorStartPosition];

if (ret) {
// Prepare the AVPlayer to play the audio
[self prepareAVPlayer];
// Inform our client that the asset is ready to play
self.mediaState = READY;
}

return ret;
}
// Prepare the AVURLAsset for reading so we can obtain video frame data from it
- (BOOL)prepareAssetForReading:(CMTime)startTime
{
BOOL ret = YES;
// ===== Audio =====
// Get the first audio track
NSArray * arrayTracks = [self.asset tracksWithMediaType:AVMediaTypeAudio];
if (0 < [arrayTracks count]) {
self.playAudio = YES;
AVAssetTrack* assetTrackAudio = arrayTracks[0];

AVMutableAudioMixInputParameters* audioInputParams = [AVMutableAudioMixInputParameters audioMixInputParameters];
[audioInputParams setVolume:self.currentVolume atTime:self.playerCursorStartPosition];
[audioInputParams setTrackID:[assetTrackAudio trackID]];
NSArray* audioParams = @[audioInputParams];
AVMutableAudioMix* audioMix = [AVMutableAudioMix audioMix];
[audioMix setInputParameters:audioParams];
AVPlayerItem* item = [self.player currentItem];
[item setAudioMix:audioMix];
}

return ret;
}
- (BOOL)isPlayableOnTexture{// We can render local files on texturereturn YES;}

Software Architect, currently @ Vodafone GR. Past: Lead Dev @ Intrasoft Intl, atcom.gr and more. Crafting apps & web services since 2007. https://oramind.com

Get the Medium app

A button that says 'Download on the App Store', and if clicked it will lead you to the iOS App store
A button that says 'Get it on, Google Play', and if clicked it will lead you to the Google Play store