void render(int width, int height, int screenRotation)
{ while ([scheduler runOne])
{ } glViewport(0, 0, width, height);
glClearColor(0.f, 0.f, 0.f, 1.f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
easyar_OutputFrame * oFrame = [outputFrameBuffer peek];
if (oFrame == nil){ return; }
easyar_InputFrame * iFrame = [oFrame inputFrame];
if (iFrame == nil) { return; }
if (![iFrame hasCameraParameters]) { return; }
easyar_CameraParameters * cameraParameters = [iFrame cameraParameters];
float viewport_aspect_ratio = (float)width / (float)height;
easyar_Matrix44F * projection = [cameraParameters projection:0.01f farPlane:1000.f viewportAspectRatio:viewport_aspect_ratio screenRotation:screenRotation combiningFlip:true manualHorizontalFlip:false]; easyar_Matrix44F * imageProjection = [cameraParameters imageProjection:viewport_aspect_ratio screenRotation:screenRotation combiningFlip:true manualHorizontalFlip:false]; easyar_Image * image = [iFrame image]; if ([iFrame index] != previousInputFrameIndex) {
[bgRenderer upload:[image format] width:[image width] height:[image height] bufferData:[[image buffer] data]];
previousInputFrameIndex = [iFrame index]; }
[bgRenderer render:imageProjection]; NSArray<easyar_FrameFilterResult *> * results = [oFrame results]; for (int i = 0; i < [results count]; i++) {
easyar_ImageTrackerResult * result = nil;
easyar_FrameFilterResult * _result = results[i];
result = ([_result isEqual:[NSNull null]])? nil: (easyar_ImageTrackerResult * )_result;
if(result == nil){
continue;
}else{for (easyar_TargetInstance * targetInstance in [result targetInstances]) {
easyar_TargetStatus status = [targetInstance status];
if (status == easyar_TargetStatus_Tracked) {
easyar_Target * target = [targetInstance target];
easyar_ImageTarget * imagetarget = [target isKindOfClass:[easyar_ImageTarget class]] ? (easyar_ImageTarget *)target : nil;
if (imagetarget == nil) {
continue; }
easyar_Vec2F * scale = [easyar_Vec2F create:@[@([imagetarget scale]), @([imagetarget scale] / [imagetarget aspectRatio])]];
[boxRenderer render:projection cameraview:[targetInstance pose] size:scale]; } } } NSArray<easyar_TargetInstance *> * targetInstances = [result targetInstances];
if ([targetInstances count] > 0) {
easyar_TargetInstance * targetInstance = [targetInstances objectAtIndex:0];
easyar_Target * target = [targetInstance target];
int status = [targetInstance status];
if (status == easyar_TargetStatus_Tracked) {
int runtimeID = [target runtimeID];
if (active_target != 0 && active_target != runtimeID) {
[video onLost]; video = nil; tracked_target = 0; active_target = 0;
}
if (tracked_target == 0){ NSData *data = [target_meta dataUsingEncoding:NSUTF8StringEncoding]; id json = [NSJSONSerialization JSONObjectWithData:data options:0 error:nil]; if (video == nil && [video_renderers count] > 0) {
NSData *data = [target_meta dataUsingEncoding:NSUTF8StringEncoding];
id json = [NSJSONSerialization JSONObjectWithData:data options:0 error:nil];video = [[ARVideo alloc] init]; [video openStreamingVideo:[json objectForKey:@"video_url"] texid:[[video_renderers objectAtIndex:2] texid] scheduler:scheduler]; current_video_renderer = [video_renderers objectAtIndex:2]; }); } if (video != nil) { [video onFound];
tracked_target = runtimeID; videoPlaying_status = [video onFound]; }
}easyar_ImageTarget * imagetarget = [target isKindOfClass:[easyar_ImageTarget class]] ? (easyar_ImageTarget *)target : nil;
if (imagetarget == nil) { continue; }
easyar_Vec2F * scale = [easyar_Vec2F create:@[@([imagetarget scale]), @([imagetarget scale] / [imagetarget aspectRatio])]];
if (current_video_renderer != nil) {
[video update];
if ([video isRenderTextureAvailable]) {
[current_video_renderer render:projection cameraview:[targetInstance pose] size: scale]; }
} }
} else {
if (tracked_target != 0) {
[video onLost];
tracked_target = 0;
} } }}