opencv - HTTP live stream AVAsset -
i implementing n http live streaming player osx using avplayer. able stream seek , duration timing etc. want take screen shots , process frames using opencv. went using avassetimagegenerator. there no audio , video tracks avasset associated player.currentitem.
the tracks appearing in player.currentitem.tracks. not able sue avassetgenerator. can find out solution extract screenshots , individual frames in such scenario?
please find code below how initiating http live stream
thanks in advance.
nsurl* url = [nsurl urlwithstring:@"http://devimages.apple.com/iphone/samples/bipbop/bipbopall.m3u8"]; playeritem = [avplayeritem playeritemwithurl:url]; [playeritem addobserver:self forkeypath:@"status" options:0 context:avspplayerstatuscontext]; [self setplayer:[avplayer playerwithplayeritem:playeritem]]; [self addobserver:self forkeypath:@"player.rate" options:nskeyvalueobservingoptionnew context:avspplayerratecontext]; [self addobserver:self forkeypath:@"player.currentitem.status" options:nskeyvalueobservingoptionnew context:avspplayeritemstatuscontext]; avplayerlayer *newplayerlayer = [avplayerlayer playerlayerwithplayer:[self player]]; [newplayerlayer setframe:[[[self playerview] layer] bounds]]; [newplayerlayer setautoresizingmask:kcalayerwidthsizable | kcalayerheightsizable]; [newplayerlayer sethidden:yes]; [[[self playerview] layer] addsublayer:newplayerlayer]; [self setplayerlayer:newplayerlayer]; [self addobserver:self forkeypath:@"playerlayer.readyfordisplay" options:nskeyvalueobservingoptioninitial | nskeyvalueobservingoptionnew context:avspplayerlayerreadyfordisplay]; [self.player play];
following how checking whether video track present asset
case avplayeritemstatusreadytoplay: [self settimeobservertoken:[[self player] addperiodictimeobserverforinterval:cmtimemake(1, 10) queue:dispatch_get_main_queue() usingblock:^(cmtime time) { [[self timeslider] setdoublevalue:cmtimegetseconds(time)]; nslog(@"%f,%f,%f",[self currenttime],[self duration],[[self player] rate]); avplayeritem *item = playeritem; if(item.status == avplayeritemstatusreadytoplay) { avasset *asset = (avasset *)item.asset; long audiotracks = [[asset tracks] count]; long videotracks = [[asset availablemediacharacteristicswithmediaselectionoptions]count]; nslog(@"track info audio = %ld,video=%ld",audiotracks,videotracks); } }]]; avplayeritem *item = self.player.currentitem; if(item.status != avplayeritemstatusreadytoplay) return; avurlasset *asset = (avurlasset *)item.asset; long audiotracks = [[asset trackswithmediatype:avmediatypeaudio]count]; long videotracks = [[asset trackswithmediatype:avmediatypevideo]count]; nslog(@"track info audio = %ld,video=%ld",audiotracks,videotracks);
this older question in case needs have answer
avurlasset *asset = /* asset here! */; avassetimagegenerator *generator = [[avassetimagegenerator alloc] initwithasset:asset]; generator.requestedtimetoleranceafter = kcmtimezero; generator.requestedtimetolerancebefore = kcmtimezero; (float64 = 0; < cmtimegetseconds(asset.duration) * /* put fps of source video here */ ; i++){ @autoreleasepool { cmtime time = cmtimemake(i, /* put fps of source video here */); nserror *err; cmtime actualtime; cgimageref image = [generator copycgimageattime:time actualtime:&actualtime error:&err]; // want image, example save uiimage uiimage *generatedimage = [[uiimage alloc] initwithcgimage:image]; cgimagerelease(image); } }
you can fps of video using code:
float fps=0.00; if (asset) { avassettrack * videoatrack = [asset trackswithmediatype:avmediatypevideo][0]; if(videoatrack) { fps = [videoatrack nominalframerate]; } }
hope helps asking how frames video or specific (with cmtime example) frames. please bear in mind, saving frames array can impact memory hardly!
Comments
Post a Comment