CustomVideoPlayer.mm 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962
  1. #include "CustomVideoPlayer.h"
  2. #include "CVTextureCache.h"
  3. #include "CMVideoSampling.h"
  4. #include "GlesHelper.h"
  5. #import <AVFoundation/AVFoundation.h>
  6. #import <UIKit/UIKit.h>
  7. #import <Foundation/Foundation.h>
  8. #import <OpenGLES/ES2/gl.h>
  9. #import <OpenGLES/ES2/glext.h>
  10. static void* _ObserveItemStatusContext = (void*)0x1;
  11. static void* _ObservePlayerItemContext = (void*)0x2;
  12. @implementation CustomVideoPlayerView
  13. + (Class)layerClass
  14. {
  15. return [AVPlayerLayer class];
  16. }
  17. - (AVPlayer*)player
  18. {
  19. return [(AVPlayerLayer*)[self layer] player];
  20. }
  21. - (void)setPlayer:(AVPlayer*)player
  22. {
  23. [(AVPlayerLayer*)[self layer] setPlayer:player];
  24. }
  25. - (void)dealloc
  26. {
  27. self.player = nil;
  28. }
  29. @end
  30. @implementation CustomVideoPlayer
  31. {
  32. AVPlayerItem* _playerItem;
  33. AVPlayer* _player;
  34. AVAssetReader* _reader;
  35. AVAssetReaderTrackOutput* _videoOut;
  36. AVPlayerItemVideoOutput* videoOutput;
  37. CMSampleBufferRef _cmSampleBuffer;
  38. CMSampleBufferRef _cmSampleBuffer2;
  39. CMVideoSampling _videoSampling;
  40. CMVideoSampling _videoSamplingDummy;
  41. unsigned int _videoTexture;
  42. CMTime _duration;
  43. CMTime _curTime;
  44. CMTime _curFrameTimestamp;
  45. CMTime _lastFrameTimestamp;
  46. CGSize _videoSize;
  47. BOOL _playerReady;
  48. intptr_t _textureID;
  49. // we need to have both because the order of asset/item getting ready is not strict
  50. BOOL _assetReady;
  51. BOOL _itemReady;
  52. BOOL _error;
  53. BOOL _evertoon_was_unloaded;
  54. int _audioTrackId;
  55. int _audioTrackInfo;
  56. float _fVolume;
  57. }
  58. @synthesize delegate;
  59. - (int)audioTrack { return _audioTrackId; }
  60. - (int) getAudioTrack { return _audioTrackInfo; }
  61. - (void)setAudioTrack:(int)id
  62. {
  63. if(!_playerReady)
  64. return;
  65. if(_player == NULL)
  66. return;
  67. AVMediaSelectionGroup * group = [_player.currentItem.asset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
  68. if(id >= [group.options count])
  69. return;
  70. AVMediaSelectionOption *option = [group.options objectAtIndex:id];
  71. [_player.currentItem selectMediaOption:option inMediaSelectionGroup:group];
  72. _audioTrackId = id;
  73. [self setAudioVolume:[_player volume]];
  74. }
  75. - (NSTimeInterval) availableDuration
  76. {
  77. if(!_playerReady)
  78. return 0;
  79. NSArray *loadedTimeRanges = [_playerItem loadedTimeRanges];
  80. if(loadedTimeRanges != nil &&[loadedTimeRanges count] >0)
  81. {
  82. CMTimeRange timeRange = [[loadedTimeRanges objectAtIndex:0] CMTimeRangeValue];
  83. Float64 startSeconds = CMTimeGetSeconds(timeRange.start);
  84. Float64 durationSeconds = CMTimeGetSeconds(timeRange.duration);
  85. NSTimeInterval result = startSeconds + durationSeconds;
  86. return result;
  87. }
  88. return 0;
  89. //printf("\n%f %f",startSeconds,durationSeconds);
  90. }
  91. - (BOOL)readyToPlay { return _playerReady; }
  92. - (CGSize)videoSize { return _videoSize; }
  93. - (CMTime)duration { return _duration; }
  94. - (float)durationSeconds { return CMTIME_IS_VALID(_duration) ? (float)CMTimeGetSeconds(_duration) : 0.0f; }
  95. - (float)curTimeSeconds { return CMTIME_IS_VALID(_curTime) ? (float)
  96. CMTimeGetSeconds(_curTime) : 0.0f; }
  97. // npot textures are supported on gles1 only with ext.
  98. // so we support only gles2 for the sake of sanity;
  99. + (BOOL)CanPlayToTexture:(NSURL*)url { return /*IsRunningWithGLES2() && [url isFileURL];*/ true; }
  100. - (void)reportError:(NSError*)error category:(const char*)category
  101. {
  102. ::printf_console("[%s]Error: %s\n", category, [[error localizedDescription] UTF8String]);
  103. ::printf_console("%s\n", [[error localizedFailureReason] UTF8String]);
  104. }
  105. - (void)reportErrorWithString:(const char*)error category:(const char*)category
  106. {
  107. ::printf_console("[%s]Error: %s\n", category, error);
  108. }
  109. - (id)init
  110. {
  111. if( (self = [super init]) )
  112. {
  113. _duration = _curTime = kCMTimeZero;
  114. _curFrameTimestamp = _lastFrameTimestamp = kCMTimeZero;
  115. _evertoon_was_unloaded = false;
  116. }
  117. _fVolume= 1.0f;
  118. return self;
  119. }
  120. - (void)cleanCache
  121. {
  122. if(_videoSamplingDummy.cvTextureCache)
  123. {
  124. CFRelease(_videoSamplingDummy.cvTextureCache);
  125. _videoSamplingDummy.cvTextureCache = 0;
  126. }
  127. }
  128. - (void)cleanupCVTextureCache
  129. {
  130. if(_cmSampleBuffer)
  131. {
  132. CFRelease(_cmSampleBuffer);
  133. _cmSampleBuffer = 0;
  134. }
  135. if(_cmSampleBuffer2)
  136. {
  137. CFRelease(_cmSampleBuffer2);
  138. _cmSampleBuffer2 = 0;
  139. }
  140. if(_videoSampling.cvImageBuffer)
  141. {
  142. CFRelease(_videoSampling.cvImageBuffer);
  143. _videoSampling.cvImageBuffer = 0;
  144. }
  145. if(_videoSampling.cvTextureCacheTexture)
  146. {
  147. CFRelease(_videoSampling.cvTextureCacheTexture);
  148. _videoSampling.cvTextureCacheTexture = 0;
  149. }
  150. if(_videoSampling.cvTextureCache !=0)
  151. {
  152. if(_videoSamplingDummy.cvTextureCache)
  153. {
  154. CFRelease(_videoSamplingDummy.cvTextureCache);
  155. _videoSamplingDummy.cvTextureCache = 0;
  156. }
  157. _videoSamplingDummy.cvTextureCache = _videoSampling.cvTextureCache;
  158. _videoSampling.cvTextureCache = 0;
  159. }
  160. //CMVideoSampling_Uninitialize(&_videoSampling);
  161. }
  162. - (void)cleanupAssetReader
  163. {
  164. if(_reader)
  165. [_reader cancelReading];
  166. _reader = nil;
  167. _videoOut = nil;
  168. }
  169. - (void)cleanupPlayer
  170. {
  171. if(_player)
  172. {
  173. [[NSNotificationCenter defaultCenter] removeObserver:self name:AVAudioSessionRouteChangeNotification object:nil];
  174. [_player.currentItem removeObserver:self forKeyPath:@"status"];
  175. [_player removeObserver:self forKeyPath:@"currentItem"];
  176. [_player pause];
  177. _player = nil;
  178. }
  179. if(_playerItem)
  180. {
  181. [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:_playerItem];
  182. _playerItem = nil;
  183. }
  184. }
  185. - (void)unloadPlayer
  186. {
  187. _evertoon_was_unloaded = true;
  188. [self cleanupCVTextureCache];
  189. [self cleanupAssetReader];
  190. [self cleanupPlayer];
  191. if(videoOutput)
  192. {
  193. videoOutput = nil;
  194. }
  195. if(_videoTexture != 0)
  196. {
  197. GLuint handle =_videoTexture;
  198. glDeleteTextures(1, &handle);
  199. _videoTexture = 0;
  200. }
  201. _videoSize = CGSizeMake(0,0);
  202. _duration = _curTime = kCMTimeZero;
  203. _curFrameTimestamp = _lastFrameTimestamp = kCMTimeZero;
  204. self->_playerReady = self->_assetReady = self->_itemReady = NO;
  205. }
  206. - (BOOL)loadVideo:(NSURL*)url
  207. {
  208. _evertoon_was_unloaded = false;
  209. _error = false;
  210. _audioTrackId = 0;
  211. AVURLAsset* asset = [AVURLAsset URLAssetWithURL:url options:nil];
  212. if(!asset) return NO;
  213. NSArray* requestedKeys = @[@"tracks", @"playable"];
  214. [asset loadValuesAsynchronouslyForKeys:requestedKeys completionHandler:
  215. ^{
  216. dispatch_async(dispatch_get_main_queue(), ^{ [self prepareAsset:asset withKeys:requestedKeys : url]; });
  217. }
  218. ];
  219. return YES;
  220. }
  221. - (BOOL)_play:(CustomVideoPlayerView*)view
  222. {
  223. if(!_playerReady)
  224. return NO;
  225. if(view) view.player = _player;
  226. else [self prepareReader];
  227. // do not do seekTo and setRate here, it seems that http streaming may hang sometimes if you do so. go figure
  228. _curFrameTimestamp = _lastFrameTimestamp = kCMTimeZero;
  229. [_player play];
  230. [self setAudioVolume:_fVolume];
  231. return YES;
  232. }
  233. - (BOOL)_playloop:(CustomVideoPlayerView*)view
  234. {
  235. if(!_playerReady)
  236. return NO;
  237. // do not do seekTo and setRate here, it seems that http streaming may hang sometimes if you do so. go figure
  238. _curFrameTimestamp = _lastFrameTimestamp = kCMTimeZero;
  239. [_player play];
  240. return YES;
  241. }
  242. - (void)setTextureID:(intptr_t)id { _textureID = id;}
  243. - (BOOL)playToView:(CustomVideoPlayerView*)view { return [self _play:view]; }
  244. - (BOOL)playToTexture { return [self _play:nil]; }
  245. - (BOOL)playToTextureloop { return [self _playloop:nil]; }
  246. - (BOOL)isPlaying { return _playerReady && _player.rate != 0.0f; }
  247. - (BOOL)getError { return _error;}
  248. - (void)pause { if(_playerReady && _player.rate != 0.0f) [_player pause]; }
  249. - (void)resume { if(_playerReady && _player.rate == 0.0f) [_player play]; }
  250. - (void)rewind { [self seekToTimestamp:kCMTimeZero]; }
  251. - (void)seekTo:(float)timeSeconds { [self seekToTimestamp:CMTimeMakeWithSeconds(timeSeconds, _player.currentItem.duration.timescale)]; }
  252. - (void)setSpeed:(float)fSpeed { [_player setRate:fSpeed ]; }
  253. - (void)seekToTimestamp:(CMTime)time
  254. {
  255. if (!CMTIME_IS_INVALID(time)) {
  256. [_player seekToTime: time
  257. toleranceBefore: kCMTimeZero
  258. toleranceAfter: kCMTimeZero
  259. completionHandler: ^(BOOL finished) {
  260. //
  261. }];
  262. }
  263. [self prepareReaderPos:time];
  264. _curFrameTimestamp = _lastFrameTimestamp = time;
  265. }
  266. - (intptr_t)curFrameTexture
  267. {
  268. AVURLAsset* asset = (AVURLAsset*)_playerItem.asset;
  269. if(![asset.URL isFileURL])
  270. {
  271. intptr_t curTex = _videoTexture;
  272. CMTime time = [_player currentTime];
  273. if(CMTimeCompare(time, _curTime) == 0 )
  274. return curTex;
  275. if(_AudioRouteWasChanged )
  276. {
  277. _AudioRouteWasChanged = false;
  278. [_player setRate: 1.0f]; // _player.rate = 1.0f;
  279. }
  280. _curTime = time;
  281. unsigned char* pixelBufferBaseAddress = NULL;
  282. CVPixelBufferRef pixelBuffer;
  283. pixelBuffer = [videoOutput copyPixelBufferForItemTime:_player.currentItem.currentTime itemTimeForDisplay:nil];
  284. CVPixelBufferLockBaseAddress(pixelBuffer, 0);
  285. pixelBufferBaseAddress = (unsigned char*)CVPixelBufferGetBaseAddress(pixelBuffer);
  286. size_t w = CVPixelBufferGetWidth( pixelBuffer);
  287. size_t h = CVPixelBufferGetHeight( pixelBuffer);
  288. CGSize size = CGSizeMake(w, h);
  289. if( _videoSize.width != size.width)
  290. {
  291. _videoSize = size;
  292. }
  293. if (NULL != pixelBufferBaseAddress)
  294. {
  295. if(UnitySelectedRenderingAPI() == apiMetal || UnitySelectedRenderingAPI() == apiOpenGLES3 || UnitySelectedRenderingAPI() == apiOpenGLES2)
  296. {
  297. if(_videoSampling.cvTextureCacheTexture)
  298. {
  299. CFRelease(_videoSampling.cvTextureCacheTexture);
  300. FlushCVTextureCache(_videoSampling.cvTextureCache);
  301. }
  302. _videoSampling.cvTextureCacheTexture = CreateTextureFromCVTextureCache(_videoSampling.cvTextureCache, pixelBuffer, w, h);
  303. if(_videoSampling.cvTextureCacheTexture)
  304. curTex = GetTextureFromCVTextureCache(_videoSampling.cvTextureCacheTexture);
  305. if(UnitySelectedRenderingAPI() == apiOpenGLES2 || UnitySelectedRenderingAPI() == apiOpenGLES3)
  306. {
  307. (glBindTexture(GL_TEXTURE_2D, (GLuint)curTex));
  308. (glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR));
  309. (glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR));
  310. (glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE));
  311. (glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE));
  312. (glBindTexture(GL_TEXTURE_2D, 0));
  313. }
  314. CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
  315. // curTex = retTex;
  316. }
  317. else
  318. {
  319. if (0 == _videoTexture) {
  320. GLuint handle;
  321. glGenTextures(1, &handle);
  322. glBindTexture(GL_TEXTURE_2D, handle);
  323. glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  324. glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  325. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  326. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
  327. glBindTexture(GL_TEXTURE_2D, 0);
  328. _videoTexture = handle;
  329. }
  330. glBindTexture(GL_TEXTURE_2D, _videoTexture);
  331. const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
  332. if (bytesPerRow / 4 == (GLsizei)w) {
  333. glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (GLsizei) w, (GLsizei) h, 0, GL_BGRA, GL_UNSIGNED_BYTE, pixelBufferBaseAddress);
  334. }
  335. else {
  336. glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (GLsizei) w, (GLsizei) h, 0, GL_BGRA, GL_UNSIGNED_BYTE, NULL);
  337. for (int i = 0; i < _videoSize.height; ++i) {
  338. GLubyte* line = pixelBufferBaseAddress + i * bytesPerRow;
  339. glTexSubImage2D(GL_TEXTURE_2D, 0, 0, i, (GLsizei) w, 1, GL_BGRA, GL_UNSIGNED_BYTE, line);
  340. }
  341. }
  342. glBindTexture(GL_TEXTURE_2D, 0);
  343. CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
  344. curTex = _videoTexture;
  345. }
  346. }
  347. if (pixelBuffer) {
  348. CFRelease(pixelBuffer);
  349. }
  350. return curTex;
  351. }
  352. else
  353. {
  354. if(!_reader)
  355. return 0;
  356. intptr_t curTex = CMVideoSampling_LastSampledTexture(&_videoSampling);
  357. CMTime time = [_player currentTime];
  358. if(CMTimeCompare(time, _curTime) == 0 || _reader.status != AVAssetReaderStatusReading)
  359. return curTex;
  360. // if we have changed audio route and due to current category apple decided to pause playback - resume automatically
  361. if(_AudioRouteWasChanged )
  362. {
  363. _AudioRouteWasChanged = false;
  364. [_player setRate: 1.0f]; // _player.rate = 1.0f;
  365. }
  366. _curTime = time;
  367. while(_reader.status == AVAssetReaderStatusReading && CMTimeCompare(_curFrameTimestamp, _curTime) <= 0)
  368. {
  369. if(_cmSampleBuffer2)
  370. CFRelease(_cmSampleBuffer2);
  371. _cmSampleBuffer2 = _cmSampleBuffer;
  372. // TODO: properly handle ending
  373. _cmSampleBuffer = [_videoOut copyNextSampleBuffer];
  374. if(_cmSampleBuffer == 0)
  375. {
  376. //[self cleanupCVTextureCache];
  377. return 0;
  378. }
  379. _curFrameTimestamp = CMSampleBufferGetPresentationTimeStamp(_cmSampleBuffer);
  380. }
  381. if(CMTimeCompare(_lastFrameTimestamp, _curFrameTimestamp) < 0)
  382. {
  383. _lastFrameTimestamp = _curFrameTimestamp;
  384. size_t w, h;
  385. if(_cmSampleBuffer)
  386. curTex = CMVideoSampling_SampleBuffer(&_videoSampling, _cmSampleBuffer, &w, &h);
  387. _videoSize = CGSizeMake(w, h);
  388. }
  389. return curTex;
  390. }
  391. return 0;
  392. }
  393. - (BOOL)setAudioVolume:(float)volume
  394. {
  395. if(!_playerReady)
  396. return NO;
  397. if(_player != NULL)
  398. [_player setVolume:volume];
  399. _fVolume = volume;
  400. NSArray* arrayTracks = [_playerItem.asset tracksWithMediaType:AVMediaTypeAudio];
  401. NSMutableArray* params = [NSMutableArray array];
  402. for (int i=0; i < [arrayTracks count]; i++)
  403. {
  404. AVAssetTrack* assetTrackAudio = arrayTracks[i];
  405. AVMutableAudioMixInputParameters* inputParams = [AVMutableAudioMixInputParameters audioMixInputParameters];
  406. float trackVolume = (i ==_audioTrackId) ? volume : 0.0f;
  407. [inputParams setVolume:trackVolume atTime:kCMTimeZero];
  408. [inputParams setTrackID:[assetTrackAudio trackID]];
  409. [params addObject:inputParams];
  410. }
  411. AVMutableAudioMix* audioMix = [AVMutableAudioMix audioMix];
  412. [audioMix setInputParameters:params];
  413. [_playerItem setAudioMix:audioMix];
  414. return YES;
  415. }
  416. - (void)playerItemDidReachEnd:(NSNotification*)notification
  417. {
  418. [delegate onPlayerDidFinishPlayingVideo];
  419. }
  420. static bool _AudioRouteWasChanged = false;
  421. - (void)audioRouteChanged:(NSNotification*)notification
  422. {
  423. _AudioRouteWasChanged = true;
  424. }
  425. - (void)observeValueForKeyPath:(NSString*)path ofObject:(id)object change:(NSDictionary*)change context:(void*)context
  426. {
  427. BOOL reportPlayerReady = NO;
  428. if(context == _ObserveItemStatusContext)
  429. {
  430. AVPlayerStatus status = (AVPlayerStatus)[[change objectForKey:NSKeyValueChangeNewKey] integerValue];
  431. switch(status)
  432. {
  433. case AVPlayerStatusUnknown:
  434. break;
  435. case AVPlayerStatusReadyToPlay:
  436. {
  437. NSArray* video = [_playerItem.asset tracksWithMediaType:AVMediaTypeVideo];
  438. if([video count])
  439. _videoSize = [(AVAssetTrack*)[video objectAtIndex:0] naturalSize];
  440. _duration = [_playerItem duration];
  441. _assetReady = YES;
  442. reportPlayerReady = _itemReady;
  443. }
  444. break;
  445. case AVPlayerStatusFailed:
  446. {
  447. AVPlayerItem *playerItem = (AVPlayerItem*)object;
  448. [self reportError:playerItem.error category:"prepareAsset"];
  449. _error = true;
  450. }
  451. break;
  452. }
  453. }
  454. else if(context == _ObservePlayerItemContext)
  455. {
  456. if( [change objectForKey:NSKeyValueChangeNewKey] != (id)[NSNull null] )
  457. {
  458. _itemReady = YES;
  459. reportPlayerReady = _assetReady;
  460. }
  461. }
  462. else
  463. {
  464. [super observeValueForKeyPath:path ofObject:object change:change context:context];
  465. }
  466. if(reportPlayerReady)
  467. {
  468. _playerReady = YES;
  469. [delegate onPlayerReady];
  470. }
  471. }
  472. - (void)prepareAsset:(AVAsset*)asset withKeys:(NSArray*)requestedKeys : (NSURL *)url
  473. {
  474. if (_evertoon_was_unloaded) {
  475. NSLog(@"### Video already unloaded - ignoring.");
  476. return;
  477. }
  478. for(NSString* key in requestedKeys)
  479. {
  480. NSError* error = nil;
  481. AVKeyValueStatus keyStatus = [asset statusOfValueForKey:key error:&error];
  482. if(keyStatus == AVKeyValueStatusFailed)
  483. {
  484. [self reportError:error category:"prepareAsset"];
  485. _error = true;
  486. return;
  487. }
  488. }
  489. if(!asset.playable)
  490. {
  491. [self reportErrorWithString:"Item cannot be played" category:"prepareAsset"];
  492. _error = true;
  493. return;
  494. }
  495. if(_playerItem)
  496. {
  497. [_playerItem removeObserver:self forKeyPath:@"status"];
  498. [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:_playerItem];
  499. _playerItem = nil;
  500. }
  501. _playerItem = [AVPlayerItem playerItemWithAsset:asset];
  502. [_playerItem addObserver:self forKeyPath:@"status"
  503. options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
  504. context:_ObserveItemStatusContext
  505. ];
  506. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(playerItemDidReachEnd:)
  507. name:AVPlayerItemDidPlayToEndTimeNotification object:_playerItem
  508. ];
  509. if(!_player)
  510. {
  511. _player = [AVPlayer playerWithPlayerItem:_playerItem];
  512. [_player addObserver:self forKeyPath:@"currentItem"
  513. options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
  514. context:_ObservePlayerItemContext
  515. ];
  516. [_player setAllowsExternalPlayback:NO];
  517. // we want to subscribe to route change notifications, for that we need audio session active
  518. // and in case FMOD wasnt used up to this point it is still not active
  519. [[AVAudioSession sharedInstance] setActive:YES error:nil];
  520. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(audioRouteChanged:)
  521. name:AVAudioSessionRouteChangeNotification object:nil
  522. ];
  523. }
  524. if(_player.currentItem != _playerItem)
  525. [_player replaceCurrentItemWithPlayerItem:_playerItem];
  526. else
  527. [_player seekToTime:kCMTimeZero];
  528. }
  529. - (BOOL)prepareReader
  530. {
  531. if(!_playerReady)
  532. return NO;
  533. [self cleanupAssetReader];
  534. AVURLAsset* asset = (AVURLAsset*)_playerItem.asset;
  535. if(![asset.URL isFileURL])
  536. {
  537. NSDictionary *settings = @{(id) kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA)};
  538. //AVPlayerItemVideoOutput *output = [[[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:settings] autorelease];
  539. AVPlayerItemVideoOutput *output = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:settings];
  540. videoOutput = output;
  541. //NSArray *videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
  542. if( [_playerItem tracks] == NULL)
  543. {
  544. [self reportErrorWithString:"_playerItem tracks == null" category:"prepareReader"];
  545. _error = true;
  546. return NO;
  547. }
  548. if( [[_playerItem tracks] count] <= 0)
  549. {
  550. [self reportErrorWithString:"tracks count == 0" category:"prepareReader"];
  551. _error = true;
  552. return NO;
  553. }
  554. AVAssetTrack *videoTrack = [[[_playerItem tracks] objectAtIndex:0] assetTrack];;
  555. _videoSize = videoTrack.naturalSize;
  556. printf("\nprepareReader %d %d", (GLsizei)_videoSize.width, (GLsizei)_videoSize.height);
  557. _duration = [asset duration];
  558. NSArray * arrayTracks = [asset tracksWithMediaType:AVMediaTypeAudio];
  559. NSMutableArray* params = [NSMutableArray array];
  560. _audioTrackInfo = [arrayTracks count];
  561. for (int i=0; i < [arrayTracks count]; i++)
  562. {
  563. AVAssetTrack* assetTrackAudio = arrayTracks[i];
  564. AVMutableAudioMixInputParameters* inputParams = [AVMutableAudioMixInputParameters audioMixInputParameters];
  565. float trackVolume = i ==_audioTrackId ? 1.0f : 0.0f;
  566. [inputParams setVolume:trackVolume atTime:kCMTimeZero];
  567. [inputParams setTrackID:[assetTrackAudio trackID]];
  568. [params addObject:inputParams];
  569. }
  570. AVMutableAudioMix* audioMix = [AVMutableAudioMix audioMix];
  571. [audioMix setInputParameters:params];
  572. AVPlayerItem* item = [_player currentItem];
  573. [item setAudioMix:audioMix];
  574. [item addOutput:videoOutput];
  575. }
  576. else
  577. {
  578. NSError* error = nil;
  579. _reader = [AVAssetReader assetReaderWithAsset:_playerItem.asset error:&error];
  580. if(error)
  581. [self reportError:error category:"prepareReader"];
  582. _reader.timeRange = CMTimeRangeMake(kCMTimeZero, _duration);
  583. AVAssetTrack* videoTrack = [[_playerItem.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
  584. NSDictionary* options = @{ (NSString*)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA) };
  585. _videoOut = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:options];
  586. _videoOut.alwaysCopiesSampleData = NO;
  587. NSArray * arrayTracks = [_playerItem.asset tracksWithMediaType:AVMediaTypeAudio];
  588. NSMutableArray* params = [NSMutableArray array];
  589. _audioTrackInfo = [arrayTracks count];
  590. for (int i=0; i < [arrayTracks count]; i++)
  591. {
  592. AVAssetTrack* assetTrackAudio = arrayTracks[i];
  593. AVMutableAudioMixInputParameters* inputParams = [AVMutableAudioMixInputParameters audioMixInputParameters];
  594. float trackVolume = i ==_audioTrackId ? 1.0f : 0.0f;
  595. [inputParams setVolume:trackVolume atTime:kCMTimeZero];
  596. [inputParams setTrackID:[assetTrackAudio trackID]];
  597. [params addObject:inputParams];
  598. }
  599. if(![_reader canAddOutput:_videoOut])
  600. {
  601. [self reportErrorWithString:"canAddOutput returned false" category:"prepareReader"];
  602. _error = true;
  603. return NO;
  604. }
  605. [_reader addOutput:_videoOut];
  606. if(![_reader startReading])
  607. {
  608. [self reportError:[_reader error] category:"prepareReader"];
  609. _error = true;
  610. return NO;
  611. }
  612. }
  613. [self cleanupCVTextureCache];
  614. CMVideoSampling_Initialize(&_videoSampling);
  615. return NO;
  616. }
  617. - (BOOL)prepareReaderPos : (CMTime)time
  618. {
  619. if(!_playerReady)
  620. return NO;
  621. [self cleanupAssetReader];
  622. AVURLAsset* asset = (AVURLAsset*)_playerItem.asset;
  623. if(![asset.URL isFileURL])
  624. {
  625. //[self reportErrorWithString:"non-file url. no video to texture." category:"prepareReader"];
  626. //return NO;
  627. /* NSDictionary *settings = @{(id) kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA)};
  628. //AVPlayerItemVideoOutput *output = [[[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:settings] autorelease];
  629. AVPlayerItemVideoOutput *output = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:settings];
  630. videoOutput = output;
  631. NSArray *videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
  632. // AVAssetTrack *videoTrack = videoTracks[0];
  633. AVAssetTrack *videoTrack = [[[_playerItem tracks] objectAtIndex:0] assetTrack];
  634. _videoSize = videoTrack.naturalSize;
  635. _duration = [asset duration];
  636. // ===== Audio =====
  637. // Get the first audio track
  638. NSArray * arrayTracks = [asset tracksWithMediaType:AVMediaTypeAudio];
  639. if (0 < [arrayTracks count]) {
  640. AVAssetTrack* assetTrackAudio = arrayTracks[0];
  641. AVMutableAudioMixInputParameters* audioInputParams = [AVMutableAudioMixInputParameters audioMixInputParameters];
  642. [audioInputParams setVolume:1.0f atTime:time];
  643. [audioInputParams setTrackID:[assetTrackAudio trackID]];
  644. NSArray* audioParams = @[audioInputParams];
  645. AVMutableAudioMix* audioMix = [AVMutableAudioMix audioMix];
  646. [audioMix setInputParameters:audioParams];
  647. AVPlayerItem* item = [_player currentItem];
  648. [item setAudioMix:audioMix];
  649. }
  650. [[_player currentItem] addOutput:videoOutput];*/
  651. }
  652. else
  653. {
  654. NSError* error = nil;
  655. _reader = [AVAssetReader assetReaderWithAsset:_playerItem.asset error:&error];
  656. if(error){
  657. [self reportError:error category:"prepareReader"];
  658. _error = true;
  659. }
  660. _reader.timeRange = CMTimeRangeMake(time, _duration);
  661. AVAssetTrack* videoTrack = [[_playerItem.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
  662. NSDictionary* options = @{ (NSString*)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA) };
  663. _videoOut = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:options];
  664. _videoOut.alwaysCopiesSampleData = NO;
  665. if(![_reader canAddOutput:_videoOut])
  666. {
  667. [self reportErrorWithString:"canAddOutput returned false" category:"prepareReader"];
  668. _error = true;
  669. return NO;
  670. }
  671. [_reader addOutput:_videoOut];
  672. if(![_reader startReading])
  673. {
  674. [self reportError:[_reader error] category:"prepareReader"];
  675. _error = true;
  676. return NO;
  677. }
  678. }
  679. //[self cleanupCVTextureCache];
  680. //CustomCMVideoSampling_Initialize(&_videoSampling);
  681. return NO;
  682. }
  683. @end