僅測試了本機(jī)拍攝的mp4文件
- (void)setFileName:(NSString *)fileName{
_fileName = [fileName copy];
// 這里是獲取本地沙盒的視頻,遠(yuǎn)程url原理一樣
// 記住如果是http地址記得用[NSURL URLWithString:urlStr]
// 若是本地視頻地址要用[NSURL fileURLWithPath:path]
_videoUrl = [NSURL fileURLWithPath:[[[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) firstObject] stringByAppendingPathComponent:VideoRecordsDirectory] stringByAppendingPathComponent:_fileName]];
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:_videoUrl options:nil];
// 獲取某一幀圖片,這里獲取第一幀
_videoImage = [self getthumImageForAsset:asset atTime:0];
}
// 獲取視頻某一幀的圖片
- (UIImage *)getthumImageForAsset:(AVURLAsset *)asset atTime:(NSTimeInterval)time{
NSParameterAssert(asset);
AVAssetImageGenerator *assetImageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:asset];
assetImageGenerator.appliesPreferredTrackTransform = YES;
assetImageGenerator.apertureMode = AVAssetImageGeneratorApertureModeEncodedPixels;
// 獲取任意幀要設(shè)定這個(gè),然后可根據(jù)傳過來的參數(shù)time獲取任意幀
// assetImageGenerator.requestedTimeToleranceAfter = kCMTimeZero;
// assetImageGenerator.requestedTimeToleranceBefore = kCMTimeZero;
CGImageRef thumImageRef = NULL;
// 獲取哪一時(shí)間的幀圖片
CFTimeInterval thumImageTime = time;
NSError *thumImageGenerationError = nil;
// 獲取每秒多少幀
// float fps = [[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] nominalFrameRate];
// 重寫CMTimeMake(a,b) a當(dāng)前第幾幀, b每秒鐘多少幀.當(dāng)前播放時(shí)間a/b CMTimeMakeWithSeconds(a,b) a當(dāng)前時(shí)間,b每秒鐘多少幀
// 這個(gè)時(shí)候即可獲取第N幀圖片
// thumImageRef = [assetImageGenerator copyCGImageAtTime:CMTimeMake(time, fps) actualTime:NULL error:&thumImageGenerationError];
thumImageRef = [assetImageGenerator copyCGImageAtTime:CMTimeMake(thumImageTime, 60) actualTime:NULL error:&thumImageGenerationError];
if (!thumImageRef)
NSLog(@"thumImageGenerationError %@", thumImageGenerationError);
UIImage *thumImage = thumImageRef ? [[UIImage alloc] initWithCGImage:thumImageRef] : nil;
return thumImage;
}
/** 獲取視頻的尺寸 */
+ (void)getVideoSizeWithURL:(NSURL *)URL complete:(void(^)(CGSize videoSize))complete{
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:URL options:nil];
// 獲取
// loadValuesAsynchronouslyForKeys是官方提供異步加載track的方法,防止線程阻塞
// 加載track是耗時(shí)操作
[asset loadValuesAsynchronouslyForKeys:@[@"tracks"] completionHandler:^{
// 一般視頻都有至少兩個(gè)track(軌道),根據(jù)track.mediaType判斷track類型
// AVMediaTypeVideo表示視頻軌道,AVMediaTypeAudio代表音頻軌道,其他類型可以查看文檔。
// 根據(jù)track的naturalSize屬性即可獲得視頻尺寸
NSArray *array = asset.tracks;
CGSize videoSize = CGSizeZero;
for (AVAssetTrack *track in array) {
if ([track.mediaType isEqualToString:AVMediaTypeVideo]) {
// 注意修正naturalSize的寬高
videoSize = CGSizeApplyAffineTransform(track.naturalSize, track.preferredTransform);//CGSizeMake(track.naturalSize.height, track.naturalSize.width);
break;
}
}
if (asset.playable) {
dispatch_async(dispatch_get_main_queue(), ^{
!complete ? : complete(videoSize);
});
}
}];
}