CGAffineTransform 视频旋转
记录下视频旋转
////////////////////////////////////////////// - (void)test:(NSURL *)url transformUrl:(NSURL *)exportUrl { [self rotateVideoAssetWithFileURL:url dstFileURL:exportUrl]; } - (void)rotateVideoAssetWithFileURL:(NSURL *)fileURL dstFileURL:(NSURL *)dstFileURL { NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:YES], AVURLAssetPreferPreciseDurationAndTimingKey, nil]; AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:fileURL options:options]; AVAssetTrack *videoAssetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; AVAssetTrack *audioAssetTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; if (videoAssetTrack == nil || audioAssetTrack == nil) { NSLog(@"error is %@", @"video or audio assetTrack is nil"); return; } AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition]; videoComposition.frameDuration = videoAssetTrack.minFrameDuration; CGSize renderSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width); videoComposition.renderSize = renderSize; //create a video instruction AVMutableVideoCompositionInstruction *videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration); AVMutableVideoCompositionLayerInstruction *videoCompositionLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoAssetTrack]; //仿射变换的坐标为iOS的屏幕坐标x向右为正y向下为正 CGAffineTransform transform = [self videoAssetTrackTransform:videoAssetTrack]; [videoCompositionLayerInstruction setTransform:transform atTime:kCMTimeZero]; //add the transformer layer instructions, then add to video composition videoCompositionInstruction.layerInstructions = [NSArray arrayWithObject:videoCompositionLayerInstruction]; videoComposition.instructions = [NSArray arrayWithObject: videoCompositionInstruction]; AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init]; #warning when use (not AVAssetExportPresetPassthrough) AVAssetExportSession export video which is contain video and audio must add video track first, #warning when add audio track frist error is -11841. AVMutableCompositionTrack *videoCompositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; NSError *error = nil; [videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoAssetTrack atTime:kCMTimeZero error:&error]; if (error) { NSLog(@"error is %@", error); return; } error = nil; AVMutableCompositionTrack *audioCompositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; [audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:audioAssetTrack atTime:kCMTimeZero error:&error]; if (error) { NSLog(@"error is %@", error); return; } NSLog(@"the assetDuration is %lld", asset.duration.value/asset.duration.timescale); AVAssetExportSession *assetExportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality] ; assetExportSession.shouldOptimizeForNetworkUse = YES; assetExportSession.videoComposition = videoComposition; assetExportSession.outputURL = dstFileURL; assetExportSession.outputFileType = AVFileTypeMPEG4; __weak AVAssetExportSession *weakAssetExportSession = assetExportSession; __weak typeof(self)weakSelf = self; [assetExportSession exportAsynchronouslyWithCompletionHandler:^ { if ([weakAssetExportSession status] != AVAssetExportSessionStatusCompleted) { NSLog(@"the error is %@", [weakAssetExportSession error]); NSLog(@"the status is %ld", (long)[weakAssetExportSession status]); NSLog(@"the outPutPath is %@", [weakAssetExportSession.outputURL absoluteString]); NSLog(@"the error is %@", [weakAssetExportSession error].userInfo); } dispatch_async(dispatch_get_main_queue(), ^{ #warning here can not use weakAssetExportSession.outputURL weakAssetExportSession.outputURL some time is null but video is exit. [weakSelf exportDidFinish:dstFileURL]; }); }]; } - (CGAffineTransform)videoAssetTrackTransform:(AVAssetTrack *)videoAssetTrack { int degrees = -180;//[self degressFromVideoFileWithVideoAssetTrack:videoAssetTrack]; CGAffineTransform transform = CGAffineTransformIdentity; if (degrees != 0) { CGAffineTransform translateToCenter = CGAffineTransformIdentity; if (degrees == 90) { // 顺时针旋转90° translateToCenter = CGAffineTransformMakeTranslation(videoAssetTrack.naturalSize.height, 0.0); transform = CGAffineTransformRotate(translateToCenter, M_PI_2); } else if(degrees == 180){ // 顺时针旋转180° translateToCenter = CGAffineTransformMakeTranslation(videoAssetTrack.naturalSize.width, videoAssetTrack.naturalSize.height); transform = CGAffineTransformRotate(translateToCenter, M_PI); } else if(degrees == 270){ // 顺时针旋转270° translateToCenter = CGAffineTransformMakeTranslation(0.0, videoAssetTrack.naturalSize.width); transform = CGAffineTransformRotate(translateToCenter, M_PI_2 + M_PI); }else if(degrees == -180){ // 绕x轴旋转180度 //仿射变换的坐标为iOS的屏幕坐标x向右为正y向下为正 #if 1 // transform = CGAffineTransformTranslate(transform, videoAssetTrack.naturalSize.width, videoAssetTrack.naturalSize.height); //transform = CGAffineTransformRotate(transform, 90/180.0f*M_PI); // 旋转90度 //transform = CGAffineTransformScale(transform, 1.0, -1.0); // 上下颠倒视频 // translateToCenter = CGAffineTransformMakeTranslation(videoAssetTrack.naturalSize.height, 0.0); // transform = CGAffineTransformRotate(translateToCenter, -M_PI_2); transform = CGAffineTransformScale(transform, -1.0, 1.0); // 左右颠倒视频 transform = CGAffineTransformTranslate(transform, -videoAssetTrack.naturalSize.width, 0); // transform = CGAffineTransformRotate(translateToCenter, -M_PI_2); //transform = CGAffineTransformScale(transform, 1.0, 1.0); // 使用原始大小 //原始视频 // ___ // | | // | | // -------------------- +x // | // | // | // | // | // | // | // +y //transform = CGAffineTransformScale(transform, 1.0, -1.0); // 上下颠倒视频 // -------------------- +x // | | | // | |___| // | // | // | // | // | // +y //transform = CGAffineTransformTranslate(transform, 0, -videoAssetTrack.naturalSize.height);// 将视频平移到原始位置 // ___ // | | // | | // -------------------- +x // | // | // | // | // | // | // | // +y // transform = CGAffineTransformScale(transform, 1.0, -1.0); // 上下颠倒视频 // transform = CGAffineTransformTranslate(transform, 0, -videoAssetTrack.naturalSize.height); #else transform = videoAssetTrack.preferredTransform; transform = CGAffineTransformTranslate(transform, 0, -videoAssetTrack.naturalSize.height); #endif } } #if 0 - cropVideo //Here we shift the viewing square up to the TOP of the video so we only see the top CGAffineTransform t1 = CGAffineTransformMakeTranslation(videoAssetTrack.naturalSize.height, 0 ); //Use this code if you want the viewing square to be in the middle of the video //CGAffineTransform t1 = CGAffineTransformMakeTranslation(videoAssetTrack.naturalSize.height, -(videoAssetTrack.naturalSize.width - videoAssetTrack.naturalSize.height) /2 ); //Make sure the square is portrait transform = CGAffineTransformRotate(t1, M_PI_2); #endif return transform; } - (int)degressFromVideoFileWithVideoAssetTrack:(AVAssetTrack *)videoAssetTrack { int degress = 0; CGAffineTransform t = videoAssetTrack.preferredTransform; if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0){ // Portrait degress = 90; } else if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0){ // PortraitUpsideDown degress = 270; } else if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0){ // LandscapeRight degress = 0; } else if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0){ // LandscapeLeft degress = 180; } else if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0){ // LandscapeLeft degress = 180; } else if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == -1.0){ // x-axis degress = -180; } return degress; } - (void)exportDidFinish:(NSURL *)fileURL { NSLog(@"fileURL is %@", fileURL); dispatch_async(dispatch_get_main_queue(), ^{ if ([XCFileManager isExistsAtPath:[self.videoUrl path]]) { NSURL *outputURL = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:@"temp.mov"]]; [self convertVideoToLowQuailtyWithInputURL:fileURL outputURL:outputURL handler:^(AVAssetExportSession *exportSession) { dispatch_async(dispatch_get_main_queue(), ^{ self.recordState = FMRecordStateFinish; }); if (exportSession.status == AVAssetExportSessionStatusCompleted) { self.videoUrl = outputURL; self.videoFirstImage = firstImage; }else { printf("error\n"); } }]; } }); }
转载:https://blog.csdn.net/jeffasd/article/details/51887064