Wie man Zeitlupenvideo in IOS macht


Answers

Ich weiß, dass ich zu spät auf Dieses Thema bin, aber ich habe erreicht, Zeitlupe zu Video einschließlich Audio sowie mit der richtigen Ausgabeorientierung hinzuzufügen, Hoffnung Dies kann jemandem helfen.

 - (void)SlowMotion:(NSURL *)URl
 {
   AVURLAsset* videoAsset = [AVURLAsset URLAssetWithURL:URl options:nil]; //self.inputAsset;

AVAsset *currentAsset = [AVAsset assetWithURL:URl];
AVAssetTrack *vdoTrack = [[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
//create mutable composition
AVMutableComposition *mixComposition = [AVMutableComposition composition];

AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

NSError *videoInsertError = nil;
BOOL videoInsertResult = [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
                                                        ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
                                                         atTime:kCMTimeZero
                                                          error:&videoInsertError];
if (!videoInsertResult || nil != videoInsertError) {
    //handle error
    return;
}

NSError *audioInsertError =nil;
BOOL audioInsertResult =[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
                                                       ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
                                                        atTime:kCMTimeZero
                                                         error:&audioInsertError];

if (!audioInsertResult || nil != audioInsertError) {
    //handle error
    return;
}

CMTime duration =kCMTimeZero;
duration=CMTimeAdd(duration, currentAsset.duration);
//slow down whole video by 2.0
double videoScaleFactor = 2.0;
CMTime videoDuration = videoAsset.duration;

[compositionVideoTrack scaleTimeRange:CMTimeRangeMake(kCMTimeZero, videoDuration)
                           toDuration:CMTimeMake(videoDuration.value*videoScaleFactor, videoDuration.timescale)];
[compositionAudioTrack scaleTimeRange:CMTimeRangeMake(kCMTimeZero, videoDuration)
                           toDuration:CMTimeMake(videoDuration.value*videoScaleFactor, videoDuration.timescale)];
[compositionVideoTrack setPreferredTransform:vdoTrack.preferredTransform];

        NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
        NSString *docsDir = [dirPaths objectAtIndex:0];
        NSString *outputFilePath = [docsDir stringByAppendingPathComponent:[NSString stringWithFormat:@"slowMotion.mov"]];
        if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
        [[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
        NSURL *_filePath = [NSURL fileURLWithPath:outputFilePath];

//export
AVAssetExportSession* assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
                                                                        presetName:AVAssetExportPresetLowQuality];
assetExport.outputURL=_filePath;
                          assetExport.outputFileType =           AVFileTypeQuickTimeMovie;
  exporter.shouldOptimizeForNetworkUse = YES;
                           [assetExport exportAsynchronouslyWithCompletionHandler:^
                            {

                                switch ([assetExport status]) {
                                    case AVAssetExportSessionStatusFailed:
                                    {
                                        NSLog(@"Export session faiied with error: %@", [assetExport error]);
                                        dispatch_async(dispatch_get_main_queue(), ^{
                                            // completion(nil);
                                        });
                                    }
                                        break;
                                    case AVAssetExportSessionStatusCompleted:
                                    {

                                        NSLog(@"Successful");
                                        NSURL *outputURL = assetExport.outputURL;

                                        ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
                                        if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {

                                            [self writeExportedVideoToAssetsLibrary:outputURL];
                                        }
                                        dispatch_async(dispatch_get_main_queue(), ^{
                                            //                                            completion(_filePath);
                                        });

                                    }
                                        break;
                                    default:

                                        break;
                                }


                            }];


 }

  - (void)writeExportedVideoToAssetsLibrary :(NSURL *)url {
NSURL *exportURL = url;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:exportURL]) {
    [library writeVideoAtPathToSavedPhotosAlbum:exportURL completionBlock:^(NSURL *assetURL, NSError *error){
        dispatch_async(dispatch_get_main_queue(), ^{
            if (error) {
                UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:[error localizedDescription]
                                                                    message:[error localizedRecoverySuggestion]
                                                                   delegate:nil
                                                          cancelButtonTitle:@"OK"
                                                          otherButtonTitles:nil];
                [alertView show];
            }
            if(!error)
            {
               // [activityView setHidden:YES];
                UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:@"Sucess"
                                                                    message:@"video added to gallery successfully"
                                                                   delegate:nil
                                                          cancelButtonTitle:@"OK"
                                                          otherButtonTitles:nil];
                [alertView show];
            }
 #if !TARGET_IPHONE_SIMULATOR
            [[NSFileManager defaultManager] removeItemAtURL:exportURL error:nil];
#endif
        });
    }];
} else {
    NSLog(@"Video could not be exported to assets library.");
}

}
Question

Ich muss " Zeitlupe " in einer Videodatei zusammen mit Audio, zwischen einigen Frames machen und muss das rampenförmige Video als neues Video speichern .

Ref: http://www.youtube.com/watch?v=BJ3_xMGzauk (Uhr von 0 bis 10s)

Aus meiner Analyse habe ich herausgefunden, dass AVFoundation Framework hilfreich sein kann.

Ref: http://developer.apple.com/library/ios/#documentation/AudioVideo/Conceptual/AVFoundationPG/Articles/00_Introduction.html

Kopieren und Einfügen aus dem obigen Link:

"Bearbeiten von AV Foundation verwendet Kompositionen zum Erstellen neuer Assets aus vorhandenen Medien (in der Regel eine oder mehrere Video- und Audiospuren). Sie verwenden eine veränderliche Komposition zum Hinzufügen und Entfernen von Titeln und passen deren zeitliche Reihenfolge an. Sie können auch relative Volumes und Ramping von Audiospuren und Einstellen der Deckkraft und der Opazitätsrampen von Videospuren Eine Komposition ist eine Ansammlung von im Speicher befindlichen Medienelementen Wenn Sie eine Komposition mit einer Exportsitzung exportieren, wird sie zu einer Datei komprimiert. Ab iOS 4.1 können Sie mit einem Asset Writer auch ein Asset aus Medien wie Beispielpuffern oder Standbildern erstellen.

"

Fragen: Kann ich die Video- / Audiodatei mit dem AVFoundation-Framework "in Zeitlupe" machen? Oder ist ein anderes Paket verfügbar? Wenn ich Audio und Video getrennt handhaben möchte, bitte führen Sie mich wie zu tun?

Update :: Code für AV-Exportsitzung:

 NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *outputURL = paths[0];
    NSFileManager *manager = [NSFileManager defaultManager];
    [manager createDirectoryAtPath:outputURL withIntermediateDirectories:YES attributes:nil error:nil];
    outputURL = [outputURL stringByAppendingPathComponent:@"output.mp4"];
    // Remove Existing File
    [manager removeItemAtPath:outputURL error:nil];
    AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:self.inputAsset presetName:AVAssetExportPresetLowQuality];
    exportSession.outputURL = [NSURL fileURLWithPath:outputURL]; // output path;
    exportSession.outputFileType = AVFileTypeQuickTimeMovie;
    [exportSession exportAsynchronouslyWithCompletionHandler:^(void) {
        if (exportSession.status == AVAssetExportSessionStatusCompleted) {
            [self writeVideoToPhotoLibrary:[NSURL fileURLWithPath:outputURL]];
            ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
            [library writeVideoAtPathToSavedPhotosAlbum:[NSURL fileURLWithPath:outputURL] completionBlock:^(NSURL *assetURL, NSError *error){
                if (error) {
                    NSLog(@"Video could not be saved");
                }
            }];
        } else {
            NSLog(@"error: %@", [exportSession error]);
        }
    }];



Ein Beispiel in raschen Worten:

ich

var asset: AVAsset?  
func configureAssets(){

    let videoAsset = AVURLAsset(url: Bundle.main.url(forResource: "sample", withExtension: "m4v")!)
    let audioAsset = AVURLAsset(url: Bundle.main.url(forResource: "sample", withExtension: "m4a")!)
    //    let audioAsset2 = AVURLAsset(url: Bundle.main.url(forResource: "audio2", withExtension: "m4a")!)

    let comp = AVMutableComposition()

    let videoAssetSourceTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo).first! as AVAssetTrack
    let audioAssetSourceTrack = videoAsset.tracks(withMediaType: AVMediaTypeAudio).first! as AVAssetTrack
    //    let audioAssetSourceTrack2 = audioAsset2.tracks(withMediaType: AVMediaTypeAudio).first! as AVAssetTrack

    let videoCompositionTrack = comp.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
    let audioCompositionTrack = comp.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)

    do {

        try videoCompositionTrack.insertTimeRange(
            CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(9 , 600)),
            of: videoAssetSourceTrack,
            at: kCMTimeZero)



        try audioCompositionTrack.insertTimeRange(
            CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(9, 600)),
            of: audioAssetSourceTrack,
            at: kCMTimeZero)

        //
        //      try audioCompositionTrack.insertTimeRange(
        //        CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(3, 600)),
        //        of: audioAssetSourceTrack2,
        //        at: CMTimeMakeWithSeconds(7, 600))

        let videoScaleFactor = Int64(2.0)
        let videoDuration: CMTime = videoAsset.duration


        videoCompositionTrack.scaleTimeRange(CMTimeRangeMake(kCMTimeZero, videoDuration), toDuration: CMTimeMake(videoDuration.value * videoScaleFactor, videoDuration.timescale))
        audioCompositionTrack.scaleTimeRange(CMTimeRangeMake(kCMTimeZero, videoDuration), toDuration: CMTimeMake(videoDuration.value * videoScaleFactor, videoDuration.timescale))
        videoCompositionTrack.preferredTransform = videoAssetSourceTrack.preferredTransform



    }catch { print(error) }

    asset = comp
}

II

  func createFileFromAsset(_ asset: AVAsset){

let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as URL

let filePath = documentsDirectory.appendingPathComponent("rendered-audio.m4v")
deleteFile(filePath)

if let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetLowQuality){


  exportSession.canPerformMultiplePassesOverSourceMediaData = true
  exportSession.outputURL = filePath
  exportSession.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration)
  exportSession.outputFileType = AVFileTypeQuickTimeMovie
  exportSession.exportAsynchronously {
    _ in
    print("finished: \(filePath) :  \(exportSession.status.rawValue) ")
  }
}

 }

 func deleteFile(_ filePath:URL) {
guard FileManager.default.fileExists(atPath: filePath.path) else {
  return
}

do {
  try FileManager.default.removeItem(atPath: filePath.path)
}catch{
  fatalError("Unable to delete file: \(error) : \(#function).")
}
}