about 10 years ago
Here is the method in ios that can clip an audio file(i.e MPMediaItem) with the time interval.
In below mentioned method, For time interval we are sending two parameter first is startinterval which is the interval form where we wanted to start clipping the audio file and another parameter is totalTimeInterval which is for the duration till where we need the audio to be clipped and the url path of the audio clipped will be retrieved in the block.
- -(void)convertAudioFileWithFile:(MPMediaItem *)song startInterval:(NSTimeInterval)startinterval
- totalTimeInterval:(NSTimeInterval)totalTimeInterval
- completion:(void(^)(NSString *urlString))callback{
- // set up an AVAssetReader to read from the iPod Library
- NSURL *assetURL = [song valueForProperty:MPMediaItemPropertyAssetURL];
- AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil];
- NSError *assetError = nil;
- AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:songAsset
- error:&assetError]
- ;
- // set the interval
- CMTime start = songAsset.duration;
- CMTime end = songAsset.duration;
- start.value = start.timescale*startinterval;
- end.value = end.timescale*totalTimeInterval;
- [assetReader setTimeRange:CMTimeRangeMake(start, end)];
- ///////
- if (assetError) {
- NSLog (@"error: %@", assetError);
- return;
- }
- AVAssetReaderOutput *assetReaderOutput = [AVAssetReaderAudioMixOutput
- assetReaderAudioMixOutputWithAudioTracks:songAsset.tracks
- audioSettings: nil]
- ;
- if (! [assetReader canAddOutput: assetReaderOutput]) {
- NSLog (@"can't add reader output... die!");
- return;
- }
- [assetReader addOutput: assetReaderOutput];
- NSArray *dirs = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
- NSString *documentsDirectoryPath = [dirs objectAtIndex:0];
- NSString *exportPath = [documentsDirectoryPath stringByAppendingPathComponent:EXPORT_NAME];
- if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath]) {
- [[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
- }
- NSURL *exportURL = [NSURL fileURLWithPath:exportPath];
- AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:exportURL
- fileType:AVFileTypeCoreAudioFormat
- error:&assetError];
- if (assetError) {
- NSLog (@"error: %@", assetError);
- return;
- }
- NSNumber *sampleRate = [NSNumber numberWithInt:12000.0];//[NSNumber numberWithFloat:44100.0]
- AudioChannelLayout channelLayout;
- memset(&channelLayout, 0, sizeof(AudioChannelLayout));
- channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
- NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
- [NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
- sampleRate, AVSampleRateKey,
- [NSNumber numberWithInt:1], AVNumberOfChannelsKey,
- [NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)], AVChannelLayoutKey,
- [NSNumber numberWithInt:16], AVLinearPCMBitDepthKey,
- [NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
- [NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey,
- [NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
- nil];
- AVAssetWriterInput *assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio
- outputSettings:outputSettings]
- ;
- if ([assetWriter canAddInput:assetWriterInput]) {
- [assetWriter addInput:assetWriterInput];
- } else {
- NSLog (@"can't add asset writer input... die!");
- return;
- }
- assetWriterInput.expectsMediaDataInRealTime = NO;
- [assetWriter startWriting];
- [assetReader startReading];
- AVAssetTrack *soundTrack = [songAsset.tracks objectAtIndex:0];
- CMTime startTime = CMTimeMake (0, soundTrack.naturalTimeScale);
- [assetWriter startSessionAtSourceTime: startTime];
- __block UInt64 convertedByteCount = 0;
- dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
- [assetWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue
- usingBlock: ^
- {
- // NSLog (@"top of block");
- while (assetWriterInput.readyForMoreMediaData) {
- CMSampleBufferRef nextBuffer = [assetReaderOutput copyNextSampleBuffer];
- if (nextBuffer) {
- // append buffer
- [assetWriterInput appendSampleBuffer: nextBuffer];
- // NSLog (@"appended a buffer (%d bytes)",
- // CMSampleBufferGetTotalSampleSize (nextBuffer));
- convertedByteCount += CMSampleBufferGetTotalSampleSize (nextBuffer);
- // oops, no
- // sizeLabel.text = [NSString stringWithFormat: @"%ld bytes converted", convertedByteCount];
- // NSNumber *convertedByteCountNumber = [NSNumber numberWithLong:convertedByteCount];
- // [self performSelectorOnMainThread:@selector(updateSizeLabel:)
- // withObject:convertedByteCountNumber
- // waitUntilDone:NO];
- } else {
- // done!
- [assetWriterInput markAsFinished];
- [assetWriter finishWritingWithCompletionHandler:^{
- // callback(exportPath);
- }];
- callback(exportPath);
- [assetReader cancelReading];
- NSDictionary *outputFileAttributes = [[NSFileManager defaultManager]
- attributesOfItemAtPath:exportPath
- error:nil];
- NSLog (@"done. file size is %llu",
- [outputFileAttributes fileSize]);
- // release a lot of stuff
- [assetReader release];
- [assetReaderOutput release];
- [assetWriter release];
- [assetWriterInput release];
- [exportPath release];
- break;
- }
- }
- }];
- NSLog (@"bottom of convertTapped:");
- }
-(void)convertAudioFileWithFile:(MPMediaItem *)song startInterval:(NSTimeInterval)startinterval totalTimeInterval:(NSTimeInterval)totalTimeInterval completion:(void(^)(NSString *urlString))callback{ // set up an AVAssetReader to read from the iPod Library NSURL *assetURL = [song valueForProperty:MPMediaItemPropertyAssetURL]; AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil]; NSError *assetError = nil; AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:songAsset error:&assetError] ; // set the interval CMTime start = songAsset.duration; CMTime end = songAsset.duration; start.value = start.timescale*startinterval; end.value = end.timescale*totalTimeInterval; [assetReader setTimeRange:CMTimeRangeMake(start, end)]; /////// if (assetError) { NSLog (@"error: %@", assetError); return; } AVAssetReaderOutput *assetReaderOutput = [AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:songAsset.tracks audioSettings: nil] ; if (! [assetReader canAddOutput: assetReaderOutput]) { NSLog (@"can't add reader output... die!"); return; } [assetReader addOutput: assetReaderOutput]; NSArray *dirs = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); NSString *documentsDirectoryPath = [dirs objectAtIndex:0]; NSString *exportPath = [documentsDirectoryPath stringByAppendingPathComponent:EXPORT_NAME]; if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath]) { [[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil]; } NSURL *exportURL = [NSURL fileURLWithPath:exportPath]; AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:exportURL fileType:AVFileTypeCoreAudioFormat error:&assetError]; if (assetError) { NSLog (@"error: %@", assetError); return; } NSNumber *sampleRate = [NSNumber numberWithInt:12000.0];//[NSNumber numberWithFloat:44100.0] AudioChannelLayout channelLayout; memset(&channelLayout, 0, sizeof(AudioChannelLayout)); channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey, sampleRate, AVSampleRateKey, [NSNumber numberWithInt:1], AVNumberOfChannelsKey, [NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)], AVChannelLayoutKey, [NSNumber numberWithInt:16], AVLinearPCMBitDepthKey, [NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved, [NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey, [NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey, nil]; AVAssetWriterInput *assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:outputSettings] ; if ([assetWriter canAddInput:assetWriterInput]) { [assetWriter addInput:assetWriterInput]; } else { NSLog (@"can't add asset writer input... die!"); return; } assetWriterInput.expectsMediaDataInRealTime = NO; [assetWriter startWriting]; [assetReader startReading]; AVAssetTrack *soundTrack = [songAsset.tracks objectAtIndex:0]; CMTime startTime = CMTimeMake (0, soundTrack.naturalTimeScale); [assetWriter startSessionAtSourceTime: startTime]; __block UInt64 convertedByteCount = 0; dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL); [assetWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock: ^ { // NSLog (@"top of block"); while (assetWriterInput.readyForMoreMediaData) { CMSampleBufferRef nextBuffer = [assetReaderOutput copyNextSampleBuffer]; if (nextBuffer) { // append buffer [assetWriterInput appendSampleBuffer: nextBuffer]; // NSLog (@"appended a buffer (%d bytes)", // CMSampleBufferGetTotalSampleSize (nextBuffer)); convertedByteCount += CMSampleBufferGetTotalSampleSize (nextBuffer); // oops, no // sizeLabel.text = [NSString stringWithFormat: @"%ld bytes converted", convertedByteCount]; // NSNumber *convertedByteCountNumber = [NSNumber numberWithLong:convertedByteCount]; // [self performSelectorOnMainThread:@selector(updateSizeLabel:) // withObject:convertedByteCountNumber // waitUntilDone:NO]; } else { // done! [assetWriterInput markAsFinished]; [assetWriter finishWritingWithCompletionHandler:^{ // callback(exportPath); }]; callback(exportPath); [assetReader cancelReading]; NSDictionary *outputFileAttributes = [[NSFileManager defaultManager] attributesOfItemAtPath:exportPath error:nil]; NSLog (@"done. file size is %llu", [outputFileAttributes fileSize]); // release a lot of stuff [assetReader release]; [assetReaderOutput release]; [assetWriter release]; [assetWriterInput release]; [exportPath release]; break; } } }]; NSLog (@"bottom of convertTapped:"); }
0 Comment(s)