iOS recording uses AVAudio Recorder, playing with AVPlayer can be a good solution, there are many online tutorials. I won't go into details here, but I'll attach the code later.
First of all, demo content, now the project requires a recording, uploading, playback function. Details: Before recording, prompt tone is needed. When recording, there will be a corresponding dynamic effect displayed according to the sound intensity. There will also be a timing function.
Let's talk about the pits I encountered in making this feature!!
Here we present the audio recording code, which refers to SpectrumView. Click Open Link . This is a sound recording effect based on sound intensity, which can be removed if not required.
Record audio code:
1. First, talk about the format of recording audio.
As a result, bloggers here recommend that you use AAC recording format. AAC (Advanced) Audio Coding, which is called "Advanced Audio Coding" in Chinese, occupies a small amount of memory and has a good sound quality. Most importantly, it is compatible with android.
2. Here is the upload of 7 Niuyun. The big problem I encountered here is that when I uploaded the audio to 7 Niuyun, I found that the format was unrecognizable and displayed as application/octet-stream. I started with. mp4 format, and after uploading it, I was able to recognize it as video/mp4 and play it normally. But it was later found that android recordings could not be played. So I try to find a way to solve this problem, after all kinds of blog browsing in the afternoon,.... Fruitless!! Well, finally think of the way to store locally and play back, and solve the problem. (The recordings here are relatively small, not more than 30 seconds)
Attached here are some demo fragments, which need to be taken by oneself.
Recording section:#import "AudioRecorderVC.h" #import <AVFoundation/AVFoundation.h> #define kRecordAudioFile @"myRecord.aac" @interface AudioRecorderVC ()<AVAudioRecorderDelegate> @property (strong,nonatomic) SpectrumView *spectrumView; @property (nonatomic,strong) AVAudioRecorder *audioRecorder;//Audio recorder @property (nonatomic, strong) AVAudioPlayer *bellplayer; @property (nonatomic, assign) int sCountup; @property (nonatomic, strong) NSTimer *mTimer; @end @implementation AudioRecorderVC #pragma mark-controller view method - (void)viewDidLoad { [super viewDidLoad]; // Do any additional setup after loading the view from its nib. [self.naView setHidden:YES]; [self.statusTip setHidden:YES]; [self.view createBordersWithColor:[UIColor clearColor] withCornerRadius:6 andWidth:1]; [self.labReminder createBordersWithColor:[UIColor clearColor] withCornerRadius:4 andWidth:1]; [self.labReminder setTextColor:MCOLOR_FFFFFF]; [self addTapGesture]; [self addSpectrumView]; [self labToSize]; } -(void)addSpectrumView{ if (!self.spectrumView) { __weak AudioRecorderVC *weakSelf = self; self.spectrumView = [[SpectrumView alloc] initWithFrame:CGRectMake(CGRectGetMidX(self.view.bounds)-150,240,300, 60.0)]; self.spectrumView.hidden = YES; self.spectrumView.text = [NSString stringWithFormat:@"%d",0]; __weak SpectrumView * weakSpectrum = self.spectrumView; self.spectrumView.itemLevelCallback = ^() { [weakSelf.audioRecorder updateMeters]; //Get the first channel of audio, the audio intensity range is - 160 to 0 float power= [weakSelf.audioRecorder averagePowerForChannel:0]; weakSpectrum.level = power; }; [self.view addSubview:self.spectrumView]; } } -(void)addTapGesture{ //Add gesture UITapGestureRecognizer * tapGesture = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(clickPop)]; //Add gestures to the appropriate view you need [self.view addGestureRecognizer:tapGesture]; } -(void)clickPop{ if (_Block) { _Block(nil); } } #Pagma mark - getter lazy loading - (UIButton *)btnRecorder { // start [_btnRecorder addTarget:self action:@selector(recordStart:) forControlEvents:UIControlEventTouchDown]; // cancel // [_btnRecorder addTarget:self action:@selector(recordCancel:) forControlEvents: UIControlEventTouchUpOutside]; //complete [_btnRecorder addTarget:self action:@selector(recordFinish:) forControlEvents:UIControlEventTouchUpInside]; /* //Move out [_btnRecorder addTarget:self action:@selector(recordTouchDragExit:) forControlEvents:UIControlEventTouchDragExit]; //Move in [_btnRecorder addTarget:self action:@selector(recordTouchDragEnter:) forControlEvents:UIControlEventTouchDragEnter]; */ return _btnRecorder; } /** * Obtain the object of the tape recorder * * @return Target of tape recorder */ - (AVAudioRecorder *)audioRecorder { if (!_audioRecorder) { [self setAudioSession]; //Create Recording File Save Path NSURL *url=[self getSavePath]; //Create Recording Format Settings NSDictionary *setting=[self getAudioSetting]; //Create a recorder NSError *error=nil; _audioRecorder=[[AVAudioRecorder alloc]initWithURL:url settings:setting error:&error]; _audioRecorder.delegate= self; _audioRecorder.meteringEnabled=YES;//If the sound wave is to be monitored, it must be set to YES. if (error) { NSLog(@"An error occurred while creating a recorder object. Error message:%@",error.localizedDescription); return nil; } } return _audioRecorder; } #pragma mark - layout - (void)viewDidLayoutSubviews { [super viewDidLayoutSubviews]; CGFloat width = self.view.bounds.size.width; CGFloat height = self.view.bounds.size.height; self.btnRecorder.frame = CGRectMake(width / 2.f - 50.f, height - 180.f, 100.f, 100.f); [self.audioRecorder record]; [self.audioRecorder stop]; [self removeFile]; } #pragma mark - ControlEvents /* - (void)recordCancel:(UIButton *)button { if ([self.audioRecorder isRecording]) { NSLog(@"Cancel ""; [self.audioRecorder stop]; self.spectrumView.hidden = NO; } } */ - (void)recordStart:(UIButton *)button { if (![self.audioRecorder isRecording]) { NSLog(@"Recording begins"); [self startScount]; [self playthebell]; [self.audioRecorder record]; [self startAnimate]; self.labReminder.hidden = YES; self.spectrumView.hidden = NO; } } - (void)recordFinish:(UIButton *)button { if ([self.audioRecorder isRecording]) { NSLog(@"complete"); [self.audioRecorder stop]; [self stopAnimate]; self.spectrumView.hidden = NO; [self judgePushAudio]; } } /* - (void)recordTouchDragExit:(UIButton *)button { if([self.audioRecorder isRecording]) { [self stopAnimate]; } } - (void)recordTouchDragEnter:(UIButton *)button { if([self.audioRecorder isRecording]) { [self startAnimate]; } } */ - (void)startAnimate { [self.spectrumView start]; } - (void)stopAnimate { [self.spectrumView stop]; [self.mTimer invalidate]; self.mTimer = nil; } - (void)setAudioSession { AVAudioSession *session = [AVAudioSession sharedInstance]; NSError *sessionError; //AVAudio Session Category Play AndRecord for recording and playing [session setCategory:AVAudioSessionCategoryPlayAndRecord error:&sessionError]; if(session == nil) NSLog(@"Error creating session: %@", [sessionError description]); else [session setActive:YES error:nil]; } /** * Get Recording File Settings * * @return Recording settings */ - (NSDictionary *)getAudioSetting { NSMutableDictionary *dicM=[NSMutableDictionary dictionary]; //Setting Recording Format [dicM setObject:@(kAudioFormatMPEG4AAC) forKey:AVFormatIDKey]; //Setting the sampling rate of recording, 8000 is the telephone sampling rate, which is enough for general recording. [dicM setObject:@(8000) forKey:AVSampleRateKey]; //Set up the channel, where the mono channel is used [dicM setObject:@(1) forKey:AVNumberOfChannelsKey]; //Each sampling point is divided into 8, 16, 24 and 32 digits. [dicM setObject:@(8) forKey:AVLinearPCMBitDepthKey]; //Whether Floating Point Sampling is Used [dicM setObject:@(YES) forKey:AVLinearPCMIsFloatKey]; //Other settings, etc. return dicM; } /** * Get the path to save the recording file * * @return Recording File Path */ - (NSURL *)getSavePath { // Create a folder named FileData in the Documents directory NSString *path = [[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)lastObject] stringByAppendingPathComponent:@"AudioData"]; NSFileManager *fileManager = [NSFileManager defaultManager]; BOOL isDir = FALSE; BOOL isDirExist = [fileManager fileExistsAtPath:path isDirectory:&isDir]; if(!(isDirExist && isDir)) { BOOL bCreateDir = [fileManager createDirectoryAtPath:path withIntermediateDirectories:YES attributes:nil error:nil]; if(!bCreateDir){ NSLog(@"Failed to create folder!"); } NSLog(@"Create folder successfully, file path%@",path); } path = [path stringByAppendingPathComponent:kRecordAudioFile]; NSLog(@"file path:%@",path); NSURL *url=[NSURL fileURLWithPath:path]; return url; } - (void)removeFile{ NSString *path = [[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)lastObject] stringByAppendingPathComponent:@"AudioData"]; NSFileManager *fileManager = [NSFileManager defaultManager]; path = [path stringByAppendingPathComponent:kRecordAudioFile]; NSError *error; if ([fileManager removeItemAtPath:path error:&error] != YES) NSLog(@"Unable to delete file: %@", [error localizedDescription]); } - (void)judgePushAudio{ if (_sCountup < 1) { [self showToast:@"The recording time is too short, please try again!"]; [self removeFile]; }else if(_sCountup >= 1 && _sCountup <= 30){ if (_Block) { _Block([self getSavePath]); } } } - (void)labToSize{ NSMutableAttributedString *attributedString = [[NSMutableAttributedString alloc] initWithString:self.labReminder.text]; NSMutableParagraphStyle *paragraphStyle = [[NSMutableParagraphStyle alloc] init]; [paragraphStyle setLineSpacing:20.0f];//Adjust row spacing [attributedString addAttribute:NSParagraphStyleAttributeName value:paragraphStyle range:NSMakeRange(0, [self.labReminder.text length])]; self.labReminder.attributedText = attributedString; [self.labReminder sizeToFit]; } #pragma -- prompt sound - (void)playthebell{ NSString *mp3Str; mp3Str = @"talkroom_begin"; NSString *filePath = [[NSBundle mainBundle] pathForResource:mp3Str ofType:@"mp3"]; self.bellplayer = [[AVAudioPlayer alloc] initWithContentsOfURL:[NSURL fileURLWithPath:filePath] error:nil]; self.bellplayer.volume = 1.0; self.bellplayer.numberOfLoops = -1; [self.bellplayer prepareToPlay]; [self.bellplayer play]; dispatch_after(dispatch_time(DISPATCH_TIME_NOW, ( 0.2 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{ [self.bellplayer stop]; }); } #pragma -- timer - (void)startScount{ self.sCountup = 0; [UIView animateWithDuration:0.5 animations:^{ self.mTimer = [NSTimer scheduledTimerWithTimeInterval:1 target:self selector:@selector(startCountUp) userInfo:nil repeats:YES]; }]; } - (void)startCountUp{ _sCountup++; self.spectrumView.timeLabel.text = [NSString stringWithFormat:@"%ds", _sCountup]; if (_sCountup == 30) { [self recordFinish:nil]; } } @end
3
#pragma -- Playing Records - (void)addVoiceButton:(DoorVoucherCell *)cell{ UIButton *voiceBtn = [[UIButton alloc]initWithFrame:CGRectMake(cell.contentValue.left, 0, 150, 50)]; [voiceBtn addTarget:self action:@selector(playRecoderVoice) forControlEvents:UIControlEventTouchUpInside]; voice = [[UIImageView alloc]initWithFrame:CGRectMake(0, 12, 25, 25)]; //Pictures before animation begins voice.image = [UIImage imageNamed:@"chat_animation_white3"]; //Three pictures for animation (placed in playback order) voice.animationImages = [NSArray arrayWithObjects: [UIImage imageNamed:@"chat_animation_white1"], [UIImage imageNamed:@"chat_animation_white2"], [UIImage imageNamed:@"chat_animation_white3"],nil]; //Set the animation interval voice.animationDuration = 1; voice.animationRepeatCount = 0; voice.userInteractionEnabled = NO; voice.backgroundColor = [UIColor clearColor]; [voiceBtn addSubview:voice]; [cell addSubview:voiceBtn]; } - (NSURL *)writeRecoderToFile{ //play NSURL *url = [NSURL URLWithString:self.dataModel.voice]; //Save audio files locally NSData *audioData = [NSData dataWithContentsOfURL:url]; NSString *path = [[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)lastObject] stringByAppendingPathComponent:@"AudioData"]; path = [path stringByAppendingPathComponent:@"myRecord.aac"]; // DDLogWarn(@ "Local Path%@ for Audio Data Written from the Network"), filePath; [audioData writeToFile:path atomically:YES]; NSURL *fileURL = [NSURL fileURLWithPath:path]; return fileURL; } - (void)playRecoderVoice{ [self setAudioPlayer]; [self.audioPlayer play]; dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(1.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{ [voice startAnimating]; }); } - (void)playerItemDidReachEnd{ [self playthebell]; [voice stopAnimating]; voice.image = [UIImage imageNamed:@"chat_animation_white3"]; } - (AVPlayer *)setAudioPlayer{ NSError *error=nil; AVPlayerItem * songItem = [[AVPlayerItem alloc]initWithURL:[self writeRecoderToFile]]; AVAudioSession *session = [AVAudioSession sharedInstance]; [session setActive:YES error:nil]; [[UIApplication sharedApplication] beginReceivingRemoteControlEvents]; [session setCategory:AVAudioSessionCategoryPlayback error:nil]; _audioPlayer = [[AVPlayer alloc] initWithPlayerItem:songItem]; // Listen for music playing [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(playerItemDidReachEnd) name:AVPlayerItemDidPlayToEndTimeNotification object:nil]; if (error) { NSLog(@"An error occurred while creating the player. Error message:%@",error.localizedDescription); return nil; } return _audioPlayer; } #pragma -- prompt sound - (void)playthebell{ NSString *mp3Str; mp3Str = @"talkroom_up"; NSString *filePath = [[NSBundle mainBundle] pathForResource:mp3Str ofType:@"mp3"]; self.bellplayer = [[AVAudioPlayer alloc] initWithContentsOfURL:[NSURL fileURLWithPath:filePath] error:nil]; self.bellplayer.volume = 1.0; self.bellplayer.numberOfLoops = -1; [self.bellplayer prepareToPlay]; [self.bellplayer play]; dispatch_after(dispatch_time(DISPATCH_TIME_NOW, ( 0.2 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{ [self.bellplayer stop]; }); }
If you have any questions, you are welcome to ask questions. If you like, please give some praise, Star. Thank you.