Company project needs to develop a similar QQ, micro-letter Instant IM chat function, so that real-time monitoring messages, need to use the technology is websocket, today to tidy up the language chat this piece; in fact, language chat, including two parts, recording and music playback, on the simple language chat function as shown below:
Recording
There is a Avaudiorecorder class in the Avfoundation framework that specializes in recording operations, and it also supports multiple audio formats. Like Avaudioplayer, you can totally consider it a sound Recorder control class, and here are the common properties and methods:
Let's take a look at the common properties of the next Avaudiorecorder:
@property (readonly, getter=isrecording) BOOL recording;//is recording
@property (readonly) nsdictionary<nsstring *, Id> *settings;//Recording Configuration
@property (readonly) nsurl *url;//recording file stored URL
@property (readonly) nstimeinterval currenttime;//Recording Long
@property (getter=ismeteringenabled) BOOL meteringenabled;//whether to monitor sound waves
Common Object methods:
-(BOOL) preparetorecord;//prepare buffer for recording
-(bool) record;//recording begins, call resumes recording when paused
-(BOOL) Recordattime: (Nstimeinterval) time;//start recording after a specified time
-(BOOL) Recordforduration: (Nstimeinterval) duration;//by the specified length of recording
-(BOOL) Recordattime: ( Nstimeinterval) Time Forduration: (nstimeinterval) duration;//the top 2 fit
-(void) pause;//Interrupt recording
-(void) stop;// Stop recording
-(BOOL) deleterecording;//Delete recording, you must stop recording before deleting
Common Proxy methods:
Call after recording completes
-(void) audiorecorderdidfinishrecording: (Avaudiorecorder *) Recorder successfully: (BOOL) flag;// Recording encoding send error call
-(void) Audiorecorderencodeerrordidoccur: (Avaudiorecorder *) Recorder error: (NSERROR *) error;
Audio
If you play larger audio or have precise control over the audio, the system Sound service may be difficult to meet the actual requirements, which is usually chosen using the Avaudioplayer in Avfoundation.framework. Avaudioplayer can be viewed as a player that supports a variety of audio formats and can be used to control progress, volume, playback speed, etc.
The use of Avaudioplayer is relatively simple:
1. Initializes the Avaudioplayer object, which typically specifies the local file path.
2. Set the player properties, such as the number of repetitions, volume size, and so on.
3. Call play method playback.
Specific implementation code
#import <AVFoundation/AVFoundation.h> #define Krecordaudiofile @ "MYRECORD.CAF" @interface Viewcontroller () <
avaudiorecorderdelegate> {nsstring *datename;
@property (Weak, nonatomic) Iboutlet UITableView *table; @property (Nonatomic,strong) avaudiorecorder *audiorecorder;//audio Recorder @property (nonatomic,strong) Avaudioplayer *
audioplayer;//Audio player for playback of recording files @property (Nonatomic,strong) Nsmutablearray *spacedata; @end @implementation Viewcontroller #pragma mark-Private method/** * Set audio session/-(void) setaudiosession{avaudiosession *audi
Osession=[avaudiosession Sharedinstance];
Set to playback and recording status so that you can play the recording after the recording [Audiosession Setcategory:avaudiosessioncategoryplayandrecord Error:nil];
[Audiosession Setactive:yes Error:nil]; /** * Get audio File Settings * * @return Recording Settings * * * (nsdictionary *) getaudiosetting{nsmutabledictionary *dicm=[nsmutabledictio
Nary dictionary];
Set recording format [DICM setobject:@ (KAUDIOFORMATLINEARPCM) Forkey:avformatidkey]; Set the recording sample rate, 8000 is the telephone sampling rate, for the general recording is enough [DIcM setobject:@ (8000) Forkey:avsampleratekey];
Set the channel, here adopts mono [dicm setobject:@ (1) Forkey:avnumberofchannelskey];
Each sampling point number is divided into 8, 16, 24, and [DICM setobject:@ (8) Forkey:avlinearpcmbitdepthkey];
Whether to use floating-point number sampling [DICM setobject:@ (YES) Forkey:avlinearpcmisfloatkey]; //....
Other settings such as return DICM;
/** * Obtain recording File save path * * @return Recording file path * * (Nsurl *) Getplaypath: (NSString *) title{//static int index = 0;
NSString *urlstr=[nssearchpathfordirectoriesindomains (NSDocumentDirectory, Nsuserdomainmask, YES) lastObject];
Urlstr=[urlstr stringbyappendingpathcomponent:[nsstring stringwithformat:@ "%@%@", Title,kRecordAudioFile];
NSLog (@ "Play file path:%@", URLSTR);
Nsurl *url=[nsurl FILEURLWITHPATH:URLSTR];
return URL; /** * with the date title, to save the recording * * @return < #return value description#>/-(NSString *) convertdatefromstring {NS
Date *date = [NSDate Date];
NSLog (@ "%@--ASKL", date);
NSDateFormatter *dateformatter = [[NSDateFormatter alloc] init]; When Zzz saysArea, zzz can be deleted so that the returned date character will not contain time zone information.
[Dateformatter setdateformat:@ "Yyyy-mm-dd HH:mm:ss"];
NSString *destdatestring = [Dateformatter stringfromdate:date];
return destdatestring;
}
Long Press recording, release stop
-(void) Setclikespacestate: (NSString *) astate {NSLog (@ "click Voice---");
if ([Astate isequaltostring:@ "Begin"]) {NSLog (@ "Begin---");
Datename = [self convertdatefromstring];
Create a recording file save path Nsurl *url=[self Getplaypath:datename];
Create recording format settings Nsdictionary *setting=[self getaudiosetting];
Create a recorder nserror *error=nil;
_audiorecorder=[[avaudiorecorder Alloc]initwithurl:url settings:setting error:&error];
_audiorecorder.delegate=self; _audiorecorder.meteringenabled=yes;//if you want to monitor sound waves, you must set to YES if (![ Self.audiorecorder isrecording] {[Self.audiorecorder record];//when using an application for the first time, if the call to the record method asks the user if it is allowed to use the microphone//self.
Timer.firedate=[nsdate Distantpast];
NSLog (@ "111");
}}else {NSLog (@ "End---");
/** Stop Recording * * [Self.audiorecorder stop];
/** Recording Address */Nsurl *url = [self getplaypath:datename];
/** Loading Data * * Avaudioplayer *audioplayer1 = [[Avaudioplayer alloc] Initwithcontentsofurl:url Error:nil]; MOdel *model = [[Model Alloc]init];
Model.duration = [NSString stringwithformat:@ "%.f", audioplayer1.duration];
Model.spacepath = Datename;
/** Table Refresh * * [Self.spacedata Addobject:model];
[Self.table Reloaddata]; /** table scrolls to the current row*/[self.table selectrowatindexpath:[nsindexpath indexpathforrow: (self.spacedata.count-1) InSecti
ON:0] Animated:yes Scrollposition:uitableviewscrollpositiontop];
}
}
Click the table to play
-(void) TableView: (UITableView *) TableView Didselectrowatindexpath: (Nsindexpath *) indexpath{Model *model = Self.spac
Edata[indexpath.row];
/** Playback Recording * * Nsurl *url=[self GetPlayPath:model.spacePath];
Nserror *error=nil;
_audioplayer=[[avaudioplayer Alloc]initwithcontentsofurl:url error:&error];
_audioplayer.numberofloops=0;
[_audioplayer Preparetoplay];
[Self.audioplayer play];
NSLog (@ "%.0f---aaaa", _audioplayer.duration);
/** uiimage Animation Array */Nsmutablearray *imgdata = [Nsmutablearray array]; for (int i=0;i<4;i++) {uiimage *aimage = [uiimage imagenamed:[nsstring stringwithformat:@] Chat_receiver_audio_pla
Ying00%d ", I]];
[Imgdata Addobject:aimage];
} Twotableviewcell *twocell = [self.table Cellforrowatindexpath:indexpath];
/** Click Animation * [Twocell.speak Setanimationimages:imgdata];
[Twocell.speak setanimationrepeatcount:1];
[Twocell.speak Setanimationduration:1];
[Twocell.speak startanimating]; Dispatch_after (Dispatch_tiMe (Dispatch_time_now, (int64_t) ([model.duration intvalue] * nsec_per_sec)), Dispatch_get_main_queue (), ^{[twoCell.sp
Eak stopanimating];
});
}
The above is the entire content of this article, I hope to help you learn, but also hope that we support the cloud habitat community.