In iOS, speech recognition requires that the voice be crawled.
#import "GetAudioViewController.h"
#import <AVFoundation/AVFoundation.h>
#import <UIKit/UIKit.h>
#import <ImageIO/ImageIO.h>
#import <MobileCoreServices/MobileCoreServices.h>
#import <QuartzCore/QuartzCore.h>
@interface Getaudioviewcontroller ()
{
Avaudioplayer *_player;
Avaudiorecorder *_audiorecord;
nstimer* _timerforpitch;
Cashapelayer *_shapelayer;
Cadisplaylink* _displaylink;
__weak Iboutlet Uiprogressview *_audiopower;
__weak Iboutlet UIButton *_record;
__weak Iboutlet UIButton *_pause;
__weak Iboutlet UIButton *_resume;
__weak Iboutlet UIButton *_stop;
__weak Iboutlet UIView *_viewforwave;
float Pitch;
Nsinteger _recordencoding;
Cftimeinterval _firsttimestamp;
Nsinteger _loopcount;
}
@end
@implementation Getaudioviewcontroller
-(void) Viewdidload {
[Super Viewdidload];
}
-(void) Cratepath: (nsstring*) path
{
nsfilemanager* FileManager = [Nsfilemanager Defaultmanager];
if (![ FileManager Fileexistsatpath:path])
[FileManager Createdirectoryatpath:path
Withintermediatedirectories:yes
Attributes:nil
Error:nil];
}
-(Uibezierpath *) Pathatinterval: (nstimeinterval) interval
{
Uibezierpath *path = [Uibezierpath Bezierpath];
[Path Movetopoint:cgpointmake (0, _viewforwave.bounds.size.height/2.0)];
CGFloat Fractionofsecond = Interval-floor (interval);
CGFloat yoffset = _viewforwave.bounds.size.height * Sin (fractionofsecond * m_pi * pitch*8);
[Path Addcurvetopoint:cgpointmake (_viewforwave.bounds.size.width, _viewforwave.bounds.size.height/2.0)
Controlpoint1:cgpointmake (_viewforwave.bounds.size.width/2.0, _viewforwave.bounds.size.height/2.0-yoffset)
Controlpoint2:cgpointmake (_viewforwave.bounds.size.width/2.0, _viewforwave.bounds.size.height/2.0 + yOffset)];
return path;
}
-(void) Addshapelayer
{
_shapelayer = [Cashapelayer layer];
_shapelayer.path = [[self pathatinterval:2.0] cgpath];
_shapelayer.fillcolor = [[Uicolor redcolor] cgcolor];
_shapelayer.linewidth = 1.0;
_shapelayer.strokecolor = [[Uicolor whitecolor] cgcolor];
[_viewforwave.layer Addsublayer:_shapelayer];
}
-(void) Handledisplaylink: (Cadisplaylink *) DisplayLink
{
if (!_firsttimestamp)
_firsttimestamp = Displaylink.timestamp;
_loopcount++;
Nstimeinterval elapsed = (Displaylink.timestamp-_firsttimestamp);
_shapelayer.path = [[self pathatinterval:elapsed] cgpath];
}
-(void) Startdisplaylink
{
_displaylink = [Cadisplaylink displaylinkwithtarget:self selector: @selector (handledisplaylink:)];
[_displaylink Addtorunloop:[nsrunloop Currentrunloop] formode:nsdefaultrunloopmode];
}
-(Ibaction) Recordclick: (ID) Sender {
_viewforwave.hidden = NO;
[Self addshapelayer];
[Self startdisplaylink];
NSLog (@ "startrecording");
_audiorecord = nil;
Avaudiosession *audiosession = [Avaudiosession sharedinstance];
[Audiosession Setcategory:avaudiosessioncategoryrecord Error:nil];
Nsmutabledictionary *recordsettings = [[Nsmutabledictionary alloc] initwithcapacity:10];
if (_recordencoding = = 6)
{
[Recordsettings Setobject:[nsnumber NUMBERWITHINT:KAUDIOFORMATLINEARPCM] forkey:avformatidkey];
[Recordsettings Setobject:[nsnumber numberwithfloat:44100.0] forkey:avsampleratekey];
[Recordsettings Setobject:[nsnumber Numberwithint:2] forkey:avnumberofchannelskey];
[Recordsettings Setobject:[nsnumber numberwithint:16] forkey:avlinearpcmbitdepthkey];
[Recordsettings Setobject:[nsnumber Numberwithbool:no] forkey:avlinearpcmisbigendiankey];
[Recordsettings Setobject:[nsnumber Numberwithbool:no] forkey:avlinearpcmisfloatkey];
}
Else
{
NSNumber *formatobject;
Switch (_recordencoding) {
Case 1:
Formatobject = [NSNumber NUMBERWITHINT:KAUDIOFORMATMPEG4AAC];
Break
Case 2:
Formatobject = [NSNumber numberwithint:kaudioformatapplelossless];
Break
Case 3:
Formatobject = [NSNumber numberwithint:kaudioformatappleima4];
Break
Case 4:
Formatobject = [NSNumber NUMBERWITHINT:KAUDIOFORMATILBC];
Break
Case 5:
Formatobject = [NSNumber Numberwithint:kaudioformatulaw];
Break
Default
Formatobject = [NSNumber numberwithint:kaudioformatappleima4];
}
[Recordsettings Setobject:formatobject Forkey:avformatidkey];
[Recordsettings Setobject:[nsnumber numberwithfloat:44100.0] forkey:avsampleratekey];
[Recordsettings Setobject:[nsnumber Numberwithint:2] forkey:avnumberofchannelskey];
[Recordsettings Setobject:[nsnumber numberwithint:12800] forkey:avencoderbitratekey];
[Recordsettings Setobject:[nsnumber numberwithint:16] forkey:avlinearpcmbitdepthkey];
[Recordsettings Setobject:[nsnumber Numberwithint:avaudioqualityhigh] forkey:avencoderaudioqualitykey];
}
Nsarray *dirpaths = Nssearchpathfordirectoriesindomains (NSDocumentDirectory, Nsuserdomainmask, YES);
NSString *docsdir = [Dirpaths objectatindex:0];
NSString *soundfilepath = [Docsdir
stringbyappendingpathcomponent:@ "RECORDTEST.CAF"];
Nsurl *url = [Nsurl Fileurlwithpath:soundfilepath];
Nserror *error = nil;
_audiorecord = [[Avaudiorecorder alloc] Initwithurl:url settings:recordsettings error:&error];
_audiorecord.meteringenabled = YES;
if ([_audiorecord preparetorecord] = = YES) {
_audiorecord.meteringenabled = YES;
[_audiorecord Record];
_timerforpitch =[nstimer scheduledtimerwithtimeinterval:0.01 target:self selector: @selector (leveltimercallback:) Userinfo:nil Repeats:yes];
}else {
int errorCode = CFSWAPINT32HOSTTOBIG ([error code]);
NSLog (@ "error:%@ [%4.4s])", [Error Localizeddescription], (char*) &errorcode);
}
}
-(void) Leveltimercallback: (Nstimer *) Timer {
[_audiorecord Updatemeters];
float linear = POW (ten, [_audiorecord peakpowerforchannel:0]/20);
float linear1 = POW (ten, [_audiorecord averagepowerforchannel:0]/20);
if (linear1>0.03) {
Pitch = Linear1+.20;//pow (ten, [Audiorecorder averagepowerforchannel:0]/+);//[audiorecorder peakpowerforchannel:0];
}
else {
Pitch = 0.0;
}
Pitch =linear1;
NSLog (@ "pitch==%f", Pitch);
_customrangebar.value = pitch;//linear1+.30;
[_audiopower Setprogress:pitch];
Float minutes = floor (_AUDIORECORD.CURRENTTIME/60);
float seconds = _audiorecord.currenttime-(minutes * 60);
NSString *time = [NSString stringwithformat:@ "%0.0f.%0. 0f ", minutes, seconds];
[Self.statuslabel settext:[nsstring stringwithformat:@ "%@ sec", time]];
NSLog (@ "recording");
}
-(Ibaction) Pauseclick: (ID) Sender {
NSLog (@ "stoprecording");
Kseconds = 0.0;
_viewforwave.hidden = YES;
[_audiorecord stop];
[Self stopdisplaylink];
_shapelayer.path = [[self pathatinterval:0] cgpath];
[_timerforpitch invalidate];
_timerforpitch = nil;
}
-(void) Stopdisplaylink
{
[_displaylink invalidate];
_displaylink = nil;
}
-(Ibaction) Resumeclick: (ID) Sender {
Avaudiosession *audiosession = [Avaudiosession sharedinstance];
[Audiosession Setcategory:avaudiosessioncategoryplayback Error:nil];
Nsarray *dirpaths = Nssearchpathfordirectoriesindomains (NSDocumentDirectory, Nsuserdomainmask, YES);
NSString *docsdir = [Dirpaths objectatindex:0];
NSString *soundfilepath = [Docsdir stringbyappendingpathcomponent:@ "RECORDTEST.CAF"];
Nsurl *url = [Nsurl Fileurlwithpath:soundfilepath];
Nserror *error;
_player = [[Avaudioplayer alloc] Initwithcontentsofurl:url error:&error];
_player.numberofloops = 0;
[_player play];
}
-(Ibaction) Stopclick: (ID) Sender {
[_player stop];
}
-(void) didreceivememorywarning {
[Super didreceivememorywarning];
}
@end
The code is all here. which
_viewforwave is a reaction graph of a band. Interested friends can write their own, landscaping, reference to my writing, remember to leave me a message.
iOS Voice admission