iOS 10 和Xcode8 一起 创建 Siri 功能步骤详解(OC写的 )
iOS10出来之后,我们开发者也可以使用类似Siri的功能。。让我们来看看怎么使用吧,其实他是使用Siri里面的一个语音识别框架Speechframework。让我们来看看一些主要的代码吧。我们需要一个UITextView和UIButton就能体现了。
第一步:定义属性
@interfaceViewController()@property(strong,nonatomic)UIButton*siriBtu; @property(strong,nonatomic)UITextView*siriTextView; @property(strong,nonatomic)SFSpeechRecognitionTask*recognitionTask; @property(strong,nonatomic)SFSpeechRecognizer*speechRecognizer; @property(strong,nonatomic)SFSpeechAudioBufferRecognitionRequest*recognitionRequest; @property(strong,nonatomic)AVAudioEngine*audioEngine; @end
第二步:进行语音识别检测
-(void)viewDidLoad{
[superviewDidLoad];
NSLocale*cale=[[NSLocalealloc]initWithLocaleIdentifier:@"zh-CN"];
self.speechRecognizer=[[SFSpeechRecognizeralloc]initWithLocale:cale];
self.siriBtu.enabled=false;
_speechRecognizer.delegate=self;
[SFSpeechRecognizerrequestAuthorization:^(SFSpeechRecognizerAuthorizationStatusstatus){
boolisButtonEnabled=false;
switch(status){
caseSFSpeechRecognizerAuthorizationStatusAuthorized:
isButtonEnabled=true;
NSLog(@"可以语音识别");
break;
caseSFSpeechRecognizerAuthorizationStatusDenied:
isButtonEnabled=false;
NSLog(@"用户被拒绝访问语音识别");
break;
caseSFSpeechRecognizerAuthorizationStatusRestricted:
isButtonEnabled=false;
NSLog(@"不能在该设备上进行语音识别");
break;
caseSFSpeechRecognizerAuthorizationStatusNotDetermined:
isButtonEnabled=false;
NSLog(@"没有授权语音识别");
break;
default:
break;
}
self.siriBtu.enabled=isButtonEnabled;
}];
self.audioEngine=[[AVAudioEnginealloc]init];
}
第三步:按钮的点击事件
-(void)microphoneTap:(UIButton*)sender{
if([self.audioEngineisRunning]){
[self.audioEnginestop];
[self.recognitionRequestendAudio];
self.siriBtu.enabled=YES;
[self.siriBtusetTitle:@"开始录制"forState:UIControlStateNormal];
}else{
[selfstartRecording];
[self.siriBtusetTitle:@"停止录制"forState:UIControlStateNormal];
}}
第四步:开始录制语音,以及将语音转为文字
-(void)startRecording{
if(self.recognitionTask){
[self.recognitionTaskcancel];
self.recognitionTask=nil;
}
AVAudioSession*audioSession=[AVAudioSessionsharedInstance];
boolaudioBool=[audioSessionsetCategory:AVAudioSessionCategoryRecorderror:nil];
boolaudioBool1=[audioSessionsetMode:AVAudioSessionModeMeasurementerror:nil];
boolaudioBool2=[audioSessionsetActive:truewithOptions:AVAudioSessionSetActiveOptionNotifyOthersOnDeactivationerror:nil];
if(audioBool||audioBool1||audioBool2){
NSLog(@"可以使用");
}else{
NSLog(@"这里说明有的功能不支持");
}
self.recognitionRequest=[[SFSpeechAudioBufferRecognitionRequestalloc]init];
AVAudioInputNode*inputNode=self.audioEngine.inputNode;
SFSpeechAudioBufferRecognitionRequest*recognitionRequest;
self.recognitionRequest.shouldReportPartialResults=true;
self.recognitionTask=[self.speechRecognizerrecognitionTaskWithRequest:self.recognitionRequestresultHandler:^(SFSpeechRecognitionResult*_Nullableresult,NSError*_Nullableerror){
boolisFinal=false;
if(result){
self.siriTextView.text=[[resultbestTranscription]formattedString];
isFinal=[resultisFinal];
}
if(error||isFinal){
[self.audioEnginestop];
[inputNoderemoveTapOnBus:0];
self.recognitionRequest=nil;
self.recognitionTask=nil;
self.siriBtu.enabled=true;
}
}];
AVAudioFormat*recordingFormat=[inputNodeoutputFormatForBus:0];
[inputNodeinstallTapOnBus:0bufferSize:1024format:recordingFormatblock:^(AVAudioPCMBuffer*_Nonnullbuffer,AVAudioTime*_Nonnullwhen){
[self.recognitionRequestappendAudioPCMBuffer:buffer];
}];
[self.audioEngineprepare];
boolaudioEngineBool=[self.audioEnginestartAndReturnError:nil];
NSLog(@"%d",audioEngineBool);
self.siriTextView.text=@"我是小冰!