#AV Foundation
主要框架
- CoreAudio
- 音频处理框架
- 扩展学习:《Learning CoreAudio》
- CoreVideo
- CoreMedia
- 提供音频和视频处理的低级数据类型和接口,如CMTime
- CoreAnimation
AV Foundation解析
- 音频播放和记录
- AVAudioPlayer
- AVAudioRecorder
- 媒体文件检查
- 视频播放
- 媒体捕捉
- 媒体编辑
- 媒体处理
- AVAssetReader
- AVAssetWriter
文字转语音
- 文字转语音主要是用到
AVSpeechSynthesizer
类
- 里面封装了一些语音的常见操作,包括常见的播放、暂停、停止等
- 使用AV Foundation需要包含头文件
#import <AVFoundation/AVFoundation.h>
- 声明一个
AVSpeechSynthesizer
实例
#import "ViewController.h" #import <AVFoundation/AVFoundation.h>
@interface ViewController ()
@property (nonatomic, strong) AVSpeechSynthesizer *synthesizer;
@property (nonatomic, copy) NSArray *voices;
@property (nonatomic, copy) NSArray *speechStrings;
@property (nonatomic, assign) NSInteger *currentIndex; @end
|
//1 创建AVSpeechSynthesizer对象
_synthesizer = [[AVSpeechSynthesizer alloc] init];
- 设置支持的语模式,可以通过方法
speechVoices
获得系统支持的语音
NSLog(@"%@",[AVSpeechSynthesisVoice speechVoices]);
_voices = @[ [AVSpeechSynthesisVoice voiceWithLanguage:@"zh-CN"] ];
|
- (void)read { _speechStrings = [[NSString stringWithContentsOfFile:[[NSBundle mainBundle]pathForResource:@"test" ofType:@"txt"] encoding:NSUTF8StringEncoding error:nil] componentsSeparatedByString:@"\n"]; }
|
- (void)beginConversation { for (NSInteger i = 0; i < self.speechStrings.count; i ++) { AVSpeechUtterance *utterance = [[AVSpeechUtterance alloc] initWithString:self.speechStrings[i]]; utterance.voice = self.voices[0]; utterance.rate = 0.4f; utterance.pitchMultiplier= 0.8f; utterance.postUtteranceDelay = 0.1f; [_synthesizer speakUtterance:utterance]; } }
|
播放控制
- (IBAction)play:(id)sender { if (_currentIndex == 0) { [self beginConversation:_currentIndex]; }else { [_synthesizer continueSpeaking]; } }
- (IBAction)stop:(id)sender { _currentIndex = 0;
[_synthesizer stopSpeakingAtBoundary:AVSpeechBoundaryImmediate]; }
- (IBAction)pause:(id)sender {
[_synthesizer pauseSpeakingAtBoundary:AVSpeechBoundaryImmediate]; }
- (IBAction)previous:(id)sender { _currentIndex -= 2; if (_currentIndex <= 0) { _currentIndex = 0; }else if(_currentIndex >= _speechStrings.count) { _currentIndex = _speechStrings.count; } [_synthesizer stopSpeakingAtBoundary:AVSpeechBoundaryImmediate]; [self beginConversation:_currentIndex]; }
- (IBAction)next:(id)sender { if (_currentIndex <= 0) { _currentIndex = 0; }else if(_currentIndex >= _speechStrings.count) { _currentIndex = _speechStrings.count; } [_synthesizer stopSpeakingAtBoundary:AVSpeechBoundaryImmediate]; [self beginConversation:_currentIndex]; }
- (void)speechSynthesizer:(AVSpeechSynthesizer *)synthesizer didStartSpeechUtterance:(AVSpeechUtterance *)utterance { _currentIndex ++; if (_currentIndex <= 0) { _currentIndex = 0; }else if(_currentIndex >= _speechStrings.count) { _currentIndex = _speechStrings.count; } self.currentLabel.text = [NSString stringWithFormat:@"%zd",_currentIndex]; } - (void)speechSynthesizer:(AVSpeechSynthesizer *)synthesizer didFinishSpeechUtterance:(AVSpeechUtterance *)utterance { }
|