音频录制
音频录制使用AVAudioRecorder
。
初始化
- (AVAudioRecorder *)audioRecorder{
if (!_audioRecorder) {
NSURL *url = [NSURL URLWithString:self.cafPathStr];
//创建录音格式设置,详见下文
NSDictionary *setting = [self getAudioSetting];
//创建录音机
NSError *error=nil;
_audioRecorder=[[AVAudioRecorder alloc] initWithURL:url settings:setting error:&error];
//设置委托代理
_audioRecorder.delegate = self;
//如果要监控声波则必须设置为YES
_audioRecorder.meteringEnabled=YES;
if (error) {
return nil;
}
}
return _audioRecorder;
}
音频质量设置
-(NSDictionary *)getAudioSetting{
//LinearPCM 是iOS的一种无损编码格式,但是体积较为庞大
//录音设置
NSMutableDictionary *recordSettings = [[NSMutableDictionary alloc] init];
//录音格式 无法使用
[recordSettings setValue :[NSNumber numberWithInt:kAudioFormatLinearPCM] forKey: AVFormatIDKey];
//采样率
[recordSettings setValue :[NSNumber numberWithFloat:11025.0] forKey: AVSampleRateKey];//44100.0
//通道数
[recordSettings setValue :[NSNumber numberWithInt:2] forKey: AVNumberOfChannelsKey];
//线性采样位数
//[recordSettings setValue :[NSNumber numberWithInt:16] forKey: AVLinearPCMBitDepthKey];
//音频质量,采样质量
[recordSettings setValue:[NSNumber numberWithInt:AVAudioQualityMedium] forKey:AVEncoderAudioQualityKey];
return recordSettings;
}
开始录音
//开始录音
- (void)startSpeech {
[MSCServiceFile manageSandBoxData:nil WithManageType:Delete Path:Document FolderName:@"audio.data" SuffixName:@"myRecord.caf"];
[MSCServiceFile manageSandBoxData:nil WithManageType:Delete Path:Document FolderName:@"audio" SuffixName:@"myRecord.mp3"];
//创建录音文件保存路径
self.cafPathStr=[MSCServiceFile getSandBoxFileToPath:Document FolderName:@"audio" SuffixName:@"myRecord.caf"];
self.mp3PathStr=[MSCServiceFile getSandBoxFileToPath:Document FolderName:@"audio" SuffixName:@"myRecord.mp3"];
//发送语音认证请求(首先要判断设备是否支持语音识别功能)
[SFSpeechRecognizer requestAuthorization:^(SFSpeechRecognizerAuthorizationStatus status) {
if (status ==SFSpeechRecognizerAuthorizationStatusAuthorized) {
if ([self.audioRecorder isRecording]) {
[self.audioRecorder stop];
self.audioRecorder = nil;
}
// 删掉录音文件
[self.audioRecorder deleteRecording];
//创建音频会话对象
AVAudioSession *audioSession=[AVAudioSession sharedInstance];
//设置category
[audioSession setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
NSError *error;
[audioSession overrideOutputAudioPort:AVAudioSessionPortOverrideSpeaker error:&error];
if (![self.audioRecorder isRecording]){
// 首次使用应用时如果调用record方法会询问用户是否允许使用麦克风
[self.audioRecorder record];
}
}
}];
}
结束录音
//结束录音
- (void)endSpeech{
[self.audioRecorder stop];
self.audioRecorder = nil;
[self transformCAFToMP3];
}
音频格式转换
//转mp3格式
- (void)transformCAFToMP3 {
NSURL *urlCaf = [NSURL URLWithString:self.cafPathStr];
NSURL *mp3FilePath = [NSURL URLWithString:self.mp3PathStr];
@try {
int read, write;
FILE *pcm = fopen([[urlCaf absoluteString] cStringUsingEncoding:1], "rb");//source被转换的音频文件位置
if(!pcm) return;
fseek(pcm, 4*1024, SEEK_CUR);//skip file header
FILE *mp3 = fopen([[mp3FilePath absoluteString] cStringUsingEncoding:1], "wb");//output输出生成的Mp3文件位置
const int PCM_SIZE = 8192;
const int MP3_SIZE = 8192;
short int pcm_buffer[PCM_SIZE*2];
unsigned char mp3_buffer[MP3_SIZE];
lame_t lame = lame_init();
lame_set_in_samplerate(lame, 11025.0);
lame_set_VBR(lame, vbr_default);
lame_init_params(lame);
do {
read = (int)fread(pcm_buffer, 2*sizeof(short int), PCM_SIZE, pcm);
if (read == 0){
write = lame_encode_flush(lame, mp3_buffer, MP3_SIZE);
}else{
write = lame_encode_buffer_interleaved(lame, pcm_buffer, read, mp3_buffer, MP3_SIZE);
}
fwrite(mp3_buffer, write, 1, mp3);
} while (read != 0);
lame_close(lame);
fclose(mp3);
fclose(pcm);
}
@catch (NSException *exception) {
NSLog(@"%@",[exception description]);
}
@finally {
NSString *audioBase64Str = [self mp3ToBASE64];
if (self.recordAudioOver) {
NSNumber *totalTime = [NSNumber numberWithDouble:[self audioDurationFromURL:self.mp3PathStr]];
NSDictionary *dict = @{@"audio":audioBase64Str,@"totalTime":totalTime};
self.recordAudioOver(dict);
}
NSLog(@"MP3生成成功");
[MSCServiceFile manageSandBoxData:nil WithManageType:Delete Path:Document FolderName:@"audio" SuffixName:@"myRecord.caf"];
[MSCServiceFile manageSandBoxData:nil WithManageType:Delete Path:Document FolderName:@"audio" SuffixName:@"myRecord.mp3"];
}
}
代理
AVAudioRecorder
的代理<AVAudioRecorderDelegate>
。
常用回调
录制结束回调
- (void)audioRecorderDidFinishRecording:(AVAudioRecorder *)recorder successfully:(BOOL)flag;
录制出错回调
- (void)audioRecorderEncodeErrorDidOccur:(AVAudioRecorder *)recorder error:(NSError * __nullable)error;
音频播放
播放本地音频使用AVAudioPlayer
。
播放在线音频使用AVPlayer
。
最简单的播放代码
NSURL*url = [[NSBundle mainBundle]URLForResource:@"一.mp3" withExtension:nil];
weakSelf.myPlay = [[AVAudioPlayer alloc] initWithContentsOfURL:url error:nil];
[weakSelf.myPlay play];
播放音量过小
[audioSession overrideOutputAudioPort:AVAudioSessionPortOverrideSpeaker error:nil];
播放本地录音
//播放录音
- (void)playSpeechWithData:(NSString *)data{
NSData *audioData = [[NSData alloc] initWithBase64EncodedString:data options:NSDataBase64DecodingIgnoreUnknownCharacters];
NSError *error;
self.mscplayer= [[AVAudioPlayer alloc] initWithData:audioData error:&error];
self.mscplayer.delegate = self;
//获取系统的声音
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
CGFloat currentVol = audioSession.outputVolume;
//设置播放器声音
self.mscplayer.volume = currentVol;
//设置播放速率
self.mscplayer.rate = 1.0;
//设置播放次数,负数代表无限循环
// self.mscplayer.numberOfLoops = 1;
//预加载资源
[self.mscplayer prepareToPlay];
//定时器0.5一次查询
_timer = [NSTimer timerWithTimeInterval:0.5f target:self selector:@selector(getAudioCurrentTime) userInfo:nil repeats:YES];
[[NSRunLoop mainRunLoop] addTimer:_timer forMode:NSRunLoopCommonModes];
if ([self.mscplayer isPlaying]) {
[self.mscplayer stop];
[_timer setFireDate:[NSDate distantFuture]];
}
[self.mscplayer play];
[_timer setFireDate:[NSDate distantPast]];
}
AVAudioPlayer 没有播放实时回调,目前使用定时器间隔时间查询,更新进度条。
播放在线音频
- (void)playSpeechWithUrl:(NSString *)url{
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:[NSURL URLWithString:url] options:nil];
AVPlayerItem *audioItem = [[AVPlayerItem alloc] initWithAsset:asset];
self.onlineAudioPlayer = [[AVPlayer alloc] initWithPlayerItem:audioItem];
[audioItem addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(playFinished:) name:AVPlayerItemDidPlayToEndTimeNotification
object:self.onlineAudioPlayer.currentItem];
__weak typeof(self) weakSelf = self;
_timeObserve = [self.onlineAudioPlayer addPeriodicTimeObserverForInterval:CMTimeMake(1.0, 1.0) queue:dispatch_get_main_queue() usingBlock:^(CMTime time) {
//当前播放的时间
float current = CMTimeGetSeconds(time);
//总时间
float total = CMTimeGetSeconds(audioItem.duration);
if (current) {
float progress = (current / total)>1 ? 1.0 : (current / total);
//更新播放进度条
if (weakSelf.recordAudioCurrentTime) {
}
}
}];
}
block中回调更新进度条。(此处好像有些问题)
暂停&继续播放
// 暂停播放录音
- (void)pauseSpeech
{
if (self.onlineAudioPlayer){
[self.onlineAudioPlayer pause];
}
if (self.mscplayer && [self.mscplayer isPlaying]) {
self.currentPlayTime = self.mscplayer.currentTime;
[self.mscplayer pause];
[_timer setFireDate:[NSDate distantFuture]];
}
}
- (void)restartSpeech
{
if (self.onlineAudioPlayer){
[self.onlineAudioPlayer play];
}
if (self.mscplayer && ![self.mscplayer isPlaying]) {
self.mscplayer .currentTime = self.currentPlayTime;
[self.mscplayer play];
[_timer setFireDate:[NSDate distantPast]];
}
}
AVPlayer 直接 play 即可,而 AVAudioPlayer 则需要指定开始时间。
监测播放状态
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary<NSKeyValueChangeKey,id> *)change context:(void *)context {
if ([keyPath isEqualToString:@"status"]) {
AVPlayerItem *item = (AVPlayerItem *)object;
//AVPlayerItemStatus *status = item.status;
if (item.status == AVPlayerItemStatusReadyToPlay) {
[self.onlineAudioPlayer play];
//对播放界面的一些操作,时间、进度等
} else if (item.status == AVPlayerItemStatusFailed) {
[BSZ_APP_WINDOW makeToast:@"录音内容加载失败"];
}
}
}
视频录制
视频录制使用CCCameraManager
。
初始化
- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if (device.position == position) {
return device;
}
}
return nil;
}
- (AVCaptureDevice *)activeCamera{
return _deviceInput.device;
}
- (AVCaptureDevice *)inactiveCamera{
AVCaptureDevice *device = nil;
if ([[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count] > 1) {
if ([self activeCamera].position == AVCaptureDevicePositionBack) {
device = [self cameraWithPosition:AVCaptureDevicePositionFront];
} else {
device = [self cameraWithPosition:AVCaptureDevicePositionBack];
}
}
return device;
}
配置
#pragma mark - -相关配置
/// 会话
- (void)setupSession:(NSError **)error{
_session = [[AVCaptureSession alloc]init];
_session.sessionPreset = AVCaptureSessionPresetHigh;
[self setupSessionInputs:error];
[self setupSessionOutputs:error];
}
//输入
- (void)setupSessionInputs:(NSError **)error{
// 视频输入
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:error];
if (videoInput) {
if ([_session canAddInput:videoInput]){
[_session addInput:videoInput];
}
}
_deviceInput = videoInput;
// 音频输入
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput *audioIn = [[AVCaptureDeviceInput alloc] initWithDevice:audioDevice error:error];
if ([_session canAddInput:audioIn]){
[_session addInput:audioIn];
}
}
/// 输出
- (void)setupSessionOutputs:(NSError **)error{
dispatch_queue_t captureQueue = dispatch_queue_create("com.cc.captureQueue", DISPATCH_QUEUE_SERIAL);
// 视频输出
AVCaptureVideoDataOutput *videoOut = [[AVCaptureVideoDataOutput alloc] init];
[videoOut setAlwaysDiscardsLateVideoFrames:YES];
[videoOut setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA]}];
[videoOut setSampleBufferDelegate:self queue:captureQueue];
if ([_session canAddOutput:videoOut]){
[_session addOutput:videoOut];
}
_videoOutput = videoOut;
_videoConnection = [videoOut connectionWithMediaType:AVMediaTypeVideo];
// 音频输出
AVCaptureAudioDataOutput *audioOut = [[AVCaptureAudioDataOutput alloc] init];
[audioOut setSampleBufferDelegate:self queue:captureQueue];
if ([_session canAddOutput:audioOut]){
[_session addOutput:audioOut];
}
_audioConnection = [audioOut connectionWithMediaType:AVMediaTypeAudio];
}
录制视频
#pragma mark - -录制视频
- (void)startOrStopRecordVideoAction{
if (_recording == NO) {
//开始录像
_recording = YES;
[self.timer setFireDate:[NSDate distantPast]];
[self.startButton setImage:[UIImage imageNamed:@"videoStop"] forState:UIControlStateNormal];
_movieManager.currentDevice = [self activeCamera];
[_movieManager start:^(NSError * _Nonnull error) {
NSLog(@"error:%@",error);
}];
}else{
//停止录像
_recording = NO;
[self.timer setFireDate:[NSDate distantFuture]];
[self.startButton setImage:[UIImage imageNamed:@"videoPlay"] forState:UIControlStateNormal];
[_movieManager stop:^(NSURL * _Nonnull url, NSError * _Nonnull error) {
if (error) {
NSLog(@"error:%@",error);
} else {
[[MSCVAlert AlertWith:@"温馨提示" CancelBtn:@"取消" ConfirmBtn:@"确认" Content:@"请问确认上传此段录制视频么?" Category:DefaultAlert ResultBolck:^(NSInteger clickIndex) {
if (clickIndex==1) {
[self toPreviewRecordingVCWithDataUrl:url];
} else {
self.timeCount = 0;
[[NSFileManager defaultManager] removeItemAtPath:[url path] error:nil];
}
}] show];
}
}];
}
}
视频播放使用AVPlayer
为什么使用 AVPlayer
首先在 iOS 平台使用播放视频,可用的选项一般有这四个,他们各自的作用和功能如下:
视频播放常用框架.png
由此可以看出,如果我们不做直播功能 AVPlayer 就是一个最优的选择。
另外 AVPlayer 是一个可以播放任何格式的全功能影音播放器
支持视频格式: WMV,AVI,MKV,RMVB,RM,XVID,MP4,3GP,MPG等。
支持音频格式:MP3,WMA,RM,ACC,OGG,APE,FLAC,FLV等。
如何使用
AVPlayer 存在于 AVFoundation 框架,我们使用时需要导入:
#import <AVFoundation/AVFoundation.h>
几个播放相关的类
在创建一个播放器之前我们需要先了解一些播放器相关的类
- AVPlayer: 制播放器的播放,暂停,播放速度
- AVPlayerItem: 管理资源对象,控制播放数据源
- AVPlayerLayer: 负责显示视频,如果没有该类,只有声音没有画面
- AVUrlAsset: AVAsset 子类,使用 url 创建,实例包括 url 对应视频的所有信息
最简单的播放器
根据上面描述,我们知道 AVPlayer 是播放的必要条件,所以我们可以构建的极简播放器就是
NSURL *playUrl = [NSURL URLWithString:@"http://120.25.226.186:32812/resources/videos/minion_02.mp4"];
self.player = [[AVPlayer alloc] initWithURL:playUrl];
[self.player play];
加上 AVPlayerLayer 增加画面。
加上 AVPlayerItem 来切换多个视频。
完善如下:
NSURL *playUrl = [NSURL URLWithString:@"http://120.25.226.186:32812/resources/videos/minion_02.mp4"];
self.playerItem = [AVPlayerItem playerItemWithURL:playUrl];
//如果要切换视频需要调AVPlayer的replaceCurrentItemWithPlayerItem:方法
self.player = [AVPlayer playerWithPlayerItem:_playerItem];
self.playerLayer = [AVPlayerLayer playerLayerWithPlayer:self.player];
self.playerLayer.frame = self.backgroundView.bounds;
//放置播放器的视图
[self.backgroundView.layer addSublayer:self.playerLayer];
[_player play];
更多功能
它作为一个视频播放器,还是有很多不能让人满意的地方。例如:没有暂停、快进快退、倍速播放等,另外如果遇到url错误是不是还要有播放失败的提示,还有播放完成的相关提示。
为完成这些,我们需要对AVPlayerItem和AVPlayerLayer进一步了解一下。
AVPlayer的控制
前面讲过该类是控制视频播放行为的,他的使用比较简单。
播放视频:
[self.player play];
暂停视频:
[self.player pause];
更改速度:
self.player.rate = 1.5;//注意更改播放速度要在视频开始播放之后才会生效
还有一下其他的控制,我们可以调转到系统API进行查看
AVPlayerItem的控制
AVPlayerItem作为资源管理对象,它控制着视频从创建到销毁的诸多状态。
1.播放状态 status
typedef NS_ENUM(NSInteger, AVPlayerItemStatus) {
AVPlayerItemStatusUnknown,//未知
AVPlayerItemStatusReadyToPlay,//准备播放
AVPlayerItemStatusFailed//播放失败
}
我们使用KVO监测playItem.status,可以获取播放状态的变化
[self.playerItem addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil];
在监听回调中:
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary<NSKeyValueChangeKey,id> *)change context:(void *)context{
if ([object isKindOfClass:[AVPlayerItem class]]) {
if ([keyPath isEqualToString:@"status"]) {
switch (_playerItem.status) {
case AVPlayerItemStatusReadyToPlay:
//推荐将视频播放放在这里
[self play];
break;
case AVPlayerItemStatusUnknown:
NSLog(@"AVPlayerItemStatusUnknown");
break;
case AVPlayerItemStatusFailed:
NSLog(@"AVPlayerItemStatusFailed")
break;
default:
break;
}
}
}
虽然设置完播放配置我们可以直接调用[self.player play];进行播放,但是更稳妥的方法是在回调收到AVPlayerItemStatusReadyToPlay时进行播放。
2.视频的时间信息
在AVPlayer中时间的表示有一个专门的结构体CMTime
typedef struct{
CMTimeValue value; // 帧数
CMTimeScale timescale; // 帧率(影片每秒有几帧)
CMTimeFlags flags;
CMTimeEpoch epoch;
} CMTime;
CMTime是以分数的形式表示时间,value表示分子,timescale表示分母,flags是位掩码,表示时间的指定状态。
获取当前播放时间,可以用value/timescale的方式:
float currentTime = self.playItem.currentTime.value/item.currentTime.timescale;
还有一种利用系统提供的方法,我们用它获取视频总时间:
float totalTime = CMTimeGetSeconds(item.duration);
如果我们想要添加一个计时的标签不断更新当前的播放进度,有一个系统的方法:
- (id)addPeriodicTimeObserverForInterval:(CMTime)interval queue:(nullable dispatch_queue_t)queue usingBlock:(void (^)(CMTime time))block;
方法名如其意, “添加周期时间观察者” ,参数1 interal 为CMTime 类型的,参数2 queue为串行队列,如果传入NULL就是默认主线程,参数3 为CMTime 的block类型。
简而言之就是,每隔一段时间后执行 block。
比如:我们把interval设置成CMTimeMake(1, 10),在block里面刷新label,就是一秒钟刷新10次。
正常观察播放进度一秒钟一次就行了,所以可以这么写:
[self.player addPeriodicTimeObserverForInterval:CMTimeMake(1, 1) queue:nil usingBlock:^(CMTime time) {
AVPlayerItem *item = WeakSelf.playerItem;
NSInteger currentTime = item.currentTime.value/item.currentTime.timescale;
NSLog(@"当前播放时间:%ld",currentTime);
}];
3.loadedTimeRange 缓存时间
获取视频的缓存情况我们需要监听playerItem的loadedTimeRanges属性
[self.playerItem addObserver:self forKeyPath:@"loadedTimeRanges" options:NSKeyValueObservingOptionNew context:nil];
在KVO的回调里:
if ([keyPath isEqualToString:@"loadedTimeRanges"]){
NSArray *array = _playerItem.loadedTimeRanges;
CMTimeRange timeRange = [array.firstObject CMTimeRangeValue];//本次缓冲时间范围
float startSeconds = CMTimeGetSeconds(timeRange.start);
float durationSeconds = CMTimeGetSeconds(timeRange.duration);
NSTimeInterval totalBuffer = startSeconds + durationSeconds;//缓冲总长度
NSLog(@"当前缓冲时间:%f",totalBuffer);
}
4.playbackBufferEmpty
监听playbackBufferEmpty我们可以获取当缓存不够,视频加载不出来的情况:
[self.playerItem addObserver:self forKeyPath:@"playbackBufferEmpty" options:NSKeyValueObservingOptionNew context:nil];
在KVO回调里:
if ([keyPath isEqualToString:@"playbackBufferEmpty"]) {
//some code show loading
}
5.playbackLikelyToKeepUp
playbackLikelyToKeepUp和playbackBufferEmpty是一对,用于监听缓存足够播放的状态
[self.playerItem addObserver:self forKeyPath:@"playbackLikelyToKeepUp" options:NSKeyValueObservingOptionNew context:nil];
/* ... */
if([keyPath isEqualToString:@"playbackLikelyToKeepUp"]) {
//由于 AVPlayer 缓存不足就会自动暂停,所以缓存充足了需要手动播放,才能继续播放
[_player play];
}
AVURLAsset
播放视频只需一个url就能进行这样太不安全了,别人可以轻易的抓包盗链,为此我们需要为视频链接做一个请求头的认证,这个功能可以借助AVURLAsset完成
AVURLAssetPreferPreciseDurationAndTimingKey.这个key对应的value是一个布尔值, 用来表明资源是否需要为时长的精确展示,以及随机时间内容的读取进行提前准备。
除了这个苹果官方介绍的功能外,他还可以设置请求头,这个算是隐藏功能
NSMutableDictionary * headers = [NSMutableDictionary dictionary];
[headers setObject:@"yourHeader"forKey:@"User-Agent"];
self.urlAsset = [AVURLAsset URLAssetWithURL:self.videoURL options:@{@"AVURLAssetHTTPHeaderFieldsKey" : headers}];
// 初始化playerItem
self.playerItem = [AVPlayerItem playerItemWithAsset:self.urlAsset];
播放相关通知
1.声音类:
//声音被打断的通知(电话打来)
AVAudioSessionInterruptionNotification
//耳机插入和拔出的通知
AVAudioSessionRouteChangeNotification
根据userInfo判断具体状态
2.播放类
AVPlayerItemDidPlayToEndTimeNotification
//播放失败
AVPlayerItemFailedToPlayToEndTimeNotification
//异常中断
AVPlayerItemPlaybackStalledNotification
对于播放完成的通知我们可以这么写:
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(playerMovieFinish:) name:AVPlayerItemDidPlayToEndTimeNotification object:[self.player currentItem]];
3.系统状态
//进入后台
UIApplicationWillResignActiveNotification
//返回前台
UIApplicationDidBecomeActiveNotification
提示:所有通知和KVO的使用我们都要记得在不用时remove掉
网友评论