這里簡單闡述一下開發(fā)需求,用戶要上直播課之前要測試麥克風是否可用,所以首先我們要使用錄音的類錄音,對聲音的分貝進行監(jiān)控,圖形化界面上為用戶展示聲音的變化。
一、首先我們看一張UI圖,補充了解一下上述需求

- 上面是一個麥克風的開關,負責開啟測試和關閉測試
- 下面是一個簡單的分貝圖,實時展示聲音的高地(這個進度條類似mac設置里面輸入設備測試的進度條)
二、開始編寫
- 首先是申請系統(tǒng)權限,(別忘了在plist文件里面設置相關權限)
AVAuthorizationStatus videoAuthStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeAudio];
if (videoAuthStatus == AVAuthorizationStatusNotDetermined) {
[[AVAudioSession sharedInstance] requestRecordPermission:^void(BOOL granted) {}];
}
- 定義兩個全局變量,如下:
@property (nonatomic , strong) AVAudioRecorder *audioRecorder;//錄音
@property (nonatomic , strong) NSTimer *timer;//錄音定時器
- 初始化這兩個類,然后開始監(jiān)聽
[[AVAudioSession sharedInstance] setCategory: AVAudioSessionCategoryPlayAndRecord error: nil];
NSDictionary *settings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithFloat: 44100.0], AVSampleRateKey,
[NSNumber numberWithInt: kAudioFormatAppleLossless], AVFormatIDKey,
[NSNumber numberWithInt: 2], AVNumberOfChannelsKey,
[NSNumber numberWithInt: AVAudioQualityMax], AVEncoderAudioQualityKey,
nil];
NSURL *url = [NSURL fileURLWithPath:@"/dev/null"];//只監(jiān)聽不寫入,所以空地址
NSError *error;
_audioRecorder = [[AVAudioRecorder alloc] initWithURL:url settings:settings error:&error];
_audioRecorder.meteringEnabled = YES;
[_audioRecorder prepareToRecord];
[self.audioRecorder record];
_timer = [NSTimer timerWithTimeInterval:0.1 repeats:YES block:^(NSTimer * _Nonnull timer) {
if (self.audioRecorder.isRecording) {
[self.audioRecorder updateMeters];
float peakPower = [self.audioRecorder peakPowerForChannel:0];
if (peakPower <= -40) {
self.toolView.volumeView.level = 0.01;
}else if (peakPower == 0){
self.toolView.volumeView.level = 1.0;
}else{
self.toolView.volumeView.level = (peakPower + 40)*2.5/100.0;
}
}
}];
[[NSRunLoop currentRunLoop] addTimer:_timer forMode:NSRunLoopCommonModes];
-
peakPower取值范圍是-160 <--> 0之間,但是經(jīng)過測試背景噪音一般在-40以下,所以我這里就取值-40 <-->0之間了
錄音監(jiān)控的部分結(jié)束了,下面分享三角形進度條的代碼,有需要的小伙伴可以直接復制使用
SHLiveCourseVolumeDBView.h
#import <UIKit/UIKit.h>
NS_ASSUME_NONNULL_BEGIN
@interface SHLiveCourseVolumeDBView : UIView
@property (nonatomic , strong) UIView *selectView;
@property (nonatomic , assign) CGFloat level;
@end
NS_ASSUME_NONNULL_END
SHLiveCourseVolumeDBView.m
#import "SHLiveCourseVolumeDBView.h"
@implementation SHLiveCourseVolumeDBView
- (instancetype)initWithFrame:(CGRect)frame
{
if (self = [super initWithFrame:frame]) {
self.backgroundColor = UIColor.grayColor;
UIBezierPath *bezierPath = [UIBezierPath bezierPath];
[bezierPath moveToPoint:CGPointMake(0, frame.size.height)];
[bezierPath addLineToPoint:CGPointMake(frame.size.width, 0)];
[bezierPath addLineToPoint:CGPointMake(frame.size.width,frame.size.height)];
[bezierPath addClip];
CAShapeLayer *maskLayer = [[CAShapeLayer alloc] init];
maskLayer.frame = self.bounds;
maskLayer.path = bezierPath.CGPath;
self.layer.mask = maskLayer;
[self addSubview:self.selectView];
}
return self;
}
- (void)setLevel:(CGFloat)level
{
_level = level;
CGRect rect = self.selectView.bounds;
rect.size.width = self.bounds.size.width*level;
self.selectView.frame = rect;
}
- (UIView *)selectView
{
if (!_selectView) {
_selectView = [[UIView alloc] initWithFrame:self.bounds];
_selectView.backgroundColor = UIColor.lightGrayColor;
UIBezierPath *bezierPath = [UIBezierPath bezierPath];
[bezierPath moveToPoint:CGPointMake(0, _selectView.frame.size.height)];
[bezierPath addLineToPoint:CGPointMake(_selectView.frame.size.width, 0)];
[bezierPath addLineToPoint:CGPointMake(_selectView.frame.size.width,_selectView.frame.size.height)];
[bezierPath addClip];
CAShapeLayer *maskLayer = [[CAShapeLayer alloc] init];
maskLayer.frame = self.bounds;
maskLayer.path = bezierPath.CGPath;
_selectView.layer.mask = maskLayer;
}
return _selectView;
}
@end
完美