版本记录
版本号 | 时间 |
---|---|
V1.0 | 2021.05.18 星期二 |
前言
AVFoundation
框架是ios中很重要的框架,所有与视频音频相关的软硬件控制都在这个框架里面,接下来这几篇就主要对这个框架进行介绍和讲解。感兴趣的可以看我上几篇。
1. AVFoundation框架解析(一)—— 基本概览
2. AVFoundation框架解析(二)—— 实现视频预览录制保存到相册
3. AVFoundation框架解析(三)—— 几个关键问题之关于框架的深度概括
4. AVFoundation框架解析(四)—— 几个关键问题之AVFoundation探索(一)
5. AVFoundation框架解析(五)—— 几个关键问题之AVFoundation探索(二)
6. AVFoundation框架解析(六)—— 视频音频的合成(一)
7. AVFoundation框架解析(七)—— 视频组合和音频混合调试
8. AVFoundation框架解析(八)—— 优化用户的播放体验
9. AVFoundation框架解析(九)—— AVFoundation的变化(一)
10. AVFoundation框架解析(十)—— AVFoundation的变化(二)
11. AVFoundation框架解析(十一)—— AVFoundation的变化(三)
12. AVFoundation框架解析(十二)—— AVFoundation的变化(四)
13. AVFoundation框架解析(十三)—— 构建基本播放应用程序
14. AVFoundation框架解析(十四)—— VAssetWriter和AVAssetReader的Timecode支持(一)
15. AVFoundation框架解析(十五)—— VAssetWriter和AVAssetReader的Timecode支持(二)
16. AVFoundation框架解析(十六)—— 一个简单示例之播放、录制以及混合视频(一)
17. AVFoundation框架解析(十七)—— 一个简单示例之播放、录制以及混合视频之源码及效果展示(二)
18. AVFoundation框架解析(十八)—— AVAudioEngine之基本概览(一)
19. AVFoundation框架解析(十九)—— AVAudioEngine之详细说明和一个简单示例(二)
20. AVFoundation框架解析(二十)—— AVAudioEngine之详细说明和一个简单示例源码(三)
21. AVFoundation框架解析(二十一)—— 一个简单的视频流预览和播放示例之解析(一)
22. AVFoundation框架解析(二十二)—— 一个简单的视频流预览和播放示例之源码(二)
23. AVFoundation框架解析(二十三) —— 向视频层添加叠加层和动画(一)
24. AVFoundation框架解析(二十四) —— 向视频层添加叠加层和动画(二)
25. AVFoundation框架解析(二十五) —— 播放、录制和合并视频简单示例(一)
26. AVFoundation框架解析(二十六) —— 播放、录制和合并视频简单示例(二)
27. AVFoundation框架解析(二十七) —— 基于AVAudioEngine的简单使用示例(一)
源码
1. Swift
首先看下工程组织结构
下面就是正文了
1. AppMain.swift
import SwiftUI
@main
struct AppMain: App {
var body: some Scene {
WindowGroup {
PlayerView()
}
}
}
2. Color+Additions.swift
import SwiftUI
extension Color {
static let rwGreen = Color("rw-green")
static let groupedBackground = Color(.systemGroupedBackground)
}
3. Image+Additions.swift
import SwiftUI
extension Image {
static let artwork = Image("artwork")
static let play = Image(systemName: "play.fill")
static let pause = Image(systemName: "pause.fill")
static let forward = Image(systemName: "goforward.10")
static let backward = Image(systemName: "gobackward.10")
}
4. PlaybackValue.swift
import Foundation
struct PlaybackValue: Identifiable {
let value: Double
let label: String
var id: String {
return "\(label)-\(value)"
}
}
5. PlayerTime.swift
import Foundation
struct PlayerTime {
let elapsedText: String
let remainingText: String
static let zero: PlayerTime = .init(elapsedTime: 0, remainingTime: 0)
init(elapsedTime: Double, remainingTime: Double) {
elapsedText = PlayerTime.formatted(time: elapsedTime)
remainingText = PlayerTime.formatted(time: remainingTime)
}
private static func formatted(time: Double) -> String {
var seconds = Int(ceil(time))
var hours = 0
var mins = 0
if seconds > TimeConstant.secsPerHour {
hours = seconds / TimeConstant.secsPerHour
seconds -= hours * TimeConstant.secsPerHour
}
if seconds > TimeConstant.secsPerMin {
mins = seconds / TimeConstant.secsPerMin
seconds -= mins * TimeConstant.secsPerMin
}
var formattedString = ""
if hours > 0 {
formattedString = "\(String(format: "%02d", hours)):"
}
formattedString += "\(String(format: "%02d", mins)):\(String(format: "%02d", seconds))"
return formattedString
}
}
6. PlayerViewModel.swift
import SwiftUI
import AVFoundation
// swiftlint:disable:next type_body_length
class PlayerViewModel: NSObject, ObservableObject {
// MARK: Public properties
var isPlaying = false {
willSet {
withAnimation {
objectWillChange.send()
}
}
}
var isPlayerReady = false {
willSet {
objectWillChange.send()
}
}
var playbackRateIndex: Int = 1 {
willSet {
objectWillChange.send()
}
didSet {
updateForRateSelection()
}
}
var playbackPitchIndex: Int = 1 {
willSet {
objectWillChange.send()
}
didSet {
updateForPitchSelection()
}
}
var playerProgress: Double = 0 {
willSet {
objectWillChange.send()
}
}
var playerTime: PlayerTime = .zero {
willSet {
objectWillChange.send()
}
}
var meterLevel: Float = 0 {
willSet {
objectWillChange.send()
}
}
let allPlaybackRates: [PlaybackValue] = [
.init(value: 0.5, label: "0.5x"),
.init(value: 1, label: "1x"),
.init(value: 1.25, label: "1.25x"),
.init(value: 2, label: "2x")
]
let allPlaybackPitches: [PlaybackValue] = [
.init(value: -0.5, label: "-½"),
.init(value: 0, label: "0"),
.init(value: 0.5, label: "+½")
]
// MARK: Private properties
private let engine = AVAudioEngine()
private let player = AVAudioPlayerNode()
private let timeEffect = AVAudioUnitTimePitch()
private var displayLink: CADisplayLink?
private var needsFileScheduled = true
private var audioFile: AVAudioFile?
private var audioSampleRate: Double = 0
private var audioLengthSeconds: Double = 0
private var seekFrame: AVAudioFramePosition = 0
private var currentPosition: AVAudioFramePosition = 0
private var audioLengthSamples: AVAudioFramePosition = 0
private var currentFrame: AVAudioFramePosition {
guard
let lastRenderTime = player.lastRenderTime,
let playerTime = player.playerTime(forNodeTime: lastRenderTime)
else {
return 0
}
return playerTime.sampleTime
}
// MARK: - Public
override init() {
super.init()
setupAudio()
setupDisplayLink()
}
func playOrPause() {
isPlaying.toggle()
if player.isPlaying {
displayLink?.isPaused = true
disconnectVolumeTap()
player.pause()
} else {
displayLink?.isPaused = false
connectVolumeTap()
if needsFileScheduled {
scheduleAudioFile()
}
player.play()
}
}
func skip(forwards: Bool) {
let timeToSeek: Double
if forwards {
timeToSeek = 10
} else {
timeToSeek = -10
}
seek(to: timeToSeek)
}
// MARK: - Private
private func setupAudio() {
guard let fileURL = Bundle.main.url(forResource: "Intro", withExtension: "mp3") else {
return
}
do {
let file = try AVAudioFile(forReading: fileURL)
let format = file.processingFormat
audioLengthSamples = file.length
audioSampleRate = format.sampleRate
audioLengthSeconds = Double(audioLengthSamples) / audioSampleRate
audioFile = file
configureEngine(with: format)
} catch {
print("Error reading the audio file: \(error.localizedDescription)")
}
}
private func configureEngine(with format: AVAudioFormat) {
engine.attach(player)
engine.attach(timeEffect)
engine.connect(
player,
to: timeEffect,
format: format)
engine.connect(
timeEffect,
to: engine.mainMixerNode,
format: format)
engine.prepare()
do {
try engine.start()
scheduleAudioFile()
isPlayerReady = true
} catch {
print("Error starting the player: \(error.localizedDescription)")
}
}
private func scheduleAudioFile() {
guard
let file = audioFile,
needsFileScheduled
else {
return
}
needsFileScheduled = false
seekFrame = 0
player.scheduleFile(file, at: nil) {
self.needsFileScheduled = true
}
}
// MARK: Audio adjustments
private func seek(to time: Double) {
guard let audioFile = audioFile else {
return
}
let offset = AVAudioFramePosition(time * audioSampleRate)
seekFrame = currentPosition + offset
seekFrame = max(seekFrame, 0)
seekFrame = min(seekFrame, audioLengthSamples)
currentPosition = seekFrame
let wasPlaying = player.isPlaying
player.stop()
if currentPosition < audioLengthSamples {
updateDisplay()
needsFileScheduled = false
let frameCount = AVAudioFrameCount(audioLengthSamples - seekFrame)
player.scheduleSegment(
audioFile,
startingFrame: seekFrame,
frameCount: frameCount,
at: nil
) {
self.needsFileScheduled = true
}
if wasPlaying {
player.play()
}
}
}
private func updateForRateSelection() {
let selectedRate = allPlaybackRates[playbackRateIndex]
timeEffect.rate = Float(selectedRate.value)
}
private func updateForPitchSelection() {
let selectedPitch = allPlaybackPitches[playbackPitchIndex]
// 1 octave = 1200 cents
timeEffect.pitch = 1200 * Float(selectedPitch.value)
}
// MARK: Audio metering
private func scaledPower(power: Float) -> Float {
guard power.isFinite else {
return 0.0
}
let minDb: Float = -80
if power < minDb {
return 0.0
} else if power >= 1.0 {
return 1.0
} else {
return (abs(minDb) - abs(power)) / abs(minDb)
}
}
private func connectVolumeTap() {
let format = engine.mainMixerNode.outputFormat(forBus: 0)
engine.mainMixerNode.installTap(
onBus: 0,
bufferSize: 1024,
format: format
) { buffer, _ in
guard let channelData = buffer.floatChannelData else {
return
}
let channelDataValue = channelData.pointee
let channelDataValueArray = stride(
from: 0,
to: Int(buffer.frameLength),
by: buffer.stride)
.map { channelDataValue[$0] }
let rms = sqrt(channelDataValueArray.map {
return $0 * $0
}
.reduce(0, +) / Float(buffer.frameLength))
let avgPower = 20 * log10(rms)
let meterLevel = self.scaledPower(power: avgPower)
DispatchQueue.main.async {
self.meterLevel = self.isPlaying ? meterLevel : 0
}
}
}
private func disconnectVolumeTap() {
engine.mainMixerNode.removeTap(onBus: 0)
meterLevel = 0
}
// MARK: Display updates
private func setupDisplayLink() {
displayLink = CADisplayLink(
target: self,
selector: #selector(updateDisplay))
displayLink?.add(to: .current, forMode: .default)
displayLink?.isPaused = true
}
@objc private func updateDisplay() {
currentPosition = currentFrame + seekFrame
currentPosition = max(currentPosition, 0)
currentPosition = min(currentPosition, audioLengthSamples)
if currentPosition >= audioLengthSamples {
player.stop()
seekFrame = 0
currentPosition = 0
isPlaying = false
displayLink?.isPaused = true
disconnectVolumeTap()
}
playerProgress = Double(currentPosition) / Double(audioLengthSamples)
let time = Double(currentPosition) / audioSampleRate
playerTime = PlayerTime(
elapsedTime: time,
remainingTime: audioLengthSeconds - time)
}
}
7. TimeConstant.swift
import Foundation
enum TimeConstant {
static let secsPerMin = 60
static let secsPerHour = TimeConstant.secsPerMin * 60
}
8. PlayerView.swift
import SwiftUI
struct PlayerView: View {
@StateObject var viewModel = PlayerViewModel()
var body: some View {
VStack {
Image.artwork
.resizable()
.aspectRatio(
nil,
contentMode: .fit)
.padding()
.layoutPriority(1)
controlsView
.padding(.bottom)
}
}
private var controlsView: some View {
VStack {
ProgressView(value: viewModel.playerProgress)
.progressViewStyle(
LinearProgressViewStyle(tint: .rwGreen))
.padding(.bottom, 8)
HStack {
Text(viewModel.playerTime.elapsedText)
Spacer()
Text(viewModel.playerTime.remainingText)
}
.font(.system(size: 14, weight: .semibold))
Spacer()
audioControlButtons
.disabled(!viewModel.isPlayerReady)
.padding(.bottom)
Spacer()
adjustmentControlsView
}
.padding(.horizontal)
}
private var adjustmentControlsView: some View {
VStack {
HStack {
Text("Playback speed")
.font(.system(size: 16, weight: .bold))
Spacer()
}
Picker("Select a rate", selection: $viewModel.playbackRateIndex) {
ForEach(0..<viewModel.allPlaybackRates.count) {
Text(viewModel.allPlaybackRates[$0].label)
}
}
.pickerStyle(SegmentedPickerStyle())
.disabled(!viewModel.isPlayerReady)
.padding(.bottom, 20)
HStack {
Text("Pitch adjustment")
.font(.system(size: 16, weight: .bold))
Spacer()
}
Picker("Select a pitch", selection: $viewModel.playbackPitchIndex) {
ForEach(0..<viewModel.allPlaybackPitches.count) {
Text(viewModel.allPlaybackPitches[$0].label)
}
}
.pickerStyle(SegmentedPickerStyle())
.disabled(!viewModel.isPlayerReady)
}
.padding()
.background(
RoundedRectangle(cornerRadius: 5)
.fill(Color.groupedBackground))
}
private var audioControlButtons: some View {
HStack(spacing: 20) {
Spacer()
Button {
viewModel.skip(forwards: false)
} label: {
Image.backward
}
.font(.system(size: 32))
Spacer()
Button {
viewModel.playOrPause()
} label: {
ZStack {
Color.rwGreen
.frame(
width: 10,
height: 35 * CGFloat(viewModel.meterLevel))
.opacity(0.5)
viewModel.isPlaying ? Image.pause : Image.play
}
}
.frame(width: 40)
.font(.system(size: 45))
Spacer()
Button {
viewModel.skip(forwards: true)
} label: {
Image.forward
}
.font(.system(size: 32))
Spacer()
}
.foregroundColor(.primary)
.padding(.vertical, 20)
.frame(height: 58)
}
}
后记
本篇主要讲述了基于
AVAudioEngine
的简单使用示例,感兴趣的给个赞或者关注~~~
网友评论