diff --git a/Demo/SwiftUI/Shared/Defaults.swift b/Demo/SwiftUI/Shared/Defaults.swift index 76c646806..e67e435ba 100644 --- a/Demo/SwiftUI/Shared/Defaults.swift +++ b/Demo/SwiftUI/Shared/Defaults.swift @@ -11,11 +11,6 @@ import SwiftUI public class Defaults: ObservableObject { @AppStorage("showRecentPlayList") public var showRecentPlayList = false - @AppStorage("isUseAudioRenderer") public var isUseAudioRenderer = KSOptions.isUseAudioRenderer { - didSet { - KSOptions.isUseAudioRenderer = isUseAudioRenderer - } - } @AppStorage("hardwareDecode") public var hardwareDecode = KSOptions.hardwareDecode { @@ -31,85 +26,99 @@ public class Defaults: ObservableObject { } } - @AppStorage("isUseDisplayLayer") public var isUseDisplayLayer = MEOptions.isUseDisplayLayer { + @AppStorage("isUseDisplayLayer") + public var isUseDisplayLayer = MEOptions.isUseDisplayLayer { didSet { MEOptions.isUseDisplayLayer = isUseDisplayLayer } } - @AppStorage("preferredForwardBufferDuration") public var preferredForwardBufferDuration = KSOptions.preferredForwardBufferDuration { + @AppStorage("preferredForwardBufferDuration") + public var preferredForwardBufferDuration = KSOptions.preferredForwardBufferDuration { didSet { KSOptions.preferredForwardBufferDuration = preferredForwardBufferDuration } } - @AppStorage("maxBufferDuration") public var maxBufferDuration = KSOptions.maxBufferDuration { + @AppStorage("maxBufferDuration") + public var maxBufferDuration = KSOptions.maxBufferDuration { didSet { KSOptions.maxBufferDuration = maxBufferDuration } } - @AppStorage("isLoopPlay") public var isLoopPlay = KSOptions.isLoopPlay { + @AppStorage("isLoopPlay") + public var isLoopPlay = KSOptions.isLoopPlay { didSet { KSOptions.isLoopPlay = isLoopPlay } } - @AppStorage("canBackgroundPlay") public var canBackgroundPlay = true { + @AppStorage("canBackgroundPlay") + public var canBackgroundPlay = true { didSet { KSOptions.canBackgroundPlay = canBackgroundPlay } } - @AppStorage("isAutoPlay") public var isAutoPlay = true { + @AppStorage("isAutoPlay") + public var isAutoPlay = true { didSet { KSOptions.isAutoPlay = isAutoPlay } } - @AppStorage("isSecondOpen") public var isSecondOpen = true { + @AppStorage("isSecondOpen") + public var isSecondOpen = true { didSet { KSOptions.isSecondOpen = isSecondOpen } } - @AppStorage("isAccurateSeek") public var isAccurateSeek = true { + @AppStorage("isAccurateSeek") + public var isAccurateSeek = true { didSet { KSOptions.isAccurateSeek = isAccurateSeek } } - @AppStorage("isPipPopViewController") public var isPipPopViewController = true { + @AppStorage("isPipPopViewController") + public var isPipPopViewController = true { didSet { KSOptions.isPipPopViewController = isPipPopViewController } } - @AppStorage("textFontSize") public var textFontSize = SubtitleModel.textFontSize { + @AppStorage("textFontSize") + public var textFontSize = SubtitleModel.textFontSize { didSet { SubtitleModel.textFontSize = textFontSize } } - @AppStorage("textBold") public var textBold = SubtitleModel.textBold { + @AppStorage("textBold") + public var textBold = SubtitleModel.textBold { didSet { SubtitleModel.textBold = textBold } } - @AppStorage("textItalic") public var textItalic = SubtitleModel.textItalic { + @AppStorage("textItalic") + public var textItalic = SubtitleModel.textItalic { didSet { SubtitleModel.textItalic = textItalic } } - @AppStorage("textColor") public var textColor = SubtitleModel.textColor { + @AppStorage("textColor") + public var textColor = SubtitleModel.textColor { didSet { SubtitleModel.textColor = textColor } } - @AppStorage("textBackgroundColor") public var textBackgroundColor = SubtitleModel.textBackgroundColor { + @AppStorage("textBackgroundColor") + public var textBackgroundColor = SubtitleModel.textBackgroundColor { didSet { SubtitleModel.textBackgroundColor = textBackgroundColor } @@ -164,9 +173,15 @@ public class Defaults: ObservableObject { } } + @AppStorage("audioPlayerType") + public var audioPlayerType = NSStringFromClass(KSOptions.audioPlayerType) { + didSet { + KSOptions.audioPlayerType = NSClassFromString(audioPlayerType) as! any AudioOutput.Type + } + } + public static let shared = Defaults() private init() { - KSOptions.isUseAudioRenderer = isUseAudioRenderer KSOptions.hardwareDecode = hardwareDecode MEOptions.isUseDisplayLayer = isUseDisplayLayer SubtitleModel.textFontSize = textFontSize @@ -188,6 +203,7 @@ public class Defaults: ObservableObject { KSOptions.isAccurateSeek = isAccurateSeek KSOptions.isPipPopViewController = isPipPopViewController MEOptions.yadifMode = yadifMode + KSOptions.audioPlayerType = NSClassFromString(audioPlayerType) as! any AudioOutput.Type } } diff --git a/Demo/SwiftUI/Shared/MovieModel.swift b/Demo/SwiftUI/Shared/MovieModel.swift index d1d10c4d8..5e4763235 100644 --- a/Demo/SwiftUI/Shared/MovieModel.swift +++ b/Demo/SwiftUI/Shared/MovieModel.swift @@ -13,7 +13,11 @@ import KSPlayer import UIKit #endif class MEOptions: KSOptions { + #if os(iOS) static var isUseDisplayLayer = true + #else + static var isUseDisplayLayer = false + #endif static var yadifMode = 1 override init() { super.init() diff --git a/Demo/SwiftUI/Shared/SettingView.swift b/Demo/SwiftUI/Shared/SettingView.swift index 71c61bfbe..b1fe0bf6a 100644 --- a/Demo/SwiftUI/Shared/SettingView.swift +++ b/Demo/SwiftUI/Shared/SettingView.swift @@ -45,12 +45,17 @@ struct SettingGeneralView: View { } struct SettingAudioView: View { - @Default(\.isUseAudioRenderer) - private var isUseAudioRenderer + @Default(\.audioPlayerType) + private var audioPlayerType init() {} var body: some View { Form { - Toggle("Use Audio Renderer", isOn: $isUseAudioRenderer) + Picker("audio Player Type", selection: $audioPlayerType) { + Text("AUGraph").tag(NSStringFromClass(AudioGraphPlayer.self)) + Text("AudioUnit").tag(NSStringFromClass(AudioUnitPlayer.self)) + Text("AVAudioEngine").tag(NSStringFromClass(AudioEnginePlayer.self)) + Text("AVSampleBufferAudioRenderer").tag(NSStringFromClass(AudioRendererPlayer.self)) + } } } } diff --git a/Demo/SwiftUI/Shared/TracyApp.swift b/Demo/SwiftUI/Shared/TracyApp.swift index 6ed334b04..874e4edb0 100644 --- a/Demo/SwiftUI/Shared/TracyApp.swift +++ b/Demo/SwiftUI/Shared/TracyApp.swift @@ -14,10 +14,11 @@ import UserNotifications @main struct TracyApp: App { #if os(macOS) - @NSApplicationDelegateAdaptor private var appDelegate: AppDelegate + @NSApplicationDelegateAdaptor #else - @UIApplicationDelegateAdaptor private var appDelegate: AppDelegate + @UIApplicationDelegateAdaptor #endif + private var appDelegate: AppDelegate private let appModel = APPModel() init() { let arguments = ProcessInfo.processInfo.arguments.dropFirst() diff --git a/Sources/KSPlayer/AVPlayer/KSAVPlayer.swift b/Sources/KSPlayer/AVPlayer/KSAVPlayer.swift index 5f4bc0c37..9812f9941 100644 --- a/Sources/KSPlayer/AVPlayer/KSAVPlayer.swift +++ b/Sources/KSPlayer/AVPlayer/KSAVPlayer.swift @@ -126,7 +126,8 @@ public class KSAVPlayer { public private(set) var duration: TimeInterval = 0 public private(set) var fileSize: Double = 0 public private(set) var playableTime: TimeInterval = 0 - + public var audioBitrate: Int = 0 + public var videoBitrate: Int = 0 public var playbackRate: Float = 1 { didSet { if playbackState == .playing { diff --git a/Sources/KSPlayer/AVPlayer/KSOptions.swift b/Sources/KSPlayer/AVPlayer/KSOptions.swift index 627eafcbb..eb15e6984 100644 --- a/Sources/KSPlayer/AVPlayer/KSOptions.swift +++ b/Sources/KSPlayer/AVPlayer/KSOptions.swift @@ -70,8 +70,6 @@ open class KSOptions { // audio public var audioFilters = [String]() public var syncDecodeAudio = false - /// true: AVSampleBufferAudioRenderer false: AVAudioEngine - public var isUseAudioRenderer = KSOptions.isUseAudioRenderer // Locale(identifier: "en-US") Locale(identifier: "zh-CN") public var audioLocale: Locale? // sutile @@ -244,7 +242,11 @@ open class KSOptions { } open func isUseDisplayLayer() -> Bool { + #if os(iOS) display == .plane + #else + false + #endif } open func io(log: String) { @@ -486,24 +488,24 @@ public extension KSOptions { } #if !os(macOS) - static func isSpatialAudioEnabled() -> Bool { + static func isSpatialAudioEnabled(channelCount: AVAudioChannelCount) -> Bool { if #available(tvOS 15.0, iOS 15.0, *) { let isSpatialAudioEnabled = AVAudioSession.sharedInstance().currentRoute.outputs.contains { $0.isSpatialAudioEnabled } - try? AVAudioSession.sharedInstance().setSupportsMultichannelContent(isSpatialAudioEnabled) + try? AVAudioSession.sharedInstance().setSupportsMultichannelContent(channelCount > 2) return isSpatialAudioEnabled } else { return false } } - static func outputNumberOfChannels(channelCount: AVAudioChannelCount, isUseAudioRenderer: Bool) -> AVAudioChannelCount { + static func outputNumberOfChannels(channelCount: AVAudioChannelCount) -> AVAudioChannelCount { let maximumOutputNumberOfChannels = AVAudioChannelCount(AVAudioSession.sharedInstance().maximumOutputNumberOfChannels) let preferredOutputNumberOfChannels = AVAudioChannelCount(AVAudioSession.sharedInstance().preferredOutputNumberOfChannels) KSLog("[audio] maximumOutputNumberOfChannels: \(maximumOutputNumberOfChannels)") KSLog("[audio] preferredOutputNumberOfChannels: \(preferredOutputNumberOfChannels)") - setAudioSession() - let isSpatialAudioEnabled = isSpatialAudioEnabled() + let isSpatialAudioEnabled = isSpatialAudioEnabled(channelCount: channelCount) KSLog("[audio] isSpatialAudioEnabled: \(isSpatialAudioEnabled)") + let isUseAudioRenderer = KSOptions.audioPlayerType == AudioRendererPlayer.self KSLog("[audio] isUseAudioRenderer: \(isUseAudioRenderer)") var channelCount = channelCount if channelCount > 2 { @@ -512,14 +514,15 @@ public extension KSOptions { try? AVAudioSession.sharedInstance().setPreferredOutputNumberOfChannels(Int(minChannels)) KSLog("[audio] set preferredOutputNumberOfChannels: \(minChannels)") } - // 不要从currentRoute获取maxRouteChannelsCount,有可能会不准。导致多音道设备也返回2(一开始播放一个2声道,就容易出现) -// if !(isUseAudioRenderer && isSpatialAudioEnabled) { -// let maxRouteChannelsCount = AVAudioSession.sharedInstance().currentRoute.outputs.compactMap { -// $0.channels?.count -// }.max() ?? 2 -// KSLog("[audio] currentRoute max channels: \(maxRouteChannelsCount)") + if !(isUseAudioRenderer && isSpatialAudioEnabled) { + let maxRouteChannelsCount = AVAudioSession.sharedInstance().currentRoute.outputs.compactMap { + $0.channels?.count + }.max() ?? 2 + KSLog("[audio] currentRoute max channels: \(maxRouteChannelsCount)") + // 不要用maxRouteChannelsCount来panduan,有可能会不准。导致多音道设备也返回2(一开始播放一个2声道,就容易出现) // channelCount = AVAudioChannelCount(min(AVAudioSession.sharedInstance().outputNumberOfChannels, maxRouteChannelsCount)) -// } + channelCount = AVAudioChannelCount(AVAudioSession.sharedInstance().outputNumberOfChannels) + } } else { channelCount = 2 } diff --git a/Sources/KSPlayer/AVPlayer/MediaPlayerProtocol.swift b/Sources/KSPlayer/AVPlayer/MediaPlayerProtocol.swift index 9e767ccd5..0a0ed68ee 100644 --- a/Sources/KSPlayer/AVPlayer/MediaPlayerProtocol.swift +++ b/Sources/KSPlayer/AVPlayer/MediaPlayerProtocol.swift @@ -18,6 +18,8 @@ public protocol MediaPlayback: AnyObject { var fileSize: Double { get } var metadata: [String: String] { get } var naturalSize: CGSize { get } + var audioBitrate: Int { get } + var videoBitrate: Int { get } var currentPlaybackTime: TimeInterval { get } func prepareToPlay() func shutdown() @@ -127,6 +129,23 @@ public enum FFmpegFieldOrder: UInt8 { case bt // < Bottom coded first, top displayed first } +extension FFmpegFieldOrder: CustomStringConvertible { + public var description: String { + switch self { + case .unknown, .progressive: + return "progressive" + case .tt: + return "top first" + case .bb: + return "bottom first" + case .tb: + return "top coded first (swapped)" + case .bt: + return "bottom coded first (swapped)" + } + } +} + // swiftlint:enable identifier_name public extension MediaPlayerTrack { var codecType: FourCharCode { diff --git a/Sources/KSPlayer/MEPlayer/AVFoundationExtension.swift b/Sources/KSPlayer/MEPlayer/AVFoundationExtension.swift index a1fb17cb6..5aa7b09d5 100644 --- a/Sources/KSPlayer/MEPlayer/AVFoundationExtension.swift +++ b/Sources/KSPlayer/MEPlayer/AVFoundationExtension.swift @@ -166,6 +166,10 @@ extension AVAudioChannelLayout { KSLog("[audio] out mask: \(outChannel.u.mask) nb_channels: \(outChannel.nb_channels)") return outChannel } + + public var channelDescriptions: String { + "tag: \(layoutTag), channelDescriptions: \(layout.channelDescriptions)" + } } extension AVAudioFormat { diff --git a/Sources/KSPlayer/MEPlayer/AudioEnginePlayer.swift b/Sources/KSPlayer/MEPlayer/AudioEnginePlayer.swift index 6d5271c6e..8b280c966 100644 --- a/Sources/KSPlayer/MEPlayer/AudioEnginePlayer.swift +++ b/Sources/KSPlayer/MEPlayer/AudioEnginePlayer.swift @@ -12,71 +12,76 @@ public protocol AudioOutput: FrameOutput { var playbackRate: Float { get set } var volume: Float { get set } var isMuted: Bool { get set } - var attackTime: Float { get set } - var releaseTime: Float { get set } - var threshold: Float { get set } - var expansionRatio: Float { get set } - var overallGain: Float { get set } init() func prepare(audioFormat: AVAudioFormat) func play(time: TimeInterval) } -public final class AudioEnginePlayer: AudioOutput { - public var attackTime: Float { +public protocol AudioDynamicsProcessor { + var audioUnitForDynamicsProcessor: AudioUnit { get } +} + +public extension AudioDynamicsProcessor { + var attackTime: Float { get { var value = AudioUnitParameterValue(1.0) - AudioUnitGetParameter(dynamicsProcessor.audioUnit, kDynamicsProcessorParam_AttackTime, kAudioUnitScope_Global, 0, &value) + AudioUnitGetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_AttackTime, kAudioUnitScope_Global, 0, &value) return value } set { - AudioUnitSetParameter(dynamicsProcessor.audioUnit, kDynamicsProcessorParam_AttackTime, kAudioUnitScope_Global, 0, AudioUnitParameterValue(newValue), 0) + AudioUnitSetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_AttackTime, kAudioUnitScope_Global, 0, AudioUnitParameterValue(newValue), 0) } } - public var releaseTime: Float { + var releaseTime: Float { get { var value = AudioUnitParameterValue(1.0) - AudioUnitGetParameter(dynamicsProcessor.audioUnit, kDynamicsProcessorParam_ReleaseTime, kAudioUnitScope_Global, 0, &value) + AudioUnitGetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_ReleaseTime, kAudioUnitScope_Global, 0, &value) return value } set { - AudioUnitSetParameter(dynamicsProcessor.audioUnit, kDynamicsProcessorParam_ReleaseTime, kAudioUnitScope_Global, 0, AudioUnitParameterValue(newValue), 0) + AudioUnitSetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_ReleaseTime, kAudioUnitScope_Global, 0, AudioUnitParameterValue(newValue), 0) } } - public var threshold: Float { + var threshold: Float { get { var value = AudioUnitParameterValue(1.0) - AudioUnitGetParameter(dynamicsProcessor.audioUnit, kDynamicsProcessorParam_Threshold, kAudioUnitScope_Global, 0, &value) + AudioUnitGetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_Threshold, kAudioUnitScope_Global, 0, &value) return value } set { - AudioUnitSetParameter(dynamicsProcessor.audioUnit, kDynamicsProcessorParam_Threshold, kAudioUnitScope_Global, 0, AudioUnitParameterValue(newValue), 0) + AudioUnitSetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_Threshold, kAudioUnitScope_Global, 0, AudioUnitParameterValue(newValue), 0) } } - public var expansionRatio: Float { + var expansionRatio: Float { get { var value = AudioUnitParameterValue(1.0) - AudioUnitGetParameter(dynamicsProcessor.audioUnit, kDynamicsProcessorParam_ExpansionRatio, kAudioUnitScope_Global, 0, &value) + AudioUnitGetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_ExpansionRatio, kAudioUnitScope_Global, 0, &value) return value } set { - AudioUnitSetParameter(dynamicsProcessor.audioUnit, kDynamicsProcessorParam_ExpansionRatio, kAudioUnitScope_Global, 0, AudioUnitParameterValue(newValue), 0) + AudioUnitSetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_ExpansionRatio, kAudioUnitScope_Global, 0, AudioUnitParameterValue(newValue), 0) } } - public var overallGain: Float { + var overallGain: Float { get { var value = AudioUnitParameterValue(1.0) - AudioUnitGetParameter(dynamicsProcessor.audioUnit, kDynamicsProcessorParam_OverallGain, kAudioUnitScope_Global, 0, &value) + AudioUnitGetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_OverallGain, kAudioUnitScope_Global, 0, &value) return value } set { - AudioUnitSetParameter(dynamicsProcessor.audioUnit, kDynamicsProcessorParam_OverallGain, kAudioUnitScope_Global, 0, AudioUnitParameterValue(newValue), 0) + AudioUnitSetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_OverallGain, kAudioUnitScope_Global, 0, AudioUnitParameterValue(newValue), 0) } } +} + +public final class AudioEnginePlayer: AudioOutput, AudioDynamicsProcessor { + public var audioUnitForDynamicsProcessor: AudioUnit { + dynamicsProcessor.audioUnit + } private let engine = AVAudioEngine() private var sourceNode: AVAudioSourceNode? @@ -146,8 +151,7 @@ public final class AudioEnginePlayer: AudioOutput { sourceNodeAudioFormat = audioFormat KSLog("[audio] outputFormat AudioFormat: \(audioFormat)") if let channelLayout = audioFormat.channelLayout { - KSLog("[audio] outputFormat tag: \(channelLayout.layoutTag)") - KSLog("[audio] outputFormat channelDescriptions: \(channelLayout.layout.channelDescriptions)") + KSLog("[audio] outputFormat channelLayout \(channelLayout.channelDescriptions)") } let isRunning = engine.isRunning engine.stop() diff --git a/Sources/KSPlayer/MEPlayer/AudioGraphPlayer.swift b/Sources/KSPlayer/MEPlayer/AudioGraphPlayer.swift index 3db850e83..10fa4f824 100644 --- a/Sources/KSPlayer/MEPlayer/AudioGraphPlayer.swift +++ b/Sources/KSPlayer/MEPlayer/AudioGraphPlayer.swift @@ -9,11 +9,11 @@ import AudioToolbox import AVFAudio import CoreAudio -public final class AudioGraphPlayer: AudioOutput { +public final class AudioGraphPlayer: AudioOutput, AudioDynamicsProcessor { + public private(set) var audioUnitForDynamicsProcessor: AudioUnit private let graph: AUGraph private var audioUnitForMixer: AudioUnit! private var audioUnitForTimePitch: AudioUnit! - private var audioUnitForDynamicsProcessor: AudioUnit! private var audioUnitForOutput: AudioUnit! private var currentRenderReadOffset = UInt32(0) private var sourceNodeAudioFormat: AVAudioFormat? @@ -93,61 +93,6 @@ public final class AudioGraphPlayer: AudioOutput { } } - public var attackTime: Float { - get { - var value = AudioUnitParameterValue(1.0) - AudioUnitGetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_AttackTime, kAudioUnitScope_Global, 0, &value) - return value - } - set { - AudioUnitSetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_AttackTime, kAudioUnitScope_Global, 0, AudioUnitParameterValue(newValue), 0) - } - } - - public var releaseTime: Float { - get { - var value = AudioUnitParameterValue(1.0) - AudioUnitGetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_ReleaseTime, kAudioUnitScope_Global, 0, &value) - return value - } - set { - AudioUnitSetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_ReleaseTime, kAudioUnitScope_Global, 0, AudioUnitParameterValue(newValue), 0) - } - } - - public var threshold: Float { - get { - var value = AudioUnitParameterValue(1.0) - AudioUnitGetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_Threshold, kAudioUnitScope_Global, 0, &value) - return value - } - set { - AudioUnitSetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_Threshold, kAudioUnitScope_Global, 0, AudioUnitParameterValue(newValue), 0) - } - } - - public var expansionRatio: Float { - get { - var value = AudioUnitParameterValue(1.0) - AudioUnitGetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_ExpansionRatio, kAudioUnitScope_Global, 0, &value) - return value - } - set { - AudioUnitSetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_ExpansionRatio, kAudioUnitScope_Global, 0, AudioUnitParameterValue(newValue), 0) - } - } - - public var overallGain: Float { - get { - var value = AudioUnitParameterValue(1.0) - AudioUnitGetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_OverallGain, kAudioUnitScope_Global, 0, &value) - return value - } - set { - AudioUnitSetParameter(audioUnitForDynamicsProcessor, kDynamicsProcessorParam_OverallGain, kAudioUnitScope_Global, 0, AudioUnitParameterValue(newValue), 0) - } - } - public init() { var newGraph: AUGraph! NewAUGraph(&newGraph) @@ -189,12 +134,18 @@ public final class AudioGraphPlayer: AudioOutput { AUGraphConnectNodeInput(graph, nodeForDynamicsProcessor, 0, nodeForMixer, 0) AUGraphConnectNodeInput(graph, nodeForMixer, 0, nodeForOutput, 0) AUGraphNodeInfo(graph, nodeForTimePitch, &descriptionForTimePitch, &audioUnitForTimePitch) + var audioUnitForDynamicsProcessor: AudioUnit? AUGraphNodeInfo(graph, nodeForDynamicsProcessor, &descriptionForDynamicsProcessor, &audioUnitForDynamicsProcessor) + self.audioUnitForDynamicsProcessor = audioUnitForDynamicsProcessor! AUGraphNodeInfo(graph, nodeForMixer, &descriptionForMixer, &audioUnitForMixer) AUGraphNodeInfo(graph, nodeForOutput, &descriptionForOutput, &audioUnitForOutput) - var inputCallbackStruct = renderCallbackStruct() - AUGraphSetNodeInputCallback(graph, nodeForTimePitch, 0, &inputCallbackStruct) addRenderNotify(audioUnit: audioUnitForOutput) + var value = UInt32(1) + AudioUnitSetProperty(audioUnitForTimePitch, + kAudioOutputUnitProperty_EnableIO, + kAudioUnitScope_Output, 0, + &value, + UInt32(MemoryLayout.size)) } public func prepare(audioFormat: AVAudioFormat) { @@ -205,20 +156,19 @@ public final class AudioGraphPlayer: AudioOutput { sampleSize = audioFormat.sampleSize var audioStreamBasicDescription = audioFormat.formatDescription.audioStreamBasicDescription let audioStreamBasicDescriptionSize = UInt32(MemoryLayout.size) - var audioPlayerMaximumFramesPerSlice = AVAudioFrameCount(4096) - let inDataSize = UInt32(MemoryLayout.size(ofValue: audioPlayerMaximumFramesPerSlice)) + let channelLayout = audioFormat.channelLayout?.layout [audioUnitForTimePitch, audioUnitForDynamicsProcessor, audioUnitForMixer, audioUnitForOutput].forEach { unit in guard let unit else { return } - AudioUnitSetProperty(unit, - kAudioUnitProperty_MaximumFramesPerSlice, - kAudioUnitScope_Global, 0, - &audioPlayerMaximumFramesPerSlice, - inDataSize) AudioUnitSetProperty(unit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &audioStreamBasicDescription, audioStreamBasicDescriptionSize) + AudioUnitSetProperty(unit, + kAudioUnitProperty_AudioChannelLayout, + kAudioUnitScope_Input, 0, + channelLayout, + UInt32(MemoryLayout.size)) if unit != audioUnitForOutput { AudioUnitSetProperty(unit, kAudioUnitProperty_StreamFormat, @@ -226,6 +176,14 @@ public final class AudioGraphPlayer: AudioOutput { &audioStreamBasicDescription, audioStreamBasicDescriptionSize) } + if unit == audioUnitForTimePitch { + var inputCallbackStruct = renderCallbackStruct() + AudioUnitSetProperty(unit, + kAudioUnitProperty_SetRenderCallback, + kAudioUnitScope_Input, 0, + &inputCallbackStruct, + UInt32(MemoryLayout.size)) + } } AUGraphInitialize(graph) } diff --git a/Sources/KSPlayer/MEPlayer/AudioRendererPlayer.swift b/Sources/KSPlayer/MEPlayer/AudioRendererPlayer.swift index f855f4444..cdf1244a3 100644 --- a/Sources/KSPlayer/MEPlayer/AudioRendererPlayer.swift +++ b/Sources/KSPlayer/MEPlayer/AudioRendererPlayer.swift @@ -35,16 +35,6 @@ public class AudioRendererPlayer: AudioOutput { } } - public var attackTime: Float = 0 - - public var releaseTime: Float = 0 - - public var threshold: Float = 0 - - public var expansionRatio: Float = 0 - - public var overallGain: Float = 0 - public weak var renderSource: OutputRenderSourceDelegate? private var periodicTimeObserver: Any? private let renderer = AVSampleBufferAudioRenderer() diff --git a/Sources/KSPlayer/MEPlayer/AudioUnitPlayer.swift b/Sources/KSPlayer/MEPlayer/AudioUnitPlayer.swift new file mode 100644 index 000000000..3a09cb061 --- /dev/null +++ b/Sources/KSPlayer/MEPlayer/AudioUnitPlayer.swift @@ -0,0 +1,221 @@ +// +// AudioUnitPlayer.swift +// KSPlayer +// +// Created by kintan on 2018/3/16. +// + +import AudioToolbox +import AVFAudio +import CoreAudio + +public final class AudioUnitPlayer: AudioOutput { + private var audioUnitForOutput: AudioUnit! + private var currentRenderReadOffset = UInt32(0) + private var sourceNodeAudioFormat: AVAudioFormat? + private var sampleSize = UInt32(MemoryLayout.size) + #if os(macOS) + private var volumeBeforeMute: Float = 0.0 + #endif + public weak var renderSource: OutputRenderSourceDelegate? + private var currentRender: AudioFrame? { + didSet { + if currentRender == nil { + currentRenderReadOffset = 0 + } + } + } + + private var isPlaying = false + public func play(time _: TimeInterval) { + if !isPlaying { + isPlaying = true + AudioOutputUnitStart(audioUnitForOutput) + } + } + + public func pause() { + if isPlaying { + isPlaying = false + AudioOutputUnitStop(audioUnitForOutput) + } + } + + public var playbackRate: Float { + get { + var playbackRate = AudioUnitParameterValue(0.0) + AudioUnitGetParameter(audioUnitForOutput, kNewTimePitchParam_Rate, kAudioUnitScope_Global, 0, &playbackRate) + return playbackRate + } + set { + AudioUnitSetParameter(audioUnitForOutput, kNewTimePitchParam_Rate, kAudioUnitScope_Global, 0, newValue, 0) + } + } + + public var volume: Float { + get { + var volume = AudioUnitParameterValue(0.0) + #if os(macOS) + let inID = kStereoMixerParam_Volume + #else + let inID = kMultiChannelMixerParam_Volume + #endif + AudioUnitGetParameter(audioUnitForOutput, inID, kAudioUnitScope_Input, 0, &volume) + return volume + } + set { + #if os(macOS) + let inID = kStereoMixerParam_Volume + #else + let inID = kMultiChannelMixerParam_Volume + #endif + AudioUnitSetParameter(audioUnitForOutput, inID, kAudioUnitScope_Input, 0, newValue, 0) + } + } + + public var isMuted: Bool = false + public var latency = Float64(0) + public init() { + var descriptionForOutput = AudioComponentDescription() + descriptionForOutput.componentType = kAudioUnitType_Output + descriptionForOutput.componentManufacturer = kAudioUnitManufacturer_Apple + #if os(macOS) + descriptionForOutput.componentSubType = kAudioUnitSubType_HALOutput + #else + descriptionForOutput.componentSubType = kAudioUnitSubType_RemoteIO + #endif + let nodeForOutput = AudioComponentFindNext(nil, &descriptionForOutput) + AudioComponentInstanceNew(nodeForOutput!, &audioUnitForOutput) + var value = UInt32(1) + AudioUnitSetProperty(audioUnitForOutput, + kAudioOutputUnitProperty_EnableIO, + kAudioUnitScope_Output, 0, + &value, + UInt32(MemoryLayout.size)) + } + + public func prepare(audioFormat: AVAudioFormat) { + if sourceNodeAudioFormat == audioFormat { + return + } + sourceNodeAudioFormat = audioFormat + sampleSize = audioFormat.sampleSize + var audioStreamBasicDescription = audioFormat.formatDescription.audioStreamBasicDescription + let audioStreamBasicDescriptionSize = UInt32(MemoryLayout.size) + AudioUnitSetProperty(audioUnitForOutput, + kAudioUnitProperty_StreamFormat, + kAudioUnitScope_Input, 0, + &audioStreamBasicDescription, + audioStreamBasicDescriptionSize) + var inputCallbackStruct = renderCallbackStruct() + AudioUnitSetProperty(audioUnitForOutput, + kAudioUnitProperty_SetRenderCallback, + kAudioUnitScope_Input, 0, + &inputCallbackStruct, + UInt32(MemoryLayout.size)) + addRenderNotify(audioUnit: audioUnitForOutput) + let channelLayout = audioFormat.channelLayout?.layout + AudioUnitSetProperty(audioUnitForOutput, + kAudioUnitProperty_AudioChannelLayout, + kAudioUnitScope_Input, 0, + channelLayout, + UInt32(MemoryLayout.size)) + AudioUnitInitialize(audioUnitForOutput) + var size = UInt32(MemoryLayout.size) + AudioUnitGetProperty(audioUnitForOutput, + kAudioUnitProperty_Latency, + kAudioUnitScope_Global, 0, + &latency, + &size) + } + + public func flush() { + currentRender = nil + } + + deinit {} +} + +extension AudioUnitPlayer { + private func renderCallbackStruct() -> AURenderCallbackStruct { + var inputCallbackStruct = AURenderCallbackStruct() + inputCallbackStruct.inputProcRefCon = Unmanaged.passUnretained(self).toOpaque() + inputCallbackStruct.inputProc = { refCon, _, _, _, inNumberFrames, ioData in + guard let ioData else { + return noErr + } + let `self` = Unmanaged.fromOpaque(refCon).takeUnretainedValue() + self.audioPlayerShouldInputData(ioData: UnsafeMutableAudioBufferListPointer(ioData), numberOfFrames: inNumberFrames) + return noErr + } + return inputCallbackStruct + } + + private func addRenderNotify(audioUnit: AudioUnit) { + AudioUnitAddRenderNotify(audioUnit, { refCon, ioActionFlags, inTimeStamp, _, _, _ in + let `self` = Unmanaged.fromOpaque(refCon).takeUnretainedValue() + autoreleasepool { + if ioActionFlags.pointee.contains(.unitRenderAction_PostRender) { + self.audioPlayerDidRenderSample(sampleTimestamp: inTimeStamp.pointee) + } + } + return noErr + }, Unmanaged.passUnretained(self).toOpaque()) + } + + private func audioPlayerShouldInputData(ioData: UnsafeMutableAudioBufferListPointer, numberOfFrames: UInt32) { + var ioDataWriteOffset = 0 + var numberOfSamples = numberOfFrames + while numberOfSamples > 0 { + if currentRender == nil { + currentRender = renderSource?.getAudioOutputRender() + } + guard let currentRender else { + break + } + let residueLinesize = currentRender.numberOfSamples - currentRenderReadOffset + guard residueLinesize > 0 else { + self.currentRender = nil + continue + } + if sourceNodeAudioFormat != currentRender.audioFormat { + runInMainqueue { [weak self] in + guard let self else { + return + } + self.prepare(audioFormat: currentRender.audioFormat) + } + return + } + let framesToCopy = min(numberOfSamples, residueLinesize) + let bytesToCopy = Int(framesToCopy * sampleSize) + let offset = Int(currentRenderReadOffset * sampleSize) + for i in 0 ..< min(ioData.count, currentRender.data.count) { + if isMuted { + memset(ioData[i].mData! + ioDataWriteOffset, 0, bytesToCopy) + } else { + (ioData[i].mData! + ioDataWriteOffset).copyMemory(from: currentRender.data[i]! + offset, byteCount: bytesToCopy) + } + } + numberOfSamples -= framesToCopy + ioDataWriteOffset += bytesToCopy + currentRenderReadOffset += framesToCopy + } + let sizeCopied = (numberOfFrames - numberOfSamples) * sampleSize + for i in 0 ..< ioData.count { + let sizeLeft = Int(ioData[i].mDataByteSize - sizeCopied) + if sizeLeft > 0 { + memset(ioData[i].mData! + Int(sizeCopied), 0, sizeLeft) + } + } + } + + private func audioPlayerDidRenderSample(sampleTimestamp _: AudioTimeStamp) { + if let currentRender { + let currentPreparePosition = currentRender.position + currentRender.duration * Int64(currentRenderReadOffset) / Int64(currentRender.numberOfSamples) + if currentPreparePosition > 0 { + renderSource?.setAudio(time: currentRender.timebase.cmtime(for: currentPreparePosition)) + } + } + } +} diff --git a/Sources/KSPlayer/MEPlayer/KSMEPlayer.swift b/Sources/KSPlayer/MEPlayer/KSMEPlayer.swift index 06da805a6..748730884 100644 --- a/Sources/KSPlayer/MEPlayer/KSMEPlayer.swift +++ b/Sources/KSPlayer/MEPlayer/KSMEPlayer.swift @@ -16,7 +16,7 @@ import AppKit public class KSMEPlayer: NSObject { private var loopCount = 1 private var playerItem: MEPlayerItem - private let audioOutput: AudioOutput + public let audioOutput: AudioOutput private var options: KSOptions private var bufferingCountDownTimer: Timer? public private(set) var videoOutput: (VideoOutput & UIView)? { @@ -100,8 +100,9 @@ public class KSMEPlayer: NSObject { } public required init(url: URL, options: KSOptions) { + KSOptions.setAudioSession() + audioOutput = KSOptions.audioPlayerType.init() playerItem = MEPlayerItem(url: url, options: options) - audioOutput = options.isUseAudioRenderer ? AudioRendererPlayer() : KSOptions.audioPlayerType.init() if options.videoDisable { videoOutput = nil } else { @@ -149,7 +150,7 @@ private extension KSMEPlayer { @objc private func spatialCapabilityChange(notification _: Notification) { KSLog("[audio] spatialCapabilityChange") tracks(mediaType: .audio).forEach { track in - (track as? FFmpegAssetTrack)?.audioDescriptor?.setAudioSession(isUseAudioRenderer: options.isUseAudioRenderer) + (track as? FFmpegAssetTrack)?.audioDescriptor?.updateAudioFormat() } } @@ -164,7 +165,7 @@ private extension KSMEPlayer { return } tracks(mediaType: .audio).forEach { track in - (track as? FFmpegAssetTrack)?.audioDescriptor?.setAudioSession(isUseAudioRenderer: options.isUseAudioRenderer) + (track as? FFmpegAssetTrack)?.audioDescriptor?.updateAudioFormat() } audioOutput.flush() } @@ -181,10 +182,13 @@ extension KSMEPlayer: MEPlayerDelegate { } let audioDescriptor = tracks(mediaType: .audio).first { $0.isEnabled }.flatMap { $0 as? FFmpegAssetTrack - }?.audioDescriptor ?? .defaultValue + }?.audioDescriptor runInMainqueue { [weak self] in guard let self else { return } - self.audioOutput.prepare(audioFormat: audioDescriptor.audioFormat) + if let audioDescriptor { + KSLog("[audio] audio type: \(self.audioOutput) prepare audioFormat )") + self.audioOutput.prepare(audioFormat: audioDescriptor.audioFormat) + } if let controlTimebase = videoOutput?.displayLayer.controlTimebase, self.options.startPlayTime > 1 { CMTimebaseSetTime(controlTimebase, time: CMTimeMake(value: Int64(self.options.startPlayTime), timescale: 1)) } @@ -323,6 +327,10 @@ extension KSMEPlayer: MediaPlayerProtocol { public var seekable: Bool { playerItem.seekable } + public var videoBitrate: Int { playerItem.videoBitrate } + + public var audioBitrate: Int { playerItem.audioBitrate } + public func seek(time: TimeInterval, completion: @escaping ((Bool) -> Void)) { let time = max(time, 0) playbackState = .seeking @@ -548,49 +556,4 @@ public extension KSMEPlayer { var bytesRead: Int64 { playerItem.bytesRead } - - var attackTime: Float { - get { - audioOutput.attackTime - } - set { - audioOutput.attackTime = newValue - } - } - - var releaseTime: Float { - get { - audioOutput.releaseTime - } - set { - audioOutput.releaseTime = newValue - } - } - - var threshold: Float { - get { - audioOutput.threshold - } - set { - audioOutput.threshold = newValue - } - } - - var expansionRatio: Float { - get { - audioOutput.expansionRatio - } - set { - audioOutput.expansionRatio = newValue - } - } - - var overallGain: Float { - get { - audioOutput.overallGain - } - set { - audioOutput.overallGain = newValue - } - } } diff --git a/Sources/KSPlayer/MEPlayer/MEPlayerItem.swift b/Sources/KSPlayer/MEPlayer/MEPlayerItem.swift index a35c20f72..89815bc30 100644 --- a/Sources/KSPlayer/MEPlayer/MEPlayerItem.swift +++ b/Sources/KSPlayer/MEPlayer/MEPlayerItem.swift @@ -328,9 +328,6 @@ extension MEPlayerItem { coreStream.pointee.discard = AVDISCARD_ALL if let assetTrack = FFmpegAssetTrack(stream: coreStream) { assetTrack.startTime = startTime - if let audioDescriptor = assetTrack.audioDescriptor { - audioDescriptor.setAudioSession(isUseAudioRenderer: options.isUseAudioRenderer) - } if !options.subtitleDisable, assetTrack.mediaType == .subtitle { let subtitle = SyncPlayerItemTrack(assetTrack: assetTrack, options: options) assetTrack.isEnabled = !assetTrack.isImageSubtitle @@ -572,6 +569,14 @@ extension MEPlayerItem { // MARK: MediaPlayback extension MEPlayerItem: MediaPlayback { + var videoBitrate: Int { + Int(8 * (videoTrack?.bitrate ?? 0)) + } + + var audioBitrate: Int { + Int(8 * (audioTrack?.bitrate ?? 0)) + } + var seekable: Bool { guard let formatCtx else { return false @@ -769,11 +774,7 @@ extension MEPlayerItem: OutputRenderSourceDelegate { case .dropNextPacket: if let videoTrack = videoTrack as? AsyncPlayerItemTrack { _ = videoTrack.packetQueue.pop { item, _ -> Bool in - if let corePacket = item.corePacket { - return corePacket.pointee.flags & AV_PKT_FLAG_KEY != AV_PKT_FLAG_KEY - } else { - return false - } + !item.isKeyFrame } } case .dropGOPPacket: @@ -781,11 +782,7 @@ extension MEPlayerItem: OutputRenderSourceDelegate { var packet: Packet? = nil repeat { packet = videoTrack.packetQueue.pop { item, _ -> Bool in - if let corePacket = item.corePacket { - return corePacket.pointee.flags & AV_PKT_FLAG_KEY != AV_PKT_FLAG_KEY - } else { - return false - } + !item.isKeyFrame } } while packet != nil } diff --git a/Sources/KSPlayer/MEPlayer/MEPlayerItemTrack.swift b/Sources/KSPlayer/MEPlayer/MEPlayerItemTrack.swift index 96368488d..dfbe6f03f 100644 --- a/Sources/KSPlayer/MEPlayer/MEPlayerItemTrack.swift +++ b/Sources/KSPlayer/MEPlayer/MEPlayerItemTrack.swift @@ -107,7 +107,24 @@ class SyncPlayerItemTrack: PlayerItemTrackProtocol, CustomString outputRenderQueue.shutdown() } + private var lastPacketBytes = Int32(0) + private var lastPacketSeconds = Double(-1) + var bitrate = Double(0) fileprivate func doDecode(packet: Packet) { + if packet.isKeyFrame, packet.assetTrack.mediaType != .subtitle { + let seconds = packet.seconds + let diff = seconds - lastPacketSeconds + if lastPacketSeconds < 0 || diff < 0 { + bitrate = 0 + lastPacketBytes = 0 + lastPacketSeconds = seconds + } else if diff > 0.5 { + bitrate = Double(lastPacketBytes) / diff + lastPacketBytes = 0 + lastPacketSeconds = seconds + } + } + lastPacketBytes += packet.size let decoder = decoderMap.value(for: packet.assetTrack.trackID, default: makeDecode(assetTrack: packet.assetTrack)) decoder.decodeFrame(from: packet) { [weak self] result in guard let self else { diff --git a/Sources/KSPlayer/MEPlayer/Model.swift b/Sources/KSPlayer/MEPlayer/Model.swift index 426daf381..54bad2d21 100644 --- a/Sources/KSPlayer/MEPlayer/Model.swift +++ b/Sources/KSPlayer/MEPlayer/Model.swift @@ -79,8 +79,6 @@ public extension KSOptions { static var stackSize = 65536 static var isClearVideoWhereReplace = true /// true: AVSampleBufferAudioRenderer false: AVAudioEngine -// static var isUseAudioRenderer = KSOptions.isSpatialAudioEnabled - static var isUseAudioRenderer = false static var audioPlayerType: AudioOutput.Type = AudioEnginePlayer.self static var videoPlayerType: (VideoOutput & UIView).Type = MetalPlayView.self static func colorSpace(ycbcrMatrix: CFString?, transferFunction: CFString?) -> CGColorSpace? { @@ -178,6 +176,18 @@ final class Packet: ObjectQueueItem { var size: Int32 = 0 var assetTrack: FFmpegAssetTrack! private(set) var corePacket = av_packet_alloc() + var isKeyFrame: Bool { + if let corePacket { + return corePacket.pointee.flags & AV_PKT_FLAG_KEY == AV_PKT_FLAG_KEY + } else { + return false + } + } + + var seconds: Double { + assetTrack.timebase.cmtime(for: position).seconds + } + func fill() { guard let corePacket else { return diff --git a/Sources/KSPlayer/MEPlayer/Resample.swift b/Sources/KSPlayer/MEPlayer/Resample.swift index f75dc4306..6fb9afb17 100644 --- a/Sources/KSPlayer/MEPlayer/Resample.swift +++ b/Sources/KSPlayer/MEPlayer/Resample.swift @@ -198,9 +198,8 @@ class AudioSwresample: Swresample { } func transfer(avframe: UnsafeMutablePointer) throws -> MEFrame { - let newDescriptor = AudioDescriptor(frame: avframe.pointee) - if !(descriptor == newDescriptor) || outChannel != descriptor.outChannel { - newDescriptor.setAudioSession(isUseAudioRenderer: descriptor.audioFormat.isInterleaved) + if !(descriptor == avframe.pointee) || outChannel != descriptor.outChannel { + let newDescriptor = AudioDescriptor(frame: avframe.pointee) if setup(descriptor: newDescriptor) { descriptor = newDescriptor } else { @@ -225,7 +224,7 @@ class AudioSwresample: Swresample { } public class AudioDescriptor: Equatable { - static let defaultValue = AudioDescriptor() +// static let defaultValue = AudioDescriptor() public let sampleRate: Int32 fileprivate let sampleFormat: AVSampleFormat fileprivate var channel: AVChannelLayout @@ -235,46 +234,49 @@ public class AudioDescriptor: Equatable { AVAudioChannelCount(channel.nb_channels) } - private init() { - channel = AVChannelLayout.defaultValue - outChannel = channel - sampleRate = 44100 - sampleFormat = AV_SAMPLE_FMT_FLT - audioFormat = AVAudioFormat(standardFormatWithSampleRate: Double(sampleRate), channelLayout: AVAudioChannelLayout(layoutTag: kAudioChannelLayoutTag_Stereo)!) + private convenience init() { + self.init(sampleFormat: AV_SAMPLE_FMT_FLT, sampleRate: 44100, channel: AVChannelLayout.defaultValue) } - init(codecpar: AVCodecParameters) { - channel = codecpar.ch_layout - outChannel = channel - let sampleRate = codecpar.sample_rate - if sampleRate <= 0 { - self.sampleRate = 44100 - } else { - self.sampleRate = sampleRate - } - sampleFormat = AVSampleFormat(rawValue: codecpar.format) - audioFormat = AVAudioFormat(standardFormatWithSampleRate: Double(self.sampleRate), channelLayout: AVAudioChannelLayout(layoutTag: kAudioChannelLayoutTag_Stereo)!) + convenience init(codecpar: AVCodecParameters) { + self.init(sampleFormat: AVSampleFormat(rawValue: codecpar.format), sampleRate: codecpar.sample_rate, channel: codecpar.ch_layout) } - init(frame: AVFrame) { - channel = frame.ch_layout + convenience init(frame: AVFrame) { + self.init(sampleFormat: AVSampleFormat(rawValue: frame.format), sampleRate: frame.sample_rate, channel: frame.ch_layout) + } + + init(sampleFormat: AVSampleFormat, sampleRate: Int32, channel: AVChannelLayout) { + self.channel = channel outChannel = channel - let sampleRate = frame.sample_rate if sampleRate <= 0 { self.sampleRate = 44100 } else { self.sampleRate = sampleRate } - sampleFormat = AVSampleFormat(rawValue: frame.format) - audioFormat = AVAudioFormat(standardFormatWithSampleRate: Double(self.sampleRate), channelLayout: AVAudioChannelLayout(layoutTag: kAudioChannelLayoutTag_Stereo)!) + self.sampleFormat = sampleFormat + #if os(macOS) + let channelCount = AVAudioChannelCount(2) + #else + let channelCount = KSOptions.outputNumberOfChannels(channelCount: AVAudioChannelCount(outChannel.nb_channels)) + #endif + audioFormat = AudioDescriptor.audioFormat(sampleFormat: sampleFormat, sampleRate: sampleRate, outChannel: &outChannel, channelCount: channelCount) } public static func == (lhs: AudioDescriptor, rhs: AudioDescriptor) -> Bool { lhs.sampleFormat == rhs.sampleFormat && lhs.sampleRate == rhs.sampleRate && lhs.channel == rhs.channel } - private func audioFormat(channelCount: AVAudioChannelCount, isUseAudioRenderer: Bool) { - if channelCount != self.channelCount { + public static func == (lhs: AudioDescriptor, rhs: AVFrame) -> Bool { + var sampleRate = rhs.sample_rate + if sampleRate <= 0 { + sampleRate = 44100 + } + return lhs.sampleFormat == AVSampleFormat(rawValue: rhs.format) && lhs.sampleRate == sampleRate && lhs.channel == rhs.ch_layout + } + + static func audioFormat(sampleFormat: AVSampleFormat, sampleRate: Int32, outChannel: inout AVChannelLayout, channelCount: AVAudioChannelCount) -> AVAudioFormat { + if channelCount != AVAudioChannelCount(outChannel.nb_channels) { av_channel_layout_default(&outChannel, Int32(channelCount)) } let layoutTag: AudioChannelLayoutTag @@ -321,20 +323,24 @@ public class AudioDescriptor: Equatable { commonFormat = .pcmFormatFloat32 interleaved = false } - interleaved = isUseAudioRenderer + interleaved = KSOptions.audioPlayerType == AudioRendererPlayer.self if !interleaved { commonFormat = .pcmFormatFloat32 } - audioFormat = AVAudioFormat(commonFormat: commonFormat, sampleRate: Double(sampleRate), interleaved: interleaved, channelLayout: AVAudioChannelLayout(layoutTag: layoutTag)!) -// AVAudioChannelLayout(layout: outChannel.layoutTag.channelLayout) + return AVAudioFormat(commonFormat: commonFormat, sampleRate: Double(sampleRate), interleaved: interleaved, channelLayout: AVAudioChannelLayout(layoutTag: layoutTag)!) + // AVAudioChannelLayout(layout: outChannel.layoutTag.channelLayout) } - public func setAudioSession(isUseAudioRenderer: Bool) { + public func setAudioFormat() -> AVAudioFormat { #if os(macOS) let channelCount = AVAudioChannelCount(2) #else - let channelCount = KSOptions.outputNumberOfChannels(channelCount: channelCount, isUseAudioRenderer: isUseAudioRenderer) + let channelCount = KSOptions.outputNumberOfChannels(channelCount: channelCount) #endif - audioFormat(channelCount: channelCount, isUseAudioRenderer: isUseAudioRenderer) + return AudioDescriptor.audioFormat(sampleFormat: sampleFormat, sampleRate: sampleRate, outChannel: &outChannel, channelCount: channelCount) + } + + public func updateAudioFormat() { + audioFormat = setAudioFormat() } } diff --git a/Sources/KSPlayer/MEPlayer/VideoToolboxDecode.swift b/Sources/KSPlayer/MEPlayer/VideoToolboxDecode.swift index f673c096e..b8d7b40f5 100644 --- a/Sources/KSPlayer/MEPlayer/VideoToolboxDecode.swift +++ b/Sources/KSPlayer/MEPlayer/VideoToolboxDecode.swift @@ -50,7 +50,7 @@ class VideoToolboxDecode: DecodeProtocol { } guard status == noErr else { if status == kVTInvalidSessionErr || status == kVTVideoDecoderMalfunctionErr || status == kVTVideoDecoderBadDataErr { - if corePacket.flags & AV_PKT_FLAG_KEY == 1 { + if packet.isKeyFrame { completionHandler(.failure(NSError(errorCode: .codecVideoReceiveFrame, avErrorCode: status))) } } @@ -59,7 +59,7 @@ class VideoToolboxDecode: DecodeProtocol { let frame = VideoVTBFrame(fps: session.assetTrack.nominalFrameRate) frame.corePixelBuffer = imageBuffer frame.timebase = session.assetTrack.timebase - if packetFlags & AV_PKT_FLAG_KEY == 1, packetFlags & AV_PKT_FLAG_DISCARD != 0, self.lastPosition > 0 { + if packet.isKeyFrame, packetFlags & AV_PKT_FLAG_DISCARD != 0, self.lastPosition > 0 { self.startTime = self.lastPosition - pts } self.lastPosition = max(self.lastPosition, pts) @@ -72,7 +72,7 @@ class VideoToolboxDecode: DecodeProtocol { if status == noErr { VTDecompressionSessionWaitForAsynchronousFrames(session.decompressionSession) } else if status == kVTInvalidSessionErr || status == kVTVideoDecoderMalfunctionErr || status == kVTVideoDecoderBadDataErr { - if corePacket.flags & AV_PKT_FLAG_KEY == 1 { + if packet.isKeyFrame { throw NSError(errorCode: .codecVideoReceiveFrame, avErrorCode: status) } else { // 解决从后台切换到前台,解码失败的问题 diff --git a/Sources/KSPlayer/SwiftUI/KSVideoPlayerView.swift b/Sources/KSPlayer/SwiftUI/KSVideoPlayerView.swift index 347f1a6c5..8642da4e5 100644 --- a/Sources/KSPlayer/SwiftUI/KSVideoPlayerView.swift +++ b/Sources/KSPlayer/SwiftUI/KSVideoPlayerView.swift @@ -486,8 +486,8 @@ struct VideoSettingView: View { Label("Audio track", systemImage: "waveform") } } - - if let videoTracks = config.playerLayer?.player.tracks(mediaType: .video), !videoTracks.isEmpty { + let videoTracks = config.playerLayer?.player.tracks(mediaType: .video) + if let videoTracks, !videoTracks.isEmpty { Picker(selection: Binding { videoTracks.first { $0.isEnabled }?.trackID } set: { value in @@ -522,8 +522,11 @@ struct VideoSettingView: View { Button("Search Sutitle") { subtitleModel.searchSubtitle(query: subtitleTitle, languages: ["zh-cn"]) } + Text("Stream Type: \((videoTracks?.first { $0.isEnabled }?.fieldOrder ?? .progressive).description)") + Text("Audio bitrate: \(config.playerLayer?.player.audioBitrate ?? 0) b/s") + Text("Video bitrate: \(config.playerLayer?.player.videoBitrate ?? 0) b/s") if let fileSize = config.playerLayer?.player.fileSize, fileSize > 0 { - Text("File Size \(String(format: "%.1f", fileSize / 1_000_000))MB") + Text("File Size: \(String(format: "%.1f", fileSize / 1_000_000))MB") } } .padding()