import Cocoa
import ScreenCaptureKit
class InairScreenCaptureRecord: NSObject,SCStreamDelegate, SCStreamOutput {
@objc static let shareManager = InairScreenCaptureRecord()
var screen: SCDisplay?
var availableContent: SCShareableContent?
var filter: SCContentFilter?
var stream: SCStream!
var audioSettings: [String : Any]!
var tag:Int = 0
var recordingBufferCallBack: ((_ buffer:CMSampleBuffer, _ tag:Int) -> Void)?
private let videoSampleBufferQueue = DispatchQueue(label: "screenCaptureKit-samplecode.VideoSampleBufferQueue")
private let audioSampleBufferQueue = DispatchQueue(label: "screenCaptureKit-samplecode.AudioSampleBufferQueue")
//檢測錄屏權(quán)限
var canRecord: Bool {
get async {
do {
// 如果應(yīng)用程序沒有屏幕錄制權(quán)限抖剿,這個調(diào)用會產(chǎn)生一個異常。
try await SCShareableContent.excludingDesktopWindows(false, onScreenWindowsOnly: true)
return true
} catch {
return false
}
}
}
deinit {
self.stopRecording()
}
override init() {
super.init()
}
@objc func prepRecord(displayID:CGDirectDisplayID, tag:Int) {
self.tag = tag
Task {
if await self.canRecord {
do {
// 檢索要捕獲的可用屏幕脑融。
let availableContent = try await SCShareableContent.excludingDesktopWindows(false,onScreenWindowsOnly: true)
self.availableContent = availableContent
self.updateAudioSettings()
// 獲取需要錄制的屏幕
self.screen = self.availableContent?.displays.first(where: { displayID == $0.displayID })
self.filter = SCContentFilter(display: self.screen!, excludingApplications: [], exceptingWindows: [])
Task { await self.record(audioOnly: false, filter: self.filter!) }
} catch {
print("Failed to get the shareable content: \(error.localizedDescription)")
}
} else {
}
}
}
//獲取芯片類型是inter還是arm
@objc open func getCPUTypeIsARM() -> Bool {
var size: size_t = MemoryLayout.size(ofValue: 0)
sysctlbyname("hw.cputype", nil, &size, nil, 0)
var type: Int32 = 0
sysctlbyname("hw.cputype", &type, &size, nil, 0)
if (type == CPU_TYPE_ARM64) {
print("ARM===ARM===ARM===ARM")
return true
} else {
print("X86_64===X86_64===X86_64===X86_64")
return false
}
}
func record(audioOnly: Bool, filter: SCContentFilter) async {
if (self.screen == nil) {
return
}
let streamConfig = SCStreamConfiguration()
streamConfig.pixelFormat = OSType(kCVPixelFormatType_32BGRA)//設(shè)置輸出類型
streamConfig.width = self.screen!.width
streamConfig.height = self.screen!.height
streamConfig.minimumFrameInterval = CMTime(value: 1, timescale: 60)//設(shè)置幀率
streamConfig.showsCursor = true
streamConfig.queueDepth = 5
//下面是開啟音頻
// conf.capturesAudio = true
// conf.sampleRate = audioSettings["AVSampleRateKey"] as! Int
// conf.channelCount = audioSettings["AVNumberOfChannelsKey"] as! Int
self.stream = SCStream(filter: filter, configuration: streamConfig, delegate: self)
do {
try self.stream.addStreamOutput(self, type: .screen, sampleHandlerQueue: videoSampleBufferQueue)
//下面是添加音頻
// try self.stream.addStreamOutput(self, type: .audio, sampleHandlerQueue: audioSampleBufferQueue)
try await self.stream.startCapture()
} catch {
assertionFailure("capture failed".local)
return
}
}
@objc func stopRecording() {
if self.stream != nil {
self.stream.stopCapture()
}
self.stream = nil
self.screen = nil
self.availableContent = nil
}
//設(shè)置音頻采集參數(shù)
func updateAudioSettings() {
self.audioSettings = [AVSampleRateKey : 48000, AVNumberOfChannelsKey : 2] // reset audioSettings
self.audioSettings[AVFormatIDKey] = kAudioFormatMPEG4AAC
self.audioSettings[AVEncoderBitRateKey] = 256 * 1000
}
func stream(_ stream: SCStream, didOutputSampleBuffer sampleBuffer: CMSampleBuffer, of outputType: SCStreamOutputType) {
guard sampleBuffer.isValid else { return }
switch outputType {
case .screen:
if self.screen == nil {
break
}
print("===========視頻=====================");
self.recordingBufferCallBack?(sampleBuffer,self.tag)
break
case .audio:
print("===========音頻(沒做處理)=====================");
break
@unknown default:
assertionFailure("unknown stream type".local)
}
}
func stream(_ stream: SCStream, didStopWithError error: Error) { // stream error
print("關(guān)閉流時出現(xiàn)錯誤:\n".local, error,
"\n 這可能是由于窗口關(guān)閉或用戶從UI停止".local)
DispatchQueue.main.async {
self.stopRecording()
}
}
/// 根據(jù)sampleBuffer獲取幀的寬脓恕、高和基地址
/// - Parameters:
/// - tag: 錄制屏幕標(biāo)識
/// - sampleBuffer: 視頻的每一幀
/// - complation:【data: 基地址,width:寬秋茫,height:高肛著,newTag:錄制屏幕標(biāo)識跺讯,sampleSize:每幀的大小,RawSampleBuffer:原始幀(即sampleBuffer)】
func convertCMSampleBufferToData(_ tag:Int, sampleBuffer: CMSampleBuffer,complation: @escaping ((_ data: UnsafeMutableRawPointer?, _ width:Int, _ height:Int, _ newTag:Int, _ sampleSize:Int,_ RawSampleBuffer:CMSampleBuffer) -> Void)) {
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
return
}
CVPixelBufferLockBaseAddress(imageBuffer, CVPixelBufferLockFlags(rawValue: 0))
let baseAddress = CVPixelBufferGetBaseAddress(imageBuffer)
let height = CVPixelBufferGetHeight(imageBuffer)
var width = CVPixelBufferGetWidth(imageBuffer)
let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer)
print("======(\(width),\(height)) ===== PerRow = \(bytesPerRow)")
//獲取字節(jié)大小
let sampleSize1 = CVPixelBufferGetDataSize(imageBuffer)
// let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer)
//補(bǔ)齊差的寬度(解決蘋果芯片部分分辨率花屏問題)iOS端可以不用
width = width + (sampleSize1 - width*height*4)/(height*4)
CVPixelBufferUnlockBaseAddress(imageBuffer, CVPixelBufferLockFlags(rawValue: 0))
if (baseAddress == nil) {
complation(nil, width, height,tag,sampleSize1,sampleBuffer)
return
}
complation(baseAddress!, width, height,tag,sampleSize1,sampleBuffer)
}
}
extension String {
var local: String { return NSLocalizedString(self, comment: "") }
}
使用示例
var ScreenCaptureRecord:InairScreenCaptureRecord?
var ScreenCaptureRecord1:InairScreenCaptureRecord?
@objc public func start() {
//獲取需要錄制的屏幕
let customScreenArray = self.getDisplayScreen()
print("--------開始錄屏------------")
var i = 0
for screen in customScreenArray {
let displayID = screen.deviceDescription[NSDeviceDescriptionKey(rawValue: "NSScreenNumber")] as! CGDirectDisplayID
let name = screen.localizedName.lowercased()
if (name.contains("screenname1")) {
self.ScreenCaptureRecord1 = InairScreenCaptureRecord()
self.ScreenCaptureRecord1!.prepRecord(displayID: displayID,tag: 1)
} else {
//錄制主屏幕
self.ScreenCaptureRecord = InairScreenCaptureRecord()
self.ScreenCaptureRecord!.prepRecord(displayID: displayID,tag: 0)
}
i += 1
}
self.recordingBufferReceiveProcessing()
}
@objc public func stop() {
print("--------停止錄屏---------")
self.ScreenCaptureRecord?.stopRecording()
self.ScreenCaptureRecord1?.stopRecording()
self.ScreenCaptureRecord = nil
self.ScreenCaptureRecord1 = nil
}
//處理獲取到的數(shù)據(jù)流
func recordingBufferReceiveProcessing() {
self.ScreenCaptureRecord?.recordingBufferCallBack = { (buffer, tag) in
//直接顯示到NSView上(使用layer或者metal)
self.metalRender(with: buffer)
}
self.ScreenCaptureRecord1?.recordingBufferCallBack = { (buffer, tag) in
self.metalRender(with: buffer)
}
}
func getDisplayScreen() -> [NSScreen] {
var customScreenArray:[NSScreen] = []
var i = 0
for screen in NSScreen.screens {
let displayId = screen.deviceDescription[NSDeviceDescriptionKey(rawValue: "NSScreenNumber")] as! CGDirectDisplayID
//判斷是否是內(nèi)置屏
if ((CGDisplayIsBuiltin(displayId)) != 0) {
customScreenArray.append(screen)
}
let displayName:String = screen.localizedName
if (displayName.contains("screenname1")) {
customScreenArray.append(screen)
}
}
return customScreenArray
}
渲染
layer渲染
var displayLayer: AVSampleBufferDisplayLayer?
func metalRender(with sampleBuffer: CMSampleBuffer) {
DispatchQueue.main.sync {
if self.displayLayer == nil {
self.displayLayer = AVSampleBufferDisplayLayer()
self.displayLayer?.frame = self.view.bounds // 設(shè)置渲染view的frame
//self.displayLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
self.view.layer.addSublayer(self.displayLayer!)
}
self.displayLayer?.enqueue(sampleBuffer)
}
}
metal渲染
import MetalKit
import CoreMedia
import MetalPerformanceShaders
var mtkView: MTKView?// 展示視圖
var processQueue: DispatchQueue?// 處理隊列
var textureCache: CVMetalTextureCache?// 紋理緩存區(qū)
var commandQueue: MTLCommandQueue?// 命令隊列
var texture: MTLTexture?// 紋理
//寫到初始化里面轮傍,不然delegate不起作用
setupMetal()
func setupMetal() {
// 1.初始化MTKView
self.mtkView = MTKView(frame: self.view.bounds)//設(shè)置自己的渲染view的frame
self.mtkView?.device = MTLCreateSystemDefaultDevice()
self.view.addSubview(self.mtkView!)
self.mtkView?.delegate = self
// 2.設(shè)置MTKView的drawable紋理是可讀寫的(默認(rèn)是只讀)
self.mtkView?.framebufferOnly = false
// 3.創(chuàng)建命令隊列
self.commandQueue = self.mtkView?.device?.makeCommandQueue()
// 4.創(chuàng)建Core Video的Metal紋理緩存區(qū)
CVMetalTextureCacheCreate(nil, nil, self.mtkView!.device!, nil, &textureCache)
}
func metalRender(with sampleBuffer: CMSampleBuffer) {
// 1.從sampleBuffer獲取視頻像素緩存區(qū)對象
let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
if (pixelBuffer == nil) {
return
}
// 2.獲取捕捉視頻的寬和高
let width = CVPixelBufferGetWidth(pixelBuffer!)
let height = CVPixelBufferGetHeight(pixelBuffer!)
// 4.從現(xiàn)有圖像緩沖區(qū)創(chuàng)建核心視頻Metal紋理緩沖區(qū)
var tmpTexture: CVMetalTexture?
let status = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, self.textureCache!, pixelBuffer!, nil, .bgra8Unorm, width, height, 0, &tmpTexture)
// 判斷紋理緩沖區(qū)是否創(chuàng)建成功
if status == kCVReturnSuccess {
// 5.設(shè)置可繪制紋理的當(dāng)前大小
self.mtkView?.drawableSize = CGSize(width: CGFloat(width), height: CGFloat(height))
// 6.返回紋理緩沖區(qū)的Metal紋理對象
self.texture = CVMetalTextureGetTexture(tmpTexture!)
}
}
extension ScreenRecordingViewController: MTKViewDelegate {
func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
print("視圖大小發(fā)生改變時會調(diào)用此方法")
}
func draw(in view: MTKView) {
// 判斷是否獲取了AVFoundation采集的紋理數(shù)據(jù)
if let texture = self.texture {
// 1.創(chuàng)建指令緩沖
let commandBuffer = commandQueue?.makeCommandBuffer()
// 2.將MTKView作為目標(biāo)渲染紋理
let drawingTexture = view.currentDrawable?.texture
// 3.創(chuàng)建高斯濾鏡创夜,sigma值越高圖像越模糊
let filter = MPSImageGaussianBlur(device: mtkView!.device!, sigma: 1)
// 4.高斯濾鏡以Metal紋理作為輸入和輸出
// 輸入:攝像頭采集的圖像 self.texture
// 輸出:創(chuàng)建的紋理 drawingTexture(其實就是view.currentDrawable.texture)
filter.encode(commandBuffer: commandBuffer!, sourceTexture: texture, destinationTexture: drawingTexture!)
// 5.展示顯示的內(nèi)容并提交命令
commandBuffer?.present(view.currentDrawable!)
commandBuffer?.commit()
// 6.清空當(dāng)前紋理驰吓,準(zhǔn)備下一次的紋理數(shù)據(jù)讀取
self.texture = nil
}
}
}