more stuff for the recorder

apple sample code my goat
This commit is contained in:
neon443
2026-01-01 21:42:15 +00:00
parent 6062a8b88a
commit 018a589c5a
4 changed files with 89 additions and 69 deletions

View File

@@ -22,10 +22,47 @@ struct CapturedFrame {
class CaptureEngine: NSObject {
private var stream: SCStream?
var streamOutput: StreamDelegate?
var streamOutput: StreamHandler?
let videoSampleBufferQueue = DispatchQueue(label: "videoSampleBufferQueue")
let audioSampleBufferQueue = DispatchQueue(label: "audioSampleBufferQueue")
let micSampleBufferQueue = DispatchQueue(label: "micSampleBufferQueue")
private var continuation: AsyncThrowingStream<CapturedFrame, Error>.Continuation?
func startCapture(config: SCStreamConfiguration, filter: SCContentFilter) -> AsyncThrowingStream<CapturedFrame, Error> {
AsyncThrowingStream<CapturedFrame, Error> { continuation in
// let streamOutput = SCStreamOutput
let streamOutput = StreamHandler(continuation: continuation)
self.streamOutput = streamOutput
streamOutput.frameBufferHandler = { continuation.yield($0) }
streamOutput.pcmBufferHandler = { print($0) }
do {
stream = SCStream(filter: filter, configuration: config, delegate: streamOutput)
try stream?.addStreamOutput(streamOutput, type: .screen, sampleHandlerQueue: videoSampleBufferQueue)
try stream?.addStreamOutput(streamOutput, type: .audio, sampleHandlerQueue: audioSampleBufferQueue)
try stream?.addStreamOutput(streamOutput, type: .microphone, sampleHandlerQueue: videoSampleBufferQueue)
} catch {
continuation.finish(throwing: error)
}
}
}
func stopCapture() async {
do {
try await stream?.stopCapture()
continuation?.finish()
} catch {
continuation?.finish(throwing: error)
}
}
func update(config: SCStreamConfiguration, filter: SCContentFilter) async {
do {
try await stream?.updateConfiguration(config)
try await stream?.updateContentFilter(filter)
} catch {
print(error)
}
}
}
@@ -45,16 +82,16 @@ class StreamHandler: NSObject, SCStreamOutput, SCStreamDelegate {
switch type {
case .screen:
// guard let frame =
guard let frame = createFrame(for: sampleBuffer) else { return }
frameBufferHandler?(frame)
case .audio:
<#code#>
handleAudio(for: sampleBuffer)
case .microphone:
<#code#>
print("idk what to do with mic buffers")
}
}
func createFrame(for sampleBuffer: CMSampleBuffer) -> CapturedFrame? {
guard let attachmentsArr = CMSampleBufferGetSampleAttachmentsArray(
sampleBuffer,
createIfNecessary: false

View File

@@ -14,7 +14,6 @@
A98E8BF92F05B2A5006D4458 /* ScreamUITests.swift in Sources */ = {isa = PBXBuildFile; fileRef = A98E8BF62F05B2A5006D4458 /* ScreamUITests.swift */; };
A98E8BFA2F05B2A5006D4458 /* ScreamUITestsLaunchTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = A98E8BF72F05B2A5006D4458 /* ScreamUITestsLaunchTests.swift */; };
A98E8BFD2F05D28D006D4458 /* ScreenRecorder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A98E8BFC2F05D28D006D4458 /* ScreenRecorder.swift */; };
A98E8C012F06F496006D4458 /* StreamDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = A98E8C002F06F496006D4458 /* StreamDelegate.swift */; };
A9D7225F2F070FE600050BB0 /* CaptureEngine.swift in Sources */ = {isa = PBXBuildFile; fileRef = A9D7225E2F070FE600050BB0 /* CaptureEngine.swift */; };
/* End PBXBuildFile section */
@@ -47,7 +46,6 @@
A98E8BF72F05B2A5006D4458 /* ScreamUITestsLaunchTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScreamUITestsLaunchTests.swift; sourceTree = "<group>"; };
A98E8BFB2F05C7B6006D4458 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist; path = Info.plist; sourceTree = "<group>"; };
A98E8BFC2F05D28D006D4458 /* ScreenRecorder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScreenRecorder.swift; sourceTree = "<group>"; };
A98E8C002F06F496006D4458 /* StreamDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamDelegate.swift; sourceTree = "<group>"; };
A9D7225E2F070FE600050BB0 /* CaptureEngine.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CaptureEngine.swift; sourceTree = "<group>"; };
/* End PBXFileReference section */
@@ -105,7 +103,6 @@
A98E8BEC2F05B2A0006D4458 /* Assets.xcassets */,
A98E8BEE2F05B2A0006D4458 /* MainMenu.xib */,
A98E8BFC2F05D28D006D4458 /* ScreenRecorder.swift */,
A98E8C002F06F496006D4458 /* StreamDelegate.swift */,
);
path = Scream;
sourceTree = "<group>";
@@ -266,7 +263,6 @@
files = (
A98E8BF02F05B2A0006D4458 /* AppDelegate.swift in Sources */,
A9D7225F2F070FE600050BB0 /* CaptureEngine.swift in Sources */,
A98E8C012F06F496006D4458 /* StreamDelegate.swift in Sources */,
A98E8BFD2F05D28D006D4458 /* ScreenRecorder.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;

View File

@@ -11,15 +11,37 @@ import ScreenCaptureKit
class ScreenRecorder: NSObject {
var isRunning: Bool = false
var isAppExluded: Bool = false
var isAudioEnabled: Bool = true
var filter: SCContentFilter?
var streamConfig = SCStreamConfiguration()
var stream: SCStream?
var streamDelegate = StreamDelegate()
var streamOutput = StreamOutputDelegate()
var isAudioEnabled: Bool = false
let videoSampleBufferQueue = DispatchQueue(label: "videoSampleBufferQueue")
let audioSampleBufferQueue = DispatchQueue(label: "audioSampleBufferQueue")
var filter: SCContentFilter?
// var filter: SCContentFilter
//
// var excludedApps = [SCRunningApplication]()
// //if users exclude Scream from the screen share
// //exclude by matching bundleid
//// if isAppExluded {
//// excludedApps = availableContent.applications.filter { app in
//// Bundle.main.bundleIdentifier == app.bundleIdentifier
//// }
//// }
// filter = SCContentFilter(display: availableContent.displays.first!, excludingApplications: excludedApps, exceptingWindows: [])
// }
var streamConfig: SCStreamConfiguration {
var streamConfig = SCStreamConfiguration()
//TODO: hdr
streamConfig.capturesAudio = isAudioEnabled
streamConfig.excludesCurrentProcessAudio = false
// streamConfig.captureMicrophone = true
streamConfig.width = Int(NSScreen.main?.frame.width ?? 100)
streamConfig.height = Int(NSScreen.main?.frame.height ?? 100)
streamConfig.minimumFrameInterval = CMTime(value: 1, timescale: 20)
streamConfig.queueDepth = 5
return streamConfig
}
let captureEngine = CaptureEngine()
var canRecord: Bool {
true
@@ -35,7 +57,6 @@ class ScreenRecorder: NSObject {
print(error.localizedDescription)
return
}
var excludedApps = [SCRunningApplication]()
//if users exclude Scream from the screen share
//exclude by matching bundleid
@@ -46,25 +67,22 @@ class ScreenRecorder: NSObject {
}
filter = SCContentFilter(display: availableContent.displays.first!, excludingApplications: excludedApps, exceptingWindows: [])
//TODO: hdr
do {
isRunning = true
for try await frame in captureEngine.startCapture(config: streamConfig, filter: filter!) {
print(frame)
}
} catch {
isRunning = false
print(error.localizedDescription)
}
//TODO: update the config using stream.updateConfiguration or .updateContentFilter
}
streamConfig.capturesAudio = isAudioEnabled
streamConfig.excludesCurrentProcessAudio = true
// streamConfig.captureMicrophone = true
streamConfig.width = Int(NSScreen.main?.frame.width ?? 100)
streamConfig.height = Int(NSScreen.main?.frame.height ?? 100)
streamConfig.minimumFrameInterval = CMTime(value: 1, timescale: 20)
streamConfig.queueDepth = 5
stream = SCStream(filter: filter!, configuration: streamConfig, delegate: streamDelegate)
try! stream?.addStreamOutput(streamOutput, type: .screen, sampleHandlerQueue: videoSampleBufferQueue)
try! stream?.addStreamOutput(streamOutput, type: .audio, sampleHandlerQueue: audioSampleBufferQueue)
// try! stream?.addStreamOutput(streamOutput, type: .microphone, sampleHandlerQueue: videoSampleBufferQueue)
//update the config using stream.updateConfiguration or .updateContentFilter
func stop() async {
guard isRunning else { return }
await captureEngine.stopCapture()
isRunning = false
}
}

View File

@@ -1,31 +0,0 @@
//
// StreamDelegate.swift
// Scream
//
// Created by neon443 on 01/01/2026.
//
import Foundation
import ScreenCaptureKit
class StreamDelegate: NSObject, SCStreamDelegate {
func outputVideoEffectDidStart(for stream: SCStream) {
print("presenter overlay started")
}
func outputVideoEffectDidStop(for stream: SCStream) {
print("presenter overlay stopped")
}
func streamDidBecomeActive(_ stream: SCStream) {
print("stream became Active")
}
func streamDidBecomeInactive(_ stream: SCStream) {
print("stream became Inactive")
}
func stream(_ stream: SCStream, didStopWithError error: any Error) {
print(error.localizedDescription)
}
}