made a lot of progresson the captureEngine

This commit is contained in:
neon443
2026-01-01 20:52:41 +00:00
parent 91319f7fc3
commit 6062a8b88a
3 changed files with 120 additions and 35 deletions

116
CaptureEngine.swift Normal file
View File

@@ -0,0 +1,116 @@
//
// CaptureEngine.swift
// Scream
//
// Created by neon443 on 01/01/2026.
//
import Foundation
import ScreenCaptureKit
struct CapturedFrame {
static var invalid: CapturedFrame {
CapturedFrame(surface: nil, contentRect: .zero, contentScale: 0, scaleFactor: 0)
}
let surface: IOSurface?
let contentRect: CGRect
let contentScale: CGFloat
let scaleFactor: CGFloat
var size: CGSize { contentRect.size }
}
class CaptureEngine: NSObject {
private var stream: SCStream?
var streamOutput: StreamDelegate?
func startCapture(config: SCStreamConfiguration, filter: SCContentFilter) -> AsyncThrowingStream<CapturedFrame, Error> {
AsyncThrowingStream<CapturedFrame, Error> { continuation in
// let streamOutput = SCStreamOutput
}
}
}
class StreamHandler: NSObject, SCStreamOutput, SCStreamDelegate {
var pcmBufferHandler: ((AVAudioPCMBuffer) -> Void)?
var frameBufferHandler: ((CapturedFrame) -> Void)?
private var continuation: AsyncThrowingStream<CapturedFrame, Error>.Continuation?
init(continuation: AsyncThrowingStream<CapturedFrame, Error>.Continuation?) {
self.continuation = continuation
}
func stream(_ stream: SCStream, didOutputSampleBuffer sampleBuffer: CMSampleBuffer, of type: SCStreamOutputType) {
guard sampleBuffer.isValid else { return }
switch type {
case .screen:
// guard let frame =
case .audio:
<#code#>
case .microphone:
<#code#>
}
}
func createFrame(for sampleBuffer: CMSampleBuffer) -> CapturedFrame? {
guard let attachmentsArr = CMSampleBufferGetSampleAttachmentsArray(
sampleBuffer,
createIfNecessary: false
) as? [[SCStreamFrameInfo: Any]],
let attachments = attachmentsArr.first else { return nil }
guard let statusRawValue = attachments[SCStreamFrameInfo.status] as? Int,
let status = SCFrameStatus(rawValue: statusRawValue),
status == .complete else { return nil }
guard let pixelBuffer = sampleBuffer.imageBuffer else { return nil }
guard let surfaceRef = CVPixelBufferGetIOSurface(pixelBuffer)?.takeUnretainedValue() else { return nil }
let surface = unsafeBitCast(surfaceRef, to: IOSurface.self)
guard let contentRectDict = attachments[.contentRect],
let contentRect = CGRect(dictionaryRepresentation: contentRectDict as! CFDictionary),
let contentScale = attachments[.contentScale] as? CGFloat,
let scaleFactor = attachments[.scaleFactor] as? CGFloat else { return nil }
var frame = CapturedFrame(surface: surface,
contentRect: contentRect,
contentScale: contentScale,
scaleFactor: scaleFactor)
return frame
}
private func handleAudio(for buffer: CMSampleBuffer) -> Void? {
try? buffer.withAudioBufferList { audioBufferList, blockBuffer in
guard let description = buffer.formatDescription?.audioStreamBasicDescription,
let format = AVAudioFormat(standardFormatWithSampleRate: description.mSampleRate, channels: description.mChannelsPerFrame),
let samples = AVAudioPCMBuffer(pcmFormat: format, bufferListNoCopy: audioBufferList.unsafePointer)
else { return }
print("got audiobuffer")
pcmBufferHandler?(samples)
}
}
func outputVideoEffectDidStart(for stream: SCStream) {
print("presenter overlay started")
}
func outputVideoEffectDidStop(for stream: SCStream) {
print("presenter overlay stopped")
}
func streamDidBecomeActive(_ stream: SCStream) {
print("stream became Active")
}
func streamDidBecomeInactive(_ stream: SCStream) {
print("stream became Inactive")
}
func stream(_ stream: SCStream, didStopWithError error: any Error) {
print(error.localizedDescription)
continuation?.finish(throwing: error)
}
}

View File

@@ -14,8 +14,8 @@
A98E8BF92F05B2A5006D4458 /* ScreamUITests.swift in Sources */ = {isa = PBXBuildFile; fileRef = A98E8BF62F05B2A5006D4458 /* ScreamUITests.swift */; };
A98E8BFA2F05B2A5006D4458 /* ScreamUITestsLaunchTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = A98E8BF72F05B2A5006D4458 /* ScreamUITestsLaunchTests.swift */; };
A98E8BFD2F05D28D006D4458 /* ScreenRecorder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A98E8BFC2F05D28D006D4458 /* ScreenRecorder.swift */; };
A98E8BFF2F06F46C006D4458 /* SCStreamOutput.swift in Sources */ = {isa = PBXBuildFile; fileRef = A98E8BFE2F06F46C006D4458 /* SCStreamOutput.swift */; };
A98E8C012F06F496006D4458 /* StreamDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = A98E8C002F06F496006D4458 /* StreamDelegate.swift */; };
A9D7225F2F070FE600050BB0 /* CaptureEngine.swift in Sources */ = {isa = PBXBuildFile; fileRef = A9D7225E2F070FE600050BB0 /* CaptureEngine.swift */; };
/* End PBXBuildFile section */
/* Begin PBXContainerItemProxy section */
@@ -47,8 +47,8 @@
A98E8BF72F05B2A5006D4458 /* ScreamUITestsLaunchTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScreamUITestsLaunchTests.swift; sourceTree = "<group>"; };
A98E8BFB2F05C7B6006D4458 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist; path = Info.plist; sourceTree = "<group>"; };
A98E8BFC2F05D28D006D4458 /* ScreenRecorder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScreenRecorder.swift; sourceTree = "<group>"; };
A98E8BFE2F06F46C006D4458 /* SCStreamOutput.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SCStreamOutput.swift; sourceTree = "<group>"; };
A98E8C002F06F496006D4458 /* StreamDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamDelegate.swift; sourceTree = "<group>"; };
A9D7225E2F070FE600050BB0 /* CaptureEngine.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CaptureEngine.swift; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
@@ -79,6 +79,7 @@
A98E8BB72F05B26B006D4458 = {
isa = PBXGroup;
children = (
A9D7225E2F070FE600050BB0 /* CaptureEngine.swift */,
A98E8BEF2F05B2A0006D4458 /* Scream */,
A98E8BF42F05B2A2006D4458 /* ScreamTests */,
A98E8BF82F05B2A5006D4458 /* ScreamUITests */,
@@ -105,7 +106,6 @@
A98E8BEE2F05B2A0006D4458 /* MainMenu.xib */,
A98E8BFC2F05D28D006D4458 /* ScreenRecorder.swift */,
A98E8C002F06F496006D4458 /* StreamDelegate.swift */,
A98E8BFE2F06F46C006D4458 /* SCStreamOutput.swift */,
);
path = Scream;
sourceTree = "<group>";
@@ -265,7 +265,7 @@
buildActionMask = 2147483647;
files = (
A98E8BF02F05B2A0006D4458 /* AppDelegate.swift in Sources */,
A98E8BFF2F06F46C006D4458 /* SCStreamOutput.swift in Sources */,
A9D7225F2F070FE600050BB0 /* CaptureEngine.swift in Sources */,
A98E8C012F06F496006D4458 /* StreamDelegate.swift in Sources */,
A98E8BFD2F05D28D006D4458 /* ScreenRecorder.swift in Sources */,
);

View File

@@ -1,31 +0,0 @@
//
// SCStreamOutput.swift
// Scream
//
// Created by neon443 on 01/01/2026.
//
import Foundation
import ScreenCaptureKit
class StreamOutputDelegate: NSObject, SCStreamOutput {
func stream(_ stream: SCStream, didOutputSampleBuffer sampleBuffer: CMSampleBuffer, of type: SCStreamOutputType) {
guard sampleBuffer.isValid else { return }
switch type {
case .screen:
print("got a screen buffer")
guard let attachmentsArr = CMSampleBufferGetSampleAttachmentsArray(
sampleBuffer,
createIfNecessary: false
) as? [[SCStreamFrameInfo: Any]],
let attachments = attachmentsArr.first else { return }
case .audio:
print("got an audio buffer")
case .microphone:
print("got a mic buffer")
@unknown default:
fatalError("wtf is ur stream sample type")
}
}
}