From 6062a8b88a8844cf412af0d1c0295573d7b5105c Mon Sep 17 00:00:00 2001 From: neon443 <69979447+neon443@users.noreply.github.com> Date: Thu, 1 Jan 2026 20:52:41 +0000 Subject: [PATCH] made a lot of progresson the captureEngine --- CaptureEngine.swift | 116 +++++++++++++++++++++++++++++++ Scream.xcodeproj/project.pbxproj | 8 +-- Scream/SCStreamOutput.swift | 31 --------- 3 files changed, 120 insertions(+), 35 deletions(-) create mode 100644 CaptureEngine.swift delete mode 100644 Scream/SCStreamOutput.swift diff --git a/CaptureEngine.swift b/CaptureEngine.swift new file mode 100644 index 0000000..bdf3cd3 --- /dev/null +++ b/CaptureEngine.swift @@ -0,0 +1,116 @@ +// +// CaptureEngine.swift +// Scream +// +// Created by neon443 on 01/01/2026. +// + +import Foundation +import ScreenCaptureKit + +struct CapturedFrame { + static var invalid: CapturedFrame { + CapturedFrame(surface: nil, contentRect: .zero, contentScale: 0, scaleFactor: 0) + } + + let surface: IOSurface? + let contentRect: CGRect + let contentScale: CGFloat + let scaleFactor: CGFloat + var size: CGSize { contentRect.size } +} + +class CaptureEngine: NSObject { + private var stream: SCStream? + var streamOutput: StreamDelegate? + func startCapture(config: SCStreamConfiguration, filter: SCContentFilter) -> AsyncThrowingStream { + AsyncThrowingStream { continuation in +// let streamOutput = SCStreamOutput + } + } +} + +class StreamHandler: NSObject, SCStreamOutput, SCStreamDelegate { + var pcmBufferHandler: ((AVAudioPCMBuffer) -> Void)? + var frameBufferHandler: ((CapturedFrame) -> Void)? + + private var continuation: AsyncThrowingStream.Continuation? + + init(continuation: AsyncThrowingStream.Continuation?) { + self.continuation = continuation + } + + func stream(_ stream: SCStream, didOutputSampleBuffer sampleBuffer: CMSampleBuffer, of type: SCStreamOutputType) { + guard sampleBuffer.isValid else { return } + + switch type { + case .screen: +// guard let frame = + case .audio: + <#code#> + case .microphone: + <#code#> + } + } + + func createFrame(for sampleBuffer: CMSampleBuffer) -> CapturedFrame? { + + guard let attachmentsArr = CMSampleBufferGetSampleAttachmentsArray( + sampleBuffer, + createIfNecessary: false + ) as? [[SCStreamFrameInfo: Any]], + let attachments = attachmentsArr.first else { return nil } + + guard let statusRawValue = attachments[SCStreamFrameInfo.status] as? Int, + let status = SCFrameStatus(rawValue: statusRawValue), + status == .complete else { return nil } + + guard let pixelBuffer = sampleBuffer.imageBuffer else { return nil } + + guard let surfaceRef = CVPixelBufferGetIOSurface(pixelBuffer)?.takeUnretainedValue() else { return nil } + let surface = unsafeBitCast(surfaceRef, to: IOSurface.self) + + guard let contentRectDict = attachments[.contentRect], + let contentRect = CGRect(dictionaryRepresentation: contentRectDict as! CFDictionary), + let contentScale = attachments[.contentScale] as? CGFloat, + let scaleFactor = attachments[.scaleFactor] as? CGFloat else { return nil } + + var frame = CapturedFrame(surface: surface, + contentRect: contentRect, + contentScale: contentScale, + scaleFactor: scaleFactor) + return frame + } + + private func handleAudio(for buffer: CMSampleBuffer) -> Void? { + try? buffer.withAudioBufferList { audioBufferList, blockBuffer in + guard let description = buffer.formatDescription?.audioStreamBasicDescription, + let format = AVAudioFormat(standardFormatWithSampleRate: description.mSampleRate, channels: description.mChannelsPerFrame), + let samples = AVAudioPCMBuffer(pcmFormat: format, bufferListNoCopy: audioBufferList.unsafePointer) + else { return } + print("got audiobuffer") + pcmBufferHandler?(samples) + } + } + + func outputVideoEffectDidStart(for stream: SCStream) { + print("presenter overlay started") + } + + func outputVideoEffectDidStop(for stream: SCStream) { + print("presenter overlay stopped") + } + + func streamDidBecomeActive(_ stream: SCStream) { + print("stream became Active") + } + + func streamDidBecomeInactive(_ stream: SCStream) { + print("stream became Inactive") + } + + func stream(_ stream: SCStream, didStopWithError error: any Error) { + print(error.localizedDescription) + continuation?.finish(throwing: error) + } +} diff --git a/Scream.xcodeproj/project.pbxproj b/Scream.xcodeproj/project.pbxproj index 1b33bab..134b9f8 100644 --- a/Scream.xcodeproj/project.pbxproj +++ b/Scream.xcodeproj/project.pbxproj @@ -14,8 +14,8 @@ A98E8BF92F05B2A5006D4458 /* ScreamUITests.swift in Sources */ = {isa = PBXBuildFile; fileRef = A98E8BF62F05B2A5006D4458 /* ScreamUITests.swift */; }; A98E8BFA2F05B2A5006D4458 /* ScreamUITestsLaunchTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = A98E8BF72F05B2A5006D4458 /* ScreamUITestsLaunchTests.swift */; }; A98E8BFD2F05D28D006D4458 /* ScreenRecorder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A98E8BFC2F05D28D006D4458 /* ScreenRecorder.swift */; }; - A98E8BFF2F06F46C006D4458 /* SCStreamOutput.swift in Sources */ = {isa = PBXBuildFile; fileRef = A98E8BFE2F06F46C006D4458 /* SCStreamOutput.swift */; }; A98E8C012F06F496006D4458 /* StreamDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = A98E8C002F06F496006D4458 /* StreamDelegate.swift */; }; + A9D7225F2F070FE600050BB0 /* CaptureEngine.swift in Sources */ = {isa = PBXBuildFile; fileRef = A9D7225E2F070FE600050BB0 /* CaptureEngine.swift */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ @@ -47,8 +47,8 @@ A98E8BF72F05B2A5006D4458 /* ScreamUITestsLaunchTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScreamUITestsLaunchTests.swift; sourceTree = ""; }; A98E8BFB2F05C7B6006D4458 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist; path = Info.plist; sourceTree = ""; }; A98E8BFC2F05D28D006D4458 /* ScreenRecorder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScreenRecorder.swift; sourceTree = ""; }; - A98E8BFE2F06F46C006D4458 /* SCStreamOutput.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SCStreamOutput.swift; sourceTree = ""; }; A98E8C002F06F496006D4458 /* StreamDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamDelegate.swift; sourceTree = ""; }; + A9D7225E2F070FE600050BB0 /* CaptureEngine.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CaptureEngine.swift; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -79,6 +79,7 @@ A98E8BB72F05B26B006D4458 = { isa = PBXGroup; children = ( + A9D7225E2F070FE600050BB0 /* CaptureEngine.swift */, A98E8BEF2F05B2A0006D4458 /* Scream */, A98E8BF42F05B2A2006D4458 /* ScreamTests */, A98E8BF82F05B2A5006D4458 /* ScreamUITests */, @@ -105,7 +106,6 @@ A98E8BEE2F05B2A0006D4458 /* MainMenu.xib */, A98E8BFC2F05D28D006D4458 /* ScreenRecorder.swift */, A98E8C002F06F496006D4458 /* StreamDelegate.swift */, - A98E8BFE2F06F46C006D4458 /* SCStreamOutput.swift */, ); path = Scream; sourceTree = ""; @@ -265,7 +265,7 @@ buildActionMask = 2147483647; files = ( A98E8BF02F05B2A0006D4458 /* AppDelegate.swift in Sources */, - A98E8BFF2F06F46C006D4458 /* SCStreamOutput.swift in Sources */, + A9D7225F2F070FE600050BB0 /* CaptureEngine.swift in Sources */, A98E8C012F06F496006D4458 /* StreamDelegate.swift in Sources */, A98E8BFD2F05D28D006D4458 /* ScreenRecorder.swift in Sources */, ); diff --git a/Scream/SCStreamOutput.swift b/Scream/SCStreamOutput.swift deleted file mode 100644 index 44afd23..0000000 --- a/Scream/SCStreamOutput.swift +++ /dev/null @@ -1,31 +0,0 @@ -// -// SCStreamOutput.swift -// Scream -// -// Created by neon443 on 01/01/2026. -// - -import Foundation -import ScreenCaptureKit - -class StreamOutputDelegate: NSObject, SCStreamOutput { - func stream(_ stream: SCStream, didOutputSampleBuffer sampleBuffer: CMSampleBuffer, of type: SCStreamOutputType) { - guard sampleBuffer.isValid else { return } - - switch type { - case .screen: - print("got a screen buffer") - guard let attachmentsArr = CMSampleBufferGetSampleAttachmentsArray( - sampleBuffer, - createIfNecessary: false - ) as? [[SCStreamFrameInfo: Any]], - let attachments = attachmentsArr.first else { return } - case .audio: - print("got an audio buffer") - case .microphone: - print("got a mic buffer") - @unknown default: - fatalError("wtf is ur stream sample type") - } - } -}