added timestamp and cvpixelbufferref to capturedframe

added actually encoding said frame sob
added a get encodersettings function
added configure compressionsession function
added nserror check function
added screampacket data schema
This commit is contained in:
neon443
2026-01-03 20:52:27 +00:00
parent 7b3e260d57
commit 739b19bfb3
3 changed files with 134 additions and 10 deletions

View File

@@ -6,19 +6,22 @@
//
import Foundation
import Cocoa
import ScreenCaptureKit
import VideoToolbox
struct CapturedFrame {
static var invalid: CapturedFrame {
CapturedFrame(surface: nil, contentRect: .zero, contentScale: 0, scaleFactor: 0)
CapturedFrame(surface: nil, pixelBuffer: nil, contentRect: .zero, contentScale: 0, scaleFactor: 0, timestamp: .invalid)
}
let surface: IOSurface?
let pixelBuffer: CVPixelBuffer?
let contentRect: CGRect
let contentScale: CGFloat
let scaleFactor: CGFloat
var size: CGSize { contentRect.size }
var timestamp: CMTime
}
class CaptureEngine: NSObject {
@@ -37,15 +40,16 @@ class CaptureEngine: NSObject {
var compressionSessionOut: VTCompressionSession?
let err = VTCompressionSessionCreate(
allocator: kCFAllocatorDefault,
width: 1600,
height: 900,
width: 3200,
height: 1800,
codecType: kCMVideoCodecType_H264,
encoderSpecification: videoEncoderSpec,
imageBufferAttributes: sourceImageBufferAttrs,
compressedDataAllocator: nil,
outputCallback: { outputCallbackRefCon, sourceFrameRefCon, status, infoFlags, samplebuffer in
print(status)
},
outputCallback: nil,
// outputCallback: { outputCallbackRefCon, sourceFrameRefCon, status, infoFlags, samplebuffer in
// print(status)
// },
refcon: nil,
compressionSessionOut: &compressionSessionOut
)
@@ -57,14 +61,23 @@ class CaptureEngine: NSObject {
return AsyncThrowingStream<CapturedFrame, Error> { continuation in
let streamOutput = StreamHandler(continuation: continuation)
self.streamOutput = streamOutput
streamOutput.frameBufferHandler = { continuation.yield($0) }
streamOutput.pcmBufferHandler = { print($0) }
do {
streamOutput.frameBufferHandler = { frame in
// print("got frame \(frame.size) at \(frame.contentRect)")
VTCompressionSessionEncodeFrame(compressionSession,
imageBuffer: frame.pixelBuffer!,
presentationTimeStamp: frame.timestamp,
duration: .invalid,
frameProperties: nil,
infoFlagsOut: nil
) { status, infoFlags, sampleBuffer in
print()
}
// outputHandler: self.outputHandler)
continuation.yield(frame)
}
streamOutput.pcmBufferHandler = { print($0) }
stream = SCStream(filter: filter, configuration: config, delegate: streamOutput)
try stream?.addStreamOutput(streamOutput, type: .screen, sampleHandlerQueue: videoSampleBufferQueue)
@@ -77,6 +90,81 @@ class CaptureEngine: NSObject {
}
}
func getEncoderSettings(session: VTCompressionSession) -> [CFString: Any]? {
var supportedPresetDictionaries: CFDictionary?
var encoderSettings: [CFString: Any]?
_ = withUnsafeMutablePointer(to: &supportedPresetDictionaries) { valueOut in
if #available(macOS 26.0, *) {
VTSessionCopyProperty(session, key: kVTCompressionPropertyKey_SupportedPresetDictionaries, allocator: kCFAllocatorDefault, valueOut: valueOut)
} else {
// Fallback on earlier versions
}
}
if let presetDictionaries = supportedPresetDictionaries as? [CFString: [CFString: Any]] {
encoderSettings = presetDictionaries
}
return encoderSettings
}
func configureVTCompressionSession(session: VTCompressionSession, expectedFrameRate: Float = 60) throws {
// var escapedContinuation: AsyncStream<(OSStatus, VTEncodeInfoFlags, CMSampleBuffer?, Int)>.Continuation!
// let compressedFrameSequence = AsyncStream<(OSStatus, VTEncodeInfoFlags, CMSampleBuffer?, Int)> { escapedContinuation = $0 }
// let outputContinuation = escapedContinuation!
//
// let compressionTask = Task {
//
// }
var err: OSStatus = noErr
var variableBitRateMode = false
let encoderSettings: [CFString: Any]?
encoderSettings = getEncoderSettings(session: session)
if let encoderSettings {
if #available(macOS 26.0, *) {
if encoderSettings[kVTCompressionPropertyKey_VariableBitRate] != nil {
variableBitRateMode = true
}
} else {
// Fallback on earlier versions
}
err = VTSessionSetProperties(session, propertyDictionary: encoderSettings as CFDictionary)
try NSError.check(err, "VTSessionSetProperties failed")
// err = VTSessionSetProperty(<#T##CM_NONNULL#>, key: <#T##CM_NONNULL#>, value: <#T##CM_NULLABLE#>)
}
err = VTSessionSetProperty(session, key: kVTCompressionPropertyKey_RealTime, value: kCFBooleanTrue)
if err != noErr { print("failed to set to realtime \(err)") }
err = VTSessionSetProperty(session, key: kVTCompressionPropertyKey_ExpectedFrameRate, value: expectedFrameRate as CFNumber)
if err != noErr { print("failed to set to framerte \(err)") }
err = VTSessionSetProperty(session, key: kVTCompressionPropertyKey_ProfileLevel, value: kVTProfileLevel_H264_Main_AutoLevel)
if err != noErr { print("failed to set to profile level \(err)") }
err = VTSessionSetProperty(session, key: kVTCompressionPropertyKey_AverageBitRate, value: 10 as CFNumber)
if err != noErr { print("failed to set to framerte \(err)") }
err = VTSessionSetProperty(session, key: kVTCompressionPropertyKey_MaxKeyFrameInterval, value: 60 as CFNumber)
if err != noErr { print("failed to set to keyframe interval \(err)") }
err = VTSessionSetProperty(session, key: kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration, value: 1 as CFNumber)
if err != noErr { print("failed to set to keyframe interval duratation \(err)") }
// err = VTSessionSetProperty(session, key: kVTCompressionPropertyKey_SuggestedLookAheadFrameCount, value: 1 as CFNumber)
// if err != noErr { print("failed to set to lookahead \(err)") }
// err = VTSessionSetProperty(session, key: kVTCompressionPropertyKey_SpatialAdaptiveQPLevel, value: 1 as CFNumber)
// if err != noErr { print("failed to set to framerte \(err)") }
// VTCompressionSessionEncodeFrame(session, imageBuffer: <#T##CVImageBuffer#>, presentationTimeStamp: <#T##CMTime#>, duration: <#T##CMTime#>, frameProperties: <#T##CFDictionary?#>, infoFlagsOut: <#T##UnsafeMutablePointer<VTEncodeInfoFlags>?#>, outputHandler: <#T##VTCompressionOutputHandler##VTCompressionOutputHandler##(OSStatus, VTEncodeInfoFlags, CMSampleBuffer?) -> Void#>)
}
func stopCapture() async {
do {
try await stream?.stopCapture()
@@ -142,9 +230,11 @@ class StreamHandler: NSObject, SCStreamOutput, SCStreamDelegate {
let scaleFactor = attachments[.scaleFactor] as? CGFloat else { return nil }
var frame = CapturedFrame(surface: surface,
pixelBuffer: pixelBuffer,
contentRect: contentRect,
contentScale: contentScale,
scaleFactor: scaleFactor)
scaleFactor: scaleFactor,
timestamp: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
return frame
}
@@ -180,3 +270,14 @@ class StreamHandler: NSObject, SCStreamOutput, SCStreamDelegate {
continuation?.finish(throwing: error)
}
}
extension NSError {
static func check(_ status: OSStatus, _ message: String? = nil) throws {
guard status == noErr else {
if let message {
print("\(message), err: \(status)")
}
throw NSError(domain: NSOSStatusErrorDomain, code: Int(status))
}
}
}

View File

@@ -7,6 +7,7 @@
objects = {
/* Begin PBXBuildFile section */
A94E29F72F09B569006E583D /* ScreamPacket.swift in Sources */ = {isa = PBXBuildFile; fileRef = A94E29F62F09B569006E583D /* ScreamPacket.swift */; };
A98E8BF02F05B2A0006D4458 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = A98E8BEB2F05B2A0006D4458 /* AppDelegate.swift */; };
A98E8BF12F05B2A0006D4458 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = A98E8BEC2F05B2A0006D4458 /* Assets.xcassets */; };
A98E8BF22F05B2A0006D4458 /* MainMenu.xib in Resources */ = {isa = PBXBuildFile; fileRef = A98E8BEE2F05B2A0006D4458 /* MainMenu.xib */; };
@@ -36,6 +37,7 @@
/* End PBXContainerItemProxy section */
/* Begin PBXFileReference section */
A94E29F62F09B569006E583D /* ScreamPacket.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScreamPacket.swift; sourceTree = "<group>"; };
A98E8BC02F05B26B006D4458 /* Scream.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Scream.app; sourceTree = BUILT_PRODUCTS_DIR; };
A98E8BCE2F05B26D006D4458 /* ScreamTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = ScreamTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
A98E8BD82F05B26D006D4458 /* ScreamUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = ScreamUITests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
@@ -79,6 +81,7 @@
A98E8BB72F05B26B006D4458 = {
isa = PBXGroup;
children = (
A94E29F62F09B569006E583D /* ScreamPacket.swift */,
A9D722602F07304C00050BB0 /* Config.xcconfig */,
A9D7225E2F070FE600050BB0 /* CaptureEngine.swift */,
A98E8BEF2F05B2A0006D4458 /* Scream */,
@@ -265,6 +268,7 @@
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
A94E29F72F09B569006E583D /* ScreamPacket.swift in Sources */,
A98E8BF02F05B2A0006D4458 /* AppDelegate.swift in Sources */,
A9D7225F2F070FE600050BB0 /* CaptureEngine.swift in Sources */,
A98E8BFD2F05D28D006D4458 /* ScreenRecorder.swift in Sources */,

19
ScreamPacket.swift Normal file
View File

@@ -0,0 +1,19 @@
//
// ScreamPacket.swift
// Scream
//
// Created by neon443 on 03/01/2026.
//
import Foundation
import Cocoa
import VideoToolbox
struct ScreamPacket: Codable, Identifiable {
var id: Double //actually the timestamp seconds :shocked:
var timestamp: CMTime { .init(seconds: self.id, preferredTimescale: 1000000000) }
var data: Data
var index: Int
var packetsInChunk: Int
var isKeyframe: Bool
}