Skip to content

Instantly share code, notes, and snippets.

@Koshimizu-Takehito
Created November 23, 2025 10:35
Show Gist options
  • Select an option

  • Save Koshimizu-Takehito/c30862ec2e3c5ee4914afd1c156c8c70 to your computer and use it in GitHub Desktop.

Select an option

Save Koshimizu-Takehito/c30862ec2e3c5ee4914afd1c156c8c70 to your computer and use it in GitHub Desktop.
Split-screen mosaic shader for SwiftUI
import AVFoundation
import CoreImage
import SwiftUI
/// A sample view that plays a remote video and applies a controllable mosaic shader.
///
/// This view demonstrates how to:
/// - Pull video frames from `AVPlayer` using `AVPlayerItemVideoOutput`
/// - Render them in SwiftUI as a `CGImage`
/// - Apply a custom Metal-based mosaic effect using `ShaderLibrary`
/// - Interactively control both the video playback position and the
/// mosaic split position with SwiftUI gestures and controls.
struct ContentView: View {
/// A Boolean value that toggles the mosaic shader on and off.
@State private var isOn = true
/// A Boolean value that controls the visibility of the playback UI.
@State private var showsControls = false
/// An observable object that provides decoded video frames and playback state.
@State private var provider = VideoImageProvider(
url: URL(string: "https://devstreaming-cdn.apple.com/videos/wwdc/2025/102/2/137f7e3a-caee-4bb1-bdea-adca731aa1ed/downloads/wwdc2025-102_hd.mp4")!
)
/// The size of the content area used to normalize the drag position.
@State private var size: CGSize = .zero
/// The current horizontal offset of the mosaic split bar, in points.
///
/// This value is relative to the center of the content area.
@State private var barOffset: CGFloat = 0
/// The horizontal offset of the mosaic bar at the start of a drag gesture.
@State private var barStartOffset: CGFloat = 0
var body: some View {
screen()
.overlay(content: mosaicBar)
.onTapGesture { showsControls.toggle() }
.overlay(content: control)
.onGeometryChange(for: CGSize.self, of: \.size) { size = $1 }
.frame(maxWidth: .infinity, maxHeight: .infinity)
.onAppear(perform: provider.start)
.onDisappear(perform: provider.stop)
.animation(.default, value: showsControls)
.tint(.blue)
}
/// Renders the current video frame or a placeholder while the video is loading.
///
/// When a frame is available, it is drawn as a resizable image and passed
/// through the ``MosaicEffect`` modifier. Otherwise, a background rectangle
/// fills the space.
@ViewBuilder
private func screen() -> some View {
if let image = provider.image {
Image(decorative: image, scale: 1)
.resizable()
.scaledToFit()
.modifier(MosaicEffect(isOn: isOn, offset: barOffset / max(size.width, 1) + 0.5))
} else {
Rectangle().foregroundStyle(.background)
}
}
/// A draggable vertical bar that controls the mosaic split position.
///
/// The bar is horizontally draggable within the bounds of the content size.
/// Its position is normalized and passed as the `offset` parameter to the
/// mosaic shader so that the shader can adjust where the effect is applied.
@ViewBuilder
private func mosaicBar() -> some View {
Color.red
.opacity(isOn ? 1.0 : 0.0)
.frame(width: 2)
.padding(.horizontal)
.contentShape(.rect)
.offset(x: barOffset)
.gesture(
DragGesture()
.onChanged { value in
// Drag from the original offset plus the translation.
barOffset = barStartOffset + value.translation.width
barOffset = min(max(-size.width / 2, barOffset), size.width / 2)
}
.onEnded { _ in
// Persist the offset as the new baseline for the next drag.
barStartOffset = barOffset
}
)
}
/// Renders the playback controls and scrubber overlay.
///
/// The controls are only shown once the first frame is available. Tapping
/// anywhere on the content toggles the visibility of the controls.
@ViewBuilder
private func control() -> some View {
if provider.image != nil {
VStack {
HStack {
if showsControls {
let buttonName = provider.isPaused ? "play" : "pause"
Button(buttonName.capitalized, systemImage: buttonName, action: provider.toggle)
Button(action: provider.reset) {
Image(systemName: "arrow.trianglehead.counterclockwise")
}
}
Spacer()
Toggle("Filter", isOn: $isOn.animation())
.fixedSize()
.shadow(radius: 1)
}
if showsControls {
slider()
}
}
.frame(maxWidth: .infinity, maxHeight: .infinity, alignment: .bottomTrailing)
.padding()
.fontWeight(.black)
.buttonStyle(.glass)
}
}
/// A slider that scrubs the video playback position.
///
/// The slider is bound to ``VideoImageProvider/progress``, which maps
/// the current time and total duration to a 0–1 range. The slider also
/// displays the current playback time and total duration.
@ViewBuilder
private func slider() -> some View {
Slider(value: $provider.progress, in: 0...1) {
Text("Position")
} minimumValueLabel: {
Text(timeString(from: provider.currentTime))
} maximumValueLabel: {
Text(timeString(from: provider.duration))
}
.monospacedDigit()
.shadow(radius: 1)
.padding(.horizontal)
.font(.caption2)
.background {
RoundedRectangle(cornerRadius: 16)
.foregroundStyle(.ultraThinMaterial)
}
}
/// Formats a time interval in seconds as a human-readable string.
///
/// The returned string uses the `m:ss` format and falls back to `--:--`
/// if the value is not finite.
///
/// - Parameter seconds: The time interval in seconds.
/// - Returns: A formatted string such as `"1:23"` or `"--:--"`.
private func timeString(from seconds: Double) -> String {
guard seconds.isFinite, !seconds.isNaN else { return "--:--" }
let total = Int(seconds.rounded())
let m = total / 60
let s = total % 60
return String(format: "%d:%02d", m, s)
}
}
/// A view modifier that applies a mosaic shader to its content.
///
/// The underlying shader is provided by ``ShaderLibrary/mosaic`` and receives
/// the view's bounding rectangle, the mosaic scale, and a normalized split
/// offset. This modifier does not animate over time by itself; instead, it
/// recomputes the shader whenever its `isOn` or `offset` properties change.
struct MosaicEffect: ViewModifier {
/// A Boolean value that controls whether the mosaic effect is enabled.
var isOn: Bool
/// A normalized value in the range `[0, 1]` that controls the horizontal
/// split position of the mosaic effect.
var offset: Double
func body(content: Content) -> some View {
content.layerEffect(shader(), maxSampleOffset: .zero)
}
/// Creates a shader for the current configuration.
///
/// The `scale` parameter determines the coarseness of the mosaic effect
/// when it is enabled. When the effect is disabled, a value of `1.0` is
/// used so that the image is rendered without visible pixelation.
///
/// - Returns: A configured `Shader` instance.
private func shader() -> Shader {
let scale = isOn ? 10.0 : 1.0
return ShaderLibrary.mosaic(.boundingRect, .float(scale), .float(offset))
}
}
/// An observable video provider that pulls frames from `AVPlayer` for use in SwiftUI.
///
/// `VideoImageProvider` is responsible for:
/// - Loading a remote video asset
/// - Driving playback via `AVPlayer`
/// - Extracting frames using `AVPlayerItemVideoOutput`
/// - Providing the latest frame as a `CGImage`
/// - Tracking duration and current playback time
/// - Exposing a normalized progress value suitable for scrubbing with a slider
@MainActor
@Observable
final class VideoImageProvider {
/// The most recent video frame as a `CGImage`.
///
/// This value is updated on every display refresh when a new pixel buffer
/// is available from `AVPlayerItemVideoOutput`.
private(set) var image: CGImage?
/// A Boolean value that indicates whether playback is currently paused.
///
/// Updating this property also pauses or resumes the associated
/// display link so that frame extraction is aligned with playback.
private(set) var isPaused: Bool = false {
didSet { displayLink?.isPaused = isPaused }
}
/// The total duration of the loaded video, in seconds.
private(set) var duration: Double = 0
/// The current playback time, in seconds.
private(set) var currentTime: Double = 0
/// The Core Image context used to convert pixel buffers into `CGImage` values.
private let context = CIContext()
/// The remote video URL used to create the `AVPlayerItem`.
private let url: URL
/// The underlying `AVPlayer` instance that manages playback.
@ObservationIgnored private var player: AVPlayer?
/// The video output used to pull decoded frames from the player item.
@ObservationIgnored private var output: AVPlayerItemVideoOutput!
/// A key-value observation used to detect when the player item becomes ready to play.
@ObservationIgnored private var observer: NSKeyValueObservation?
/// A display link that synchronizes frame extraction with the screen refresh rate.
///
/// The display link is created lazily when the player item becomes ready
/// and is invalidated when ``stop()`` is called or when the provider
/// is deallocated.
@ObservationIgnored private var displayLink: CADisplayLink?
/// The audio volume applied to the underlying player.
///
/// Valid values are in the range `0.0` (muted) to `1.0` (full volume).
var volume: Float = 1.00 {
didSet { player?.volume = volume }
}
/// A normalized progress value for the current playback position.
///
/// The value is in the range `0.0...1.0` and is derived from
/// ``currentTime`` and ``duration``. Assigning a new value
/// seeks the player to the corresponding position.
var progress: Double {
get {
guard duration > 0 else { return 0 }
return currentTime / duration
}
set {
guard duration > 0 else { return }
let clamped = max(0, min(1, newValue))
let seconds = clamped * duration
seek(to: seconds)
}
}
/// Creates a new video provider for the specified URL.
///
/// The actual loading and playback setup is performed by calling ``start()``.
///
/// - Parameter url: The remote video URL to load and play.
init(url: URL) {
self.url = url
}
/// Cleans up playback resources when the provider is deallocated.
///
/// The display link is invalidated, the player is paused, and any
/// KVO observers are invalidated.
isolated deinit {
displayLink?.invalidate()
player?.pause()
observer?.invalidate()
}
/// Pauses video playback.
///
/// This method does nothing if the player has not been created yet.
func pause() {
player?.pause()
isPaused = true
}
/// Stops playback and releases underlying playback resources.
///
/// This method invalidates the display link, removes the KVO observer,
/// pauses the player, and releases the player instance. After calling
/// this method, you can call ``start()`` again to recreate the pipeline.
func stop() {
displayLink?.invalidate()
displayLink = nil
observer?.invalidate()
observer = nil
player?.pause()
player = nil
}
/// Starts or resumes video playback.
///
/// If the player has not been created yet, this method initializes it
/// and starts playback once the asset is ready. Otherwise, it simply
/// resumes playback from the current position.
func resume() {
guard let player else {
return start()
}
player.play()
isPaused = false
}
/// Toggles between playing and pausing the video.
func toggle() {
isPaused ? resume() : pause()
}
/// Resets playback to the beginning of the video.
///
/// If the player has not been created yet, this method does nothing.
/// Use ``resume()`` afterwards to immediately start playback from the start.
func reset() {
// If the player does not exist, there is nothing to reset.
guard let player else { return }
// Seek to the beginning (0 seconds).
player.seek(to: .zero)
currentTime = .zero
}
/// Seeks the player to the specified time.
///
/// - Parameter seconds: The new playback time in seconds.
func seek(to seconds: Double) {
guard let player else { return }
let time = CMTime(seconds: seconds, preferredTimescale: 600)
player.seek(to: time, toleranceBefore: .zero, toleranceAfter: .zero)
currentTime = seconds
}
/// Creates and configures the underlying `AVPlayer` and its video output.
///
/// This method is typically called once when the view appears. It sets up
/// the player item, video output, and KVO observer used to detect when
/// the asset becomes ready for playback. Once ready, a display link is
/// created and playback begins.
func start() {
guard player == nil else {
return resume()
}
let item = AVPlayerItem(url: url)
player = AVPlayer(playerItem: item)
player?.volume = volume
let output = AVPlayerItemVideoOutput(outputSettings: [
AVVideoAllowWideColorKey: true,
AVVideoColorPropertiesKey: [
AVVideoColorPrimariesKey: AVVideoColorPrimaries_P3_D65,
AVVideoTransferFunctionKey: AVVideoTransferFunction_Linear,
AVVideoYCbCrMatrixKey: AVVideoYCbCrMatrix_ITU_R_2020,
],
kCVPixelBufferPixelFormatTypeKey as String: NSNumber(
value: kCVPixelFormatType_64RGBAHalf
),
])
self.output = output
observer = item.observe(\.status, options: [.new, .old], changeHandler: { item, _ in
guard item.status == .readyToPlay else {
return
}
item.add(output)
MainActor.assumeIsolated {
Task {
self.duration = try await item.asset.load(.duration).seconds
}
self.currentTime = 0
self.makeDisplayLinkIfNeeded()
self.resume()
}
})
}
/// Lazily creates and registers the display link used for frame extraction.
///
/// The display link is added to the main run loop with the `.common` mode
/// and configured to call ``copyPixelBuffers(link:)`` on every screen refresh.
private func makeDisplayLinkIfNeeded() {
guard displayLink == nil else { return }
let link = CADisplayLink(
target: self,
selector: #selector(copyPixelBuffers(link:))
)
link.add(to: .main, forMode: .common)
displayLink = link
}
/// Copies the latest video frame from the player into ``image``.
///
/// This method is invoked on every screen refresh by the display link.
/// When a new pixel buffer is available, it is converted into a `CGImage`
/// via Core Image and published to SwiftUI. The current playback time is
/// also updated from the underlying player.
///
/// - Parameter link: The display link that triggered this callback.
@objc private func copyPixelBuffers(link: CADisplayLink) {
let time = output.itemTime(forHostTime: link.timestamp)
let hasBuffer = output.hasNewPixelBuffer(forItemTime: time)
if hasBuffer, let buffer = output.copyPixelBuffer(forItemTime: time, itemTimeForDisplay: nil) {
let image = CIImage(cvPixelBuffer: buffer)
self.image = context.createCGImage(image, from: image.extent)
if let player {
self.currentTime = player.currentTime().seconds
}
}
}
}
#Preview {
ContentView()
.colorScheme(.dark)
}
#include <metal_stdlib>
#include <SwiftUI/SwiftUI.h>
using namespace metal;
/**
* @file MosaicShader.metal
* @brief Provides a simple, split-screen mosaic shader for SwiftUI.
*
* This file defines a `stitchable` Metal function that can be attached to a
* SwiftUI `ShaderLibrary`. The shader applies a pixelated (mosaic) effect
* to a configurable portion of the rendered layer while leaving the rest
* of the image untouched.
*/
/**
* @brief Applies a pixelated mosaic effect to part of a SwiftUI layer.
*
* The shader renders the original image on the right-hand side of the view
* and a pixelated version on the left-hand side. The split position is
* controlled by the @p ratio parameter in normalized coordinates.
*
* Typical usage from SwiftUI is through `ShaderLibrary.mosaic`, passing in
* the layer's bounding rectangle as @p bounds, the desired mosaic tile size
* as @p scale, and a split value between `0.0` and `1.0` as @p ratio.
*
* @param position
* The fragment position in the layer's coordinate space.
*
* @param layer
* The SwiftUI layer being sampled. This is provided by SwiftUI and
* represents the rendered contents to which the effect is applied.
*
* @param bounds
* A rectangle describing the layer's bounds. The `xy` components usually
* represent the origin, while the `zw` components represent the width
* and height of the layer in points.
*
* @param scale
* The size of each mosaic tile, in points. Larger values produce a
* more heavily pixelated appearance.
*
* @param ratio
* The normalized horizontal split position in the range `[0.0, 1.0]`.
* Pixels with `position.x / bounds.z > ratio` are rendered without
* the mosaic effect; the remaining pixels are snapped to a grid to
* create the pixelated region.
*
* @return
* The resulting color after optionally applying the mosaic effect.
*/
[[ stitchable ]]
half4 mosaic(float2 position, SwiftUI::Layer layer, float4 bounds, float scale, float ratio) {
// Right-hand side of the view (beyond the split) is drawn as-is.
if (position.x / bounds.z > ratio) {
return layer.sample(position);
}
// Shift the position so that the layer is centered around the origin.
position = position - bounds.zw / 2;
// Snap the position to a regular grid of size `scale` to form tiles.
position = floor(position / scale) * scale;
// Sample the color at the snapped location, shifted back into layer space.
// The extra `scale / 2` offsets sampling towards the center of each tile.
return layer.sample(position + bounds.zw / 2 + scale / 2);
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment