๐ŸŽฌ AVFoundation

์˜ค๋””์˜ค/๋น„๋””์˜ค ์บก์ฒ˜, ์žฌ์ƒ, ํŽธ์ง‘์„ ์œ„ํ•œ ์ข…ํ•ฉ ํ”„๋ ˆ์ž„์›Œํฌ

iOS 4+ํ”„๋กœ๊ธ‰ ๋ฏธ๋””์–ด

โœจ AVFoundation์ด๋ž€?

AVFoundation์€ ์‹œ๊ฐ„ ๊ธฐ๋ฐ˜ ๋ฏธ๋””์–ด๋ฅผ ์ฒ˜๋ฆฌํ•˜๋Š” Apple์˜ ๊ฐ•๋ ฅํ•œ ํ”„๋ ˆ์ž„์›Œํฌ์ž…๋‹ˆ๋‹ค. ์˜ค๋””์˜ค/๋น„๋””์˜ค ์บก์ฒ˜, ์žฌ์ƒ, ํŽธ์ง‘, ๋‚ด๋ณด๋‚ด๊ธฐ ๋“ฑ ๋ฏธ๋””์–ด ์›Œํฌํ”Œ๋กœ์šฐ์˜ ๋ชจ๋“  ๋‹จ๊ณ„๋ฅผ ์ง€์›ํ•ฉ๋‹ˆ๋‹ค. ์นด๋ฉ”๋ผ ์•ฑ, ์˜์ƒ ํŽธ์ง‘๊ธฐ, ์˜ค๋””์˜ค ๋ ˆ์ฝ”๋”, ์ŠคํŠธ๋ฆฌ๋ฐ ์„œ๋น„์Šค ๋“ฑ์„ ๊ตฌํ˜„ํ•  ๋•Œ ํ•ต์‹ฌ์ ์œผ๋กœ ์‚ฌ์šฉ๋ฉ๋‹ˆ๋‹ค.

๐Ÿ’ก ํ•ต์‹ฌ ๊ธฐ๋Šฅ: ๋น„๋””์˜ค/์˜ค๋””์˜ค ์บก์ฒ˜ ยท ๋ฏธ๋””์–ด ์žฌ์ƒ ยท ์‹ค์‹œ๊ฐ„ ์ฒ˜๋ฆฌ ยท ๋น„๋””์˜ค ํŽธ์ง‘ ยท ๋‚ด๋ณด๋‚ด๊ธฐ ยท ๋ฉ”ํƒ€๋ฐ์ดํ„ฐ ยท ์ŠคํŠธ๋ฆฌ๋ฐ ยท ์„ธ์…˜ ๊ด€๋ฆฌ

๐Ÿ“น 1. ๋น„๋””์˜ค ์บก์ฒ˜

AVCaptureSession์„ ์‚ฌ์šฉํ•˜์—ฌ ์นด๋ฉ”๋ผ๋กœ๋ถ€ํ„ฐ ๋น„๋””์˜ค๋ฅผ ์บก์ฒ˜ํ•ฉ๋‹ˆ๋‹ค.

CameraView.swift โ€” ๋น„๋””์˜ค ์บก์ฒ˜
import SwiftUI
import AVFoundation

@Observable
class CameraManager: NSObject {
    let captureSession = AVCaptureSession()
    var videoOutput = AVCaptureMovieFileOutput()
    var previewLayer: AVCaptureVideoPreviewLayer?
    var isRecording = false

    func setupCamera() {
        // ์„ธ์…˜ ์„ค์ •
        captureSession.beginConfiguration()

        // ๋น„๋””์˜ค ์ž…๋ ฅ ์ถ”๊ฐ€
        guard let videoDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back),
              let videoInput = try? AVCaptureDeviceInput(device: videoDevice),
              captureSession.canAddInput(videoInput)
        else { return }

        captureSession.addInput(videoInput)

        // ์˜ค๋””์˜ค ์ž…๋ ฅ ์ถ”๊ฐ€
        if let audioDevice = AVCaptureDevice.default(for: .audio),
           let audioInput = try? AVCaptureDeviceInput(device: audioDevice),
           captureSession.canAddInput(audioInput) {
            captureSession.addInput(audioInput)
        }

        // ๋น„๋””์˜ค ์ถœ๋ ฅ ์ถ”๊ฐ€
        if captureSession.canAddOutput(videoOutput) {
            captureSession.addOutput(videoOutput)
        }

        // ์„ธ์…˜ ํ’ˆ์งˆ ์„ค์ •
        captureSession.sessionPreset = .high

        captureSession.commitConfiguration()
    }

    func startSession() {
        Task(priority: .background) {
            captureSession.startRunning()
        }
    }

    func stopSession() {
        Task(priority: .background) {
            captureSession.stopRunning()
        }
    }

    func startRecording(to url: URL) {
        videoOutput.startRecording(to: url, recordingDelegate: self)
        isRecording = true
    }

    func stopRecording() {
        videoOutput.stopRecording()
        isRecording = false
    }
}

// ๋…นํ™” ๋ธ๋ฆฌ๊ฒŒ์ดํŠธ
extension CameraManager: AVCaptureFileOutputRecordingDelegate {
    func fileOutput(
        _ output: AVCaptureFileOutput,
        didFinishRecordingTo outputFileURL: URL,
        from connections: [AVCaptureConnection],
        error: Error?
    ) {
        if let error = error {
            print("๋…นํ™” ์˜ค๋ฅ˜: \(error.localizedDescription)")
        } else {
            print("๋…นํ™” ์™„๋ฃŒ: \(outputFileURL)")
        }
    }
}

struct CameraView: View {
    @State private var cameraManager = CameraManager()

    var body: some View {
        ZStack {
            CameraPreview(session: cameraManager.captureSession)
                .ignoresSafeArea()

            VStack {
                Spacer()

                Button {
                    if cameraManager.isRecording {
                        cameraManager.stopRecording()
                    } else {
                        let url = FileManager.default.temporaryDirectory.appendingPathComponent("video.mov")
                        cameraManager.startRecording(to: url)
                    }
                } label: {
                    Circle()
                        .fill(cameraManager.isRecording ? Color.red : Color.white)
                        .frame(width: 70, height: 70)
                        .overlay(
                            Circle()
                                .stroke(Color.white, lineWidth: 4)
                        )
                }
                .padding(.bottom, 50)
            }
        }
        .onAppear {
            cameraManager.setupCamera()
            cameraManager.startSession()
        }
        .onDisappear {
            cameraManager.stopSession()
        }
    }
}

struct CameraPreview: UIViewRepresentable {
    let session: AVCaptureSession

    func makeUIView(context: Context) -> UIView {
        let view = UIView(frame: .zero)
        let previewLayer = AVCaptureVideoPreviewLayer(session: session)
        previewLayer.videoGravity = .resizeAspectFill
        view.layer.addSublayer(previewLayer)

        DispatchQueue.main.async {
            previewLayer.frame = view.bounds
        }

        return view
    }

    func updateUIView(_ uiView: UIView, context: Context) {
        if let layer = uiView.layer.sublayers?.first as? AVCaptureVideoPreviewLayer {
            layer.frame = uiView.bounds
        }
    }
}

๐ŸŽต 2. ์˜ค๋””์˜ค ๋…น์Œ

AVAudioRecorder๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ์˜ค๋””์˜ค๋ฅผ ๋…น์Œํ•ฉ๋‹ˆ๋‹ค.

AudioRecorderView.swift โ€” ์˜ค๋””์˜ค ๋…น์Œ
import SwiftUI
import AVFoundation

@Observable
class AudioRecorderManager: NSObject {
    var audioRecorder: AVAudioRecorder?
    var isRecording = false
    var recordingTime: TimeInterval = 0

    func setupRecorder() {
        // ์˜ค๋””์˜ค ์„ธ์…˜ ์„ค์ •
        let audioSession = AVAudioSession.sharedInstance()
        try? audioSession.setCategory(.playAndRecord, mode: .default)
        try? audioSession.setActive(true)

        // ๋…น์Œ ์„ค์ •
        let settings: [String: Any] = [
            AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
            AVSampleRateKey: 44100.0,
            AVNumberOfChannelsKey: 2,
            AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
        ]

        let url = FileManager.default.temporaryDirectory.appendingPathComponent("recording.m4a")

        do {
            audioRecorder = try AVAudioRecorder(url: url, settings: settings)
            audioRecorder?.delegate = self
            audioRecorder?.prepareToRecord()
        } catch {
            print("๋…น์Œ ์„ค์ • ์˜ค๋ฅ˜: \(error)")
        }
    }

    func startRecording() {
        audioRecorder?.record()
        isRecording = true
    }

    func stopRecording() {
        audioRecorder?.stop()
        isRecording = false
    }

    func pauseRecording() {
        audioRecorder?.pause()
        isRecording = false
    }
}

extension AudioRecorderManager: AVAudioRecorderDelegate {
    func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
        if flag {
            print("๋…น์Œ ์™„๋ฃŒ: \(recorder.url)")
        }
    }
}

struct AudioRecorderView: View {
    @State private var recorderManager = AudioRecorderManager()

    var body: some View {
        VStack(spacing: 30) {
            Text(recorderManager.isRecording ? "๋…น์Œ ์ค‘..." : "์ค€๋น„๋จ")
                .font(.title)
                .foregroundStyle(recorderManager.isRecording ? .red : .primary)

            HStack(spacing: 40) {
                Button {
                    if recorderManager.isRecording {
                        recorderManager.stopRecording()
                    } else {
                        recorderManager.startRecording()
                    }
                } label: {
                    Image(systemName: recorderManager.isRecording ? "stop.circle.fill" : "record.circle")
                        .font(.system(size: 64))
                        .foregroundStyle(recorderManager.isRecording ? .red : .blue)
                }

                if recorderManager.isRecording {
                    Button {
                        recorderManager.pauseRecording()
                    } label: {
                        Image(systemName: "pause.circle.fill")
                            .font(.system(size: 64))
                    }
                }
            }
        }
        .onAppear {
            recorderManager.setupRecorder()
        }
    }
}

โ–ถ๏ธ 3. ๋ฏธ๋””์–ด ์žฌ์ƒ

AVPlayer๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ๋น„๋””์˜ค/์˜ค๋””์˜ค๋ฅผ ์žฌ์ƒํ•ฉ๋‹ˆ๋‹ค.

VideoPlayerView.swift โ€” ๋ฏธ๋””์–ด ์žฌ์ƒ
import SwiftUI
import AVFoundation

@Observable
class VideoPlayerManager {
    var player: AVPlayer?
    var isPlaying = false
    var currentTime: TimeInterval = 0
    var duration: TimeInterval = 0

    func loadVideo(from url: URL) {
        let asset = AVAsset(url: url)
        let playerItem = AVPlayerItem(asset: asset)
        player = AVPlayer(playerItem: playerItem)

        // ์žฌ์ƒ ์‹œ๊ฐ„ ๊ด€์ฐฐ
        player?.addPeriodicTimeObserver(
            forInterval: CMTime(seconds: 0.5, preferredTimescale: 600),
            queue: .main
        ) { [weak self] time in
            self?.currentTime = time.seconds
        }

        // ์˜์ƒ ๊ธธ์ด ๊ฐ€์ ธ์˜ค๊ธฐ
        Task {
            if let duration = try? await asset.load(.duration) {
                self.duration = duration.seconds
            }
        }
    }

    func play() {
        player?.play()
        isPlaying = true
    }

    func pause() {
        player?.pause()
        isPlaying = false
    }

    func seek(to time: TimeInterval) {
        let cmTime = CMTime(seconds: time, preferredTimescale: 600)
        player?.seek(to: cmTime)
    }
}

struct VideoPlayerView: View {
    let url: URL
    @State private var playerManager = VideoPlayerManager()

    var body: some View {
        VStack {
            // ๋น„๋””์˜ค ๋ทฐ
            VideoPlayerLayer(player: playerManager.player)
                .frame(height: 300)
                .background(Color.black)

            // ์žฌ์ƒ ์ปจํŠธ๋กค
            VStack(spacing: 16) {
                // ์ง„ํ–‰ ๋ฐ”
                Slider(
                    value: Binding(
                        get: { playerManager.currentTime },
                        set: { playerManager.seek(to: $0) }
                    ),
                    in: 0...max(playerManager.duration, 1)
                )

                // ์‹œ๊ฐ„ ํ‘œ์‹œ
                HStack {
                    Text(formatTime(playerManager.currentTime))
                    Spacer()
                    Text(formatTime(playerManager.duration))
                }
                .font(.caption)
                .foregroundStyle(.secondary)

                // ์žฌ์ƒ ๋ฒ„ํŠผ
                Button {
                    if playerManager.isPlaying {
                        playerManager.pause()
                    } else {
                        playerManager.play()
                    }
                } label: {
                    Image(systemName: playerManager.isPlaying ? "pause.circle.fill" : "play.circle.fill")
                        .font(.system(size: 64))
                }
            }
            .padding()
        }
        .onAppear {
            playerManager.loadVideo(from: url)
        }
    }

    func formatTime(_ time: TimeInterval) -> String {
        let minutes = Int(time) / 60
        let seconds = Int(time) % 60
        return String(format: "%d:%02d", minutes, seconds)
    }
}

struct VideoPlayerLayer: UIViewRepresentable {
    let player: AVPlayer?

    func makeUIView(context: Context) -> UIView {
        let view = UIView(frame: .zero)
        let playerLayer = AVPlayerLayer(player: player)
        playerLayer.videoGravity = .resizeAspect
        view.layer.addSublayer(playerLayer)

        DispatchQueue.main.async {
            playerLayer.frame = view.bounds
        }

        return view
    }

    func updateUIView(_ uiView: UIView, context: Context) {
        if let layer = uiView.layer.sublayers?.first as? AVPlayerLayer {
            layer.frame = uiView.bounds
        }
    }
}

โœ‚๏ธ 4. ๋น„๋””์˜ค ํŽธ์ง‘

AVMutableComposition์„ ์‚ฌ์šฉํ•˜์—ฌ ๋น„๋””์˜ค๋ฅผ ํŽธ์ง‘ํ•ฉ๋‹ˆ๋‹ค.

VideoEditor.swift โ€” ๋น„๋””์˜ค ํŽธ์ง‘
import AVFoundation
import UIKit

class VideoEditor {
    // ๋น„๋””์˜ค ์ž๋ฅด๊ธฐ
    func trimVideo(
        asset: AVAsset,
        startTime: CMTime,
        endTime: CMTime,
        outputURL: URL
    ) async throws {
        // Composition ์ƒ์„ฑ
        let composition = AVMutableComposition()

        // ๋น„๋””์˜ค ํŠธ๋ž™ ์ถ”๊ฐ€
        guard let videoTrack = composition.addMutableTrack(
            withMediaType: .video,
            preferredTrackID: kCMPersistentTrackID_Invalid
        ) else { return }

        // ์˜ค๋””์˜ค ํŠธ๋ž™ ์ถ”๊ฐ€
        guard let audioTrack = composition.addMutableTrack(
            withMediaType: .audio,
            preferredTrackID: kCMPersistentTrackID_Invalid
        ) else { return }

        // ์›๋ณธ ํŠธ๋ž™ ๊ฐ€์ ธ์˜ค๊ธฐ
        let assetVideoTrack = try await asset.loadTracks(withMediaType: .video).first
        let assetAudioTrack = try await asset.loadTracks(withMediaType: .audio).first

        // ์‹œ๊ฐ„ ๋ฒ”์œ„
        let timeRange = CMTimeRange(start: startTime, end: endTime)

        // ํŠธ๋ž™ ์‚ฝ์ž…
        if let assetVideoTrack {
            try videoTrack.insertTimeRange(timeRange, of: assetVideoTrack, at: .zero)
        }

        if let assetAudioTrack {
            try audioTrack.insertTimeRange(timeRange, of: assetAudioTrack, at: .zero)
        }

        // ๋‚ด๋ณด๋‚ด๊ธฐ
        try await exportVideo(composition: composition, to: outputURL)
    }

    // ์—ฌ๋Ÿฌ ๋น„๋””์˜ค ํ•ฉ์น˜๊ธฐ
    func mergeVideos(
        assets: [AVAsset],
        outputURL: URL
    ) async throws {
        let composition = AVMutableComposition()

        guard let videoTrack = composition.addMutableTrack(
            withMediaType: .video,
            preferredTrackID: kCMPersistentTrackID_Invalid
        ) else { return }

        guard let audioTrack = composition.addMutableTrack(
            withMediaType: .audio,
            preferredTrackID: kCMPersistentTrackID_Invalid
        ) else { return }

        var currentTime = CMTime.zero

        for asset in assets {
            let duration = try await asset.load(.duration)
            let timeRange = CMTimeRange(start: .zero, duration: duration)

            if let assetVideoTrack = try await asset.loadTracks(withMediaType: .video).first {
                try videoTrack.insertTimeRange(timeRange, of: assetVideoTrack, at: currentTime)
            }

            if let assetAudioTrack = try await asset.loadTracks(withMediaType: .audio).first {
                try audioTrack.insertTimeRange(timeRange, of: assetAudioTrack, at: currentTime)
            }

            currentTime = CMTimeAdd(currentTime, duration)
        }

        try await exportVideo(composition: composition, to: outputURL)
    }

    // ๋น„๋””์˜ค ๋‚ด๋ณด๋‚ด๊ธฐ
    private func exportVideo(
        composition: AVMutableComposition,
        to url: URL
    ) async throws {
        guard let exportSession = AVAssetExportSession(
            asset: composition,
            presetName: AVAssetExportPresetHighestQuality
        ) else { return }

        exportSession.outputURL = url
        exportSession.outputFileType = .mov

        await exportSession.export()

        if exportSession.status == .failed {
            throw exportSession.error ?? NSError(domain: "", code: -1)
        }
    }

    // ๋น„๋””์˜ค์— ์›Œํ„ฐ๋งˆํฌ ์ถ”๊ฐ€
    func addWatermark(
        to asset: AVAsset,
        watermarkImage: UIImage,
        outputURL: URL
    ) async throws {
        let composition = AVMutableComposition()
        let videoComposition = AVMutableVideoComposition()

        // ๋น„๋””์˜ค ํŠธ๋ž™ ์ถ”๊ฐ€
        guard let videoTrack = composition.addMutableTrack(
            withMediaType: .video,
            preferredTrackID: kCMPersistentTrackID_Invalid
        ) else { return }

        let assetVideoTrack = try await asset.loadTracks(withMediaType: .video).first
        let duration = try await asset.load(.duration)

        if let assetVideoTrack {
            try videoTrack.insertTimeRange(
                CMTimeRange(start: .zero, duration: duration),
                of: assetVideoTrack,
                at: .zero
            )
        }

        // ์›Œํ„ฐ๋งˆํฌ ๋ ˆ์ด์–ด ์ถ”๊ฐ€
        let watermarkLayer = CALayer()
        watermarkLayer.contents = watermarkImage.cgImage
        watermarkLayer.frame = CGRect(x: 20, y: 20, width: 100, height: 100)
        watermarkLayer.opacity = 0.7

        let videoLayer = CALayer()
        videoLayer.frame = CGRect(origin: .zero, size: CGSize(width: 1920, height: 1080))

        let outputLayer = CALayer()
        outputLayer.frame = videoLayer.frame
        outputLayer.addSublayer(videoLayer)
        outputLayer.addSublayer(watermarkLayer)

        videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(
            postProcessingAsVideoLayer: videoLayer,
            in: outputLayer
        )

        try await exportVideo(composition: composition, to: outputURL)
    }
}

๐Ÿ“ฑ ์ข…ํ•ฉ ์˜ˆ์ œ

MediaApp.swift โ€” ์ข…ํ•ฉ ๋ฏธ๋””์–ด ์•ฑ
import SwiftUI
import AVFoundation

struct MediaApp: View {
    @State private var selectedTab = 0

    var body: some View {
        TabView(selection: $selectedTab) {
            CameraView()
                .tabItem {
                    Label("์นด๋ฉ”๋ผ", systemImage: "camera")
                }
                .tag(0)

            AudioRecorderView()
                .tabItem {
                    Label("๋…น์Œ", systemImage: "mic")
                }
                .tag(1)

            MediaLibraryView()
                .tabItem {
                    Label("๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ", systemImage: "photo.stack")
                }
                .tag(2)
        }
    }
}

struct MediaLibraryView: View {
    var body: some View {
        NavigationStack {
            List {
                Section("๋น„๋””์˜ค") {
                    Text("์ตœ๊ทผ ์ดฌ์˜")
                }

                Section("์˜ค๋””์˜ค") {
                    Text("์ตœ๊ทผ ๋…น์Œ")
                }
            }
            .navigationTitle("๋ฏธ๋””์–ด")
        }
    }
}

๐Ÿ’ก HIG ๊ฐ€์ด๋“œ๋ผ์ธ

๐ŸŽฏ ์‹ค์ „ ํ™œ์šฉ

๐Ÿ“š ๋” ์•Œ์•„๋ณด๊ธฐ

โšก๏ธ ์„ฑ๋Šฅ ํŒ: ์นด๋ฉ”๋ผ ์„ธ์…˜์€ ๋ฐฑ๊ทธ๋ผ์šด๋“œ ์Šค๋ ˆ๋“œ์—์„œ ์‹œ์ž‘/์ค‘์ง€ํ•˜์„ธ์š”. sessionPreset์œผ๋กœ ํ•ด์ƒ๋„๋ฅผ ์กฐ์ ˆํ•˜์—ฌ ์„ฑ๋Šฅ๊ณผ ํ’ˆ์งˆ์˜ ๊ท ํ˜•์„ ๋งž์ถ”๊ณ , ๋ถˆํ•„์š”ํ•œ ์ฒ˜๋ฆฌ๋Š” videoOutput ๋ธ๋ฆฌ๊ฒŒ์ดํŠธ์—์„œ ์ƒ˜ํ”Œ ๋ฒ„ํผ๋ฅผ ๊ฑด๋„ˆ๋›ฐ์„ธ์š”.