๐ฌ AVFoundation
์ค๋์ค/๋น๋์ค ์บก์ฒ, ์ฌ์, ํธ์ง์ ์ํ ์ข ํฉ ํ๋ ์์ํฌ
iOS 4+ํ๋ก๊ธ ๋ฏธ๋์ด
โจ AVFoundation์ด๋?
AVFoundation์ ์๊ฐ ๊ธฐ๋ฐ ๋ฏธ๋์ด๋ฅผ ์ฒ๋ฆฌํ๋ Apple์ ๊ฐ๋ ฅํ ํ๋ ์์ํฌ์ ๋๋ค. ์ค๋์ค/๋น๋์ค ์บก์ฒ, ์ฌ์, ํธ์ง, ๋ด๋ณด๋ด๊ธฐ ๋ฑ ๋ฏธ๋์ด ์ํฌํ๋ก์ฐ์ ๋ชจ๋ ๋จ๊ณ๋ฅผ ์ง์ํฉ๋๋ค. ์นด๋ฉ๋ผ ์ฑ, ์์ ํธ์ง๊ธฐ, ์ค๋์ค ๋ ์ฝ๋, ์คํธ๋ฆฌ๋ฐ ์๋น์ค ๋ฑ์ ๊ตฌํํ ๋ ํต์ฌ์ ์ผ๋ก ์ฌ์ฉ๋ฉ๋๋ค.
๐ก ํต์ฌ ๊ธฐ๋ฅ: ๋น๋์ค/์ค๋์ค ์บก์ฒ ยท ๋ฏธ๋์ด ์ฌ์ ยท ์ค์๊ฐ ์ฒ๋ฆฌ ยท ๋น๋์ค ํธ์ง ยท ๋ด๋ณด๋ด๊ธฐ ยท ๋ฉํ๋ฐ์ดํฐ ยท ์คํธ๋ฆฌ๋ฐ ยท ์ธ์
๊ด๋ฆฌ
๐น 1. ๋น๋์ค ์บก์ฒ
AVCaptureSession์ ์ฌ์ฉํ์ฌ ์นด๋ฉ๋ผ๋ก๋ถํฐ ๋น๋์ค๋ฅผ ์บก์ฒํฉ๋๋ค.
CameraView.swift โ ๋น๋์ค ์บก์ฒ
import SwiftUI import AVFoundation @Observable class CameraManager: NSObject { let captureSession = AVCaptureSession() var videoOutput = AVCaptureMovieFileOutput() var previewLayer: AVCaptureVideoPreviewLayer? var isRecording = false func setupCamera() { // ์ธ์ ์ค์ captureSession.beginConfiguration() // ๋น๋์ค ์ ๋ ฅ ์ถ๊ฐ guard let videoDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back), let videoInput = try? AVCaptureDeviceInput(device: videoDevice), captureSession.canAddInput(videoInput) else { return } captureSession.addInput(videoInput) // ์ค๋์ค ์ ๋ ฅ ์ถ๊ฐ if let audioDevice = AVCaptureDevice.default(for: .audio), let audioInput = try? AVCaptureDeviceInput(device: audioDevice), captureSession.canAddInput(audioInput) { captureSession.addInput(audioInput) } // ๋น๋์ค ์ถ๋ ฅ ์ถ๊ฐ if captureSession.canAddOutput(videoOutput) { captureSession.addOutput(videoOutput) } // ์ธ์ ํ์ง ์ค์ captureSession.sessionPreset = .high captureSession.commitConfiguration() } func startSession() { Task(priority: .background) { captureSession.startRunning() } } func stopSession() { Task(priority: .background) { captureSession.stopRunning() } } func startRecording(to url: URL) { videoOutput.startRecording(to: url, recordingDelegate: self) isRecording = true } func stopRecording() { videoOutput.stopRecording() isRecording = false } } // ๋ นํ ๋ธ๋ฆฌ๊ฒ์ดํธ extension CameraManager: AVCaptureFileOutputRecordingDelegate { func fileOutput( _ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error? ) { if let error = error { print("๋ นํ ์ค๋ฅ: \(error.localizedDescription)") } else { print("๋ นํ ์๋ฃ: \(outputFileURL)") } } } struct CameraView: View { @State private var cameraManager = CameraManager() var body: some View { ZStack { CameraPreview(session: cameraManager.captureSession) .ignoresSafeArea() VStack { Spacer() Button { if cameraManager.isRecording { cameraManager.stopRecording() } else { let url = FileManager.default.temporaryDirectory.appendingPathComponent("video.mov") cameraManager.startRecording(to: url) } } label: { Circle() .fill(cameraManager.isRecording ? Color.red : Color.white) .frame(width: 70, height: 70) .overlay( Circle() .stroke(Color.white, lineWidth: 4) ) } .padding(.bottom, 50) } } .onAppear { cameraManager.setupCamera() cameraManager.startSession() } .onDisappear { cameraManager.stopSession() } } } struct CameraPreview: UIViewRepresentable { let session: AVCaptureSession func makeUIView(context: Context) -> UIView { let view = UIView(frame: .zero) let previewLayer = AVCaptureVideoPreviewLayer(session: session) previewLayer.videoGravity = .resizeAspectFill view.layer.addSublayer(previewLayer) DispatchQueue.main.async { previewLayer.frame = view.bounds } return view } func updateUIView(_ uiView: UIView, context: Context) { if let layer = uiView.layer.sublayers?.first as? AVCaptureVideoPreviewLayer { layer.frame = uiView.bounds } } }
๐ต 2. ์ค๋์ค ๋ น์
AVAudioRecorder๋ฅผ ์ฌ์ฉํ์ฌ ์ค๋์ค๋ฅผ ๋ น์ํฉ๋๋ค.
AudioRecorderView.swift โ ์ค๋์ค ๋
น์
import SwiftUI import AVFoundation @Observable class AudioRecorderManager: NSObject { var audioRecorder: AVAudioRecorder? var isRecording = false var recordingTime: TimeInterval = 0 func setupRecorder() { // ์ค๋์ค ์ธ์ ์ค์ let audioSession = AVAudioSession.sharedInstance() try? audioSession.setCategory(.playAndRecord, mode: .default) try? audioSession.setActive(true) // ๋ น์ ์ค์ let settings: [String: Any] = [ AVFormatIDKey: Int(kAudioFormatMPEG4AAC), AVSampleRateKey: 44100.0, AVNumberOfChannelsKey: 2, AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue ] let url = FileManager.default.temporaryDirectory.appendingPathComponent("recording.m4a") do { audioRecorder = try AVAudioRecorder(url: url, settings: settings) audioRecorder?.delegate = self audioRecorder?.prepareToRecord() } catch { print("๋ น์ ์ค์ ์ค๋ฅ: \(error)") } } func startRecording() { audioRecorder?.record() isRecording = true } func stopRecording() { audioRecorder?.stop() isRecording = false } func pauseRecording() { audioRecorder?.pause() isRecording = false } } extension AudioRecorderManager: AVAudioRecorderDelegate { func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) { if flag { print("๋ น์ ์๋ฃ: \(recorder.url)") } } } struct AudioRecorderView: View { @State private var recorderManager = AudioRecorderManager() var body: some View { VStack(spacing: 30) { Text(recorderManager.isRecording ? "๋ น์ ์ค..." : "์ค๋น๋จ") .font(.title) .foregroundStyle(recorderManager.isRecording ? .red : .primary) HStack(spacing: 40) { Button { if recorderManager.isRecording { recorderManager.stopRecording() } else { recorderManager.startRecording() } } label: { Image(systemName: recorderManager.isRecording ? "stop.circle.fill" : "record.circle") .font(.system(size: 64)) .foregroundStyle(recorderManager.isRecording ? .red : .blue) } if recorderManager.isRecording { Button { recorderManager.pauseRecording() } label: { Image(systemName: "pause.circle.fill") .font(.system(size: 64)) } } } } .onAppear { recorderManager.setupRecorder() } } }
โถ๏ธ 3. ๋ฏธ๋์ด ์ฌ์
AVPlayer๋ฅผ ์ฌ์ฉํ์ฌ ๋น๋์ค/์ค๋์ค๋ฅผ ์ฌ์ํฉ๋๋ค.
VideoPlayerView.swift โ ๋ฏธ๋์ด ์ฌ์
import SwiftUI import AVFoundation @Observable class VideoPlayerManager { var player: AVPlayer? var isPlaying = false var currentTime: TimeInterval = 0 var duration: TimeInterval = 0 func loadVideo(from url: URL) { let asset = AVAsset(url: url) let playerItem = AVPlayerItem(asset: asset) player = AVPlayer(playerItem: playerItem) // ์ฌ์ ์๊ฐ ๊ด์ฐฐ player?.addPeriodicTimeObserver( forInterval: CMTime(seconds: 0.5, preferredTimescale: 600), queue: .main ) { [weak self] time in self?.currentTime = time.seconds } // ์์ ๊ธธ์ด ๊ฐ์ ธ์ค๊ธฐ Task { if let duration = try? await asset.load(.duration) { self.duration = duration.seconds } } } func play() { player?.play() isPlaying = true } func pause() { player?.pause() isPlaying = false } func seek(to time: TimeInterval) { let cmTime = CMTime(seconds: time, preferredTimescale: 600) player?.seek(to: cmTime) } } struct VideoPlayerView: View { let url: URL @State private var playerManager = VideoPlayerManager() var body: some View { VStack { // ๋น๋์ค ๋ทฐ VideoPlayerLayer(player: playerManager.player) .frame(height: 300) .background(Color.black) // ์ฌ์ ์ปจํธ๋กค VStack(spacing: 16) { // ์งํ ๋ฐ Slider( value: Binding( get: { playerManager.currentTime }, set: { playerManager.seek(to: $0) } ), in: 0...max(playerManager.duration, 1) ) // ์๊ฐ ํ์ HStack { Text(formatTime(playerManager.currentTime)) Spacer() Text(formatTime(playerManager.duration)) } .font(.caption) .foregroundStyle(.secondary) // ์ฌ์ ๋ฒํผ Button { if playerManager.isPlaying { playerManager.pause() } else { playerManager.play() } } label: { Image(systemName: playerManager.isPlaying ? "pause.circle.fill" : "play.circle.fill") .font(.system(size: 64)) } } .padding() } .onAppear { playerManager.loadVideo(from: url) } } func formatTime(_ time: TimeInterval) -> String { let minutes = Int(time) / 60 let seconds = Int(time) % 60 return String(format: "%d:%02d", minutes, seconds) } } struct VideoPlayerLayer: UIViewRepresentable { let player: AVPlayer? func makeUIView(context: Context) -> UIView { let view = UIView(frame: .zero) let playerLayer = AVPlayerLayer(player: player) playerLayer.videoGravity = .resizeAspect view.layer.addSublayer(playerLayer) DispatchQueue.main.async { playerLayer.frame = view.bounds } return view } func updateUIView(_ uiView: UIView, context: Context) { if let layer = uiView.layer.sublayers?.first as? AVPlayerLayer { layer.frame = uiView.bounds } } }
โ๏ธ 4. ๋น๋์ค ํธ์ง
AVMutableComposition์ ์ฌ์ฉํ์ฌ ๋น๋์ค๋ฅผ ํธ์งํฉ๋๋ค.
VideoEditor.swift โ ๋น๋์ค ํธ์ง
import AVFoundation import UIKit class VideoEditor { // ๋น๋์ค ์๋ฅด๊ธฐ func trimVideo( asset: AVAsset, startTime: CMTime, endTime: CMTime, outputURL: URL ) async throws { // Composition ์์ฑ let composition = AVMutableComposition() // ๋น๋์ค ํธ๋ ์ถ๊ฐ guard let videoTrack = composition.addMutableTrack( withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid ) else { return } // ์ค๋์ค ํธ๋ ์ถ๊ฐ guard let audioTrack = composition.addMutableTrack( withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid ) else { return } // ์๋ณธ ํธ๋ ๊ฐ์ ธ์ค๊ธฐ let assetVideoTrack = try await asset.loadTracks(withMediaType: .video).first let assetAudioTrack = try await asset.loadTracks(withMediaType: .audio).first // ์๊ฐ ๋ฒ์ let timeRange = CMTimeRange(start: startTime, end: endTime) // ํธ๋ ์ฝ์ if let assetVideoTrack { try videoTrack.insertTimeRange(timeRange, of: assetVideoTrack, at: .zero) } if let assetAudioTrack { try audioTrack.insertTimeRange(timeRange, of: assetAudioTrack, at: .zero) } // ๋ด๋ณด๋ด๊ธฐ try await exportVideo(composition: composition, to: outputURL) } // ์ฌ๋ฌ ๋น๋์ค ํฉ์น๊ธฐ func mergeVideos( assets: [AVAsset], outputURL: URL ) async throws { let composition = AVMutableComposition() guard let videoTrack = composition.addMutableTrack( withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid ) else { return } guard let audioTrack = composition.addMutableTrack( withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid ) else { return } var currentTime = CMTime.zero for asset in assets { let duration = try await asset.load(.duration) let timeRange = CMTimeRange(start: .zero, duration: duration) if let assetVideoTrack = try await asset.loadTracks(withMediaType: .video).first { try videoTrack.insertTimeRange(timeRange, of: assetVideoTrack, at: currentTime) } if let assetAudioTrack = try await asset.loadTracks(withMediaType: .audio).first { try audioTrack.insertTimeRange(timeRange, of: assetAudioTrack, at: currentTime) } currentTime = CMTimeAdd(currentTime, duration) } try await exportVideo(composition: composition, to: outputURL) } // ๋น๋์ค ๋ด๋ณด๋ด๊ธฐ private func exportVideo( composition: AVMutableComposition, to url: URL ) async throws { guard let exportSession = AVAssetExportSession( asset: composition, presetName: AVAssetExportPresetHighestQuality ) else { return } exportSession.outputURL = url exportSession.outputFileType = .mov await exportSession.export() if exportSession.status == .failed { throw exportSession.error ?? NSError(domain: "", code: -1) } } // ๋น๋์ค์ ์ํฐ๋งํฌ ์ถ๊ฐ func addWatermark( to asset: AVAsset, watermarkImage: UIImage, outputURL: URL ) async throws { let composition = AVMutableComposition() let videoComposition = AVMutableVideoComposition() // ๋น๋์ค ํธ๋ ์ถ๊ฐ guard let videoTrack = composition.addMutableTrack( withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid ) else { return } let assetVideoTrack = try await asset.loadTracks(withMediaType: .video).first let duration = try await asset.load(.duration) if let assetVideoTrack { try videoTrack.insertTimeRange( CMTimeRange(start: .zero, duration: duration), of: assetVideoTrack, at: .zero ) } // ์ํฐ๋งํฌ ๋ ์ด์ด ์ถ๊ฐ let watermarkLayer = CALayer() watermarkLayer.contents = watermarkImage.cgImage watermarkLayer.frame = CGRect(x: 20, y: 20, width: 100, height: 100) watermarkLayer.opacity = 0.7 let videoLayer = CALayer() videoLayer.frame = CGRect(origin: .zero, size: CGSize(width: 1920, height: 1080)) let outputLayer = CALayer() outputLayer.frame = videoLayer.frame outputLayer.addSublayer(videoLayer) outputLayer.addSublayer(watermarkLayer) videoComposition.animationTool = AVVideoCompositionCoreAnimationTool( postProcessingAsVideoLayer: videoLayer, in: outputLayer ) try await exportVideo(composition: composition, to: outputURL) } }
๐ฑ ์ข ํฉ ์์
MediaApp.swift โ ์ข
ํฉ ๋ฏธ๋์ด ์ฑ
import SwiftUI import AVFoundation struct MediaApp: View { @State private var selectedTab = 0 var body: some View { TabView(selection: $selectedTab) { CameraView() .tabItem { Label("์นด๋ฉ๋ผ", systemImage: "camera") } .tag(0) AudioRecorderView() .tabItem { Label("๋ น์", systemImage: "mic") } .tag(1) MediaLibraryView() .tabItem { Label("๋ผ์ด๋ธ๋ฌ๋ฆฌ", systemImage: "photo.stack") } .tag(2) } } } struct MediaLibraryView: View { var body: some View { NavigationStack { List { Section("๋น๋์ค") { Text("์ต๊ทผ ์ดฌ์") } Section("์ค๋์ค") { Text("์ต๊ทผ ๋ น์") } } .navigationTitle("๋ฏธ๋์ด") } } }
๐ก HIG ๊ฐ์ด๋๋ผ์ธ
- ๊ถํ: Info.plist์ ์นด๋ฉ๋ผ/๋ง์ดํฌ ๊ถํ ์ค๋ช
์ถ๊ฐ (
NSCameraUsageDescription,NSMicrophoneUsageDescription) - ์ค๋์ค ์ธ์ : ์ ์ ํ ์ค๋์ค ์ธ์ ์นดํ ๊ณ ๋ฆฌ ์ค์
- ๋ฐฑ๊ทธ๋ผ์ด๋: ๋ฐฑ๊ทธ๋ผ์ด๋ ์ค๋์ค ์ฌ์ ์ Capabilities ์ค์
- ์ฑ๋ฅ: ์ธ์ ํ๋ฆฌ์ ์ผ๋ก ํ์ง/์ฑ๋ฅ ๊ท ํ ์กฐ์
- ์ค๋จ ์ฒ๋ฆฌ: ์ ํ ๋ฑ์ผ๋ก ์ธํ ์ธ์ ์ค๋จ ์ฒ๋ฆฌ
- ์ฌ์ฉ์ ๊ฒฝํ: ๋ นํ/๋ น์ ์ค ๋ช ํํ ์๊ฐ์ ํผ๋๋ฐฑ ์ ๊ณต
๐ฏ ์ค์ ํ์ฉ
- ์นด๋ฉ๋ผ ์ฑ: ์ฌ์ง/๋น๋์ค ์ดฌ์ ์ฑ
- ์์ ํธ์ง๊ธฐ: ๋น๋์ค ์๋ฅด๊ธฐ, ํฉ์น๊ธฐ, ํจ๊ณผ
- ์์ฑ ๋ น์๊ธฐ: ๊ฐ์ ๋ น์, ๋ณด์ด์ค ๋ฉ๋ชจ
- ์์ ๋ฏธ๋์ด: ์ํผ ๋น๋์ค ์ดฌ์/ํธ์ง
- ์คํธ๋ฆฌ๋ฐ: ๋ผ์ด๋ธ ๋ฐฉ์ก ์ฑ
๐ ๋ ์์๋ณด๊ธฐ
โก๏ธ ์ฑ๋ฅ ํ: ์นด๋ฉ๋ผ ์ธ์
์ ๋ฐฑ๊ทธ๋ผ์ด๋ ์ค๋ ๋์์ ์์/์ค์งํ์ธ์.
sessionPreset์ผ๋ก ํด์๋๋ฅผ ์กฐ์ ํ์ฌ ์ฑ๋ฅ๊ณผ ํ์ง์ ๊ท ํ์ ๋ง์ถ๊ณ , ๋ถํ์ํ ์ฒ๋ฆฌ๋ videoOutput ๋ธ๋ฆฌ๊ฒ์ดํธ์์ ์ํ ๋ฒํผ๋ฅผ ๊ฑด๋๋ฐ์ธ์.