๐จ Core Image
๊ฐ๋ ฅํ ์ด๋ฏธ์ง ํํฐ์ ์ฒ๋ฆฌ
iOS 5+Metal ๊ฐ์
โจ Core Image๋?
Core Image๋ Apple์ ๊ณ ์ฑ๋ฅ ์ด๋ฏธ์ง ์ฒ๋ฆฌ ํ๋ ์์ํฌ๋ก, GPU ๊ฐ์ ํํฐ๋ฅผ ํตํด ์ค์๊ฐ ์ด๋ฏธ์ง/๋น๋์ค ์ฒ๋ฆฌ๋ฅผ ์ ๊ณตํฉ๋๋ค. 200๊ฐ ์ด์์ ๋นํธ์ธ ํํฐ์ ์ปค์คํ ํํฐ ์์ฑ์ ์ง์ํฉ๋๋ค.
๐ก ํต์ฌ ๊ธฐ๋ฅ: 200+ ๋นํธ์ธ ํํฐ ยท GPU ๊ฐ์ ยท ์ค์๊ฐ ์ฒ๋ฆฌ ยท ํํฐ ์ฒด์ด๋ ยท ์ผ๊ตด ๊ฐ์ง ยท ์ปค์คํ
ํํฐ ยท Core ML ํตํฉ ยท Metal ์ต์ ํ
๐ฏ 1. ๊ธฐ๋ณธ ํํฐ ์ ์ฉ
์ด๋ฏธ์ง์ ํํฐ๋ฅผ ์ ์ฉํ๋ ๊ธฐ๋ณธ ๋ฐฉ๋ฒ์ ๋๋ค.
ImageFilterManager.swift โ ๊ธฐ๋ณธ ํํฐ
import CoreImage import UIKit @Observable class ImageFilterManager { private let context = CIContext() // 1. ์ธํผ์ ํํฐ func applySepia(to image: UIImage, intensity: Double = 0.8) -> UIImage? { guard let ciImage = CIImage(image: image) else { return nil } // ํํฐ ์์ฑ guard let filter = CIFilter(name: "CISepiaTone") else { return nil } // ์ ๋ ฅ ์ด๋ฏธ์ง ์ค์ filter.setValue(ciImage, forKey: kCIInputImageKey) // ํํฐ ํ๋ผ๋ฏธํฐ ์ค์ filter.setValue(intensity, forKey: kCIInputIntensityKey) // ๊ฒฐ๊ณผ ์ด๋ฏธ์ง ๊ฐ์ ธ์ค๊ธฐ guard let outputImage = filter.outputImage else { return nil } // CIImage โ CGImage โ UIImage guard let cgImage = context.createCGImage(outputImage, from: outputImage.extent) else { return nil } return UIImage(cgImage: cgImage) } // 2. ๋ธ๋ฌ ํํฐ func applyBlur(to image: UIImage, radius: Double = 10.0) -> UIImage? { guard let ciImage = CIImage(image: image), let filter = CIFilter(name: "CIGaussianBlur") else { return nil } filter.setValue(ciImage, forKey: kCIInputImageKey) filter.setValue(radius, forKey: kCIInputRadiusKey) guard let outputImage = filter.outputImage, let cgImage = context.createCGImage(outputImage, from: ciImage.extent) else { return nil } return UIImage(cgImage: cgImage) } // 3. ๋ฐ๊ธฐ/๋๋น ์กฐ์ func adjustBrightnessContrast( image: UIImage, brightness: Double = 0.0, contrast: Double = 1.0 ) -> UIImage? { guard let ciImage = CIImage(image: image), let filter = CIFilter(name: "CIColorControls") else { return nil } filter.setValue(ciImage, forKey: kCIInputImageKey) filter.setValue(brightness, forKey: kCIInputBrightnessKey) filter.setValue(contrast, forKey: kCIInputContrastKey) guard let outputImage = filter.outputImage, let cgImage = context.createCGImage(outputImage, from: ciImage.extent) else { return nil } return UIImage(cgImage: cgImage) } // 4. ๋น๋คํ ํจ๊ณผ func applyVignette(to image: UIImage, intensity: Double = 1.0) -> UIImage? { guard let ciImage = CIImage(image: image), let filter = CIFilter(name: "CIVignetteEffect") else { return nil } filter.setValue(ciImage, forKey: kCIInputImageKey) filter.setValue(intensity, forKey: kCIInputIntensityKey) filter.setValue(2.0, forKey: "inputRadius") guard let outputImage = filter.outputImage, let cgImage = context.createCGImage(outputImage, from: ciImage.extent) else { return nil } return UIImage(cgImage: cgImage) } }
๐จ 2. ์ธ๊ธฐ ํํฐ ๋ชจ์
์์ฃผ ์ฌ์ฉ๋๋ ํํฐ๋ค์ ๋ชจ์์ต๋๋ค.
PopularFilters.swift โ ์ธ๊ธฐ ํํฐ
import CoreImage extension ImageFilterManager { // 1. ํ๋ฐฑ ํํฐ func applyMonochrome(to image: UIImage) -> UIImage? { guard let ciImage = CIImage(image: image), let filter = CIFilter(name: "CIPhotoEffectNoir") else { return nil } filter.setValue(ciImage, forKey: kCIInputImageKey) guard let outputImage = filter.outputImage, let cgImage = context.createCGImage(outputImage, from: ciImage.extent) else { return nil } return UIImage(cgImage: cgImage) } // 2. ๋น๋น๋ ํํฐ (์ฑ๋ ์ฆ๊ฐ) func applyVivid(to image: UIImage) -> UIImage? { guard let ciImage = CIImage(image: image), let filter = CIFilter(name: "CIColorControls") else { return nil } filter.setValue(ciImage, forKey: kCIInputImageKey) filter.setValue(1.5, forKey: kCIInputSaturationKey) guard let outputImage = filter.outputImage, let cgImage = context.createCGImage(outputImage, from: ciImage.extent) else { return nil } return UIImage(cgImage: cgImage) } // 3. ํฝ์ ํ func applyPixelate(to image: UIImage, scale: Double = 8.0) -> UIImage? { guard let ciImage = CIImage(image: image), let filter = CIFilter(name: "CIPixellate") else { return nil } filter.setValue(ciImage, forKey: kCIInputImageKey) filter.setValue(scale, forKey: kCIInputScaleKey) guard let outputImage = filter.outputImage, let cgImage = context.createCGImage(outputImage, from: ciImage.extent) else { return nil } return UIImage(cgImage: cgImage) } // 4. ์ฃ์ง ๊ฐ์ง func applyEdgeDetection(to image: UIImage) -> UIImage? { guard let ciImage = CIImage(image: image), let filter = CIFilter(name: "CIEdges") else { return nil } filter.setValue(ciImage, forKey: kCIInputImageKey) filter.setValue(1.0, forKey: kCIInputIntensityKey) guard let outputImage = filter.outputImage, let cgImage = context.createCGImage(outputImage, from: ciImage.extent) else { return nil } return UIImage(cgImage: cgImage) } // 5. ๋งํ ํจ๊ณผ func applyComicEffect(to image: UIImage) -> UIImage? { guard let ciImage = CIImage(image: image), let filter = CIFilter(name: "CIComicEffect") else { return nil } filter.setValue(ciImage, forKey: kCIInputImageKey) guard let outputImage = filter.outputImage, let cgImage = context.createCGImage(outputImage, from: ciImage.extent) else { return nil } return UIImage(cgImage: cgImage) } }
๐ 3. ํํฐ ์ฒด์ด๋
์ฌ๋ฌ ํํฐ๋ฅผ ์ฐ๊ฒฐํ์ฌ ๋ณต์กํ ํจ๊ณผ๋ฅผ ๋ง๋ญ๋๋ค.
FilterChaining.swift โ ํํฐ ์ฒด์ด๋
import CoreImage extension ImageFilterManager { // ์ฌ๋ฌ ํํฐ๋ฅผ ์์ฐจ์ ์ผ๋ก ์ ์ฉ func applyFilterChain(to image: UIImage) -> UIImage? { guard var ciImage = CIImage(image: image) else { return nil } // 1. ๋ฐ๊ธฐ ์ฆ๊ฐ if let brightnessFilter = CIFilter(name: "CIColorControls") { brightnessFilter.setValue(ciImage, forKey: kCIInputImageKey) brightnessFilter.setValue(0.2, forKey: kCIInputBrightnessKey) if let output = brightnessFilter.outputImage { ciImage = output } } // 2. ์ฑ๋ ์ฆ๊ฐ if let saturationFilter = CIFilter(name: "CIColorControls") { saturationFilter.setValue(ciImage, forKey: kCIInputImageKey) saturationFilter.setValue(1.3, forKey: kCIInputSaturationKey) if let output = saturationFilter.outputImage { ciImage = output } } // 3. ๋น๋คํ ์ถ๊ฐ if let vignetteFilter = CIFilter(name: "CIVignetteEffect") { vignetteFilter.setValue(ciImage, forKey: kCIInputImageKey) vignetteFilter.setValue(1.5, forKey: kCIInputIntensityKey) if let output = vignetteFilter.outputImage { ciImage = output } } // ์ต์ข ์ด๋ฏธ์ง ๋ฐํ guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else { return nil } return UIImage(cgImage: cgImage) } // ์ธ์คํ๊ทธ๋จ ์คํ์ผ ํํฐ func applyInstagramStyle(to image: UIImage) -> UIImage? { guard var ciImage = CIImage(image: image) else { return nil } // ๋ฐ๊ธฐ ์กฐ์ if let filter = CIFilter(name: "CIColorControls") { filter.setValue(ciImage, forKey: kCIInputImageKey) filter.setValue(0.1, forKey: kCIInputBrightnessKey) filter.setValue(1.1, forKey: kCIInputContrastKey) filter.setValue(1.2, forKey: kCIInputSaturationKey) if let output = filter.outputImage { ciImage = output } } // ์จ๋ ์กฐ์ (๋ฐ๋ปํ ํค) if let filter = CIFilter(name: "CITemperatureAndTint") { filter.setValue(ciImage, forKey: kCIInputImageKey) filter.setValue(CIVector(x: 6500, y: 0), forKey: "inputNeutral") filter.setValue(CIVector(x: 6800, y: 100), forKey: "inputTargetNeutral") if let output = filter.outputImage { ciImage = output } } guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else { return nil } return UIImage(cgImage: cgImage) } }
๐ 4. ์ผ๊ตด ๊ฐ์ง
Core Image๋ก ์ด๋ฏธ์ง ์ ์ผ๊ตด์ ๊ฐ์งํฉ๋๋ค.
FaceDetection.swift โ ์ผ๊ตด ๊ฐ์ง
import CoreImage extension ImageFilterManager { // ์ผ๊ตด ๊ฐ์ง func detectFaces(in image: UIImage) -> [CIFeature] { guard let ciImage = CIImage(image: image) else { return [] } // ์ผ๊ตด ๊ฐ์ง๊ธฐ ์์ฑ let detector = CIDetector( ofType: CIDetectorTypeFace, context: context, options: [ CIDetectorAccuracy: CIDetectorAccuracyHigh, CIDetectorSmile: true, CIDetectorEyeBlink: true ] ) // ์ผ๊ตด ๊ฐ์ง let features = detector?.features(in: ciImage) ?? [] // ์ผ๊ตด ์ ๋ณด ์ถ๋ ฅ for feature in features { if let faceFeature = feature as? CIFaceFeature { print("์ผ๊ตด ์์น: \(faceFeature.bounds)") if faceFeature.hasSmile { print("์๊ณ ์์") } if faceFeature.leftEyeClosed { print("์ผ์ชฝ ๋ ๊ฐ์") } if faceFeature.rightEyeClosed { print("์ค๋ฅธ์ชฝ ๋ ๊ฐ์") } } } return features } // ์ผ๊ตด์ ๋ธ๋ฌ ์ฒ๋ฆฌ func blurFaces(in image: UIImage) -> UIImage? { guard let ciImage = CIImage(image: image) else { return nil } let faces = detectFaces(in: image) var outputImage = ciImage for feature in faces { guard let faceFeature = feature as? CIFaceFeature else { continue } // ์ผ๊ตด ์์ญ ์ถ์ถ let faceBounds = faceFeature.bounds let faceImage = ciImage.cropped(to: faceBounds) // ๋ธ๋ฌ ์ ์ฉ if let blurFilter = CIFilter(name: "CIGaussianBlur") { blurFilter.setValue(faceImage, forKey: kCIInputImageKey) blurFilter.setValue(20.0, forKey: kCIInputRadiusKey) if let blurredFace = blurFilter.outputImage { // ๋ธ๋ฌ๋ ์ผ๊ตด์ ์๋ณธ์ ํฉ์ฑ outputImage = blurredFace.composited(over: outputImage) } } } guard let cgImage = context.createCGImage(outputImage, from: ciImage.extent) else { return nil } return UIImage(cgImage: cgImage) } }
๐ฑ SwiftUI ํตํฉ
FilterDemoView.swift โ SwiftUI ํตํฉ
import SwiftUI import PhotosUI struct FilterDemoView: View { @State private var selectedImage: UIImage? @State private var filteredImage: UIImage? @State private var selectedFilter: FilterType = .original @State private var isProcessing = false let filterManager = ImageFilterManager() enum FilterType: String, CaseIterable { case original = "์๋ณธ" case sepia = "์ธํผ์" case monochrome = "ํ๋ฐฑ" case blur = "๋ธ๋ฌ" case vivid = "๋น๋น๋" case vignette = "๋น๋คํ " } var body: some View { VStack { // ์ด๋ฏธ์ง ํ์ if let displayImage = filteredImage ?? selectedImage { Image(uiImage: displayImage) .resizable() .scaledToFit() .frame(maxHeight: 400) } else { ContentUnavailableView( "์ด๋ฏธ์ง๋ฅผ ์ ํํ์ธ์", systemImage: "photo" ) } // ํํฐ ์ ํ ScrollView(.horizontal) { HStack(spacing: 15) { ForEach(FilterType.allCases, id: \.self) { filter in Button { selectedFilter = filter applyFilter(filter) } label: { Text(filter.rawValue) .padding(8) .background( selectedFilter == filter ? Color.blue : Color.gray.opacity(0.2) ) .foregroundStyle(selectedFilter == filter ? .white : .primary) .cornerRadius(8) } } } .padding() } // ์ฌ์ง ์ ํ PhotosPicker(selection: .constant(nil), matching: .images) { Label("์ฌ์ง ์ ํ", systemImage: "photo.on.rectangle") } .buttonStyle(.borderedProminent) .padding() if isProcessing { ProgressView("ํํฐ ์ ์ฉ ์ค...") } } } func applyFilter(_ filter: FilterType) { guard let image = selectedImage else { return } isProcessing = true Task { let result: UIImage? switch filter { case .original: result = image case .sepia: result = filterManager.applySepia(to: image) case .monochrome: result = filterManager.applyMonochrome(to: image) case .blur: result = filterManager.applyBlur(to: image) case .vivid: result = filterManager.applyVivid(to: image) case .vignette: result = filterManager.applyVignette(to: image) } await MainActor.run { filteredImage = result isProcessing = false } } } }
๐ก HIG ๊ฐ์ด๋๋ผ์ธ
- ์ฑ๋ฅ: ๋ฐฑ๊ทธ๋ผ์ด๋ ์ค๋ ๋์์ ํํฐ ์ฒ๋ฆฌ
- ์ปจํ ์คํธ: CIContext๋ฅผ ์ฌ์ฌ์ฉํ์ฌ ์ฑ๋ฅ ํฅ์
- ํ๋ฆฌ๋ทฐ: ๋ฎ์ ํด์๋๋ก ์ค์๊ฐ ํ๋ฆฌ๋ทฐ ์ ๊ณต
- ๋ฉ๋ชจ๋ฆฌ: ํฐ ์ด๋ฏธ์ง๋ ๋ค์ด์ํ๋ง
- Metal: Metal ๊ธฐ๋ฐ CIContext ์ฌ์ฉ ๊ถ์ฅ
๐ฏ ์ค์ ํ์ฉ
- ์ฌ์ง ํธ์ง ์ฑ: ํํฐ, ๋ณด์ , ํจ๊ณผ
- ์นด๋ฉ๋ผ ์ฑ: ์ค์๊ฐ ํํฐ ๋ฏธ๋ฆฌ๋ณด๊ธฐ
- ์์ ๋ฏธ๋์ด: ์ธ์คํ๊ทธ๋จ ์คํ์ผ ํํฐ
- ์ผ๊ตด ์ธ์: ์๋ ์ผ๊ตด ๋ธ๋ฌ/๋ชจ์์ดํฌ
- AR ํํฐ: ์ค์๊ฐ ๋น๋์ค ํํฐ
๐ ๋ ์์๋ณด๊ธฐ
โก๏ธ ์ฑ๋ฅ ํ: Metal ๊ธฐ๋ฐ CIContext๋ฅผ ์ฌ์ฉํ๋ฉด GPU ๊ฐ์์ผ๋ก ํํฐ ์ฒ๋ฆฌ ์๋๊ฐ ํฌ๊ฒ ํฅ์๋ฉ๋๋ค.
CIContext(mtlDevice: MTLCreateSystemDefaultDevice()!)๋ก ์์ฑํ์ธ์.