๐ŸŽจ Core Image

๊ฐ•๋ ฅํ•œ ์ด๋ฏธ์ง€ ํ•„ํ„ฐ์™€ ์ฒ˜๋ฆฌ

iOS 5+Metal ๊ฐ€์†

โœจ Core Image๋ž€?

Core Image๋Š” Apple์˜ ๊ณ ์„ฑ๋Šฅ ์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ ํ”„๋ ˆ์ž„์›Œํฌ๋กœ, GPU ๊ฐ€์† ํ•„ํ„ฐ๋ฅผ ํ†ตํ•ด ์‹ค์‹œ๊ฐ„ ์ด๋ฏธ์ง€/๋น„๋””์˜ค ์ฒ˜๋ฆฌ๋ฅผ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. 200๊ฐœ ์ด์ƒ์˜ ๋นŒํŠธ์ธ ํ•„ํ„ฐ์™€ ์ปค์Šคํ…€ ํ•„ํ„ฐ ์ƒ์„ฑ์„ ์ง€์›ํ•ฉ๋‹ˆ๋‹ค.

๐Ÿ’ก ํ•ต์‹ฌ ๊ธฐ๋Šฅ: 200+ ๋นŒํŠธ์ธ ํ•„ํ„ฐ ยท GPU ๊ฐ€์† ยท ์‹ค์‹œ๊ฐ„ ์ฒ˜๋ฆฌ ยท ํ•„ํ„ฐ ์ฒด์ด๋‹ ยท ์–ผ๊ตด ๊ฐ์ง€ ยท ์ปค์Šคํ…€ ํ•„ํ„ฐ ยท Core ML ํ†ตํ•ฉ ยท Metal ์ตœ์ ํ™”

๐ŸŽฏ 1. ๊ธฐ๋ณธ ํ•„ํ„ฐ ์ ์šฉ

์ด๋ฏธ์ง€์— ํ•„ํ„ฐ๋ฅผ ์ ์šฉํ•˜๋Š” ๊ธฐ๋ณธ ๋ฐฉ๋ฒ•์ž…๋‹ˆ๋‹ค.

ImageFilterManager.swift โ€” ๊ธฐ๋ณธ ํ•„ํ„ฐ
import CoreImage
import UIKit

@Observable
class ImageFilterManager {
    private let context = CIContext()

    // 1. ์„ธํ”ผ์•„ ํ•„ํ„ฐ
    func applySepia(to image: UIImage, intensity: Double = 0.8) -> UIImage? {
        guard let ciImage = CIImage(image: image) else { return nil }

        // ํ•„ํ„ฐ ์ƒ์„ฑ
        guard let filter = CIFilter(name: "CISepiaTone") else { return nil }

        // ์ž…๋ ฅ ์ด๋ฏธ์ง€ ์„ค์ •
        filter.setValue(ciImage, forKey: kCIInputImageKey)

        // ํ•„ํ„ฐ ํŒŒ๋ผ๋ฏธํ„ฐ ์„ค์ •
        filter.setValue(intensity, forKey: kCIInputIntensityKey)

        // ๊ฒฐ๊ณผ ์ด๋ฏธ์ง€ ๊ฐ€์ ธ์˜ค๊ธฐ
        guard let outputImage = filter.outputImage else { return nil }

        // CIImage โ†’ CGImage โ†’ UIImage
        guard let cgImage = context.createCGImage(outputImage, from: outputImage.extent) else {
            return nil
        }

        return UIImage(cgImage: cgImage)
    }

    // 2. ๋ธ”๋Ÿฌ ํ•„ํ„ฐ
    func applyBlur(to image: UIImage, radius: Double = 10.0) -> UIImage? {
        guard let ciImage = CIImage(image: image),
              let filter = CIFilter(name: "CIGaussianBlur") else { return nil }

        filter.setValue(ciImage, forKey: kCIInputImageKey)
        filter.setValue(radius, forKey: kCIInputRadiusKey)

        guard let outputImage = filter.outputImage,
              let cgImage = context.createCGImage(outputImage, from: ciImage.extent) else {
            return nil
        }

        return UIImage(cgImage: cgImage)
    }

    // 3. ๋ฐ๊ธฐ/๋Œ€๋น„ ์กฐ์ •
    func adjustBrightnessContrast(
        image: UIImage,
        brightness: Double = 0.0,
        contrast: Double = 1.0
    ) -> UIImage? {
        guard let ciImage = CIImage(image: image),
              let filter = CIFilter(name: "CIColorControls") else { return nil }

        filter.setValue(ciImage, forKey: kCIInputImageKey)
        filter.setValue(brightness, forKey: kCIInputBrightnessKey)
        filter.setValue(contrast, forKey: kCIInputContrastKey)

        guard let outputImage = filter.outputImage,
              let cgImage = context.createCGImage(outputImage, from: ciImage.extent) else {
            return nil
        }

        return UIImage(cgImage: cgImage)
    }

    // 4. ๋น„๋„คํŒ… ํšจ๊ณผ
    func applyVignette(to image: UIImage, intensity: Double = 1.0) -> UIImage? {
        guard let ciImage = CIImage(image: image),
              let filter = CIFilter(name: "CIVignetteEffect") else { return nil }

        filter.setValue(ciImage, forKey: kCIInputImageKey)
        filter.setValue(intensity, forKey: kCIInputIntensityKey)
        filter.setValue(2.0, forKey: "inputRadius")

        guard let outputImage = filter.outputImage,
              let cgImage = context.createCGImage(outputImage, from: ciImage.extent) else {
            return nil
        }

        return UIImage(cgImage: cgImage)
    }
}

๐ŸŽจ 2. ์ธ๊ธฐ ํ•„ํ„ฐ ๋ชจ์Œ

์ž์ฃผ ์‚ฌ์šฉ๋˜๋Š” ํ•„ํ„ฐ๋“ค์„ ๋ชจ์•˜์Šต๋‹ˆ๋‹ค.

PopularFilters.swift โ€” ์ธ๊ธฐ ํ•„ํ„ฐ
import CoreImage

extension ImageFilterManager {
    // 1. ํ‘๋ฐฑ ํ•„ํ„ฐ
    func applyMonochrome(to image: UIImage) -> UIImage? {
        guard let ciImage = CIImage(image: image),
              let filter = CIFilter(name: "CIPhotoEffectNoir") else { return nil }

        filter.setValue(ciImage, forKey: kCIInputImageKey)

        guard let outputImage = filter.outputImage,
              let cgImage = context.createCGImage(outputImage, from: ciImage.extent) else {
            return nil
        }

        return UIImage(cgImage: cgImage)
    }

    // 2. ๋น„๋น„๋“œ ํ•„ํ„ฐ (์ฑ„๋„ ์ฆ๊ฐ€)
    func applyVivid(to image: UIImage) -> UIImage? {
        guard let ciImage = CIImage(image: image),
              let filter = CIFilter(name: "CIColorControls") else { return nil }

        filter.setValue(ciImage, forKey: kCIInputImageKey)
        filter.setValue(1.5, forKey: kCIInputSaturationKey)

        guard let outputImage = filter.outputImage,
              let cgImage = context.createCGImage(outputImage, from: ciImage.extent) else {
            return nil
        }

        return UIImage(cgImage: cgImage)
    }

    // 3. ํ”ฝ์…€ํ™”
    func applyPixelate(to image: UIImage, scale: Double = 8.0) -> UIImage? {
        guard let ciImage = CIImage(image: image),
              let filter = CIFilter(name: "CIPixellate") else { return nil }

        filter.setValue(ciImage, forKey: kCIInputImageKey)
        filter.setValue(scale, forKey: kCIInputScaleKey)

        guard let outputImage = filter.outputImage,
              let cgImage = context.createCGImage(outputImage, from: ciImage.extent) else {
            return nil
        }

        return UIImage(cgImage: cgImage)
    }

    // 4. ์—ฃ์ง€ ๊ฐ์ง€
    func applyEdgeDetection(to image: UIImage) -> UIImage? {
        guard let ciImage = CIImage(image: image),
              let filter = CIFilter(name: "CIEdges") else { return nil }

        filter.setValue(ciImage, forKey: kCIInputImageKey)
        filter.setValue(1.0, forKey: kCIInputIntensityKey)

        guard let outputImage = filter.outputImage,
              let cgImage = context.createCGImage(outputImage, from: ciImage.extent) else {
            return nil
        }

        return UIImage(cgImage: cgImage)
    }

    // 5. ๋งŒํ™” ํšจ๊ณผ
    func applyComicEffect(to image: UIImage) -> UIImage? {
        guard let ciImage = CIImage(image: image),
              let filter = CIFilter(name: "CIComicEffect") else { return nil }

        filter.setValue(ciImage, forKey: kCIInputImageKey)

        guard let outputImage = filter.outputImage,
              let cgImage = context.createCGImage(outputImage, from: ciImage.extent) else {
            return nil
        }

        return UIImage(cgImage: cgImage)
    }
}

๐Ÿ”— 3. ํ•„ํ„ฐ ์ฒด์ด๋‹

์—ฌ๋Ÿฌ ํ•„ํ„ฐ๋ฅผ ์—ฐ๊ฒฐํ•˜์—ฌ ๋ณต์žกํ•œ ํšจ๊ณผ๋ฅผ ๋งŒ๋“ญ๋‹ˆ๋‹ค.

FilterChaining.swift โ€” ํ•„ํ„ฐ ์ฒด์ด๋‹
import CoreImage

extension ImageFilterManager {
    // ์—ฌ๋Ÿฌ ํ•„ํ„ฐ๋ฅผ ์ˆœ์ฐจ์ ์œผ๋กœ ์ ์šฉ
    func applyFilterChain(to image: UIImage) -> UIImage? {
        guard var ciImage = CIImage(image: image) else { return nil }

        // 1. ๋ฐ๊ธฐ ์ฆ๊ฐ€
        if let brightnessFilter = CIFilter(name: "CIColorControls") {
            brightnessFilter.setValue(ciImage, forKey: kCIInputImageKey)
            brightnessFilter.setValue(0.2, forKey: kCIInputBrightnessKey)
            if let output = brightnessFilter.outputImage {
                ciImage = output
            }
        }

        // 2. ์ฑ„๋„ ์ฆ๊ฐ€
        if let saturationFilter = CIFilter(name: "CIColorControls") {
            saturationFilter.setValue(ciImage, forKey: kCIInputImageKey)
            saturationFilter.setValue(1.3, forKey: kCIInputSaturationKey)
            if let output = saturationFilter.outputImage {
                ciImage = output
            }
        }

        // 3. ๋น„๋„คํŒ… ์ถ”๊ฐ€
        if let vignetteFilter = CIFilter(name: "CIVignetteEffect") {
            vignetteFilter.setValue(ciImage, forKey: kCIInputImageKey)
            vignetteFilter.setValue(1.5, forKey: kCIInputIntensityKey)
            if let output = vignetteFilter.outputImage {
                ciImage = output
            }
        }

        // ์ตœ์ข… ์ด๋ฏธ์ง€ ๋ฐ˜ํ™˜
        guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else {
            return nil
        }

        return UIImage(cgImage: cgImage)
    }

    // ์ธ์Šคํƒ€๊ทธ๋žจ ์Šคํƒ€์ผ ํ•„ํ„ฐ
    func applyInstagramStyle(to image: UIImage) -> UIImage? {
        guard var ciImage = CIImage(image: image) else { return nil }

        // ๋ฐ๊ธฐ ์กฐ์ •
        if let filter = CIFilter(name: "CIColorControls") {
            filter.setValue(ciImage, forKey: kCIInputImageKey)
            filter.setValue(0.1, forKey: kCIInputBrightnessKey)
            filter.setValue(1.1, forKey: kCIInputContrastKey)
            filter.setValue(1.2, forKey: kCIInputSaturationKey)
            if let output = filter.outputImage {
                ciImage = output
            }
        }

        // ์˜จ๋„ ์กฐ์ • (๋”ฐ๋œปํ•œ ํ†ค)
        if let filter = CIFilter(name: "CITemperatureAndTint") {
            filter.setValue(ciImage, forKey: kCIInputImageKey)
            filter.setValue(CIVector(x: 6500, y: 0), forKey: "inputNeutral")
            filter.setValue(CIVector(x: 6800, y: 100), forKey: "inputTargetNeutral")
            if let output = filter.outputImage {
                ciImage = output
            }
        }

        guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else {
            return nil
        }

        return UIImage(cgImage: cgImage)
    }
}

๐Ÿ˜Š 4. ์–ผ๊ตด ๊ฐ์ง€

Core Image๋กœ ์ด๋ฏธ์ง€ ์† ์–ผ๊ตด์„ ๊ฐ์ง€ํ•ฉ๋‹ˆ๋‹ค.

FaceDetection.swift โ€” ์–ผ๊ตด ๊ฐ์ง€
import CoreImage

extension ImageFilterManager {
    // ์–ผ๊ตด ๊ฐ์ง€
    func detectFaces(in image: UIImage) -> [CIFeature] {
        guard let ciImage = CIImage(image: image) else { return [] }

        // ์–ผ๊ตด ๊ฐ์ง€๊ธฐ ์ƒ์„ฑ
        let detector = CIDetector(
            ofType: CIDetectorTypeFace,
            context: context,
            options: [
                CIDetectorAccuracy: CIDetectorAccuracyHigh,
                CIDetectorSmile: true,
                CIDetectorEyeBlink: true
            ]
        )

        // ์–ผ๊ตด ๊ฐ์ง€
        let features = detector?.features(in: ciImage) ?? []

        // ์–ผ๊ตด ์ •๋ณด ์ถœ๋ ฅ
        for feature in features {
            if let faceFeature = feature as? CIFaceFeature {
                print("์–ผ๊ตด ์œ„์น˜: \(faceFeature.bounds)")

                if faceFeature.hasSmile {
                    print("์›ƒ๊ณ  ์žˆ์Œ")
                }

                if faceFeature.leftEyeClosed {
                    print("์™ผ์ชฝ ๋ˆˆ ๊ฐ์Œ")
                }

                if faceFeature.rightEyeClosed {
                    print("์˜ค๋ฅธ์ชฝ ๋ˆˆ ๊ฐ์Œ")
                }
            }
        }

        return features
    }

    // ์–ผ๊ตด์— ๋ธ”๋Ÿฌ ์ฒ˜๋ฆฌ
    func blurFaces(in image: UIImage) -> UIImage? {
        guard let ciImage = CIImage(image: image) else { return nil }

        let faces = detectFaces(in: image)
        var outputImage = ciImage

        for feature in faces {
            guard let faceFeature = feature as? CIFaceFeature else { continue }

            // ์–ผ๊ตด ์˜์—ญ ์ถ”์ถœ
            let faceBounds = faceFeature.bounds
            let faceImage = ciImage.cropped(to: faceBounds)

            // ๋ธ”๋Ÿฌ ์ ์šฉ
            if let blurFilter = CIFilter(name: "CIGaussianBlur") {
                blurFilter.setValue(faceImage, forKey: kCIInputImageKey)
                blurFilter.setValue(20.0, forKey: kCIInputRadiusKey)

                if let blurredFace = blurFilter.outputImage {
                    // ๋ธ”๋Ÿฌ๋œ ์–ผ๊ตด์„ ์›๋ณธ์— ํ•ฉ์„ฑ
                    outputImage = blurredFace.composited(over: outputImage)
                }
            }
        }

        guard let cgImage = context.createCGImage(outputImage, from: ciImage.extent) else {
            return nil
        }

        return UIImage(cgImage: cgImage)
    }
}

๐Ÿ“ฑ SwiftUI ํ†ตํ•ฉ

FilterDemoView.swift โ€” SwiftUI ํ†ตํ•ฉ
import SwiftUI
import PhotosUI

struct FilterDemoView: View {
    @State private var selectedImage: UIImage?
    @State private var filteredImage: UIImage?
    @State private var selectedFilter: FilterType = .original
    @State private var isProcessing = false

    let filterManager = ImageFilterManager()

    enum FilterType: String, CaseIterable {
        case original = "์›๋ณธ"
        case sepia = "์„ธํ”ผ์•„"
        case monochrome = "ํ‘๋ฐฑ"
        case blur = "๋ธ”๋Ÿฌ"
        case vivid = "๋น„๋น„๋“œ"
        case vignette = "๋น„๋„คํŒ…"
    }

    var body: some View {
        VStack {
            // ์ด๋ฏธ์ง€ ํ‘œ์‹œ
            if let displayImage = filteredImage ?? selectedImage {
                Image(uiImage: displayImage)
                    .resizable()
                    .scaledToFit()
                    .frame(maxHeight: 400)
            } else {
                ContentUnavailableView(
                    "์ด๋ฏธ์ง€๋ฅผ ์„ ํƒํ•˜์„ธ์š”",
                    systemImage: "photo"
                )
            }

            // ํ•„ํ„ฐ ์„ ํƒ
            ScrollView(.horizontal) {
                HStack(spacing: 15) {
                    ForEach(FilterType.allCases, id: \.self) { filter in
                        Button {
                            selectedFilter = filter
                            applyFilter(filter)
                        } label: {
                            Text(filter.rawValue)
                                .padding(8)
                                .background(
                                    selectedFilter == filter ? Color.blue : Color.gray.opacity(0.2)
                                )
                                .foregroundStyle(selectedFilter == filter ? .white : .primary)
                                .cornerRadius(8)
                        }
                    }
                }
                .padding()
            }

            // ์‚ฌ์ง„ ์„ ํƒ
            PhotosPicker(selection: .constant(nil),
                         matching: .images) {
                Label("์‚ฌ์ง„ ์„ ํƒ", systemImage: "photo.on.rectangle")
            }
            .buttonStyle(.borderedProminent)
            .padding()

            if isProcessing {
                ProgressView("ํ•„ํ„ฐ ์ ์šฉ ์ค‘...")
            }
        }
    }

    func applyFilter(_ filter: FilterType) {
        guard let image = selectedImage else { return }

        isProcessing = true

        Task {
            let result: UIImage?

            switch filter {
            case .original:
                result = image
            case .sepia:
                result = filterManager.applySepia(to: image)
            case .monochrome:
                result = filterManager.applyMonochrome(to: image)
            case .blur:
                result = filterManager.applyBlur(to: image)
            case .vivid:
                result = filterManager.applyVivid(to: image)
            case .vignette:
                result = filterManager.applyVignette(to: image)
            }

            await MainActor.run {
                filteredImage = result
                isProcessing = false
            }
        }
    }
}

๐Ÿ’ก HIG ๊ฐ€์ด๋“œ๋ผ์ธ

๐ŸŽฏ ์‹ค์ „ ํ™œ์šฉ

๐Ÿ“š ๋” ์•Œ์•„๋ณด๊ธฐ

โšก๏ธ ์„ฑ๋Šฅ ํŒ: Metal ๊ธฐ๋ฐ˜ CIContext๋ฅผ ์‚ฌ์šฉํ•˜๋ฉด GPU ๊ฐ€์†์œผ๋กœ ํ•„ํ„ฐ ์ฒ˜๋ฆฌ ์†๋„๊ฐ€ ํฌ๊ฒŒ ํ–ฅ์ƒ๋ฉ๋‹ˆ๋‹ค. CIContext(mtlDevice: MTLCreateSystemDefaultDevice()!)๋กœ ์ƒ์„ฑํ•˜์„ธ์š”.