๐ŸŒ KO

๐Ÿฅฝ ARKit

โญ Difficulty: โญโญโญโญ โฑ๏ธ Est. Time: 3-4h ๐Ÿ“‚ Graphics & Media

Merging reality and digital with augmented reality

iOS 11+visionOS Supported

โœจ ARKit is?

ARKit is Apple's augmented reality framework that uses cameras and sensors to place and interact with virtual objects in the real world. It provides powerful AR experiences including plane detection, face tracking, image recognition, and spatial audio.

๐Ÿ’ก Key Features: World Tracking ยท Plane Detection ยท Face Tracking ยท Image/Object Recognition ยท LiDAR ยท Spatial Audio ยท Multi-User AR ยท RealityKit Integration

๐ŸŽฏ 1. AR ์„ธ์…˜ ์„ค์ •

Start an ARSession and configure the basic setup.

ARViewController.swift โ€” Start AR Session
import ARKit
import RealityKit

class ARViewController: UIViewController {
    var arView: ARView!

    override func viewDidLoad() {
        super.viewDidLoad()

        // ARView ์ƒ์„ฑ
        arView = ARView(frame: view.bounds)
        view.addSubview(arView)

        // AR ์„ธ์…˜ ์„ค์ •
        setupARSession()
    }

    func setupARSession() {
        // World Tracking Configuration
        let configuration = ARWorldTrackingConfiguration()

        // ํ‰๋ฉด ๊ฐ์ง€ (์ˆ˜ํ‰, ์ˆ˜์ง)
        configuration.planeDetection = [.horizontal, .vertical]

        // ํ™˜๊ฒฝ ํ…์Šค์ฒ˜๋ง (์‚ฌ์‹ค์ ์ธ ๋ฐ˜์‚ฌ)
        configuration.environmentTexturing = .automatic

        // LiDAR ํ™œ์šฉ (iPhone 12 Pro ์ด์ƒ)
        if ARWorldTrackingConfiguration.supportsSceneReconstruction(.mesh) {
            configuration.sceneReconstruction = .mesh
        }

        // ์„ธ์…˜ ์‹คํ–‰
        arView.session.run(configuration)
    }

    override func viewWillDisappear(_ animated: Bool) {
        super.viewWillDisappear(animated)
        arView.session.pause()
    }
}

๐Ÿช‘ 2. 3D ๊ฐ์ฒด ๋ฐฐ์น˜

AR ๊ณต๊ฐ„์— 3D ๋ชจ๋ธ์„ ๋ฐฐ์น˜.

ARObjectPlacement.swift โ€” Object Placement
import RealityKit

extension ARViewController {
    // ํƒญ ์œ„์น˜์— ๊ฐ์ฒด ๋ฐฐ์น˜
    func placeObject(at location: CGPoint) {
        // Raycast๋กœ ์‹ค์ œ ์„ธ๊ณ„ ์œ„์น˜ ์ฐพ๊ธฐ
        guard let raycastResult = arView.raycast(
            from: location,
            allowing: .estimatedPlane,
            alignment: .horizontal
        ).first else { return }

        // 3D ๋ชจ๋ธ ๋กœ๋“œ
        Task {
            do {
                let entity = try await ModelEntity(named: "chair.usdz")

                // ์•ต์ปค ์ƒ์„ฑ ๋ฐ ๋ฐฐ์น˜
                let anchor = AnchorEntity(world: raycastResult.worldTransform)
                anchor.addChild(entity)

                // ์”ฌ์— ์ถ”๊ฐ€
                arView.scene.addAnchor(anchor)

                // ์ถฉ๋Œ ๊ฐ์ง€ ํ™œ์„ฑํ™”
                entity.generateCollisionShapes(recursive: true)
                arView.installGestures(for: entity)
            } catch {
                print("๋ชจ๋ธ ๋กœ๋“œ ์‹คํŒจ: \(error)")
            }
        }
    }

    // ์ปค์Šคํ…€ ๋ฐ•์Šค ์ƒ์„ฑ
    func createBox(at position: SIMD3<Float>) {
        // ๋ฉ”์‹œ ์ƒ์„ฑ
        let mesh = MeshResource.generateBox(size: 0.1)

        // ๋จธํ‹ฐ๋ฆฌ์–ผ ์„ค์ •
        var material = SimpleMaterial()
        material.color = .init(tint: .blue, texture: nil)

        // ์—”ํ‹ฐํ‹ฐ ์ƒ์„ฑ
        let entity = ModelEntity(mesh: mesh, materials: [material])

        // ์•ต์ปค์— ์ถ”๊ฐ€
        let anchor = AnchorEntity(world: position)
        anchor.addChild(entity)
        arView.scene.addAnchor(anchor)
    }
}

๐Ÿ“ 3. ํ‰๋ฉด ๊ฐ์ง€

๋ฐ”๋‹ฅ์ด๋‚˜ ๋ฒฝ ๊ฐ™์€ ํ‰๋ฉด์„ ์ž๋™์œผ๋กœ ๊ฐ์ง€.

ARPlaneDetection.swift โ€” Plane Detection
import ARKit

extension ARViewController: ARSessionDelegate {
    func setupPlaneDetection() {
        arView.session.delegate = self
    }

    // ์ƒˆ ์•ต์ปค ์ถ”๊ฐ€๋จ (ํ‰๋ฉด ๊ฐ์ง€)
    func session(_ session: ARSession, didAdd anchors: [ARAnchor]) {
        for anchor in anchors {
            if let planeAnchor = anchor as? ARPlaneAnchor {
                handlePlaneDetection(planeAnchor)
            }
        }
    }

    // ์•ต์ปค ์—…๋ฐ์ดํŠธ (ํ‰๋ฉด ํ™•์žฅ)
    func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) {
        for anchor in anchors {
            if let planeAnchor = anchor as? ARPlaneAnchor {
                updatePlaneVisualization(planeAnchor)
            }
        }
    }

    func handlePlaneDetection(_ planeAnchor: ARPlaneAnchor) {
        // ํ‰๋ฉด ํฌ๊ธฐ
        let width = planeAnchor.planeExtent.width
        let height = planeAnchor.planeExtent.height

        // ํ‰๋ฉด ํƒ€์ž…
        let alignment = planeAnchor.alignment
        print("ํ‰๋ฉด ๊ฐ์ง€: \(alignment == .horizontal ? "์ˆ˜ํ‰" : "์ˆ˜์ง") \(width)x\(height)m")

        // ํ‰๋ฉด ์‹œ๊ฐํ™” (์„ ํƒ ์‚ฌํ•ญ)
        let planeMesh = MeshResource.generatePlane(width: width, depth: height)
        var material = SimpleMaterial()
        material.color = .init(tint: .white.withAlphaComponent(0.3), texture: nil)

        let planeEntity = ModelEntity(mesh: planeMesh, materials: [material])
        let anchorEntity = AnchorEntity(anchor: planeAnchor)
        anchorEntity.addChild(planeEntity)
        arView.scene.addAnchor(anchorEntity)
    }

    func updatePlaneVisualization(_ planeAnchor: ARPlaneAnchor) {
        // ํ‰๋ฉด ํ™•์žฅ/์—…๋ฐ์ดํŠธ ์ฒ˜๋ฆฌ
    }
}

๐Ÿ˜Š 4. ์–ผ๊ตด ์ถ”์  (Face Tracking)

Track facial expressions and movement with the TrueDepth camera.

ARFaceTracking.swift โ€” Face Tracking
import ARKit

class FaceTrackingViewController: UIViewController {
    var arView: ARView!

    override func viewDidLoad() {
        super.viewDidLoad()

        arView = ARView(frame: view.bounds)
        view.addSubview(arView)

        // Face Tracking ์„ค์ •
        guard ARFaceTrackingConfiguration.isSupported else {
            print("Face Tracking ๋ฏธ์ง€์›")
            return
        }

        let configuration = ARFaceTrackingConfiguration()
        configuration.maximumNumberOfTrackedFaces = 1
        arView.session.run(configuration)

        arView.session.delegate = self
    }
}

extension FaceTrackingViewController: ARSessionDelegate {
    func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) {
        for anchor in anchors {
            guard let faceAnchor = anchor as? ARFaceAnchor else { continue }

            // ์–ผ๊ตด ๋ธ”๋ Œ๋“œ์‰์ž… (ํ‘œ์ •)
            let blendShapes = faceAnchor.blendShapes

            // ๋ˆˆ ๊นœ๋นก์ž„
            if let eyeBlinkLeft = blendShapes[.eyeBlinkLeft]?.floatValue,
               let eyeBlinkRight = blendShapes[.eyeBlinkRight]?.floatValue {
                print("๋ˆˆ ๊นœ๋นก์ž„: L=\(eyeBlinkLeft) R=\(eyeBlinkRight)")
            }

            // ๋ฏธ์†Œ
            if let smile = blendShapes[.mouthSmileLeft]?.floatValue {
                print("๋ฏธ์†Œ: \(smile)")
            }

            // ์ž… ๋ฒŒ๋ฆผ
            if let jawOpen = blendShapes[.jawOpen]?.floatValue {
                print("์ž… ๋ฒŒ๋ฆผ: \(jawOpen)")
            }

            // ์–ผ๊ตด ์œ„์น˜์™€ ํšŒ์ „
            let transform = faceAnchor.transform
            print("์–ผ๊ตด ์œ„์น˜: \(transform.columns.3)")
        }
    }
}

๐Ÿ–ผ๏ธ 5. ์ด๋ฏธ์ง€ ์ถ”์ 

Recognize specific images and overlay AR content.

ARImageTracking.swift โ€” Image Tracking
import ARKit

extension ARViewController {
    func setupImageTracking() {
        // AR ์ฐธ์กฐ ์ด๋ฏธ์ง€ ๋กœ๋“œ
        guard let referenceImages = ARReferenceImage.referenceImages(
            inGroupNamed: "AR Resources",
            bundle: nil
        ) else {
            print("์ด๋ฏธ์ง€ ์—์…‹ ์—†์Œ")
            return
        }

        let configuration = ARWorldTrackingConfiguration()
        configuration.detectionImages = referenceImages
        configuration.maximumNumberOfTrackedImages = 2

        arView.session.run(configuration)
    }

    // ์ด๋ฏธ์ง€ ๊ฐ์ง€๋จ
    func session(_ session: ARSession, didAdd anchors: [ARAnchor]) {
        for anchor in anchors {
            guard let imageAnchor = anchor as? ARImageAnchor else { continue }

            // ๊ฐ์ง€๋œ ์ด๋ฏธ์ง€ ์ •๋ณด
            let referenceImage = imageAnchor.referenceImage
            let imageName = referenceImage.name ?? "์•Œ ์ˆ˜ ์—†์Œ"

            print("์ด๋ฏธ์ง€ ๊ฐ์ง€: \(imageName)")

            // ์ด๋ฏธ์ง€ ์œ„์— 3D ์ฝ˜ํ…์ธ  ๋ฐฐ์น˜
            placeContentOnImage(imageAnchor)
        }
    }

    func placeContentOnImage(_ imageAnchor: ARImageAnchor) {
        // ์ด๋ฏธ์ง€ ํฌ๊ธฐ
        let imageSize = imageAnchor.referenceImage.physicalSize

        // ์ด๋ฏธ์ง€ ์œ„์— ํ‰๋ฉด ์ƒ์„ฑ
        let plane = MeshResource.generatePlane(
            width: Float(imageSize.width),
            depth: Float(imageSize.height)
        )

        var material = SimpleMaterial()
        material.color = .init(tint: .green.withAlphaComponent(0.5), texture: nil)

        let planeEntity = ModelEntity(mesh: plane, materials: [material])

        let anchorEntity = AnchorEntity(anchor: imageAnchor)
        anchorEntity.addChild(planeEntity)
        arView.scene.addAnchor(anchorEntity)
    }
}

๐Ÿ“ฑ SwiftUI Integration

ARDemoView.swift โ€” SwiftUI Wrapper
import SwiftUI
import ARKit
import RealityKit

struct ARViewContainer: UIViewRepresentable {
    @Binding var placementEnabled: Bool

    func makeUIView(context: Context) -> ARView {
        let arView = ARView(frame: .zero)

        // AR ์„ธ์…˜ ์„ค์ •
        let config = ARWorldTrackingConfiguration()
        config.planeDetection = [.horizontal]
        arView.session.run(config)

        // ํƒญ ์ œ์Šค์ฒ˜ ์ถ”๊ฐ€
        let tapGesture = UITapGestureRecognizer(
            target: context.coordinator,
            action: #selector(Coordinator.handleTap)
        )
        arView.addGestureRecognizer(tapGesture)

        context.coordinator.arView = arView

        return arView
    }

    func updateUIView(_ uiView: ARView, context: Context) {
        context.coordinator.placementEnabled = placementEnabled
    }

    func makeCoordinator() -> Coordinator {
        Coordinator(placementEnabled: $placementEnabled)
    }

    class Coordinator: NSObject {
        var arView: ARView?
        @Binding var placementEnabled: Bool

        init(placementEnabled: Binding<Bool>) {
            _placementEnabled = placementEnabled
        }

        @objc func handleTap(_ sender: UITapGestureRecognizer) {
            guard placementEnabled, let arView else { return }

            let location = sender.location(in: arView)

            // Raycast
            if let result = arView.raycast(from: location, allowing: .estimatedPlane, alignment: .horizontal).first {
                // ๋ฐ•์Šค ์ƒ์„ฑ
                let mesh = MeshResource.generateBox(size: 0.1)
                let material = SimpleMaterial(color: .blue, isMetallic: false)
                let entity = ModelEntity(mesh: mesh, materials: [material])

                let anchor = AnchorEntity(world: result.worldTransform)
                anchor.addChild(entity)
                arView.scene.addAnchor(anchor)
            }
        }
    }
}

struct ARDemoView: View {
    @State private var placementEnabled = false

    var body: some View {
        ZStack {
            ARViewContainer(placementEnabled: $placementEnabled)
                .ignoresSafeArea()

            VStack {
                Spacer()

                Button {
                    placementEnabled.toggle()
                } label: {
                    Label(
                        placementEnabled ? "๋ฐฐ์น˜ ์ค‘์ง€" : "๋ฐฐ์น˜ ์‹œ์ž‘",
                        systemImage: placementEnabled ? "stop.circle" : "play.circle"
                    )
                }
                .buttonStyle(.borderedProminent)
                .padding()
            }
        }
    }
}

๐Ÿ’ก HIG Guidelines

๐ŸŽฏ Practical Usage

๐Ÿ“š Learn More

โšก๏ธ LiDAR ํ™œ์šฉ: The LiDAR sensor on iPhone 12 Pro and later enables more accurate depth sensing and faster plane detection. sceneReconstruction = .mesh๋กœ ์‹ค์‹œ๊ฐ„ 3D ๋ฉ”์‹œ๋ฅผ ์ƒ์„ฑ is possible.

๐Ÿ“Ž Apple Official Resources

๐Ÿ“˜ Documentation ๐Ÿ’ป Sample Code ๐ŸŽฌ WWDC Sessions