๐ฅฝ ARKit
โญ Difficulty: โญโญโญโญ
โฑ๏ธ Est. Time: 3-4h
๐ Graphics & Media
Merging reality and digital with augmented reality
iOS 11+visionOS Supported
โจ ARKit is?
ARKit is Apple's augmented reality framework that uses cameras and sensors to place and interact with virtual objects in the real world. It provides powerful AR experiences including plane detection, face tracking, image recognition, and spatial audio.
๐ก Key Features: World Tracking ยท Plane Detection ยท Face Tracking ยท Image/Object Recognition ยท LiDAR ยท Spatial Audio ยท Multi-User AR ยท RealityKit Integration
๐ฏ 1. AR ์ธ์ ์ค์
Start an ARSession and configure the basic setup.
ARViewController.swift โ Start AR Session
import ARKit import RealityKit class ARViewController: UIViewController { var arView: ARView! override func viewDidLoad() { super.viewDidLoad() // ARView ์์ฑ arView = ARView(frame: view.bounds) view.addSubview(arView) // AR ์ธ์ ์ค์ setupARSession() } func setupARSession() { // World Tracking Configuration let configuration = ARWorldTrackingConfiguration() // ํ๋ฉด ๊ฐ์ง (์ํ, ์์ง) configuration.planeDetection = [.horizontal, .vertical] // ํ๊ฒฝ ํ ์ค์ฒ๋ง (์ฌ์ค์ ์ธ ๋ฐ์ฌ) configuration.environmentTexturing = .automatic // LiDAR ํ์ฉ (iPhone 12 Pro ์ด์) if ARWorldTrackingConfiguration.supportsSceneReconstruction(.mesh) { configuration.sceneReconstruction = .mesh } // ์ธ์ ์คํ arView.session.run(configuration) } override func viewWillDisappear(_ animated: Bool) { super.viewWillDisappear(animated) arView.session.pause() } }
๐ช 2. 3D ๊ฐ์ฒด ๋ฐฐ์น
AR ๊ณต๊ฐ์ 3D ๋ชจ๋ธ์ ๋ฐฐ์น.
ARObjectPlacement.swift โ Object Placement
import RealityKit extension ARViewController { // ํญ ์์น์ ๊ฐ์ฒด ๋ฐฐ์น func placeObject(at location: CGPoint) { // Raycast๋ก ์ค์ ์ธ๊ณ ์์น ์ฐพ๊ธฐ guard let raycastResult = arView.raycast( from: location, allowing: .estimatedPlane, alignment: .horizontal ).first else { return } // 3D ๋ชจ๋ธ ๋ก๋ Task { do { let entity = try await ModelEntity(named: "chair.usdz") // ์ต์ปค ์์ฑ ๋ฐ ๋ฐฐ์น let anchor = AnchorEntity(world: raycastResult.worldTransform) anchor.addChild(entity) // ์ฌ์ ์ถ๊ฐ arView.scene.addAnchor(anchor) // ์ถฉ๋ ๊ฐ์ง ํ์ฑํ entity.generateCollisionShapes(recursive: true) arView.installGestures(for: entity) } catch { print("๋ชจ๋ธ ๋ก๋ ์คํจ: \(error)") } } } // ์ปค์คํ ๋ฐ์ค ์์ฑ func createBox(at position: SIMD3<Float>) { // ๋ฉ์ ์์ฑ let mesh = MeshResource.generateBox(size: 0.1) // ๋จธํฐ๋ฆฌ์ผ ์ค์ var material = SimpleMaterial() material.color = .init(tint: .blue, texture: nil) // ์ํฐํฐ ์์ฑ let entity = ModelEntity(mesh: mesh, materials: [material]) // ์ต์ปค์ ์ถ๊ฐ let anchor = AnchorEntity(world: position) anchor.addChild(entity) arView.scene.addAnchor(anchor) } }
๐ 3. ํ๋ฉด ๊ฐ์ง
๋ฐ๋ฅ์ด๋ ๋ฒฝ ๊ฐ์ ํ๋ฉด์ ์๋์ผ๋ก ๊ฐ์ง.
ARPlaneDetection.swift โ Plane Detection
import ARKit extension ARViewController: ARSessionDelegate { func setupPlaneDetection() { arView.session.delegate = self } // ์ ์ต์ปค ์ถ๊ฐ๋จ (ํ๋ฉด ๊ฐ์ง) func session(_ session: ARSession, didAdd anchors: [ARAnchor]) { for anchor in anchors { if let planeAnchor = anchor as? ARPlaneAnchor { handlePlaneDetection(planeAnchor) } } } // ์ต์ปค ์ ๋ฐ์ดํธ (ํ๋ฉด ํ์ฅ) func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) { for anchor in anchors { if let planeAnchor = anchor as? ARPlaneAnchor { updatePlaneVisualization(planeAnchor) } } } func handlePlaneDetection(_ planeAnchor: ARPlaneAnchor) { // ํ๋ฉด ํฌ๊ธฐ let width = planeAnchor.planeExtent.width let height = planeAnchor.planeExtent.height // ํ๋ฉด ํ์ let alignment = planeAnchor.alignment print("ํ๋ฉด ๊ฐ์ง: \(alignment == .horizontal ? "์ํ" : "์์ง") \(width)x\(height)m") // ํ๋ฉด ์๊ฐํ (์ ํ ์ฌํญ) let planeMesh = MeshResource.generatePlane(width: width, depth: height) var material = SimpleMaterial() material.color = .init(tint: .white.withAlphaComponent(0.3), texture: nil) let planeEntity = ModelEntity(mesh: planeMesh, materials: [material]) let anchorEntity = AnchorEntity(anchor: planeAnchor) anchorEntity.addChild(planeEntity) arView.scene.addAnchor(anchorEntity) } func updatePlaneVisualization(_ planeAnchor: ARPlaneAnchor) { // ํ๋ฉด ํ์ฅ/์ ๋ฐ์ดํธ ์ฒ๋ฆฌ } }
๐ 4. ์ผ๊ตด ์ถ์ (Face Tracking)
Track facial expressions and movement with the TrueDepth camera.
ARFaceTracking.swift โ Face Tracking
import ARKit class FaceTrackingViewController: UIViewController { var arView: ARView! override func viewDidLoad() { super.viewDidLoad() arView = ARView(frame: view.bounds) view.addSubview(arView) // Face Tracking ์ค์ guard ARFaceTrackingConfiguration.isSupported else { print("Face Tracking ๋ฏธ์ง์") return } let configuration = ARFaceTrackingConfiguration() configuration.maximumNumberOfTrackedFaces = 1 arView.session.run(configuration) arView.session.delegate = self } } extension FaceTrackingViewController: ARSessionDelegate { func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) { for anchor in anchors { guard let faceAnchor = anchor as? ARFaceAnchor else { continue } // ์ผ๊ตด ๋ธ๋ ๋์์ (ํ์ ) let blendShapes = faceAnchor.blendShapes // ๋ ๊น๋นก์ if let eyeBlinkLeft = blendShapes[.eyeBlinkLeft]?.floatValue, let eyeBlinkRight = blendShapes[.eyeBlinkRight]?.floatValue { print("๋ ๊น๋นก์: L=\(eyeBlinkLeft) R=\(eyeBlinkRight)") } // ๋ฏธ์ if let smile = blendShapes[.mouthSmileLeft]?.floatValue { print("๋ฏธ์: \(smile)") } // ์ ๋ฒ๋ฆผ if let jawOpen = blendShapes[.jawOpen]?.floatValue { print("์ ๋ฒ๋ฆผ: \(jawOpen)") } // ์ผ๊ตด ์์น์ ํ์ let transform = faceAnchor.transform print("์ผ๊ตด ์์น: \(transform.columns.3)") } } }
๐ผ๏ธ 5. ์ด๋ฏธ์ง ์ถ์
Recognize specific images and overlay AR content.
ARImageTracking.swift โ Image Tracking
import ARKit extension ARViewController { func setupImageTracking() { // AR ์ฐธ์กฐ ์ด๋ฏธ์ง ๋ก๋ guard let referenceImages = ARReferenceImage.referenceImages( inGroupNamed: "AR Resources", bundle: nil ) else { print("์ด๋ฏธ์ง ์์ ์์") return } let configuration = ARWorldTrackingConfiguration() configuration.detectionImages = referenceImages configuration.maximumNumberOfTrackedImages = 2 arView.session.run(configuration) } // ์ด๋ฏธ์ง ๊ฐ์ง๋จ func session(_ session: ARSession, didAdd anchors: [ARAnchor]) { for anchor in anchors { guard let imageAnchor = anchor as? ARImageAnchor else { continue } // ๊ฐ์ง๋ ์ด๋ฏธ์ง ์ ๋ณด let referenceImage = imageAnchor.referenceImage let imageName = referenceImage.name ?? "์ ์ ์์" print("์ด๋ฏธ์ง ๊ฐ์ง: \(imageName)") // ์ด๋ฏธ์ง ์์ 3D ์ฝํ ์ธ ๋ฐฐ์น placeContentOnImage(imageAnchor) } } func placeContentOnImage(_ imageAnchor: ARImageAnchor) { // ์ด๋ฏธ์ง ํฌ๊ธฐ let imageSize = imageAnchor.referenceImage.physicalSize // ์ด๋ฏธ์ง ์์ ํ๋ฉด ์์ฑ let plane = MeshResource.generatePlane( width: Float(imageSize.width), depth: Float(imageSize.height) ) var material = SimpleMaterial() material.color = .init(tint: .green.withAlphaComponent(0.5), texture: nil) let planeEntity = ModelEntity(mesh: plane, materials: [material]) let anchorEntity = AnchorEntity(anchor: imageAnchor) anchorEntity.addChild(planeEntity) arView.scene.addAnchor(anchorEntity) } }
๐ฑ SwiftUI Integration
ARDemoView.swift โ SwiftUI Wrapper
import SwiftUI import ARKit import RealityKit struct ARViewContainer: UIViewRepresentable { @Binding var placementEnabled: Bool func makeUIView(context: Context) -> ARView { let arView = ARView(frame: .zero) // AR ์ธ์ ์ค์ let config = ARWorldTrackingConfiguration() config.planeDetection = [.horizontal] arView.session.run(config) // ํญ ์ ์ค์ฒ ์ถ๊ฐ let tapGesture = UITapGestureRecognizer( target: context.coordinator, action: #selector(Coordinator.handleTap) ) arView.addGestureRecognizer(tapGesture) context.coordinator.arView = arView return arView } func updateUIView(_ uiView: ARView, context: Context) { context.coordinator.placementEnabled = placementEnabled } func makeCoordinator() -> Coordinator { Coordinator(placementEnabled: $placementEnabled) } class Coordinator: NSObject { var arView: ARView? @Binding var placementEnabled: Bool init(placementEnabled: Binding<Bool>) { _placementEnabled = placementEnabled } @objc func handleTap(_ sender: UITapGestureRecognizer) { guard placementEnabled, let arView else { return } let location = sender.location(in: arView) // Raycast if let result = arView.raycast(from: location, allowing: .estimatedPlane, alignment: .horizontal).first { // ๋ฐ์ค ์์ฑ let mesh = MeshResource.generateBox(size: 0.1) let material = SimpleMaterial(color: .blue, isMetallic: false) let entity = ModelEntity(mesh: mesh, materials: [material]) let anchor = AnchorEntity(world: result.worldTransform) anchor.addChild(entity) arView.scene.addAnchor(anchor) } } } } struct ARDemoView: View { @State private var placementEnabled = false var body: some View { ZStack { ARViewContainer(placementEnabled: $placementEnabled) .ignoresSafeArea() VStack { Spacer() Button { placementEnabled.toggle() } label: { Label( placementEnabled ? "๋ฐฐ์น ์ค์ง" : "๋ฐฐ์น ์์", systemImage: placementEnabled ? "stop.circle" : "play.circle" ) } .buttonStyle(.borderedProminent) .padding() } } } }
๐ก HIG Guidelines
- ์์ : AR ์ฌ์ฉ ์ ์ฃผ๋ณ์ ์ดํด๋ณด๋ผ๋ ๊ฒฝ๊ณ ๋ฉ์์ง ํ์
- ์ด๊ธฐํ: ํ๋ฉด ๊ฐ์ง ์ค์์ ์ฌ์ฉ์์๊ฒ ๋ช ํํ ์๋ด
- ํผ๋๋ฐฑ: ๊ฐ์ฒด ๋ฐฐ์น ๊ฐ๋ฅํ ์์ญ์ ์๊ฐ์ ์ผ๋ก ํ์
- ์กฐ๋ช : ์ถฉ๋ถํ ์กฐ๋ช ์ด ํ์ํจ์ ์๋ด
- ๊ถํ: In Info.plist,
NSCameraUsageDescription์ถ๊ฐ - ์ฑ๋ฅ: ๋ณต์กํ 3D ๋ชจ๋ธ์ ์ต์ ํํ์ฌ ์ฌ์ฉ
๐ฏ Practical Usage
- ๊ฐ๊ตฌ ๋ฐฐ์น ์ฑ: ์ค์ ๊ณต๊ฐ์ ๊ฐ๊ตฌ๋ฅผ ๋ฏธ๋ฆฌ ๋ฐฐ์นํด๋ณด๊ธฐ
- Education Apps: 3D ๋ชจ๋ธ๋ก ํ์ต ์ฝํ ์ธ ์๊ฐํ
- ์ผํ ์ฑ: ์ ํ์ AR๋ก ๋ฏธ๋ฆฌ ์ฒดํ
- Games: ํ์ค ๊ณต๊ฐ์ ๊ฒ์ ํ๋๋ก ํ์ฉ
- ๋ด๋น๊ฒ์ด์ : AR๋ก ์ค์๊ฐ ๊ธธ ์๋ด
๐ Learn More
- ARKit ๊ณต์ ๋ฌธ์
- WWDC: Meet ARKit for spatial computing
- Apple AR ๊ฐ๋ฐ์ ํ์ด์ง
- HIG: Augmented Reality
โก๏ธ LiDAR ํ์ฉ: The LiDAR sensor on iPhone 12 Pro and later enables more accurate depth sensing and faster plane detection.
sceneReconstruction = .mesh๋ก ์ค์๊ฐ 3D ๋ฉ์๋ฅผ ์์ฑ is possible.