diff --git a/mirroringBooth/mirroringBooth.xcodeproj/project.pbxproj b/mirroringBooth/mirroringBooth.xcodeproj/project.pbxproj index 980a8ac..c8a26b1 100644 --- a/mirroringBooth/mirroringBooth.xcodeproj/project.pbxproj +++ b/mirroringBooth/mirroringBooth.xcodeproj/project.pbxproj @@ -10,9 +10,22 @@ 8C796EAA2EF52FB200280FED /* mirroringBooth.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = mirroringBooth.app; sourceTree = BUILT_PRODUCTS_DIR; }; /* End PBXFileReference section */ +/* Begin PBXFileSystemSynchronizedBuildFileExceptionSet section */ + 8CC632842EFB1909006EA0E1 /* Exceptions for "mirroringBooth" folder in "mirroringBooth" target */ = { + isa = PBXFileSystemSynchronizedBuildFileExceptionSet; + membershipExceptions = ( + Info.plist, + ); + target = 8C796EA92EF52FB200280FED /* mirroringBooth */; + }; +/* End PBXFileSystemSynchronizedBuildFileExceptionSet section */ + /* Begin PBXFileSystemSynchronizedRootGroup section */ 8C796EAC2EF52FB200280FED /* mirroringBooth */ = { isa = PBXFileSystemSynchronizedRootGroup; + exceptions = ( + 8CC632842EFB1909006EA0E1 /* Exceptions for "mirroringBooth" folder in "mirroringBooth" target */, + ); path = mirroringBooth; sourceTree = ""; }; @@ -254,7 +267,9 @@ ENABLE_PREVIEWS = YES; ENABLE_USER_SELECTED_FILES = readonly; GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_FILE = mirroringBooth/Info.plist; INFOPLIST_KEY_CFBundleDisplayName = "Mirroring Booth"; + INFOPLIST_KEY_NSLocalNetworkUsageDescription = "이 앱은 같은 네트워크의 기기와 연결하기 위해 로컬 네트워크 접근이 필요합니다."; "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES; "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphonesimulator*]" = YES; "INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphoneos*]" = YES; @@ -302,7 +317,9 @@ ENABLE_PREVIEWS = YES; ENABLE_USER_SELECTED_FILES = readonly; GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_FILE = mirroringBooth/Info.plist; INFOPLIST_KEY_CFBundleDisplayName = "Mirroring Booth"; + INFOPLIST_KEY_NSLocalNetworkUsageDescription = "이 앱은 같은 네트워크의 기기와 연결하기 위해 로컬 네트워크 접근이 필요합니다."; "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES; "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphonesimulator*]" = YES; "INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphoneos*]" = YES; diff --git a/mirroringBooth/mirroringBooth/CameraDevice/Connection/BrowserView.swift b/mirroringBooth/mirroringBooth/CameraDevice/Connection/BrowserView.swift new file mode 100644 index 0000000..be7af85 --- /dev/null +++ b/mirroringBooth/mirroringBooth/CameraDevice/Connection/BrowserView.swift @@ -0,0 +1,79 @@ +// +// BrowserView.swift +// mirroringBooth +// +// Created by 이상유 on 2025-12-24. +// + +import SwiftUI + +struct BrowserView: View { + + private var router: Router + private var sender: StreamSender + @State private var isConnecting = false + + init(_ router: Router, _ sender: StreamSender) { + self.router = router + self.sender = sender + } + + var body: some View { + VStack { + Button { + router.pop() + } label: { + Text("뒤로가기") + .font(.headline) + .padding(5) + } + + if isConnecting { + ProgressView() + .padding() + Text("연결 중...") + .font(.subheadline) + } + + Text(sender.connectionState.values.joined(separator: "\n")) + .font(.subheadline) + + ForEach(sender.peers, id: \.self) { peer in + deviceRow(peer) + } + } + .onAppear { + sender.startBrowsing() + } + .onDisappear { + sender.stopBrowsing() + } + .onChange(of: sender.connectionState) { _, newValue in + // 연결이 완료되면 카메라 화면으로 이동 + if newValue.values.contains(where: { $0.contains("연결 완료") }) && isConnecting { + isConnecting = false + router.push(to: .camera) + } + } + } + + @ViewBuilder + func deviceRow(_ peer: String) -> some View { + Button { + sender.invite(to: peer) + isConnecting = true + } label: { + Text(peer) + .padding(5) + .overlay { + RoundedRectangle(cornerRadius: 12) + .stroke(style: StrokeStyle(lineWidth: 1)) + } + } + .disabled(isConnecting) + } +} + +#Preview { + BrowserView(Router(), StreamSender()) +} diff --git a/mirroringBooth/mirroringBooth/CameraDevice/Connection/StreamSender.swift b/mirroringBooth/mirroringBooth/CameraDevice/Connection/StreamSender.swift new file mode 100644 index 0000000..8e20e28 --- /dev/null +++ b/mirroringBooth/mirroringBooth/CameraDevice/Connection/StreamSender.swift @@ -0,0 +1,153 @@ +// +// StreamSender.swift +// mirroringBooth +// +// Created by 이상유 on 2025-12-28. +// + +import Foundation +import MultipeerConnectivity + +/// 스트림 송신 측 (iPhone) +/// 다른 기기를 탐색하고 연결하여 스트림 데이터(비디오/사진)를 전송 +@Observable +final class StreamSender: NSObject { + + /// 연결된 피어들의 상태 정보 + var connectionState: [String: String] = [:] + /// 발견된 피어 목록 + var peers: [String] = [] + + /// 촬영 요청 수신 콜백 + var onCaptureRequest: (() -> Void)? + + private let serviceType: String + /// 현재 기기의 식별자 + private let identifier: MCPeerID + /// Multipeer 연결 세션 + private let session: MCSession + /// 서비스 탐색 (송신 측) + private let browser: MCNearbyServiceBrowser + /// 발견된 피어 ID 매핑 + private var discoveredPeers: [String: MCPeerID] = [:] + + init(serviceType: String = "mirroringbooth") { + self.serviceType = serviceType + self.identifier = MCPeerID(displayName: UIDevice.current.name) + self.session = MCSession( + peer: identifier, + securityIdentity: nil, + encryptionPreference: .none + ) + self.browser = MCNearbyServiceBrowser(peer: identifier, serviceType: serviceType) + + super.init() + setup() + } + + private func setup() { + session.delegate = self + browser.delegate = self + } + + func startBrowsing() { + peers.removeAll() + discoveredPeers.removeAll() + browser.startBrowsingForPeers() + } + + func stopBrowsing() { + browser.stopBrowsingForPeers() + } + + func invite(to id: String) { + guard let peerID = discoveredPeers[id] else { return } + browser.invitePeer(peerID, to: session, withContext: nil, timeout: 30) + } + + func sendPacket(_ data: Data) { + // 연결된 피어가 없으면 전송하지 않음 + guard !session.connectedPeers.isEmpty else { + return + } + + // 패킷 타입에 따라 전송 모드 결정 + // SPS/PPS와 Photo는 반드시 전달되어야 하므로 reliable 모드 사용 + // 프레임 데이터는 실시간성이 중요하므로 unreliable 모드 사용 + let sendMode: MCSessionSendDataMode = { + guard data.count > 0 else { return .unreliable } + + let packetType = data[0] + // SPS(0x01), PPS(0x02), Photo(0x05)인 경우 reliable 모드 + if packetType == 0x01 || packetType == 0x02 || packetType == 0x05 { + return .reliable + } + return .unreliable + }() + + do { + try session.send(data, toPeers: session.connectedPeers, with: sendMode) + } catch { + print("Failed to send packet data: \(error)") + } + } + +} + +// MARK: - Session Delegate +extension StreamSender: MCSessionDelegate { + + func session(_ session: MCSession, peer peerID: MCPeerID, didChange state: MCSessionState) { + switch state { + case .connected: + connectionState[peerID.displayName] = "✅ \(peerID.displayName)와 연결 완료" + case .connecting: + connectionState[peerID.displayName] = "⏳ \(peerID.displayName)와 연결 중" + case .notConnected: + connectionState.removeValue(forKey: peerID.displayName) + default: + break + } + } + + func session(_ session: MCSession, didReceive data: Data, fromPeer peerID: MCPeerID) { + // 수신된 데이터가 촬영 요청 패킷인지 확인 + guard let packet = MediaPacket.deserialize(data), + packet.type == .captureRequest else { + return + } + + // 촬영 요청 콜백 호출 + onCaptureRequest?() + } + + func session(_ session: MCSession, didReceive stream: InputStream, withName streamName: String, fromPeer peerID: MCPeerID) { } + + func session(_ session: MCSession, didStartReceivingResourceWithName resourceName: String, fromPeer peerID: MCPeerID, with progress: Progress) { } + + func session(_ session: MCSession, didFinishReceivingResourceWithName resourceName: String, fromPeer peerID: MCPeerID, at localURL: URL?, withError error: (any Error)?) { } + +} + +// MARK: - Browser Delegate +extension StreamSender: MCNearbyServiceBrowserDelegate { + + func browser( + _ browser: MCNearbyServiceBrowser, + foundPeer peerID: MCPeerID, + withDiscoveryInfo info: [String : String]? + ) { + let displayName = peerID.displayName + discoveredPeers[displayName] = peerID + guard !peers.contains(displayName) else { return } + peers.append(displayName) + } + + func browser( + _ browser: MCNearbyServiceBrowser, + lostPeer peerID: MCPeerID + ) { + + } + +} diff --git a/mirroringBooth/mirroringBooth/CameraDevice/Home/HomeView.swift b/mirroringBooth/mirroringBooth/CameraDevice/Home/HomeView.swift new file mode 100644 index 0000000..29c1558 --- /dev/null +++ b/mirroringBooth/mirroringBooth/CameraDevice/Home/HomeView.swift @@ -0,0 +1,39 @@ +// +// HomeView.swift +// mirroringBooth +// +// Created by 이상유 on 2025-12-24. +// + +import SwiftUI + +struct HomeView: View { + + @State private var router: Router = .init() + private let sender = StreamSender() + + var body: some View { + NavigationStack(path: $router.path) { + Button { + router.push(to: .connection) + } label: { + Text("촬영하기") + .font(.headline) + .padding(5) + } + .navigationDestination(for: Route.self) { viewType in + switch viewType { + case .connection: + BrowserView(router, sender) + case .camera: + StreamingView(sender) + } + } + } + } + +} + +#Preview { + HomeView() +} diff --git a/mirroringBooth/mirroringBooth/CameraDevice/Stream/CameraEncoder.swift b/mirroringBooth/mirroringBooth/CameraDevice/Stream/CameraEncoder.swift new file mode 100644 index 0000000..50de062 --- /dev/null +++ b/mirroringBooth/mirroringBooth/CameraDevice/Stream/CameraEncoder.swift @@ -0,0 +1,334 @@ +// +// CameraEncoder.swift +// mirroringBooth +// +// Created by 이상유 on 2025-12-27. +// + +import Foundation +import AVFoundation +import VideoToolbox + +/// 카메라 캡처 및 H.264 인코딩 클래스 +/// AVCaptureSession으로 카메라 영상을 캡처하고 VideoToolbox로 H.264 인코딩 +final class CameraEncoder: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVCapturePhotoCaptureDelegate { + + /// 인코딩된 프레임 데이터를 전달하는 콜백 + var onEncodedFrame: ((Data) -> Void)? + + /// 촬영된 고화질 사진 데이터를 전달하는 콜백 + var onPhotoCaptured: ((Data) -> Void)? + + /// 카메라 캡처 세션 + private let session = AVCaptureSession() + /// 카메라 프레임 처리용 큐 + private let cameraQueue = DispatchQueue(label: "cameraQueue") + /// 비디오 압축 세션(인코더) + private var compressionSession: VTCompressionSession? + /// SPS/PPS 전송 여부 플래그 + private var didSendParameterSets = false + /// 사진 촬영용 출력 + private let photoOutput = AVCapturePhotoOutput() + + /// 인코딩 결과 콜백 + private let compressionOutputCallback: VTCompressionOutputCallback = { + outputCallbackRefCon, + sourceFrameRefCon, + status, + infoFlags, + sampleBuffer in + + guard status == noErr, + let sampleBuffer, + CMSampleBufferDataIsReady(sampleBuffer) + else { return } + + let manager = Unmanaged + .fromOpaque(outputCallbackRefCon!) + .takeUnretainedValue() + + manager.processEncodedFrame(sampleBuffer) + } + + // 카메라 캡처 세션 설정 및 시작 + func startSession() throws { + session.sessionPreset = .hd1280x720 + + guard let device = AVCaptureDevice.default( + .builtInWideAngleCamera, + for: .video, + position: .back + ) else { return } + + let input = try AVCaptureDeviceInput(device: device) + + if session.canAddInput(input) { + session.addInput(input) + } + + let output = AVCaptureVideoDataOutput() + output.setSampleBufferDelegate(self, queue: cameraQueue) + output.alwaysDiscardsLateVideoFrames = true + + if session.canAddOutput(output) { + session.addOutput(output) + } + + // 사진 촬영용 출력 추가 + if session.canAddOutput(photoOutput) { + session.addOutput(photoOutput) + } + + // 카메라 세션은 백그라운드 스레드에서 시작 + // 메인 스레드에서 시작하면 UI 응답성 저하 가능 + cameraQueue.async { [weak self] in + self?.session.startRunning() + } + } + + // 카메라 캡처 세션 중지 + func stopSession() { + // 카메라 세션 중지도 백그라운드 스레드에서 수행 + cameraQueue.async { [weak self] in + guard let self else { return } + self.session.stopRunning() + + if let encoder = self.compressionSession { + VTCompressionSessionCompleteFrames(encoder, untilPresentationTimeStamp: .invalid) + VTCompressionSessionInvalidate(encoder) + self.compressionSession = nil + } + + self.didSendParameterSets = false + } + } + + // 고화질 사진 촬영 + func capturePhoto() { + let settings = AVCapturePhotoSettings() + settings.flashMode = .off + photoOutput.capturePhoto(with: settings, delegate: self) + } + +} + +// MARK: - AVCapturePhotoCaptureDelegate + +extension CameraEncoder { + + func photoOutput( + _ output: AVCapturePhotoOutput, + didFinishProcessingPhoto photo: AVCapturePhoto, + error: Error? + ) { + guard error == nil, + let imageData = photo.fileDataRepresentation() + else { + print("Failed to capture photo: \(error?.localizedDescription ?? "Unknown error")") + return + } + + // JPEG 데이터를 MediaPacket으로 감싸서 전송 + let packet = MediaPacket(type: .photo, data: imageData) + onPhotoCaptured?(packet.serialize()) + } + +} + +// MARK: - encode + +extension CameraEncoder { + + // 프레임 처리 -> 인코딩 시작 + func captureOutput( + _ output: AVCaptureOutput, + didOutput sampleBuffer: CMSampleBuffer, + from connection: AVCaptureConnection + ) { + if compressionSession == nil { + if let format = CMSampleBufferGetFormatDescription(sampleBuffer) { + let dimensions = CMVideoFormatDescriptionGetDimensions(format) + setupVideoEncoder(width: dimensions.width, height: dimensions.height) + } + } + + guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer), + let session = compressionSession + else { return } + + let pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) + + VTCompressionSessionEncodeFrame( + session, + imageBuffer: pixelBuffer, + presentationTimeStamp: pts, + duration: .invalid, + frameProperties: nil, + sourceFrameRefcon: nil, + infoFlagsOut: nil + ) + } + + // 비디오 압축 세션(인코더) 설정 + private func setupVideoEncoder(width: Int32, height: Int32) { + VTCompressionSessionCreate( + allocator: nil, + width: width, + height: height, + codecType: kCMVideoCodecType_H264, + encoderSpecification: nil, + imageBufferAttributes: nil, + compressedDataAllocator: nil, + outputCallback: compressionOutputCallback, + refcon: UnsafeMutableRawPointer(Unmanaged.passUnretained(self).toOpaque()), + compressionSessionOut: &compressionSession + ) + + guard let session = compressionSession else { return } + + // 실시간 인코딩 모드 활성화 + // 낮은 레이턴시를 위해 프레임 버퍼링을 최소화하고 즉시 인코딩 + VTSessionSetProperty( + session, + key: kVTCompressionPropertyKey_RealTime, + value: kCFBooleanTrue + ) + + // H.264 Baseline Profile 사용 + // 가장 넓은 호환성을 제공하며, 대부분의 디바이스에서 디코딩 가능 + // AutoLevel: 해상도와 비트레이트에 따라 자동으로 레벨 선택 + VTSessionSetProperty( + session, + key: kVTCompressionPropertyKey_ProfileLevel, + value: kVTProfileLevel_H264_Baseline_AutoLevel + ) + + // KeyFrame 간격 설정 (30프레임마다 KeyFrame 생성) + // KeyFrame은 독립적으로 디코딩 가능하여 스트림 중간 진입점 제공 + // 값이 작을수록 빠른 복구 가능, 크면 압축률 향상 + VTSessionSetProperty( + session, + key: kVTCompressionPropertyKey_MaxKeyFrameInterval, + value: 30 as CFTypeRef + ) + + // 비트레이트 설정 (2 Mbps) + VTSessionSetProperty( + session, + key: kVTCompressionPropertyKey_AverageBitRate, + value: 2_000_000 as CFTypeRef + ) + + // 데이터 레이트 제한 설정 + VTSessionSetProperty( + session, + key: kVTCompressionPropertyKey_DataRateLimits, + value: [2_500_000, 1] as CFArray + ) + + VTCompressionSessionPrepareToEncodeFrames(session) + } + + // 인코딩 된 데이터 처리 + private func processEncodedFrame(_ sampleBuffer: CMSampleBuffer) { + guard let dataBuffer = CMSampleBufferGetDataBuffer(sampleBuffer) else { return } + + // 1. KeyFrame 여부 확인 + let isKeyFrame = checkIfKeyFrame(sampleBuffer) + + // 2. KeyFrame이고 아직 SPS/PPS를 전송하지 않았다면 전송 + if isKeyFrame, !didSendParameterSets { + /// formatDescription: 비디오 포맷 정보 + if let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer) { + sendParameterSets(formatDescription) + didSendParameterSets = true + } + } + + // 3. 프레임 데이터 추출 + var length: Int = 0 + var dataPointer: UnsafeMutablePointer? + + CMBlockBufferGetDataPointer( + dataBuffer, + atOffset: 0, + lengthAtOffsetOut: nil, + totalLengthOut: &length, + dataPointerOut: &dataPointer + ) + + guard let pointer = dataPointer else { return } + let frameData = Data(bytes: pointer, count: length) + + // 4. 프레임 타입에 따라 패킷 생성 및 전송 + let packetType: MediaPacketType = isKeyFrame ? .idrFrame : .pFrame + let packet = MediaPacket(type: packetType, data: frameData) + + onEncodedFrame?(packet.serialize()) + } + + // KeyFrame(IDR Frame) 여부 확인 + private func checkIfKeyFrame(_ sampleBuffer: CMSampleBuffer) -> Bool { + guard let attachments = CMSampleBufferGetSampleAttachmentsArray( + sampleBuffer, + createIfNecessary: false + ) as? [[CFString: Any]], + let attachment = attachments.first + else { return false } + + // NotSync가 false이면 KeyFrame(Sync Frame) + let notSync = attachment[kCMSampleAttachmentKey_NotSync] as? Bool ?? false + return !notSync + } + + // SPS/PPS 파라미터 셋 추출 및 전송 + private func sendParameterSets(_ formatDescription: CMFormatDescription) { + var spsSize: Int = 0 + var spsCount: Int = 0 + var ppsSize: Int = 0 + var ppsCount: Int = 0 + var sps: UnsafePointer? + var pps: UnsafePointer? + + // SPS 추출 (index 0) + let spsStatus = CMVideoFormatDescriptionGetH264ParameterSetAtIndex( + formatDescription, + parameterSetIndex: 0, + parameterSetPointerOut: &sps, + parameterSetSizeOut: &spsSize, + parameterSetCountOut: &spsCount, + nalUnitHeaderLengthOut: nil + ) + + // PPS 추출 (index 1) + let ppsStatus = CMVideoFormatDescriptionGetH264ParameterSetAtIndex( + formatDescription, + parameterSetIndex: 1, + parameterSetPointerOut: &pps, + parameterSetSizeOut: &ppsSize, + parameterSetCountOut: &ppsCount, + nalUnitHeaderLengthOut: nil + ) + + // SPS/PPS 전송 + guard spsStatus == noErr, + ppsStatus == noErr, + let spsPointer = sps, + let ppsPointer = pps + else { + print("Failed to extract SPS/PPS") + return + } + + // SPS 패킷 생성 및 전송 + let spsData = Data(bytes: spsPointer, count: spsSize) + let spsPacket = MediaPacket(type: .sps, data: spsData) + onEncodedFrame?(spsPacket.serialize()) + + // PPS 패킷 생성 및 전송 + let ppsData = Data(bytes: ppsPointer, count: ppsSize) + let ppsPacket = MediaPacket(type: .pps, data: ppsData) + onEncodedFrame?(ppsPacket.serialize()) + } + +} diff --git a/mirroringBooth/mirroringBooth/CameraDevice/Stream/StreamingView.swift b/mirroringBooth/mirroringBooth/CameraDevice/Stream/StreamingView.swift new file mode 100644 index 0000000..cab2dfc --- /dev/null +++ b/mirroringBooth/mirroringBooth/CameraDevice/Stream/StreamingView.swift @@ -0,0 +1,54 @@ +// +// StreamingView.swift +// mirroringBooth +// +// Created by 이상유 on 2025-12-24. +// + +import SwiftUI + +/// 비디오 송신 측 화면 (iPhone) +/// 카메라로부터 영상을 캡처하여 H.264로 인코딩 후 다른 기기로 전송 +struct StreamingView: View { + + /// 카메라 캡처 및 인코딩 담당 + private var camera = CameraEncoder() + private let sender: StreamSender + + init(_ sender: StreamSender) { + self.sender = sender + // 인코딩된 프레임을 네트워크로 전송 + camera.onEncodedFrame = { data in + sender.sendPacket(data) + } + // 촬영된 고화질 사진을 네트워크로 전송 + camera.onPhotoCaptured = { data in + sender.sendPacket(data) + } + // 촬영 요청 수신 시 사진 촬영 + sender.onCaptureRequest = { [camera] in + camera.capturePhoto() + } + } + + var body: some View { + Text("미러링 기기에 촬영 화면이 표시됩니다.") + .padding() + .onAppear { + // 화면이 나타날 때 카메라 세션 시작 + do { + try camera.startSession() + } catch { + print("Failed to start camera session: \(error)") + } + } + .onDisappear { + // 화면이 사라질 때 카메라 세션 중지 + camera.stopSession() + } + } +} + +#Preview { + StreamingView(StreamSender()) +} diff --git a/mirroringBooth/mirroringBooth/Common/ContentView.swift b/mirroringBooth/mirroringBooth/Common/ContentView.swift new file mode 100644 index 0000000..6d41c6e --- /dev/null +++ b/mirroringBooth/mirroringBooth/Common/ContentView.swift @@ -0,0 +1,29 @@ +// +// ContentView.swift +// mirroringBooth +// +// Created by 이상유 on 2025-12-19. +// + +import SwiftUI + +struct ContentView: View { + + var body: some View { + defaultView + } + + @ViewBuilder + var defaultView: some View { + switch UIDevice.current.userInterfaceIdiom { + case .phone: + HomeView() + default: + ReceiverView() + } + } +} + +#Preview { + ContentView() +} diff --git a/mirroringBooth/mirroringBooth/Common/MediaPacket.swift b/mirroringBooth/mirroringBooth/Common/MediaPacket.swift new file mode 100644 index 0000000..c4afe48 --- /dev/null +++ b/mirroringBooth/mirroringBooth/Common/MediaPacket.swift @@ -0,0 +1,58 @@ +// +// MediaPacket.swift +// mirroringBooth +// +// Created by 이상유 on 2025-12-27. +// + +import Foundation + +/// 비디오 스트리밍에서 전송되는 패킷 타입 +enum MediaPacketType: UInt8 { + case sps = 0x01 // Sequence Parameter Set + case pps = 0x02 // Picture Parameter Set + case idrFrame = 0x03 // KeyFrame (IDR Frame) + case pFrame = 0x04 // P-Frame (Predicted Frame) + case photo = 0x05 // High Quality Photo (JPEG) + case captureRequest = 0x06 // Capture Photo Request (iPad → iPhone) +} + +/// 미디어 패킷 구조 +/// [1 byte: type] + [4 bytes: data length] + [N bytes: data] +struct MediaPacket { + let type: MediaPacketType + let data: Data + + /// 패킷을 전송 가능한 Data로 직렬화 + func serialize() -> Data { + var packet = Data() + packet.append(type.rawValue) + + // 데이터 길이를 4바이트로 추가 (Big Endian) + var length = UInt32(data.count).bigEndian + packet.append(Data(bytes: &length, count: 4)) + + // 실제 데이터 추가 + packet.append(data) + + return packet + } + + /// 수신된 Data에서 패킷 파싱 + static func deserialize(_ data: Data) -> MediaPacket? { + guard data.count >= 5 else { return nil } + + // 타입 추출 + guard let type = MediaPacketType(rawValue: data[0]) else { return nil } + + // 길이 추출 (Big Endian) + let lengthData = data.subdata(in: 1..<5) + let length = lengthData.withUnsafeBytes { $0.load(as: UInt32.self).bigEndian } + + // 데이터 추출 + guard data.count >= 5 + Int(length) else { return nil } + let payload = data.subdata(in: 5..<(5 + Int(length))) + + return MediaPacket(type: type, data: payload) + } +} diff --git a/mirroringBooth/mirroringBooth/Common/Router.swift b/mirroringBooth/mirroringBooth/Common/Router.swift new file mode 100644 index 0000000..d4c1837 --- /dev/null +++ b/mirroringBooth/mirroringBooth/Common/Router.swift @@ -0,0 +1,30 @@ +// +// Router.swift +// mirroringBooth +// +// Created by 이상유 on 2025-12-24. +// + +import Foundation + +@Observable +final class Router { + var path: [Route] = [] + + func push(to route: Route) { + path.append(route) + } + + func pop() { + path.removeLast() + } + + func reset() { + path.removeLast(path.count) + } +} + +enum Route { + case connection + case camera +} diff --git a/mirroringBooth/mirroringBooth/mirroringBoothApp.swift b/mirroringBooth/mirroringBooth/Common/mirroringBoothApp.swift similarity index 100% rename from mirroringBooth/mirroringBooth/mirroringBoothApp.swift rename to mirroringBooth/mirroringBooth/Common/mirroringBoothApp.swift diff --git a/mirroringBooth/mirroringBooth/ContentView.swift b/mirroringBooth/mirroringBooth/ContentView.swift deleted file mode 100644 index aecdf64..0000000 --- a/mirroringBooth/mirroringBooth/ContentView.swift +++ /dev/null @@ -1,24 +0,0 @@ -// -// ContentView.swift -// mirroringBooth -// -// Created by 이상유 on 2025-12-19. -// - -import SwiftUI - -struct ContentView: View { - var body: some View { - VStack { - Image(systemName: "globe") - .imageScale(.large) - .foregroundStyle(.tint) - Text("Hello, world!") - } - .padding() - } -} - -#Preview { - ContentView() -} diff --git a/mirroringBooth/mirroringBooth/Info.plist b/mirroringBooth/mirroringBooth/Info.plist new file mode 100644 index 0000000..93073de --- /dev/null +++ b/mirroringBooth/mirroringBooth/Info.plist @@ -0,0 +1,10 @@ + + + + + NSBonjourServices + + _mirroringbooth._tcp + + + diff --git a/mirroringBooth/mirroringBooth/MirroringDevice/Connection/PacketHandler.swift b/mirroringBooth/mirroringBooth/MirroringDevice/Connection/PacketHandler.swift new file mode 100644 index 0000000..2e952cd --- /dev/null +++ b/mirroringBooth/mirroringBooth/MirroringDevice/Connection/PacketHandler.swift @@ -0,0 +1,60 @@ +// +// PacketHandler.swift +// mirroringBooth +// +// Created by 이상유 on 2025-12-29. +// + +import Foundation +import Combine + +/// 수신된 패킷을 타입에 따라 적절한 핸들러로 전달하는 클래스 +final class PacketHandler { + + private let videoDecoder: VideoDecoder + private let renderer: MediaFrameRenderer + + /// 사진 데이터 수신 콜백 + var onPhotoReceived: ((Data) -> Void)? + + init(videoDecoder: VideoDecoder, renderer: MediaFrameRenderer) { + self.videoDecoder = videoDecoder + self.renderer = renderer + + setupCallbacks() + } + + private func setupCallbacks() { + // 디코더로부터 프레임을 받아 렌더러로 전달 + videoDecoder.onDecodedFrame = { [weak renderer] pixelBuffer in + renderer?.renderDecodedFrame(pixelBuffer) + } + } + + /// 수신된 패킷을 타입별로 처리 + func handlePacket(_ data: Data) { + guard let packet = MediaPacket.deserialize(data) else { + print("Failed to deserialize packet") + return + } + + switch packet.type { + case .photo: + // 고화질 사진 패킷 → 콜백으로 직접 전달 + onPhotoReceived?(packet.data) + + case .sps, .pps, .idrFrame, .pFrame: + // 비디오 스트리밍 패킷 → VideoDecoder로 전달 + videoDecoder.handleReceivedPacket(data) + + case .captureRequest: + // 촬영 요청 패킷은 송신 측에서만 처리하므로 무시 + break + } + } + + func cleanup() { + videoDecoder.cleanup() + } + +} diff --git a/mirroringBooth/mirroringBooth/MirroringDevice/Connection/ReceiverView.swift b/mirroringBooth/mirroringBooth/MirroringDevice/Connection/ReceiverView.swift new file mode 100644 index 0000000..81d62a5 --- /dev/null +++ b/mirroringBooth/mirroringBooth/MirroringDevice/Connection/ReceiverView.swift @@ -0,0 +1,76 @@ +// +// ReceiverView.swift +// mirroringBooth +// +// Created by 이상유 on 2025-12-24. +// + +import SwiftUI + +/// 스트림 수신 측 화면 (iPad/Mac) +/// 다른 기기로부터 스트림 데이터(비디오/사진)를 수신하여 표시 +struct ReceiverView: View { + + @State private var receiver = StreamReceiver() + @StateObject private var renderer = MediaFrameRenderer() + @State private var packetHandler: PacketHandler? + @State private var photoData: Data? + + var body: some View { + NavigationStack { + VStack { + if !receiver.isConnected { + Text("연결을 시도해주세요...") + .font(.caption) + .foregroundColor(.white) + .padding(8) + .background(Color.black.opacity(0.6)) + .cornerRadius(8) + .padding(.top, 8) + } else { + ZStack { + // 스트림 표시 (비디오 + 사진) + StreamDisplayView(renderer: renderer) { + // 촬영 버튼을 누르면 촬영 요청 패킷 전송 + receiver.requestCapture() + } + .edgesIgnoringSafeArea(.all) + } + } + } + .navigationDestination(item: $photoData) { data in + PhotoView(photoData: data) + } + } + .onAppear { + setupPacketHandler() + receiver.startAdvertising() + } + .onDisappear { + receiver.stopAdvertising() + packetHandler?.cleanup() + } + } + + private func setupPacketHandler() { + guard packetHandler == nil else { return } + + let videoDecoder = VideoDecoder() + let handler = PacketHandler(videoDecoder: videoDecoder, renderer: renderer) + + handler.onPhotoReceived = { [self] data in + photoData = data + } + + receiver.onDataReceived = { data in + handler.handlePacket(data) + } + + packetHandler = handler + } +} + + +#Preview { + ReceiverView() +} diff --git a/mirroringBooth/mirroringBooth/MirroringDevice/Connection/StreamReceiver.swift b/mirroringBooth/mirroringBooth/MirroringDevice/Connection/StreamReceiver.swift new file mode 100644 index 0000000..8e5c2a9 --- /dev/null +++ b/mirroringBooth/mirroringBooth/MirroringDevice/Connection/StreamReceiver.swift @@ -0,0 +1,115 @@ +// +// StreamReceiver.swift +// mirroringBooth +// +// Created by 이상유 on 2025-12-28. +// + +import Foundation +import MultipeerConnectivity + +/// 스트림 수신 측 (iPad/Mac) +/// 서비스를 광고하고 연결 요청을 수락하여 스트림 데이터(비디오/사진)를 수신 +@Observable +final class StreamReceiver: NSObject { + + /// 연결 상태 + var isConnected: Bool = false + + /// 스트림 데이터 수신 콜백 + var onDataReceived: ((Data) -> Void)? + + private let serviceType: String + /// 현재 기기의 식별자 + private let identifier: MCPeerID + /// Multipeer 연결 세션 + private let session: MCSession + /// 서비스 광고 (수신 측) + private let advertiser: MCNearbyServiceAdvertiser + + init(serviceType: String = "mirroringbooth") { + self.serviceType = serviceType + self.identifier = MCPeerID(displayName: UIDevice.current.name) + self.session = MCSession( + peer: identifier, + securityIdentity: nil, + encryptionPreference: .none + ) + self.advertiser = MCNearbyServiceAdvertiser(peer: identifier, discoveryInfo: nil, serviceType: serviceType) + + super.init() + setup() + } + + private func setup() { + session.delegate = self + advertiser.delegate = self + } + + func startAdvertising() { + advertiser.startAdvertisingPeer() + } + + func stopAdvertising() { + advertiser.stopAdvertisingPeer() + } + + /// 촬영 요청 전송 (iPad → iPhone) + func requestCapture() { + guard !session.connectedPeers.isEmpty else { + print("No connected peers to send capture request") + return + } + + // 빈 데이터로 촬영 요청 패킷 생성 + let packet = MediaPacket(type: .captureRequest, data: Data()) + + do { + try session.send(packet.serialize(), toPeers: session.connectedPeers, with: .reliable) + } catch { + print("Failed to send capture request: \(error)") + } + } + +} + +// MARK: - Session Delegate +extension StreamReceiver: MCSessionDelegate { + + func session(_ session: MCSession, peer peerID: MCPeerID, didChange state: MCSessionState) { + switch state { + case .connected: + isConnected = true + case .notConnected: + isConnected = false + default: + break + } + } + + func session(_ session: MCSession, didReceive data: Data, fromPeer peerID: MCPeerID) { + // 수신된 스트림 데이터를 라우터로 전달 + onDataReceived?(data) + } + + func session(_ session: MCSession, didReceive stream: InputStream, withName streamName: String, fromPeer peerID: MCPeerID) { } + + func session(_ session: MCSession, didStartReceivingResourceWithName resourceName: String, fromPeer peerID: MCPeerID, with progress: Progress) { } + + func session(_ session: MCSession, didFinishReceivingResourceWithName resourceName: String, fromPeer peerID: MCPeerID, at localURL: URL?, withError error: (any Error)?) { } + +} + +// MARK: - Advertiser Delegate +extension StreamReceiver: MCNearbyServiceAdvertiserDelegate { + + func advertiser( + _ advertiser: MCNearbyServiceAdvertiser, + didReceiveInvitationFromPeer peerID: MCPeerID, + withContext context: Data?, + invitationHandler: @escaping (Bool, MCSession?) -> Void + ) { + invitationHandler(true, session) + } + +} diff --git a/mirroringBooth/mirroringBooth/MirroringDevice/Photo/PhotoView.swift b/mirroringBooth/mirroringBooth/MirroringDevice/Photo/PhotoView.swift new file mode 100644 index 0000000..43295ee --- /dev/null +++ b/mirroringBooth/mirroringBooth/MirroringDevice/Photo/PhotoView.swift @@ -0,0 +1,54 @@ +// +// PhotoView.swift +// mirroringBooth +// +// Created by 이상유 on 2025-12-29. +// + +import SwiftUI + +/// 촬영된 고화질 사진을 표시하는 화면 +struct PhotoView: View { + + let photoData: Data + @Environment(\.dismiss) private var dismiss + + var body: some View { + ZStack { + Color.black.edgesIgnoringSafeArea(.all) + + VStack { + Spacer() + + // 고화질 사진 표시 + if let uiImage = UIImage(data: photoData) { + Image(uiImage: uiImage) + .resizable() + .aspectRatio(contentMode: .fit) + .frame(maxWidth: .infinity, maxHeight: .infinity) + } + + Spacer() + + // 닫기 버튼 + Button { + dismiss() + } label: { + Text("닫기") + .font(.headline) + .foregroundColor(.white) + .padding() + .background(Color.gray.opacity(0.8)) + .cornerRadius(10) + } + .padding(.bottom, 30) + } + } + } +} + +#Preview { + if let data = UIImage(systemName: "photo")?.pngData() { + PhotoView(photoData: data) + } +} diff --git a/mirroringBooth/mirroringBooth/MirroringDevice/Stream/MediaFrameRenderer.swift b/mirroringBooth/mirroringBooth/MirroringDevice/Stream/MediaFrameRenderer.swift new file mode 100644 index 0000000..8c9fe69 --- /dev/null +++ b/mirroringBooth/mirroringBooth/MirroringDevice/Stream/MediaFrameRenderer.swift @@ -0,0 +1,38 @@ +// +// MediaFrameRenderer.swift +// mirroringBooth +// +// Created by 이상유 on 2025-12-27. +// + +import Foundation +import SwiftUI +import CoreImage +import Combine + +/// 디코딩된 비디오 프레임을 UI에 렌더링 가능한 형태로 변환 +@MainActor +final class MediaFrameRenderer: ObservableObject { + + /// 현재 화면에 표시할 프레임 이미지 + @Published var currentFrame: CGImage? + + /// CoreImage 컨텍스트 - CVPixelBuffer를 CGImage로 변환 + private let ciContext = CIContext() + + /// CVPixelBuffer를 CGImage로 변환하여 UI 업데이트 + /// - Parameter pixelBuffer: 디코딩된 프레임 (YUV420 포맷) + func renderDecodedFrame(_ pixelBuffer: CVPixelBuffer) { + let ciImage = CIImage(cvPixelBuffer: pixelBuffer) + + guard let cgImage = ciContext.createCGImage(ciImage, from: ciImage.extent) else { + return + } + + // UI 업데이트는 메인 스레드에서 수행 + DispatchQueue.main.async { [weak self] in + self?.currentFrame = cgImage + } + } + +} diff --git a/mirroringBooth/mirroringBooth/MirroringDevice/Stream/StreamDisplayView.swift b/mirroringBooth/mirroringBooth/MirroringDevice/Stream/StreamDisplayView.swift new file mode 100644 index 0000000..520b943 --- /dev/null +++ b/mirroringBooth/mirroringBooth/MirroringDevice/Stream/StreamDisplayView.swift @@ -0,0 +1,53 @@ +// +// StreamDisplayView.swift +// mirroringBooth +// +// Created by 이상유 on 2025-12-27. +// + +import SwiftUI + +/// 디코딩된 비디오 프레임을 화면에 렌더링하는 뷰 +struct StreamDisplayView: View { + + @ObservedObject private var renderer: MediaFrameRenderer + var onCaptureRequest: (() -> Void)? + + init(renderer: MediaFrameRenderer, onCaptureRequest: (() -> Void)? = nil) { + self.renderer = renderer + self.onCaptureRequest = onCaptureRequest + } + + var body: some View { + ZStack { + GeometryReader { geometry in + if let image = renderer.currentFrame { + Image(decorative: image, scale: 1.0) + .resizable() + .aspectRatio(contentMode: .fit) + .frame(width: geometry.size.width, height: geometry.size.height) + } + } + .background(Color.black) + + // 촬영 버튼 + VStack { + Spacer() + + Button { + onCaptureRequest?() + } label: { + Circle() + .fill(Color.white) + .frame(width: 70, height: 70) + .overlay( + Circle() + .stroke(Color.gray, lineWidth: 3) + .frame(width: 80, height: 80) + ) + } + .padding(.bottom, 50) + } + } + } +} diff --git a/mirroringBooth/mirroringBooth/MirroringDevice/Stream/VideoDecoder.swift b/mirroringBooth/mirroringBooth/MirroringDevice/Stream/VideoDecoder.swift new file mode 100644 index 0000000..98f878d --- /dev/null +++ b/mirroringBooth/mirroringBooth/MirroringDevice/Stream/VideoDecoder.swift @@ -0,0 +1,253 @@ +// +// VideoDecoder.swift +// mirroringBooth +// +// Created by 이상유 on 2025-12-27. +// + +import Foundation +import AVFoundation +import VideoToolbox + +/// H.264 비디오 스트림 디코더 +/// 수신된 패킷을 디코딩하여 화면에 표시 가능한 프레임으로 변환 +final class VideoDecoder { + + /// 디코딩된 프레임을 전달하는 콜백 + var onDecodedFrame: ((CVPixelBuffer) -> Void)? + + /// 비디오 압축 해제 세션 (디코더) + private var decompressionSession: VTDecompressionSession? + + /// SPS (Sequence Parameter Set) - 비디오 시퀀스 설정 정보 + private var spsData: Data? + + /// PPS (Picture Parameter Set) - 픽처 파라미터 정보 + private var ppsData: Data? + + /// 포맷 정보 (SPS/PPS로부터 생성) + private var formatDescription: CMFormatDescription? + + /// 디코딩 결과 콜백 + private let decompressionOutputCallback: VTDecompressionOutputCallback = { + decompressionOutputRefCon, + sourceFrameRefCon, + status, + infoFlags, + imageBuffer, + presentationTimeStamp, + presentationDuration in + + guard status == noErr, + let imageBuffer + else { + print("Decoding failed with status: \(status)") + return + } + + let decoder = Unmanaged + .fromOpaque(decompressionOutputRefCon!) + .takeUnretainedValue() + + decoder.onDecodedFrame?(imageBuffer) + } + + // 수신된 비디오 패킷 처리 + func handleReceivedPacket(_ packetData: Data) { + guard let packet = MediaPacket.deserialize(packetData) else { + return + } + + switch packet.type { + case .sps: + handleSPS(packet.data) + case .pps: + handlePPS(packet.data) + case .idrFrame, .pFrame: + handleFrame(packet.data, isKeyFrame: packet.type == .idrFrame) + default: + break + } + } + + // SPS 데이터 처리 + private func handleSPS(_ data: Data) { + spsData = data + + // SPS와 PPS가 모두 있으면 디코더 초기화 + if ppsData != nil { + setupDecoder() + } + } + + // PPS 데이터 처리 + private func handlePPS(_ data: Data) { + ppsData = data + + // SPS와 PPS가 모두 있으면 디코더 초기화 + if spsData != nil { + setupDecoder() + } + } + + // SPS/PPS로부터 CMFormatDescription 생성 및 디코더 설정 + private func setupDecoder() { + guard let sps = spsData, + let pps = ppsData + else { return } + + // SPS와 PPS로부터 포맷 정보 생성 + // withUnsafeBytes 내에서 모든 작업 수행 (포인터 생명주기 보장) + let status = sps.withUnsafeBytes { spsBuffer -> OSStatus in + pps.withUnsafeBytes { ppsBuffer -> OSStatus in + guard let spsPtr = spsBuffer.baseAddress?.assumingMemoryBound(to: UInt8.self), + let ppsPtr = ppsBuffer.baseAddress?.assumingMemoryBound(to: UInt8.self) else { + return -1 + } + + let parameterSetPointers: [UnsafePointer] = [spsPtr, ppsPtr] + let parameterSetSizes: [Int] = [sps.count, pps.count] + + return CMVideoFormatDescriptionCreateFromH264ParameterSets( + allocator: kCFAllocatorDefault, + parameterSetCount: 2, + parameterSetPointers: parameterSetPointers, + parameterSetSizes: parameterSetSizes, + nalUnitHeaderLength: 4, + formatDescriptionOut: &formatDescription + ) + } + } + + guard status == noErr, + let formatDesc = formatDescription + else { return } + + // 기존 세션이 있으면 무효화 + if let session = decompressionSession { + VTDecompressionSessionInvalidate(session) + decompressionSession = nil + } + + // 디코더 세션 생성 + var videoDecoderSpecification: [CFString: Any] = [:] + #if targetEnvironment(simulator) + // 시뮬레이터에서는 소프트웨어 디코더 사용 + videoDecoderSpecification[kVTVideoDecoderSpecification_EnableHardwareAcceleratedVideoDecoder] = false + #endif + + let destinationImageBufferAttributes: [CFString: Any] = [ + kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, + kCVPixelBufferMetalCompatibilityKey: true + ] + + var callbackRecord = VTDecompressionOutputCallbackRecord( + decompressionOutputCallback: decompressionOutputCallback, + decompressionOutputRefCon: UnsafeMutableRawPointer(Unmanaged.passUnretained(self).toOpaque()) + ) + + var session: VTDecompressionSession? + let createStatus = withUnsafePointer(to: &callbackRecord) { callbackPtr in + VTDecompressionSessionCreate( + allocator: kCFAllocatorDefault, + formatDescription: formatDesc, + decoderSpecification: videoDecoderSpecification as CFDictionary, + imageBufferAttributes: destinationImageBufferAttributes as CFDictionary, + outputCallback: callbackPtr, + decompressionSessionOut: &session + ) + } + + guard createStatus == noErr, + let decoderSession = session + else { return } + + decompressionSession = decoderSession + } + + // 프레임 데이터 디코딩 + private func handleFrame(_ data: Data, isKeyFrame: Bool) { + guard let session = decompressionSession else { return } + + // CMBlockBuffer 생성 + var blockBuffer: CMBlockBuffer? + let dataPointer = (data as NSData).bytes.bindMemory(to: UInt8.self, capacity: data.count) + + let blockBufferStatus = CMBlockBufferCreateWithMemoryBlock( + allocator: kCFAllocatorDefault, + memoryBlock: nil, + blockLength: data.count, + blockAllocator: kCFAllocatorDefault, + customBlockSource: nil, + offsetToData: 0, + dataLength: data.count, + flags: 0, + blockBufferOut: &blockBuffer + ) + + guard blockBufferStatus == noErr, + let buffer = blockBuffer + else { + print("Failed to create block buffer: \(blockBufferStatus)") + return + } + + // 데이터 복사 + CMBlockBufferReplaceDataBytes( + with: dataPointer, + blockBuffer: buffer, + offsetIntoDestination: 0, + dataLength: data.count + ) + + // CMSampleBuffer 생성 + var sampleBuffer: CMSampleBuffer? + var sampleSizeArray = [data.count] + + let sampleBufferStatus = CMSampleBufferCreateReady( + allocator: kCFAllocatorDefault, + dataBuffer: buffer, + formatDescription: formatDescription, + sampleCount: 1, + sampleTimingEntryCount: 0, + sampleTimingArray: nil, + sampleSizeEntryCount: 1, + sampleSizeArray: &sampleSizeArray, + sampleBufferOut: &sampleBuffer + ) + + guard sampleBufferStatus == noErr, + let sample = sampleBuffer + else { + print("Failed to create sample buffer: \(sampleBufferStatus)") + return + } + + // 프레임 디코딩 + var infoFlags = VTDecodeInfoFlags() + VTDecompressionSessionDecodeFrame( + session, + sampleBuffer: sample, + flags: [._EnableAsynchronousDecompression], + frameRefcon: nil, + infoFlagsOut: &infoFlags + ) + } + + // 디코더 세션 정리 + func cleanup() { + if let session = decompressionSession { + VTDecompressionSessionInvalidate(session) + decompressionSession = nil + } + + spsData = nil + ppsData = nil + formatDescription = nil + } + + deinit { + cleanup() + } + +} diff --git a/mirroringBooth/mirroringBooth/Assets.xcassets/AccentColor.colorset/Contents.json b/mirroringBooth/mirroringBooth/Resource/Assets.xcassets/AccentColor.colorset/Contents.json similarity index 100% rename from mirroringBooth/mirroringBooth/Assets.xcassets/AccentColor.colorset/Contents.json rename to mirroringBooth/mirroringBooth/Resource/Assets.xcassets/AccentColor.colorset/Contents.json diff --git a/mirroringBooth/mirroringBooth/Assets.xcassets/AppIcon.appiconset/Contents.json b/mirroringBooth/mirroringBooth/Resource/Assets.xcassets/AppIcon.appiconset/Contents.json similarity index 100% rename from mirroringBooth/mirroringBooth/Assets.xcassets/AppIcon.appiconset/Contents.json rename to mirroringBooth/mirroringBooth/Resource/Assets.xcassets/AppIcon.appiconset/Contents.json diff --git a/mirroringBooth/mirroringBooth/Assets.xcassets/Contents.json b/mirroringBooth/mirroringBooth/Resource/Assets.xcassets/Contents.json similarity index 100% rename from mirroringBooth/mirroringBooth/Assets.xcassets/Contents.json rename to mirroringBooth/mirroringBooth/Resource/Assets.xcassets/Contents.json