Recording access trouble - cho407/WhisperCaptionPro GitHub Wiki

๋งˆ์ดํฌ ์‚ฌ์šฉ ๊ถŒํ•œ ๊ด€๋ จ ํŠธ๋Ÿฌ๋ธ” ์ŠˆํŒ…

๋ฌธ์ œ

์šฐ์„  ์•ฑ์—์„œ ๋งˆ์ดํฌ, ์นด๋ฉ”๋ผ, ๋ธ”๋ฃจํˆฌ์Šค ๋“ฑ๋“ฑ ๊ธฐ๊ธฐ๋ฅผ ์‚ฌ์šฉํ•˜๋ ค๋ฉด ์‚ฌ์šฉ ๊ถŒํ•œ์— ๋™์˜๋ฅผ ํ•ด์•ผํ•œ๋‹ค. ๊ทธ๋ ‡๊ฒŒ ๋•Œ๋ฌธ์— Info.plist ์—๋‹ค๊ฐ€ Privacy - Microphone Usage Description์„ ์ถ”๊ฐ€ํ•ด ์–ด๋–ค ์šฉ๋„์—์„œ ์‚ฌ์šฉ๊ถŒํ•œ์„ ์š”์ฒญํ•˜๋Š”์ง€ ํŒ์—… ์•Œ๋žŒ์ด ๋œจ๊ณ  ๊ทธ๊ฒƒ์„ ์‚ฌ์šฉ์ž๊ฐ€ ๋™์˜ํ• ์ง€ ๋™์˜ํ•˜์ง€ ์•Š์„์ง€๋ฅผ ๊ฒฐ์ •ํ•˜๋Š” ๊ตฌ์กฐ์ธ๋ฐ ํŒ์—… ์•Œ๋žŒ์ด ๋œจ์ง€ ์•Š์•„์„œ ๊ทธ ์›์ธ์„ ์ฐพ๊ธฐ์œ„ํ•ด ๋งŽ์ด ํ—ค๋งธ๋‹ค.

ํ•ด๊ฒฐ ๋ฐฉ๋ฒ•

์šฐ์„  ์ฝ”๋“œ์˜ ๋กœ์ง์„ ์‚ดํŽด๋ณด์•˜๋‹ค.

    /// ๋…น์Œ ์‹œ์ž‘
    func startRecording(_ loop: Bool) {
        if let audioProcessor = whisperKit?.audioProcessor {
            Task(priority: .userInitiated) {
                guard await AudioProcessor.requestRecordPermission() else {
                    print("Microphone access was not granted.")
                    return
                }
                var deviceId: DeviceID?
                #if os(macOS)
                    if settings.selectedAudioInput != "No Audio Input",
                       let devices = audioState.audioDevices,
                       let device = devices
                       .first(where: { $0.name == settings.selectedAudioInput }) {
                        deviceId = device.id
                    }
                    if deviceId == nil {
                        throw WhisperError.microphoneUnavailable()
                    }
                #endif
                try? audioProcessor.startRecordingLive(inputDeviceID: deviceId) { _ in
                    DispatchQueue.main.async {
                        // ์ „์‚ฌ ์ƒํƒœ ์—…๋ฐ์ดํŠธ
                        self.transcriptionState.bufferEnergy = self.whisperKit?.audioProcessor
                            .relativeEnergy ?? []
                        self.transcriptionState
                            .bufferSeconds = Double(self.whisperKit?.audioProcessor.audioSamples
                                .count ?? 0) / Double(WhisperKit.sampleRate)
                    }
                }
                audioState.isRecording = true
                audioState.isTranscribing = true
                if loop { realtimeLoop() }
            }
        }
    }

์œ„์™€๊ฐ™์€ ๋กœ์ง์ธ๋ฐ WhisperKit์˜ AudioProcessor๋ฅผ ํ†ตํ•ด์„œ requestRecordPermission()๊ถŒํ•œ์„ ํ™•์ธํ•˜๊ณ  ๋…น์Œ์„ ์‹œ์ž‘ํ•˜๋Š” ๊ตฌ์กฐ์ธ๋ฐ ์‚ฌ์‹ค ๊ตฌ์กฐ์˜ ๋ฌธ์ œ๋Š” ์—†์—ˆ์ง€๋งŒ ๋‹จ๊ณ„๋ณ„ ๋””๋ฒ„๊น…์„ ์ˆ˜ํ–‰ํ•˜๊ธฐ ์œ„ํ•ด ContentView์˜ .onAppear ๋ฉ”์†Œ๋“œ์— ๋‹ค์Œ๊ณผ ๊ฐ™์€ ์ฝ”๋“œ๋ฅผ ๋„ฃ์€ ํ›„ ์‹คํ–‰ํ•ด ๋ณด์•˜๋‹ค.

           if #available(macOS 14, *) {
                Task {
                    let granted = await AVAudioApplication.requestRecordPermission()
                    print("๋งˆ์ดํฌ ๊ถŒํ•œ \(granted ? "ํ—ˆ์šฉ๋จ" : "๊ฑฐ๋ถ€๋จ")")
                }
            } else {
                let microphoneStatus = AVCaptureDevice.authorizationStatus(for: .audio)
                switch microphoneStatus {
                case .notDetermined:
                    AVCaptureDevice.requestAccess(for: .audio) { granted in
                        DispatchQueue.main.async {
                            print("๋งˆ์ดํฌ ๊ถŒํ•œ \(granted ? "ํ—ˆ์šฉ๋จ" : "๊ฑฐ๋ถ€๋จ")")
                        }
                    }
                case .authorized:
                    print("๋งˆ์ดํฌ ๊ถŒํ•œ ์ด๋ฏธ ํ—ˆ์šฉ๋จ")
                default:
                    print("๋งˆ์ดํฌ ๊ถŒํ•œ์ด ๊ฑฐ๋ถ€๋˜์—ˆ๊ฑฐ๋‚˜ ์ œํ•œ๋จ")
                }
            }

ํ•˜์ง€๋งŒ ์—ฌ์ „ํžˆ ํŒ์—…์ฐฝ์€ ๋œจ์ง€ ์•Š๊ณ  ๊ถŒํ•œ์ด ๊ฑฐ๋ถ€๋œ๋‹ค๋Š” ๋ฉ”์„ธ์ง€๋งŒ print๋˜์–ด์„œ ํ•œ์ฐธ์„ ๊ณ ์ƒํ•˜๋˜์ค‘ ์ด์œ ๋ฅผ ๋ฐœ๊ฒฌํ•˜์˜€๋‹ค.

Targets -> Signing&Capabilities ์— Audio Input ์„ค์ •์„ ์ฒดํฌํ•˜์ง€ ์•Š์€ ์‚ฌ์‹ค์„ ๋ฐœ๊ฒฌ... image

๊ทธ๋ž˜์„œ ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์ฒดํฌํ•œ ํ›„ ์‹คํ–‰ํ•˜๋‹ˆ ์ •์ƒ์ ์œผ๋กœ ์ž‘๋™ ๋˜์—ˆ๋‹ค.