// LiveKit PTT protocol: // connect(url, token) → join LiveKit room (muted by default) // startSpeaking() → enable microphone → LiveKit publishes audio track // stopSpeaking() → disable microphone → LiveKit unpublishes track // Remote audio is played automatically by LiveKit SDK // ActiveSpeakersChangedEvent fires whenever a participant starts/stops speaking import 'dart:async'; import 'package:livekit_client/livekit_client.dart'; import '../config/app_config.dart'; enum PttState { idle, connected, speaking, receiving } class PttService { Room? _room; EventsListener? _listener; final _stateCtrl = StreamController.broadcast(); final _speakerCtrl = StreamController.broadcast(); final _errorCtrl = StreamController.broadcast(); PttState _state = PttState.idle; String? _speakerName; Stream get stateStream => _stateCtrl.stream; Stream get speakerStream => _speakerCtrl.stream; Stream get errorStream => _errorCtrl.stream; PttState get currentState => _state; String? get currentSpeaker => _speakerName; void _setState(PttState s) { _state = s; _stateCtrl.add(s); } // ── Connect ────────────────────────────────────────────────────────────── Future connect(String url, String token) async { if (AppConfig.debug) { await Future.delayed(const Duration(milliseconds: 400)); _setState(PttState.connected); return true; } try { _room = Room( roomOptions: const RoomOptions( adaptiveStream: false, dynacast: false, ), ); _listener = _room!.createListener(); _listener! ..on( (e) => _onSpeakersChanged(e.speakers), ) ..on((_) { _setState(PttState.idle); }); await _room!.connect(url, token); // Start muted — mic only enabled when PTT is pressed await _room!.localParticipant?.setMicrophoneEnabled(false); _setState(PttState.connected); return true; } catch (e) { _errorCtrl.add('اتصال به سرور برقرار نشد'); return false; } } // ── Speakers detection ──────────────────────────────────────────────────── void _onSpeakersChanged(List speakers) { // Ignore while I'm the one speaking if (_state == PttState.speaking) return; final remoteSpeakers = speakers.whereType().toList(); if (remoteSpeakers.isNotEmpty) { final p = remoteSpeakers.first; _speakerName = p.name.isNotEmpty ? p.name : p.identity; _speakerCtrl.add(_speakerName); _setState(PttState.receiving); } else if (_state == PttState.receiving) { _speakerName = null; _speakerCtrl.add(null); _setState(PttState.connected); } } // ── PTT ─────────────────────────────────────────────────────────────────── Future startSpeaking() async { if (_state != PttState.connected) return; if (AppConfig.debug) { _setState(PttState.speaking); return; } try { await _room?.localParticipant?.setMicrophoneEnabled(true); _setState(PttState.speaking); } catch (_) { _errorCtrl.add('خطا در فعال‌سازی میکروفون'); } } Future stopSpeaking() async { if (_state != PttState.speaking) return; if (AppConfig.debug) { _setState(PttState.connected); _debugSimulateIncoming(); return; } try { await _room?.localParticipant?.setMicrophoneEnabled(false); _setState(PttState.connected); } catch (_) { _setState(PttState.connected); } } // ── Debug helper ────────────────────────────────────────────────────────── Future _debugSimulateIncoming() async { await Future.delayed(const Duration(milliseconds: 800)); if (_state != PttState.connected) return; _speakerName = 'کاربر تست'; _speakerCtrl.add(_speakerName); _setState(PttState.receiving); await Future.delayed(const Duration(seconds: 2)); if (_state != PttState.receiving) return; _speakerName = null; _speakerCtrl.add(null); _setState(PttState.connected); } // ── Disconnect / Dispose ────────────────────────────────────────────────── Future disconnect() async { if (AppConfig.debug) { _setState(PttState.idle); return; } try { await _room?.localParticipant?.setMicrophoneEnabled(false); } catch (_) {} await _listener?.dispose(); await _room?.disconnect(); _room = null; _listener = null; _setState(PttState.idle); } Future dispose() async { await disconnect(); await _stateCtrl.close(); await _speakerCtrl.close(); await _errorCtrl.close(); } }