feat: enhance audio input handling and add message polling functionality

This commit is contained in:
Nedifinita
2025-12-05 18:12:20 +08:00
parent 99bc081583
commit 2b856c1a4a
7 changed files with 467 additions and 22 deletions

View File

@@ -250,7 +250,18 @@ void processBasebandSample(double sample)
} }
double sample_val = filt - dc_offset; double sample_val = filt - dc_offset;
double threshold = 0.05;
static double peak_pos = 0.01;
static double peak_neg = -0.01;
if (sample_val > peak_pos) peak_pos = sample_val;
else peak_pos *= 0.9999;
if (sample_val < peak_neg) peak_neg = sample_val;
else peak_neg *= 0.9999;
double threshold = (peak_pos - peak_neg) * 0.15;
if (threshold < 0.005) threshold = 0.005;
if (sample_val > threshold) if (sample_val > threshold)
{ {

View File

@@ -98,9 +98,6 @@ Java_org_noxylva_lbjconsole_flutter_AudioInputHandler_nativePushAudio(
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
double sample = (double)samples[i] / 32768.0; double sample = (double)samples[i] / 32768.0;
sample *= 5.0;
processBasebandSample(sample); processBasebandSample(sample);
} }
@@ -125,6 +122,35 @@ Java_org_noxylva_lbjconsole_flutter_RtlTcpChannelHandler_getSignalStrength(JNIEn
{ {
return (jdouble)magsqRaw; return (jdouble)magsqRaw;
} }
extern "C" JNIEXPORT void JNICALL
Java_org_noxylva_lbjconsole_flutter_AudioInputHandler_clearMessageBuffer(JNIEnv *, jobject)
{
std::lock_guard<std::mutex> demodLock(demodDataMutex);
std::lock_guard<std::mutex> msgLock(msgMutex);
messageBuffer.clear();
is_message_ready = false;
numeric_msg.clear();
alpha_msg.clear();
}
extern "C" JNIEXPORT jstring JNICALL
Java_org_noxylva_lbjconsole_flutter_AudioInputHandler_pollMessages(JNIEnv *env, jobject)
{
std::lock_guard<std::mutex> demodLock(demodDataMutex);
std::lock_guard<std::mutex> msgLock(msgMutex);
if (messageBuffer.empty())
{
return env->NewStringUTF("");
}
std::ostringstream ss;
for (auto &msg : messageBuffer)
ss << msg << "\n";
messageBuffer.clear();
return env->NewStringUTF(ss.str().c_str());
}
extern "C" JNIEXPORT jboolean JNICALL extern "C" JNIEXPORT jboolean JNICALL
Java_org_noxylva_lbjconsole_flutter_RtlTcpChannelHandler_isConnected(JNIEnv *, jobject) Java_org_noxylva_lbjconsole_flutter_RtlTcpChannelHandler_isConnected(JNIEnv *, jobject)
{ {

View File

@@ -6,14 +6,18 @@ import android.content.pm.PackageManager
import android.media.AudioFormat import android.media.AudioFormat
import android.media.AudioRecord import android.media.AudioRecord
import android.media.MediaRecorder import android.media.MediaRecorder
import android.os.Handler
import android.os.Looper
import android.util.Log import android.util.Log
import androidx.core.content.ContextCompat import androidx.core.content.ContextCompat
import io.flutter.embedding.engine.FlutterEngine import io.flutter.embedding.engine.FlutterEngine
import io.flutter.plugin.common.MethodCall import io.flutter.plugin.common.MethodCall
import io.flutter.plugin.common.MethodChannel import io.flutter.plugin.common.MethodChannel
import io.flutter.plugin.common.EventChannel
import java.util.concurrent.atomic.AtomicBoolean import java.util.concurrent.atomic.AtomicBoolean
import java.nio.charset.Charset
class AudioInputHandler(private val context: Context) : MethodChannel.MethodCallHandler { class AudioInputHandler(private val context: Context) : MethodChannel.MethodCallHandler, EventChannel.StreamHandler {
private var audioRecord: AudioRecord? = null private var audioRecord: AudioRecord? = null
private val isRecording = AtomicBoolean(false) private val isRecording = AtomicBoolean(false)
private var recordingThread: Thread? = null private var recordingThread: Thread? = null
@@ -25,21 +29,35 @@ class AudioInputHandler(private val context: Context) : MethodChannel.MethodCall
AudioFormat.ENCODING_PCM_16BIT AudioFormat.ENCODING_PCM_16BIT
) * 2 ) * 2
private val handler = Handler(Looper.getMainLooper())
private var eventSink: EventChannel.EventSink? = null
companion object { companion object {
private const val CHANNEL = "org.noxylva.lbjconsole/audio_input" private const val METHOD_CHANNEL = "org.noxylva.lbjconsole/audio_input"
private const val EVENT_CHANNEL = "org.noxylva.lbjconsole/audio_input_event"
private const val TAG = "AudioInputHandler" private const val TAG = "AudioInputHandler"
init {
System.loadLibrary("railwaypagerdemod")
}
fun registerWith(flutterEngine: FlutterEngine, context: Context) { fun registerWith(flutterEngine: FlutterEngine, context: Context) {
val channel = MethodChannel(flutterEngine.dartExecutor.binaryMessenger, CHANNEL) val handler = AudioInputHandler(context)
channel.setMethodCallHandler(AudioInputHandler(context)) val methodChannel = MethodChannel(flutterEngine.dartExecutor.binaryMessenger, METHOD_CHANNEL)
methodChannel.setMethodCallHandler(handler)
val eventChannel = EventChannel(flutterEngine.dartExecutor.binaryMessenger, EVENT_CHANNEL)
eventChannel.setStreamHandler(handler)
} }
} }
private external fun nativePushAudio(data: ShortArray, size: Int) private external fun nativePushAudio(data: ShortArray, size: Int)
private external fun pollMessages(): String
private external fun clearMessageBuffer()
override fun onMethodCall(call: MethodCall, result: MethodChannel.Result) { override fun onMethodCall(call: MethodCall, result: MethodChannel.Result) {
when (call.method) { when (call.method) {
"start" -> { "start" -> {
clearMessageBuffer()
if (startRecording()) { if (startRecording()) {
result.success(null) result.success(null)
} else { } else {
@@ -48,12 +66,63 @@ class AudioInputHandler(private val context: Context) : MethodChannel.MethodCall
} }
"stop" -> { "stop" -> {
stopRecording() stopRecording()
clearMessageBuffer()
result.success(null) result.success(null)
} }
else -> result.notImplemented() else -> result.notImplemented()
} }
} }
override fun onListen(arguments: Any?, events: EventChannel.EventSink?) {
Log.d(TAG, "EventChannel onListen")
this.eventSink = events
startPolling()
}
override fun onCancel(arguments: Any?) {
Log.d(TAG, "EventChannel onCancel")
handler.removeCallbacksAndMessages(null)
this.eventSink = null
}
private fun startPolling() {
handler.post(object : Runnable {
override fun run() {
if (eventSink == null) {
return
}
val recording = isRecording.get()
val logs = pollMessages()
val regex = "\\[MSG\\]\\s*(\\d+)\\|(-?\\d+)\\|(.*)".toRegex()
val statusMap = mutableMapOf<String, Any?>()
statusMap["listening"] = recording
eventSink?.success(statusMap)
if (logs.isNotEmpty()) {
regex.findAll(logs).forEach { match ->
try {
val dataMap = mutableMapOf<String, Any?>()
dataMap["address"] = match.groupValues[1]
dataMap["func"] = match.groupValues[2]
val gbkBytes = match.groupValues[3].toByteArray(Charsets.ISO_8859_1)
val utf8String = String(gbkBytes, Charset.forName("GBK"))
dataMap["numeric"] = utf8String
eventSink?.success(dataMap)
} catch (e: Exception) {
Log.e(TAG, "decode_fail", e)
}
}
}
handler.postDelayed(this, 200)
}
})
}
private fun startRecording(): Boolean { private fun startRecording(): Boolean {
if (isRecording.get()) return true if (isRecording.get()) return true

View File

@@ -21,6 +21,7 @@ class _ConnectionStatusWidget extends StatefulWidget {
final RtlTcpService rtlTcpService; final RtlTcpService rtlTcpService;
final DateTime? lastReceivedTime; final DateTime? lastReceivedTime;
final DateTime? rtlTcpLastReceivedTime; final DateTime? rtlTcpLastReceivedTime;
final DateTime? audioLastReceivedTime;
final InputSource inputSource; final InputSource inputSource;
final bool rtlTcpConnected; final bool rtlTcpConnected;
@@ -29,6 +30,7 @@ class _ConnectionStatusWidget extends StatefulWidget {
required this.rtlTcpService, required this.rtlTcpService,
required this.lastReceivedTime, required this.lastReceivedTime,
required this.rtlTcpLastReceivedTime, required this.rtlTcpLastReceivedTime,
required this.audioLastReceivedTime,
required this.inputSource, required this.inputSource,
required this.rtlTcpConnected, required this.rtlTcpConnected,
}); });
@@ -92,7 +94,7 @@ class _ConnectionStatusWidgetState extends State<_ConnectionStatusWidget> {
isConnected = AudioInputService().isListening; isConnected = AudioInputService().isListening;
statusColor = isConnected ? Colors.green : Colors.red; statusColor = isConnected ? Colors.green : Colors.red;
statusText = isConnected ? '监听中' : '已停止'; statusText = isConnected ? '监听中' : '已停止';
displayTime = widget.rtlTcpLastReceivedTime ?? widget.lastReceivedTime; displayTime = widget.audioLastReceivedTime;
break; break;
case InputSource.bluetooth: case InputSource.bluetooth:
isConnected = _isConnected; isConnected = _isConnected;
@@ -229,13 +231,17 @@ class _MainScreenState extends State<MainScreen> with WidgetsBindingObserver {
StreamSubscription? _connectionSubscription; StreamSubscription? _connectionSubscription;
StreamSubscription? _rtlTcpConnectionSubscription; StreamSubscription? _rtlTcpConnectionSubscription;
StreamSubscription? _audioConnectionSubscription;
StreamSubscription? _dataSubscription; StreamSubscription? _dataSubscription;
StreamSubscription? _rtlTcpDataSubscription; StreamSubscription? _rtlTcpDataSubscription;
StreamSubscription? _audioDataSubscription;
StreamSubscription? _lastReceivedTimeSubscription; StreamSubscription? _lastReceivedTimeSubscription;
StreamSubscription? _rtlTcpLastReceivedTimeSubscription; StreamSubscription? _rtlTcpLastReceivedTimeSubscription;
StreamSubscription? _audioLastReceivedTimeSubscription;
StreamSubscription? _settingsSubscription; StreamSubscription? _settingsSubscription;
DateTime? _lastReceivedTime; DateTime? _lastReceivedTime;
DateTime? _rtlTcpLastReceivedTime; DateTime? _rtlTcpLastReceivedTime;
DateTime? _audioLastReceivedTime;
bool _isHistoryEditMode = false; bool _isHistoryEditMode = false;
InputSource _inputSource = InputSource.bluetooth; InputSource _inputSource = InputSource.bluetooth;
@@ -277,11 +283,13 @@ class _MainScreenState extends State<MainScreen> with WidgetsBindingObserver {
final sourceStr = settings?['inputSource'] as String? ?? 'bluetooth'; final sourceStr = settings?['inputSource'] as String? ?? 'bluetooth';
if (mounted) { if (mounted) {
final newSource = InputSource.values.firstWhere(
(e) => e.name == sourceStr,
orElse: () => InputSource.bluetooth,
);
setState(() { setState(() {
_inputSource = InputSource.values.firstWhere( _inputSource = newSource;
(e) => e.name == sourceStr,
orElse: () => InputSource.bluetooth,
);
_rtlTcpConnected = _rtlTcpService.isConnected; _rtlTcpConnected = _rtlTcpService.isConnected;
}); });
@@ -324,6 +332,15 @@ class _MainScreenState extends State<MainScreen> with WidgetsBindingObserver {
}); });
} }
}); });
_audioLastReceivedTimeSubscription =
AudioInputService().lastReceivedTimeStream.listen((time) {
if (mounted) {
setState(() {
_audioLastReceivedTime = time;
});
}
});
} }
void _setupSettingsListener() { void _setupSettingsListener() {
@@ -331,13 +348,10 @@ class _MainScreenState extends State<MainScreen> with WidgetsBindingObserver {
DatabaseService.instance.onSettingsChanged((settings) { DatabaseService.instance.onSettingsChanged((settings) {
if (mounted) { if (mounted) {
final sourceStr = settings['inputSource'] as String? ?? 'bluetooth'; final sourceStr = settings['inputSource'] as String? ?? 'bluetooth';
print('[MainScreen] Settings changed: inputSource=$sourceStr');
final newInputSource = InputSource.values.firstWhere( final newInputSource = InputSource.values.firstWhere(
(e) => e.name == sourceStr, (e) => e.name == sourceStr,
orElse: () => InputSource.bluetooth, orElse: () => InputSource.bluetooth,
); );
print('[MainScreen] Current: $_inputSource, New: $newInputSource');
setState(() { setState(() {
_inputSource = newInputSource; _inputSource = newInputSource;
@@ -348,7 +362,6 @@ class _MainScreenState extends State<MainScreen> with WidgetsBindingObserver {
setState(() { setState(() {
_rtlTcpConnected = _rtlTcpService.isConnected; _rtlTcpConnected = _rtlTcpService.isConnected;
}); });
print('[MainScreen] RTL-TCP mode, connected: $_rtlTcpConnected');
break; break;
case InputSource.audioInput: case InputSource.audioInput:
setState(() {}); setState(() {});
@@ -385,6 +398,12 @@ class _MainScreenState extends State<MainScreen> with WidgetsBindingObserver {
}); });
} }
}); });
_audioConnectionSubscription = AudioInputService().connectionStream.listen((listening) {
if (mounted) {
setState(() {});
}
});
} }
Future<void> _connectToRtlTcp(String host, String port) async { Future<void> _connectToRtlTcp(String host, String port) async {
@@ -399,10 +418,13 @@ class _MainScreenState extends State<MainScreen> with WidgetsBindingObserver {
void dispose() { void dispose() {
_connectionSubscription?.cancel(); _connectionSubscription?.cancel();
_rtlTcpConnectionSubscription?.cancel(); _rtlTcpConnectionSubscription?.cancel();
_audioConnectionSubscription?.cancel();
_dataSubscription?.cancel(); _dataSubscription?.cancel();
_rtlTcpDataSubscription?.cancel(); _rtlTcpDataSubscription?.cancel();
_audioDataSubscription?.cancel();
_lastReceivedTimeSubscription?.cancel(); _lastReceivedTimeSubscription?.cancel();
_rtlTcpLastReceivedTimeSubscription?.cancel(); _rtlTcpLastReceivedTimeSubscription?.cancel();
_audioLastReceivedTimeSubscription?.cancel();
_settingsSubscription?.cancel(); _settingsSubscription?.cancel();
WidgetsBinding.instance.removeObserver(this); WidgetsBinding.instance.removeObserver(this);
super.dispose(); super.dispose();
@@ -426,7 +448,13 @@ class _MainScreenState extends State<MainScreen> with WidgetsBindingObserver {
}); });
_rtlTcpDataSubscription = _rtlTcpService.dataStream.listen((record) { _rtlTcpDataSubscription = _rtlTcpService.dataStream.listen((record) {
if (_inputSource != InputSource.bluetooth) { if (_inputSource == InputSource.rtlTcp) {
_processRecord(record);
}
});
_audioDataSubscription = AudioInputService().dataStream.listen((record) {
if (_inputSource == InputSource.audioInput) {
_processRecord(record); _processRecord(record);
} }
}); });
@@ -503,6 +531,7 @@ class _MainScreenState extends State<MainScreen> with WidgetsBindingObserver {
rtlTcpService: _rtlTcpService, rtlTcpService: _rtlTcpService,
lastReceivedTime: _lastReceivedTime, lastReceivedTime: _lastReceivedTime,
rtlTcpLastReceivedTime: _rtlTcpLastReceivedTime, rtlTcpLastReceivedTime: _rtlTcpLastReceivedTime,
audioLastReceivedTime: _audioLastReceivedTime,
inputSource: _inputSource, inputSource: _inputSource,
rtlTcpConnected: _rtlTcpConnected, rtlTcpConnected: _rtlTcpConnected,
), ),

View File

@@ -29,6 +29,8 @@ class _SettingsScreenState extends State<SettingsScreen> {
late TextEditingController _rtlTcpHostController; late TextEditingController _rtlTcpHostController;
late TextEditingController _rtlTcpPortController; late TextEditingController _rtlTcpPortController;
bool _settingsLoaded = false;
String _deviceName = ''; String _deviceName = '';
bool _backgroundServiceEnabled = false; bool _backgroundServiceEnabled = false;
bool _notificationsEnabled = true; bool _notificationsEnabled = true;
@@ -83,11 +85,15 @@ class _SettingsScreenState extends State<SettingsScreen> {
(e) => e.name == sourceStr, (e) => e.name == sourceStr,
orElse: () => InputSource.bluetooth, orElse: () => InputSource.bluetooth,
); );
_settingsLoaded = true;
}); });
} }
} }
Future<void> _saveSettings() async { Future<void> _saveSettings() async {
if (!_settingsLoaded) return;
await _databaseService.updateSettings({ await _databaseService.updateSettings({
'deviceName': _deviceName, 'deviceName': _deviceName,
'backgroundServiceEnabled': _backgroundServiceEnabled ? 1 : 0, 'backgroundServiceEnabled': _backgroundServiceEnabled ? 1 : 0,
@@ -667,7 +673,7 @@ class _SettingsScreenState extends State<SettingsScreen> {
_buildActionButton( _buildActionButton(
icon: Icons.share, icon: Icons.share,
title: '分享数据', title: '分享数据',
subtitle: '将记录分享为JSON文件', subtitle: '将记录分享为 JSON 文件',
onTap: _shareData, onTap: _shareData,
), ),
const SizedBox(height: 12), const SizedBox(height: 12),

View File

@@ -1,6 +1,194 @@
import 'dart:async';
import 'dart:math';
import 'package:flutter/services.dart'; import 'package:flutter/services.dart';
import 'package:permission_handler/permission_handler.dart'; import 'package:permission_handler/permission_handler.dart';
import 'dart:developer' as developer; import 'dart:developer' as developer;
import 'package:gbk_codec/gbk_codec.dart';
import 'package:lbjconsole/models/train_record.dart';
import 'package:lbjconsole/services/database_service.dart';
const String _lbjInfoAddr = "1234000";
const String _lbjInfo2Addr = "1234002";
const String _lbjSyncAddr = "1234008";
const int _functionDown = 1;
const int _functionUp = 3;
class _LbJState {
String train = "<NUL>";
int direction = -1;
String speed = "NUL";
String positionKm = " <NUL>";
String time = "<NUL>";
String lbjClass = "NA";
String loco = "<NUL>";
String route = "********";
String posLonDeg = "";
String posLonMin = "";
String posLatDeg = "";
String posLatMin = "";
String _info2Hex = "";
void reset() {
train = "<NUL>";
direction = -1;
speed = "NUL";
positionKm = " <NUL>";
time = "<NUL>";
lbjClass = "NA";
loco = "<NUL>";
route = "********";
posLonDeg = "";
posLonMin = "";
posLatDeg = "";
posLatMin = "";
_info2Hex = "";
}
String _recodeBCD(String numericStr) {
return numericStr
.replaceAll('.', 'A')
.replaceAll('U', 'B')
.replaceAll(' ', 'C')
.replaceAll('-', 'D')
.replaceAll(')', 'E')
.replaceAll('(', 'F');
}
int _hexToChar(String hex1, String hex2) {
final String hex = "$hex1$hex2";
return int.tryParse(hex, radix: 16) ?? 0;
}
String _gbkToUtf8(List<int> gbkBytes) {
try {
final validBytes = gbkBytes.where((b) => b != 0).toList();
return gbk.decode(validBytes);
} catch (e) {
return "";
}
}
void updateFromRaw(String addr, int func, String numeric) {
if (func == _functionDown || func == _functionUp) {
direction = func;
}
switch (addr) {
case _lbjInfoAddr:
final RegExp infoRegex = RegExp(r'^\s*(\S+)\s+(\S+)\s+(\S+)');
final match = infoRegex.firstMatch(numeric);
if (match != null) {
train = match.group(1) ?? "<NUL>";
speed = match.group(2) ?? "NUL";
String pos = match.group(3)?.trim() ?? "";
if (pos.isEmpty) {
positionKm = " <NUL>";
} else if (pos.length > 1) {
positionKm =
"${pos.substring(0, pos.length - 1)}.${pos.substring(pos.length - 1)}";
} else {
positionKm = "0.$pos";
}
}
break;
case _lbjInfo2Addr:
String buffer = numeric;
if (buffer.length < 50) return;
_info2Hex = _recodeBCD(buffer);
if (_info2Hex.length >= 4) {
try {
List<int> classBytes = [
_hexToChar(_info2Hex[0], _info2Hex[1]),
_hexToChar(_info2Hex[2], _info2Hex[3]),
];
lbjClass = String.fromCharCodes(classBytes
.where((b) => b > 0x1F && b < 0x7F && b != 0x22 && b != 0x2C));
} catch (e) {}
}
if (buffer.length >= 12) loco = buffer.substring(4, 12);
List<int> routeBytes = List<int>.filled(17, 0);
if (_info2Hex.length >= 18) {
try {
routeBytes[0] = _hexToChar(_info2Hex[14], _info2Hex[15]);
routeBytes[1] = _hexToChar(_info2Hex[16], _info2Hex[17]);
} catch (e) {}
}
if (_info2Hex.length >= 22) {
try {
routeBytes[2] = _hexToChar(_info2Hex[18], _info2Hex[19]);
routeBytes[3] = _hexToChar(_info2Hex[20], _info2Hex[21]);
} catch (e) {}
}
if (_info2Hex.length >= 30) {
try {
routeBytes[4] = _hexToChar(_info2Hex[22], _info2Hex[23]);
routeBytes[5] = _hexToChar(_info2Hex[24], _info2Hex[25]);
routeBytes[6] = _hexToChar(_info2Hex[26], _info2Hex[27]);
routeBytes[7] = _hexToChar(_info2Hex[28], _info2Hex[29]);
} catch (e) {}
}
route = _gbkToUtf8(routeBytes);
if (buffer.length >= 39) {
posLonDeg = buffer.substring(30, 33);
posLonMin = "${buffer.substring(33, 35)}.${buffer.substring(35, 39)}";
}
if (buffer.length >= 47) {
posLatDeg = buffer.substring(39, 41);
posLatMin = "${buffer.substring(41, 43)}.${buffer.substring(43, 47)}";
}
break;
case _lbjSyncAddr:
if (numeric.length >= 5) {
time = "${numeric.substring(1, 3)}:${numeric.substring(3, 5)}";
}
break;
}
}
Map<String, dynamic> toTrainRecordJson() {
final now = DateTime.now();
String gpsPosition = "";
if (posLatDeg.isNotEmpty && posLatMin.isNotEmpty) {
gpsPosition = "$posLatDeg°$posLatMin";
}
if (posLonDeg.isNotEmpty && posLonMin.isNotEmpty) {
gpsPosition +=
"${gpsPosition.isEmpty ? "" : " "}$posLonDeg°$posLonMin";
}
String kmPosition = positionKm.replaceAll(' <NUL>', '');
final jsonData = {
'uniqueId': '${now.millisecondsSinceEpoch}_${Random().nextInt(9999)}',
'receivedTimestamp': now.millisecondsSinceEpoch,
'timestamp': now.millisecondsSinceEpoch,
'rssi': 0.0,
'train': train.replaceAll('<NUL>', ''),
'loco': loco.replaceAll('<NUL>', ''),
'speed': speed.replaceAll('NUL', ''),
'position': kmPosition,
'positionInfo': gpsPosition,
'route': route.replaceAll('********', ''),
'lbjClass': lbjClass.replaceAll('NA', ''),
'time': time.replaceAll('<NUL>', ''),
'direction': (direction == 1 || direction == 3) ? direction : 0,
'locoType': "",
};
return jsonData;
}
}
class AudioInputService { class AudioInputService {
static final AudioInputService _instance = AudioInputService._internal(); static final AudioInputService _instance = AudioInputService._internal();
@@ -8,9 +196,107 @@ class AudioInputService {
AudioInputService._internal(); AudioInputService._internal();
static const _methodChannel = MethodChannel('org.noxylva.lbjconsole/audio_input'); static const _methodChannel = MethodChannel('org.noxylva.lbjconsole/audio_input');
static const _eventChannel = EventChannel('org.noxylva.lbjconsole/audio_input_event');
final StreamController<String> _statusController =
StreamController<String>.broadcast();
final StreamController<TrainRecord> _dataController =
StreamController<TrainRecord>.broadcast();
final StreamController<bool> _connectionController =
StreamController<bool>.broadcast();
final StreamController<DateTime?> _lastReceivedTimeController =
StreamController<DateTime?>.broadcast();
Stream<String> get statusStream => _statusController.stream;
Stream<TrainRecord> get dataStream => _dataController.stream;
Stream<bool> get connectionStream => _connectionController.stream;
Stream<DateTime?> get lastReceivedTimeStream => _lastReceivedTimeController.stream;
bool _isListening = false; bool _isListening = false;
DateTime? _lastReceivedTime;
StreamSubscription? _eventChannelSubscription;
final _LbJState _state = _LbJState();
String _lastRawMessage = "";
bool get isListening => _isListening; bool get isListening => _isListening;
DateTime? get lastReceivedTime => _lastReceivedTime;
void _updateListeningState(bool listening, String status) {
if (_isListening == listening) return;
_isListening = listening;
if (!listening) {
_lastReceivedTime = null;
_state.reset();
}
_statusController.add(status);
_connectionController.add(listening);
_lastReceivedTimeController.add(_lastReceivedTime);
}
void _listenToEventChannel() {
if (_eventChannelSubscription != null) {
_eventChannelSubscription?.cancel();
}
_eventChannelSubscription = _eventChannel.receiveBroadcastStream().listen(
(dynamic event) {
try {
final map = event as Map;
if (map.containsKey('listening')) {
final listening = map['listening'] as bool? ?? false;
if (_isListening != listening) {
_updateListeningState(listening, listening ? "监听中" : "已停止");
}
}
if (map.containsKey('address')) {
final addr = map['address'] as String;
if (addr != _lbjInfoAddr &&
addr != _lbjInfo2Addr &&
addr != _lbjSyncAddr) {
return;
}
final func = int.tryParse(map['func'] as String? ?? '-1') ?? -1;
final numeric = map['numeric'] as String;
final String currentRawMessage = "$addr|$func|$numeric";
if (currentRawMessage == _lastRawMessage) {
return;
}
_lastRawMessage = currentRawMessage;
developer.log('Audio-RAW: $currentRawMessage', name: 'AudioInput');
if (!_isListening) {
_updateListeningState(true, "监听中");
}
_lastReceivedTime = DateTime.now();
_lastReceivedTimeController.add(_lastReceivedTime);
_state.updateFromRaw(addr, func, numeric);
if (addr == _lbjInfoAddr || addr == _lbjInfo2Addr) {
final jsonData = _state.toTrainRecordJson();
final trainRecord = TrainRecord.fromJson(jsonData);
_dataController.add(trainRecord);
DatabaseService.instance.insertRecord(trainRecord);
}
}
} catch (e, s) {
developer.log('Audio StateMachine Error: $e',
name: 'AudioInput', error: e, stackTrace: s);
}
},
onError: (dynamic error) {
_updateListeningState(false, "数据通道错误");
_eventChannelSubscription?.cancel();
},
);
}
Future<bool> startListening() async { Future<bool> startListening() async {
if (_isListening) return true; if (_isListening) return true;
@@ -25,8 +311,11 @@ class AudioInputService {
} }
try { try {
_listenToEventChannel();
await _methodChannel.invokeMethod('start'); await _methodChannel.invokeMethod('start');
_isListening = true; _isListening = true;
_statusController.add("监听中");
_connectionController.add(true);
developer.log('Audio input started', name: 'AudioInput'); developer.log('Audio input started', name: 'AudioInput');
return true; return true;
} on PlatformException catch (e) { } on PlatformException catch (e) {
@@ -41,9 +330,22 @@ class AudioInputService {
try { try {
await _methodChannel.invokeMethod('stop'); await _methodChannel.invokeMethod('stop');
_isListening = false; _isListening = false;
_lastReceivedTime = null;
_state.reset();
_statusController.add("已停止");
_connectionController.add(false);
_lastReceivedTimeController.add(null);
developer.log('Audio input stopped', name: 'AudioInput'); developer.log('Audio input stopped', name: 'AudioInput');
} catch (e) { } catch (e) {
developer.log('Error stopping audio input: $e', name: 'AudioInput'); developer.log('Error stopping audio input: $e', name: 'AudioInput');
} }
} }
void dispose() {
_eventChannelSubscription?.cancel();
_statusController.close();
_dataController.close();
_connectionController.close();
_lastReceivedTimeController.close();
}
} }

View File

@@ -341,7 +341,10 @@ class DatabaseService {
where: 'id = 1', where: 'id = 1',
); );
if (result > 0) { if (result > 0) {
_notifySettingsChanged(settings); final currentSettings = await getAllSettings();
if (currentSettings != null) {
_notifySettingsChanged(currentSettings);
}
} }
return result; return result;
} }
@@ -447,7 +450,6 @@ class DatabaseService {
} }
void _notifySettingsChanged(Map<String, dynamic> settings) { void _notifySettingsChanged(Map<String, dynamic> settings) {
print('[Database] Notifying ${_settingsListeners.length} settings listeners');
for (final listener in _settingsListeners) { for (final listener in _settingsListeners) {
listener(settings); listener(settings);
} }