10 Commits

22 changed files with 2042 additions and 571 deletions

1
.gitignore vendored
View File

@@ -165,3 +165,4 @@ fabric.properties
macos/Flutter/ephemeral/flutter_export_environment.sh macos/Flutter/ephemeral/flutter_export_environment.sh
macos/Flutter/ephemeral/Flutter-Generated.xcconfig macos/Flutter/ephemeral/Flutter-Generated.xcconfig
*.py *.py
PDW

View File

@@ -1,12 +1,8 @@
# LBJ_Console # LBJ_Console
LBJ Console 是一个应用程序,用于通过 BLE 从 [SX1276_Receive_LBJ](https://github.com/undef-i/SX1276_Receive_LBJ) 设备接收并显示列车预警消息,功能包括: LBJ Console 是一个应用程序,用于接收并显示列车预警消息
- 接收列车预警消息,支持可选的手机推送通知 应用程序支持通过 BLE 从 [SX1276_Receive_LBJ](https://github.com/undef-i/SX1276_Receive_LBJ) 接收预警消息,或直接连接 RTL-TCP 服务器从 RTL-SDR 接收预警消息。在可视化方面,软件能够在地图上标注预警消息的 GPS 位置,并支持绘制指定列车的运行轨迹。此外,程序内置了机车数据文件,可根据数据内容匹配并显示机车配属、机车类型以及车次类型
- 监控指定列车的轨迹,在地图上显示。
- 在地图上显示预警消息的 GPS 信息。
- 基于内置数据文件显示机车配属,机车类型和车次类型。
- 连接 RTL-TCP 服务器获取预警消息。
[android](https://github.com/undef-i/LBJ_Console/tree/android) 分支包含项目早期基于 Android 平台的实现代码,已实现基本功能,现已停止开发。 [android](https://github.com/undef-i/LBJ_Console/tree/android) 分支包含项目早期基于 Android 平台的实现代码,已实现基本功能,现已停止开发。
@@ -26,6 +22,7 @@ LBJ Console 依赖以下数据文件,位于 `assets` 目录,用于支持机
- 集成 ESP-Touch 协议,实现设备 WiFi 凭证的配置。 - 集成 ESP-Touch 协议,实现设备 WiFi 凭证的配置。
- 从设备端拉取历史数据记录。 - 从设备端拉取历史数据记录。
- [WIP]从音频流解析预警消息。
# 致谢 # 致谢
@@ -36,5 +33,11 @@ LBJ Console 依赖以下数据文件,位于 `assets` 目录,用于支持机
# 许可证 # 许可证
该项目采用 GNU 通用公共许可证 v3.0GPLv3授权。 该项目采用 GNU 通用公共许可证 v3.0 授权。
本软件按现状提供,不附带任何明示或默示担保。本软件接收、解析、显示的一切信息不具备官方授权、不具有任何权威性或证明效力,不保证其实时性、连续性、准确性、完整性或可靠性。上述信息不得作为任何决策依据或参考。
本软件作者及全体贡献者从未生产、销售、推广、授权或以其他方式参与任何搭载、捆绑或集成使用本软件的硬件设备。任何以本软件相关名义进行的销售、定制、组装、代工或其他商业行为,均与作者及贡献者无关。因使用本软件导致的任何人身损害、财产损失、行政责任、刑事责任、民事赔偿或其他任何损失,软件作者及贡献者不对任何直接、间接、附带、特殊、惩罚性或后果性损害承担任何责任。
使用本软件即表示使用者已知悉、理解并接受以上全部内容。

View File

@@ -1,5 +1,8 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android" <manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"> xmlns:tools="http://schemas.android.com/tools">
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.BLUETOOTH"/> <uses-permission android:name="android.permission.BLUETOOTH"/>
<uses-permission android:name="android.permission.BLUETOOTH_ADMIN"/> <uses-permission android:name="android.permission.BLUETOOTH_ADMIN"/>
<uses-permission android:name="android.permission.BLUETOOTH_CONNECT" android:usesPermissionFlags="neverForLocation"/> <uses-permission android:name="android.permission.BLUETOOTH_CONNECT" android:usesPermissionFlags="neverForLocation"/>
@@ -31,10 +34,6 @@
android:configChanges="orientation|keyboardHidden|keyboard|screenSize|smallestScreenSize|locale|layoutDirection|fontScale|screenLayout|density|uiMode" android:configChanges="orientation|keyboardHidden|keyboard|screenSize|smallestScreenSize|locale|layoutDirection|fontScale|screenLayout|density|uiMode"
android:hardwareAccelerated="true" android:hardwareAccelerated="true"
android:windowSoftInputMode="adjustResize"> android:windowSoftInputMode="adjustResize">
<!-- Specifies an Android theme to apply to this Activity as soon as
the Android process has started. This theme is visible to the user
while the Flutter UI initializes. After that, this theme continues
to determine the Window background behind the Flutter UI. -->
<meta-data <meta-data
android:name="io.flutter.embedding.android.NormalTheme" android:name="io.flutter.embedding.android.NormalTheme"
android:resource="@style/NormalTheme" android:resource="@style/NormalTheme"
@@ -44,13 +43,10 @@
<category android:name="android.intent.category.LAUNCHER"/> <category android:name="android.intent.category.LAUNCHER"/>
</intent-filter> </intent-filter>
</activity> </activity>
<!-- Don't delete the meta-data below.
This is used by the Flutter tool to generate GeneratedPluginRegistrant.java -->
<meta-data <meta-data
android:name="flutterEmbedding" android:name="flutterEmbedding"
android:value="2" /> android:value="2" />
<!-- 前台服务配置 -->
<service <service
android:name="id.flutter.flutter_background_service.BackgroundService" android:name="id.flutter.flutter_background_service.BackgroundService"
android:foregroundServiceType="connectedDevice|dataSync" android:foregroundServiceType="connectedDevice|dataSync"
@@ -59,11 +55,6 @@
android:enabled="true" android:enabled="true"
tools:replace="android:exported"/> tools:replace="android:exported"/>
</application> </application>
<!-- Required to query activities that can process text, see:
https://developer.android.com/training/package-visibility and
https://developer.android.com/reference/android/content/Intent#ACTION_PROCESS_TEXT.
In particular, this is used by the Flutter engine in io.flutter.plugin.text.ProcessTextPlugin. -->
<queries> <queries>
<intent> <intent>
<action android:name="android.intent.action.PROCESS_TEXT"/> <action android:name="android.intent.action.PROCESS_TEXT"/>

View File

@@ -6,6 +6,10 @@ project("railwaypagerdemod")
add_library(${CMAKE_PROJECT_NAME} SHARED add_library(${CMAKE_PROJECT_NAME} SHARED
demod.cpp demod.cpp
demod.h demod.h
audio_demod.cpp
audio_demod.h
audio_fft.cpp
audio_fft.h
native-lib.cpp native-lib.cpp
${CMAKE_CURRENT_SOURCE_DIR}/dsp/firfilter.cpp ${CMAKE_CURRENT_SOURCE_DIR}/dsp/firfilter.cpp
) )

View File

@@ -0,0 +1,106 @@
#include "audio_demod.h"
#include "demod.h"
#include <android/log.h>
#include <cmath>
#include <cstring>
#define LOG_TAG "AudioDemod"
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)
#define FINE_CLKT_HI 1.90
#define FINE_CLKT_LO 0.32
#define SAMPLE_RATE 48000
#define BAUD_RATE 1200
static double watch_ctr = 0.0;
static double atb_ctr = 0.0;
static double clkt = (double)SAMPLE_RATE / BAUD_RATE;
static int last_value = 0;
static int preamble_count = 0;
static int pocbit = 0;
static bool preamble_detected = false;
static int crossing_count = 0;
static int nSamples = 0;
static const int AUDIO_THRESHOLD = 128;
void resetAudioDemod() {
watch_ctr = 0.0;
atb_ctr = 0.0;
clkt = (double)SAMPLE_RATE / BAUD_RATE;
last_value = 0;
preamble_count = 0;
pocbit = 0;
preamble_detected = false;
crossing_count = 0;
nSamples = 0;
LOGD("Audio demodulator reset");
}
void processAudioSamples(int16_t *samples, int size) {
for (int i = 0; i < size; i++) {
int audio_value = (samples[i] + 32768) / 256;
if (audio_value < 0) audio_value = 0;
if (audio_value > 255) audio_value = 255;
int current_bit = (audio_value > AUDIO_THRESHOLD) ? 1 : 0;
if (current_bit != last_value) {
crossing_count++;
if ((nSamples > 28) && (nSamples < 44)) {
preamble_count++;
if (preamble_count > 50 && !preamble_detected) {
preamble_detected = true;
pocbit = 0;
LOGD("Preamble detected! crossings=%d samples=%d", preamble_count, nSamples);
}
}
nSamples = 0;
}
nSamples++;
last_value = current_bit;
watch_ctr += 1.0;
if (watch_ctr - atb_ctr < 1.0) {
int bit = current_bit;
if (preamble_detected) {
processBasebandSample(bit);
pocbit++;
if (pocbit > 1250) {
LOGD("POCSAG timeout - no sync after 1250 bits");
preamble_detected = false;
preamble_count = 0;
pocbit = 0;
}
}
if (crossing_count > 0) {
double offset = watch_ctr - atb_ctr;
if (offset > FINE_CLKT_HI) {
clkt -= 0.01;
if (clkt < (SAMPLE_RATE / BAUD_RATE) * 0.95) {
clkt = (SAMPLE_RATE / BAUD_RATE) * 0.95;
}
} else if (offset < FINE_CLKT_LO) {
clkt += 0.01;
if (clkt > (SAMPLE_RATE / BAUD_RATE) * 1.05) {
clkt = (SAMPLE_RATE / BAUD_RATE) * 1.05;
}
}
crossing_count = 0;
}
atb_ctr += clkt;
}
}
}

View File

@@ -0,0 +1,10 @@
#ifndef AUDIO_DEMOD_H
#define AUDIO_DEMOD_H
#include <cstdint>
void processAudioSamples(int16_t *samples, int size);
void resetAudioDemod();
#endif

View File

@@ -0,0 +1,117 @@
#include "audio_fft.h"
#include <cmath>
#include <algorithm>
#include <android/log.h>
#define LOG_TAG "AudioFFT"
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)
#ifndef M_PI
#define M_PI 3.14159265358979323846
#endif
AudioFFT::AudioFFT(int fftSize)
: fftSize_(fftSize)
, inputBuffer_(fftSize, 0.0f)
, windowBuffer_(fftSize, 0.0f)
, realPart_(fftSize, 0.0f)
, imagPart_(fftSize, 0.0f)
, magnitude_(fftSize, 0.0f)
, bufferPos_(0) {
for (int i = 0; i < fftSize_; i++) {
windowBuffer_[i] = 0.54f - 0.46f * std::cos(2.0f * M_PI * i / (fftSize_ - 1));
}
LOGD("AudioFFT initialized with size %d", fftSize_);
}
AudioFFT::~AudioFFT() {
}
void AudioFFT::processSamples(const int16_t* samples, int size) {
for (int i = 0; i < size; i++) {
inputBuffer_[bufferPos_] = samples[i] / 32768.0f;
bufferPos_++;
if (bufferPos_ >= fftSize_) {
computeFFT();
bufferPos_ = 0;
}
}
}
void AudioFFT::applyWindow() {
for (int i = 0; i < fftSize_; i++) {
realPart_[i] = inputBuffer_[i] * windowBuffer_[i];
imagPart_[i] = 0.0f;
}
}
void AudioFFT::computeFFT() {
applyWindow();
int n = fftSize_;
int j = 0;
for (int i = 0; i < n - 1; i++) {
if (i < j) {
std::swap(realPart_[i], realPart_[j]);
std::swap(imagPart_[i], imagPart_[j]);
}
int k = n / 2;
while (k <= j) {
j -= k;
k /= 2;
}
j += k;
}
for (int len = 2; len <= n; len *= 2) {
float angle = -2.0f * M_PI / len;
float wlenReal = std::cos(angle);
float wlenImag = std::sin(angle);
for (int i = 0; i < n; i += len) {
float wReal = 1.0f;
float wImag = 0.0f;
for (int k = 0; k < len / 2; k++) {
int idx1 = i + k;
int idx2 = i + k + len / 2;
float tReal = wReal * realPart_[idx2] - wImag * imagPart_[idx2];
float tImag = wReal * imagPart_[idx2] + wImag * realPart_[idx2];
realPart_[idx2] = realPart_[idx1] - tReal;
imagPart_[idx2] = imagPart_[idx1] - tImag;
realPart_[idx1] += tReal;
imagPart_[idx1] += tImag;
float wTempReal = wReal * wlenReal - wImag * wlenImag;
wImag = wReal * wlenImag + wImag * wlenReal;
wReal = wTempReal;
}
}
}
for (int i = 0; i < fftSize_; i++) {
float real = realPart_[i];
float imag = imagPart_[i];
magnitude_[i] = std::sqrt(real * real + imag * imag);
}
}
void AudioFFT::getSpectrum(float* output, int outputSize) {
int copySize = std::min(outputSize, fftSize_ / 2);
for (int i = 0; i < copySize; i++) {
float mag = magnitude_[i];
if (mag < 1e-10f) mag = 1e-10f;
float db = 20.0f * std::log10(mag);
float normalized = (db + 80.0f) / 80.0f;
output[i] = std::max(0.0f, std::min(1.0f, normalized));
}
}

View File

@@ -0,0 +1,29 @@
#ifndef AUDIO_FFT_H
#define AUDIO_FFT_H
#include <cstdint>
#include <vector>
class AudioFFT {
public:
AudioFFT(int fftSize = 256);
~AudioFFT();
void processSamples(const int16_t* samples, int size);
void getSpectrum(float* output, int outputSize);
int getFFTSize() const { return fftSize_; }
private:
void computeFFT();
void applyWindow();
int fftSize_;
std::vector<float> inputBuffer_;
std::vector<float> windowBuffer_;
std::vector<float> realPart_;
std::vector<float> imagPart_;
std::vector<float> magnitude_;
int bufferPos_;
};
#endif

View File

@@ -1,4 +1,5 @@
#include "demod.h" #include "demod.h"
#include <android/log.h>
bool is_message_ready = false; bool is_message_ready = false;
@@ -14,17 +15,29 @@ uint32_t bits;
uint32_t code_words[PAGERDEMOD_BATCH_WORDS]; uint32_t code_words[PAGERDEMOD_BATCH_WORDS];
bool code_words_bch_error[PAGERDEMOD_BATCH_WORDS]; bool code_words_bch_error[PAGERDEMOD_BATCH_WORDS];
static bool hysteresis_state = false;
static bool dsp_initialized = false;
std::string numeric_msg, alpha_msg; std::string numeric_msg, alpha_msg;
int function_bits; int function_bits;
uint32_t address; uint32_t address;
uint32_t alpha_bit_buffer; // Bit buffer to 7-bit chars spread across codewords uint32_t alpha_bit_buffer;
int alpha_bit_buffer_bits; // Count of bits in alpha_bit_buffer int alpha_bit_buffer_bits;
int parity_errors; // Count of parity errors in current message int parity_errors;
int bch_errors; // Count of BCH errors in current message int bch_errors;
int batch_num; // Count of batches in current transmission int batch_num;
double magsqRaw; double magsqRaw;
void ensureDSPInitialized() {
if (dsp_initialized) return;
lowpassBaud.create(301, SAMPLE_RATE, BAUD_RATE * 5.0f);
phaseDiscri.setFMScaling(SAMPLE_RATE / (2.0f * DEVIATION));
dsp_initialized = true;
}
int pop_cnt(uint32_t cw) int pop_cnt(uint32_t cw)
{ {
int cnt = 0; int cnt = 0;
@@ -39,10 +52,9 @@ int pop_cnt(uint32_t cw)
uint32_t bchEncode(const uint32_t cw) uint32_t bchEncode(const uint32_t cw)
{ {
uint32_t bit = 0; uint32_t bit = 0;
uint32_t localCW = cw & 0xFFFFF800; // Mask off BCH parity and even parity bits uint32_t localCW = cw & 0xFFFFF800;
uint32_t cwE = localCW; uint32_t cwE = localCW;
// Calculate BCH bits
for (bit = 1; bit <= 21; bit++) for (bit = 1; bit <= 21; bit++)
{ {
if (cwE & 0x80000000) if (cwE & 0x80000000)
@@ -56,38 +68,28 @@ uint32_t bchEncode(const uint32_t cw)
return localCW; return localCW;
} }
// Use BCH decoding to try to fix any bit errors
// Returns true if able to be decode/repair successful
// See: https://www.eevblog.com/forum/microcontrollers/practical-guides-to-bch-fec/
bool bchDecode(const uint32_t cw, uint32_t &correctedCW) bool bchDecode(const uint32_t cw, uint32_t &correctedCW)
{ {
// Calculate syndrome
// We do this by recalculating the BCH parity bits and XORing them against the received ones
uint32_t syndrome = ((bchEncode(cw) ^ cw) >> 1) & 0x3FF; uint32_t syndrome = ((bchEncode(cw) ^ cw) >> 1) & 0x3FF;
if (syndrome == 0) if (syndrome == 0)
{ {
// Syndrome of zero indicates no repair required
correctedCW = cw; correctedCW = cw;
return true; return true;
} }
// Meggitt decoder
uint32_t result = 0; uint32_t result = 0;
uint32_t damagedCW = cw; uint32_t damagedCW = cw;
// Calculate BCH bits
for (uint32_t xbit = 0; xbit < 31; xbit++) for (uint32_t xbit = 0; xbit < 31; xbit++)
{ {
// Produce the next corrected bit in the high bit of the result
result <<= 1; result <<= 1;
if ((syndrome == 0x3B4) || // 0x3B4: Syndrome when a single error is detected in the MSB if ((syndrome == 0x3B4) ||
(syndrome == 0x26E) || // 0x26E: Two adjacent errors (syndrome == 0x26E) ||
(syndrome == 0x359) || // 0x359: Two errors, one OK bit between (syndrome == 0x359) ||
(syndrome == 0x076) || // 0x076: Two errors, two OK bits between (syndrome == 0x076) ||
(syndrome == 0x255) || // 0x255: Two errors, three OK bits between (syndrome == 0x255) ||
(syndrome == 0x0F0) || // 0x0F0: Two errors, four OK bits between (syndrome == 0x0F0) ||
(syndrome == 0x216) || (syndrome == 0x216) ||
(syndrome == 0x365) || (syndrome == 0x365) ||
(syndrome == 0x068) || (syndrome == 0x068) ||
@@ -114,36 +116,29 @@ bool bchDecode(const uint32_t cw, uint32_t &correctedCW)
(syndrome == 0x3B6) || (syndrome == 0x3B6) ||
(syndrome == 0x3B5)) (syndrome == 0x3B5))
{ {
// Syndrome matches an error in the MSB
// Correct that error and adjust the syndrome to account for it
syndrome ^= 0x3B4; syndrome ^= 0x3B4;
result |= (~damagedCW & 0x80000000) >> 30; result |= (~damagedCW & 0x80000000) >> 30;
} }
else else
{ {
// No error
result |= (damagedCW & 0x80000000) >> 30; result |= (damagedCW & 0x80000000) >> 30;
} }
damagedCW <<= 1; damagedCW <<= 1;
// Handle syndrome shift register feedback
if (syndrome & 0x200) if (syndrome & 0x200)
{ {
syndrome <<= 1; syndrome <<= 1;
syndrome ^= 0x769; // 0x769 = POCSAG generator polynomial -- x^10 + x^9 + x^8 + x^6 + x^5 + x^3 + 1 syndrome ^= 0x769;
} }
else else
{ {
syndrome <<= 1; syndrome <<= 1;
} }
// Mask off bits which fall off the end of the syndrome shift register
syndrome &= 0x3FF; syndrome &= 0x3FF;
} }
// Check if error correction was successful
if (syndrome != 0) if (syndrome != 0)
{ {
// Syndrome nonzero at end indicates uncorrectable errors
correctedCW = cw; correctedCW = cw;
return false; return false;
} }
@@ -162,13 +157,11 @@ int xorBits(uint32_t word, int firstBit, int lastBit)
return x; return x;
} }
// Check for even parity
bool evenParity(uint32_t word, int firstBit, int lastBit, int parityBit) bool evenParity(uint32_t word, int firstBit, int lastBit, int parityBit)
{ {
return xorBits(word, firstBit, lastBit) == parityBit; return xorBits(word, firstBit, lastBit) == parityBit;
} }
// Reverse order of bits
uint32_t reverse(uint32_t x) uint32_t reverse(uint32_t x)
{ {
x = (((x & 0xaaaaaaaa) >> 1) | ((x & 0x55555555) << 1)); x = (((x & 0xaaaaaaaa) >> 1) | ((x & 0x55555555) << 1));
@@ -178,10 +171,6 @@ uint32_t reverse(uint32_t x)
return ((x >> 16) | (x << 16)); return ((x >> 16) | (x << 16));
} }
// Decode a batch of codewords to addresses and messages
// Messages may be spreadout over multiple batches
// https://www.itu.int/dms_pubrec/itu-r/rec/m/R-REC-M.584-1-198607-S!!PDF-E.pdf
// https://www.itu.int/dms_pubrec/itu-r/rec/m/R-REC-M.584-2-199711-I!!PDF-E.pdf
void decodeBatch() void decodeBatch()
{ {
int i = 1; int i = 1;
@@ -190,20 +179,20 @@ void decodeBatch()
for (int word = 0; word < PAGERDEMOD_CODEWORDS_PER_FRAME; word++) for (int word = 0; word < PAGERDEMOD_CODEWORDS_PER_FRAME; word++)
{ {
bool addressCodeWord = ((code_words[i] >> 31) & 1) == 0; bool addressCodeWord = ((code_words[i] >> 31) & 1) == 0;
// Check parity bit
bool parityError = !evenParity(code_words[i], 1, 31, code_words[i] & 0x1); bool parityError = !evenParity(code_words[i], 1, 31, code_words[i] & 0x1);
if (code_words[i] == PAGERDEMOD_POCSAG_IDLECODE) if (code_words[i] == PAGERDEMOD_POCSAG_IDLECODE)
{ {
// Idle
} }
else if (addressCodeWord) else if (addressCodeWord)
{ {
// Address
function_bits = (code_words[i] >> 11) & 0x3; function_bits = (code_words[i] >> 11) & 0x3;
int addressBits = (code_words[i] >> 13) & 0x3ffff; int addressBits = (code_words[i] >> 13) & 0x3ffff;
address = (addressBits << 3) | frame; address = (addressBits << 3) | frame;
__android_log_print(ANDROID_LOG_DEBUG, "DEMOD", "addr_cw: raw=0x%08X addr=%u func=%d frame=%d bch_err=%d parity_err=%d",
code_words[i], address, function_bits, frame, code_words_bch_error[i] ? 1 : 0, parityError ? 1 : 0);
numeric_msg = ""; numeric_msg = "";
alpha_msg = ""; alpha_msg = "";
alpha_bit_buffer_bits = 0; alpha_bit_buffer_bits = 0;
@@ -213,45 +202,34 @@ void decodeBatch()
} }
else else
{ {
// Message - decode as both numeric and ASCII - not all operators use functionBits to indidcate encoding
int messageBits = (code_words[i] >> 11) & 0xfffff; int messageBits = (code_words[i] >> 11) & 0xfffff;
if (parityError) if (parityError) parity_errors++;
{ if (code_words_bch_error[i]) bch_errors++;
parity_errors++;
} __android_log_print(ANDROID_LOG_DEBUG, "DEMOD", "msg_cw: raw=0x%08X msgbits=0x%05X bch_err=%d parity_err=%d",
if (code_words_bch_error[i]) code_words[i], messageBits, code_words_bch_error[i] ? 1 : 0, parityError ? 1 : 0);
{
bch_errors++;
}
// Numeric format
for (int j = 16; j >= 0; j -= 4) for (int j = 16; j >= 0; j -= 4)
{ {
uint32_t numericBits = (messageBits >> j) & 0xf; uint32_t numericBits = (messageBits >> j) & 0xf;
numericBits = reverse(numericBits) >> (32 - 4); numericBits = reverse(numericBits) >> (32 - 4);
// Spec has 0xa as 'spare', but other decoders treat is as .
const char numericChars[] = { const char numericChars[] = {
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '.', 'U', ' ', '-', ')', '('}; '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '.', 'U', ' ', '-', ')', '('};
char numericChar = numericChars[numericBits]; char numericChar = numericChars[numericBits];
numeric_msg.push_back(numericChar); numeric_msg.push_back(numericChar);
} }
// 7-bit ASCII alpnanumeric format
alpha_bit_buffer = (alpha_bit_buffer << 20) | messageBits; alpha_bit_buffer = (alpha_bit_buffer << 20) | messageBits;
alpha_bit_buffer_bits += 20; alpha_bit_buffer_bits += 20;
while (alpha_bit_buffer_bits >= 7) while (alpha_bit_buffer_bits >= 8)
{ {
// Extract next 7-bit character from bit buffer unsigned char c = (alpha_bit_buffer >> (alpha_bit_buffer_bits - 8)) & 0xff;
char c = (alpha_bit_buffer >> (alpha_bit_buffer_bits - 7)) & 0x7f; c = reverse(c) >> (32 - 8);
// Reverse bit ordering
c = reverse(c) >> (32 - 7);
// Add to received message string (excluding, null, end of text, end ot transmission)
if (c != 0 && c != 0x3 && c != 0x4) if (c != 0 && c != 0x3 && c != 0x4)
{ {
alpha_msg.push_back(c); alpha_msg.push_back(c);
} }
// Remove from bit buffer alpha_bit_buffer_bits -= 8;
alpha_bit_buffer_bits -= 7;
if (alpha_bit_buffer_bits == 0) if (alpha_bit_buffer_bits == 0)
{ {
alpha_bit_buffer = 0; alpha_bit_buffer = 0;
@@ -262,25 +240,16 @@ void decodeBatch()
} }
} }
} }
// Move to next codeword
i++; i++;
} }
} }
} }
void processOneSample(int8_t i, int8_t q) void processBasebandSample(double sample)
{ {
float fi = ((float)i) / 128.0f; ensureDSPInitialized();
float fq = ((float)q) / 128.0f;
std::complex<float> iq(fi, fq); double filt = lowpassBaud.filter(sample);
float deviation;
double fmDemod = phaseDiscri.phaseDiscriminatorDelta(iq, magsqRaw, deviation);
// printf("fmDemod: %.3f\n", fmDemod);
double filt = lowpassBaud.filter(fmDemod);
if (!got_SC) if (!got_SC)
{ {
@@ -288,54 +257,60 @@ void processOneSample(int8_t i, int8_t q)
dc_offset = preambleMovingAverage.asDouble(); dc_offset = preambleMovingAverage.asDouble();
} }
bool data = (filt - dc_offset) >= 0.0; double sample_val = filt - dc_offset;
// printf("filt - dc: %.3f\n", filt - dc_offset);
static double peak_pos = 0.01;
static double peak_neg = -0.01;
if (sample_val > peak_pos) peak_pos = sample_val;
else peak_pos *= 0.9999;
if (sample_val < peak_neg) peak_neg = sample_val;
else peak_neg *= 0.9999;
double threshold = (peak_pos - peak_neg) * 0.15;
if (threshold < 0.005) threshold = 0.005;
if (sample_val > threshold)
{
hysteresis_state = true;
}
else if (sample_val < -threshold)
{
hysteresis_state = false;
}
bool data = hysteresis_state;
if (data != prev_data) if (data != prev_data)
{ {
sync_cnt = SAMPLES_PER_SYMBOL / 2; // reset sync_cnt = SAMPLES_PER_SYMBOL / 2;
} }
else else
{ {
sync_cnt--; // wait until next bit's midpoint sync_cnt--;
if (sync_cnt <= 0) if (sync_cnt <= 0)
{ {
if (bit_inverted) if (bit_inverted) data_bit = data;
{ else data_bit = !data;
data_bit = data;
}
else
{
data_bit = !data;
}
// printf("%d", data_bit);
bits = (bits << 1) | data_bit; bits = (bits << 1) | data_bit;
bit_cnt++; bit_cnt++;
if (bit_cnt > 32) if (bit_cnt > 32) bit_cnt = 32;
{
bit_cnt = 32;
}
if (bit_cnt == 32 && !got_SC) if (bit_cnt == 32 && !got_SC)
{ {
// printf("pop count: %d\n", pop_cnt(bits ^ POCSAG_SYNCCODE));
// printf("pop count inv: %d\n", pop_cnt(bits ^ POCSAG_SYNCCODE_INV));
if (bits == POCSAG_SYNCCODE) if (bits == POCSAG_SYNCCODE)
{ {
got_SC = true; got_SC = true;
bit_inverted = false; bit_inverted = false;
printf("\nSync code found\n");
} }
else if (bits == POCSAG_SYNCCODE_INV) else if (bits == POCSAG_SYNCCODE_INV)
{ {
got_SC = true; got_SC = true;
bit_inverted = true; bit_inverted = true;
printf("\nSync code found\n");
} }
else if (pop_cnt(bits ^ POCSAG_SYNCCODE) <= 3) else if (pop_cnt(bits ^ POCSAG_SYNCCODE) <= 3)
{ {
@@ -344,9 +319,7 @@ void processOneSample(int8_t i, int8_t q)
{ {
got_SC = true; got_SC = true;
bit_inverted = false; bit_inverted = false;
printf("\nSync code found\n");
} }
// else printf("\nSync code not found\n");
} }
else if (pop_cnt(bits ^ POCSAG_SYNCCODE_INV) <= 3) else if (pop_cnt(bits ^ POCSAG_SYNCCODE_INV) <= 3)
{ {
@@ -355,13 +328,12 @@ void processOneSample(int8_t i, int8_t q)
{ {
got_SC = true; got_SC = true;
bit_inverted = true; bit_inverted = true;
printf("\nSync code found\n");
} }
// else printf("\nSync code not found\n");
} }
if (got_SC) if (got_SC)
{ {
__android_log_print(ANDROID_LOG_DEBUG, "DEMOD", "sync_found: inverted=%d bits=0x%08X", bit_inverted ? 1 : 0, bits);
bits = 0; bits = 0;
bit_cnt = 0; bit_cnt = 0;
code_words[0] = POCSAG_SYNCCODE; code_words[0] = POCSAG_SYNCCODE;
@@ -394,7 +366,6 @@ void processOneSample(int8_t i, int8_t q)
if (address > 0 && !numeric_msg.empty()) if (address > 0 && !numeric_msg.empty())
{ {
is_message_ready = true; is_message_ready = true;
printf("Addr: %d | Numeric: %s | Alpha: %s\n", address, numeric_msg.c_str(), alpha_msg.c_str());
} }
else else
{ {
@@ -408,3 +379,101 @@ void processOneSample(int8_t i, int8_t q)
prev_data = data; prev_data = data;
} }
void processOneSample(int8_t i, int8_t q)
{
float fi = ((float)i) / 128.0f;
float fq = ((float)q) / 128.0f;
std::complex<float> iq(fi, fq);
float deviation;
double fmDemod = phaseDiscri.phaseDiscriminatorDelta(iq, magsqRaw, deviation);
double filt = lowpassBaud.filter(fmDemod);
if (!got_SC) {
preambleMovingAverage(filt);
dc_offset = preambleMovingAverage.asDouble();
}
bool data = (filt - dc_offset) >= 0.0;
if (data != prev_data) {
sync_cnt = SAMPLES_PER_SYMBOL / 2;
} else {
sync_cnt--;
if (sync_cnt <= 0) {
if (bit_inverted) {
data_bit = data;
} else {
data_bit = !data;
}
bits = (bits << 1) | data_bit;
bit_cnt++;
if (bit_cnt > 32) {
bit_cnt = 32;
}
if (bit_cnt == 32 && !got_SC) {
if (bits == POCSAG_SYNCCODE) {
got_SC = true;
bit_inverted = false;
} else if (bits == POCSAG_SYNCCODE_INV) {
got_SC = true;
bit_inverted = true;
} else if (pop_cnt(bits ^ POCSAG_SYNCCODE) <= 3) {
uint32_t corrected_cw;
if (bchDecode(bits, corrected_cw) && corrected_cw == POCSAG_SYNCCODE) {
got_SC = true;
bit_inverted = false;
}
} else if (pop_cnt(bits ^ POCSAG_SYNCCODE_INV) <= 3) {
uint32_t corrected_cw;
if (bchDecode(~bits, corrected_cw) && corrected_cw == POCSAG_SYNCCODE) {
got_SC = true;
bit_inverted = true;
}
}
if (got_SC) {
__android_log_print(ANDROID_LOG_DEBUG, "DEMOD", "sync_found: inverted=%d bits=0x%08X", bit_inverted ? 1 : 0, bits);
bits = 0;
bit_cnt = 0;
code_words[0] = POCSAG_SYNCCODE;
word_cnt = 1;
}
} else if (bit_cnt == 32 && got_SC) {
uint32_t corrected_cw;
code_words_bch_error[word_cnt] = !bchDecode(bits, corrected_cw);
code_words[word_cnt] = corrected_cw;
word_cnt++;
if (word_cnt == 1 && corrected_cw != POCSAG_SYNCCODE) {
got_SC = false;
bit_inverted = false;
}
if (word_cnt == PAGERDEMOD_BATCH_WORDS) {
decodeBatch();
batch_num++;
word_cnt = 0;
}
bits = 0;
bit_cnt = 0;
if (address > 0 && !numeric_msg.empty()) {
is_message_ready = true;
}
}
sync_cnt = SAMPLES_PER_SYMBOL;
}
}
prev_data = data;
}

View File

@@ -34,6 +34,8 @@ extern Lowpass<double> lowpassBaud;
extern MovingAverageUtil<double, double, 2048> preambleMovingAverage; extern MovingAverageUtil<double, double, 2048> preambleMovingAverage;
extern double magsqRaw; extern double magsqRaw;
void ensureDSPInitialized();
void processOneSample(int8_t i, int8_t q); void processOneSample(int8_t i, int8_t q);
void processBasebandSample(double sample);
#endif #endif

View File

@@ -5,13 +5,18 @@
#include <mutex> #include <mutex>
#include <vector> #include <vector>
#include <sstream> #include <sstream>
#include <iomanip>
#include <chrono> #include <chrono>
#include <unistd.h> #include <unistd.h>
#include <arpa/inet.h> #include <arpa/inet.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <fcntl.h> #include <fcntl.h>
#include <android/log.h> #include <android/log.h>
#include <errno.h> #include <errno.h>
#include "demod.h" #include "demod.h"
#include "audio_demod.h"
#include "audio_fft.h"
#define BUF_SIZE 8192 #define BUF_SIZE 8192
@@ -23,6 +28,8 @@ static std::mutex msgMutex;
static std::vector<std::string> messageBuffer; static std::vector<std::string> messageBuffer;
static std::mutex demodDataMutex; static std::mutex demodDataMutex;
static std::mutex fftMutex;
static AudioFFT* audioFFT = nullptr;
static JavaVM *g_vm = nullptr; static JavaVM *g_vm = nullptr;
static jobject g_obj = nullptr; static jobject g_obj = nullptr;
@@ -84,18 +91,126 @@ Java_org_noxylva_lbjconsole_flutter_RtlTcpChannelHandler_startClientAsync(
env->ReleaseStringUTFChars(port_, portStr); env->ReleaseStringUTFChars(port_, portStr);
} }
extern "C" JNIEXPORT void JNICALL
Java_org_noxylva_lbjconsole_flutter_AudioInputHandler_nativePushAudio(
JNIEnv *env, jobject thiz, jshortArray audioData, jint size) {
ensureDSPInitialized();
jshort *samples = env->GetShortArrayElements(audioData, NULL);
{
std::lock_guard<std::mutex> fftLock(fftMutex);
if (!audioFFT) {
audioFFT = new AudioFFT(4096);
}
audioFFT->processSamples(samples, size);
}
std::lock_guard<std::mutex> demodLock(demodDataMutex);
processAudioSamples(samples, size);
if (is_message_ready) {
std::ostringstream ss;
std::lock_guard<std::mutex> msgLock(msgMutex);
std::string message_content;
if (function_bits == 3) {
message_content = alpha_msg;
} else {
message_content = numeric_msg;
}
if (message_content.empty()) {
message_content = alpha_msg.empty() ? numeric_msg : alpha_msg;
}
__android_log_print(ANDROID_LOG_DEBUG, "AUDIO",
"msg_ready: addr=%u func=%d alpha_len=%zu numeric_len=%zu",
address, function_bits, alpha_msg.length(), numeric_msg.length());
ss << "[MSG]" << address << "|" << function_bits << "|" << message_content;
messageBuffer.push_back(ss.str());
is_message_ready = false;
numeric_msg.clear();
alpha_msg.clear();
}
env->ReleaseShortArrayElements(audioData, samples, 0);
}
extern "C" JNIEXPORT jdouble JNICALL extern "C" JNIEXPORT jdouble JNICALL
Java_org_noxylva_lbjconsole_flutter_RtlTcpChannelHandler_getSignalStrength(JNIEnv *, jobject) Java_org_noxylva_lbjconsole_flutter_RtlTcpChannelHandler_getSignalStrength(JNIEnv *, jobject)
{ {
return (jdouble)magsqRaw; return (jdouble)magsqRaw;
} }
extern "C" JNIEXPORT void JNICALL
Java_org_noxylva_lbjconsole_flutter_AudioInputHandler_clearMessageBuffer(JNIEnv *, jobject)
{
std::lock_guard<std::mutex> demodLock(demodDataMutex);
std::lock_guard<std::mutex> msgLock(msgMutex);
messageBuffer.clear();
is_message_ready = false;
numeric_msg.clear();
alpha_msg.clear();
}
extern "C" JNIEXPORT jfloatArray JNICALL
Java_org_noxylva_lbjconsole_flutter_AudioInputHandler_getAudioSpectrum(JNIEnv *env, jobject)
{
std::lock_guard<std::mutex> fftLock(fftMutex);
if (!audioFFT) {
return env->NewFloatArray(0);
}
int spectrumSize = audioFFT->getFFTSize() / 2;
std::vector<float> spectrum(spectrumSize);
audioFFT->getSpectrum(spectrum.data(), spectrumSize);
const int outputBins = 500;
std::vector<float> downsampled(outputBins);
for (int i = 0; i < outputBins; i++) {
int srcIdx = (i * spectrumSize) / outputBins;
downsampled[i] = spectrum[srcIdx];
}
jfloatArray result = env->NewFloatArray(outputBins);
env->SetFloatArrayRegion(result, 0, outputBins, downsampled.data());
return result;
}
extern "C" JNIEXPORT jbyteArray JNICALL
Java_org_noxylva_lbjconsole_flutter_AudioInputHandler_pollMessages(JNIEnv *env, jobject)
{
std::lock_guard<std::mutex> demodLock(demodDataMutex);
std::lock_guard<std::mutex> msgLock(msgMutex);
if (messageBuffer.empty())
{
return env->NewByteArray(0);
}
std::ostringstream ss;
for (auto &msg : messageBuffer)
ss << msg << "\n";
messageBuffer.clear();
std::string result = ss.str();
jbyteArray byteArray = env->NewByteArray(result.size());
env->SetByteArrayRegion(byteArray, 0, result.size(), (const jbyte*)result.c_str());
return byteArray;
}
extern "C" JNIEXPORT jboolean JNICALL extern "C" JNIEXPORT jboolean JNICALL
Java_org_noxylva_lbjconsole_flutter_RtlTcpChannelHandler_isConnected(JNIEnv *, jobject) Java_org_noxylva_lbjconsole_flutter_RtlTcpChannelHandler_isConnected(JNIEnv *, jobject)
{ {
return (running && sockfd_atomic.load() >= 0) ? JNI_TRUE : JNI_FALSE; return (running && sockfd_atomic.load() >= 0) ? JNI_TRUE : JNI_FALSE;
} }
extern "C" JNIEXPORT jstring JNICALL extern "C" JNIEXPORT jbyteArray JNICALL
Java_org_noxylva_lbjconsole_flutter_RtlTcpChannelHandler_pollMessages(JNIEnv *env, jobject /*this*/) Java_org_noxylva_lbjconsole_flutter_RtlTcpChannelHandler_pollMessages(JNIEnv *env, jobject /*this*/)
{ {
std::lock_guard<std::mutex> demodLock(demodDataMutex); std::lock_guard<std::mutex> demodLock(demodDataMutex);
@@ -104,13 +219,17 @@ Java_org_noxylva_lbjconsole_flutter_RtlTcpChannelHandler_pollMessages(JNIEnv *en
if (messageBuffer.empty()) if (messageBuffer.empty())
{ {
return env->NewStringUTF(""); return env->NewByteArray(0);
} }
std::ostringstream ss; std::ostringstream ss;
for (auto &msg : messageBuffer) for (auto &msg : messageBuffer)
ss << msg << "\n"; ss << msg << "\n";
messageBuffer.clear(); messageBuffer.clear();
return env->NewStringUTF(ss.str().c_str());
std::string result = ss.str();
jbyteArray byteArray = env->NewByteArray(result.size());
env->SetByteArrayRegion(byteArray, 0, result.size(), (const jbyte*)result.c_str());
return byteArray;
} }
void clientThread(std::string host, int port) void clientThread(std::string host, int port)
@@ -171,8 +290,8 @@ void clientThread(std::string host, int port)
goto cleanup; goto cleanup;
} }
lowpassBaud.create(301, SAMPLE_RATE, BAUD_RATE * 5.0f); ensureDSPInitialized();
phaseDiscri.setFMScaling(SAMPLE_RATE / (2.0f * DEVIATION));
sockfd_atomic.store(localSockfd); sockfd_atomic.store(localSockfd);
{ {
std::lock_guard<std::mutex> lock(msgMutex); std::lock_guard<std::mutex> lock(msgMutex);
@@ -193,27 +312,40 @@ void clientThread(std::string host, int port)
std::lock_guard<std::mutex> demodLock(demodDataMutex); std::lock_guard<std::mutex> demodLock(demodDataMutex);
processOneSample(i_ds, q_ds); processOneSample(i_ds, q_ds);
if (is_message_ready)
{
std::ostringstream ss;
std::lock_guard<std::mutex> msgLock(msgMutex);
std::ostringstream alpha_hex;
for (unsigned char ch : alpha_msg) {
alpha_hex << std::hex << std::uppercase << (int)ch << ",";
}
__android_log_print(ANDROID_LOG_DEBUG, "RTL-TCP", "alpha_msg_bytes: %s", alpha_hex.str().c_str());
__android_log_print(ANDROID_LOG_DEBUG, "RTL-TCP", "numeric_msg: %s func=%d", numeric_msg.c_str(), function_bits);
std::string message_content;
if (function_bits == 3) {
message_content = alpha_msg;
} else {
message_content = numeric_msg;
}
if (message_content.empty()) {
message_content = alpha_msg.empty() ? numeric_msg : alpha_msg;
}
ss << "[MSG]" << address << "|" << function_bits << "|" << message_content;
messageBuffer.push_back(ss.str());
is_message_ready = false;
numeric_msg.clear();
alpha_msg.clear();
}
acc_i = acc_q = 0; acc_i = acc_q = 0;
decim_counter = 0; decim_counter = 0;
} }
} }
if (is_message_ready)
{
std::ostringstream ss;
std::lock_guard<std::mutex> demodLock(demodDataMutex);
std::lock_guard<std::mutex> msgLock(msgMutex);
std::string message_content = alpha_msg.empty() ? numeric_msg : alpha_msg;
ss << "[MSG]" << address << "|" << function_bits << "|" << message_content;
messageBuffer.push_back(ss.str());
is_message_ready = false;
numeric_msg.clear();
alpha_msg.clear();
}
} }
if (n < 0 && running) if (n < 0 && running)

View File

@@ -0,0 +1,202 @@
package org.noxylva.lbjconsole.flutter
import android.Manifest
import android.content.Context
import android.content.pm.PackageManager
import android.media.AudioFormat
import android.media.AudioRecord
import android.media.MediaRecorder
import android.os.Handler
import android.os.Looper
import android.util.Log
import androidx.core.content.ContextCompat
import io.flutter.embedding.engine.FlutterEngine
import io.flutter.plugin.common.MethodCall
import io.flutter.plugin.common.MethodChannel
import io.flutter.plugin.common.EventChannel
import java.util.concurrent.atomic.AtomicBoolean
import java.nio.charset.Charset
class AudioInputHandler(private val context: Context) : MethodChannel.MethodCallHandler, EventChannel.StreamHandler {
private var audioRecord: AudioRecord? = null
private val isRecording = AtomicBoolean(false)
private var recordingThread: Thread? = null
private val sampleRate = 48000
private val bufferSize = AudioRecord.getMinBufferSize(
sampleRate,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT
) * 2
private val handler = Handler(Looper.getMainLooper())
private var eventSink: EventChannel.EventSink? = null
companion object {
private const val METHOD_CHANNEL = "org.noxylva.lbjconsole/audio_input"
private const val EVENT_CHANNEL = "org.noxylva.lbjconsole/audio_input_event"
private const val TAG = "AudioInputHandler"
init {
System.loadLibrary("railwaypagerdemod")
}
fun registerWith(flutterEngine: FlutterEngine, context: Context) {
val handler = AudioInputHandler(context)
val methodChannel = MethodChannel(flutterEngine.dartExecutor.binaryMessenger, METHOD_CHANNEL)
methodChannel.setMethodCallHandler(handler)
val eventChannel = EventChannel(flutterEngine.dartExecutor.binaryMessenger, EVENT_CHANNEL)
eventChannel.setStreamHandler(handler)
}
}
private external fun nativePushAudio(data: ShortArray, size: Int)
private external fun pollMessages(): ByteArray
private external fun clearMessageBuffer()
private external fun getAudioSpectrum(): FloatArray
override fun onMethodCall(call: MethodCall, result: MethodChannel.Result) {
when (call.method) {
"start" -> {
clearMessageBuffer()
if (startRecording()) {
result.success(null)
} else {
result.error("AUDIO_ERROR", "Failed to start audio recording", null)
}
}
"stop" -> {
stopRecording()
clearMessageBuffer()
result.success(null)
}
"getSpectrum" -> {
try {
val spectrum = getAudioSpectrum()
result.success(spectrum.toList())
} catch (e: Exception) {
result.error("FFT_ERROR", "Failed to get spectrum", e.message)
}
}
else -> result.notImplemented()
}
}
override fun onListen(arguments: Any?, events: EventChannel.EventSink?) {
Log.d(TAG, "EventChannel onListen")
this.eventSink = events
startPolling()
}
override fun onCancel(arguments: Any?) {
Log.d(TAG, "EventChannel onCancel")
handler.removeCallbacksAndMessages(null)
this.eventSink = null
}
private fun startPolling() {
handler.post(object : Runnable {
override fun run() {
if (eventSink == null) {
return
}
val recording = isRecording.get()
val logsBytes = pollMessages()
val logs = if (logsBytes.isNotEmpty()) String(logsBytes, Charsets.ISO_8859_1) else ""
val regex = "\\[MSG\\]\\s*(\\d+)\\|(-?\\d+)\\|(.*)".toRegex()
val statusMap = mutableMapOf<String, Any?>()
statusMap["listening"] = recording
eventSink?.success(statusMap)
if (logs.isNotEmpty()) {
regex.findAll(logs).forEach { match ->
try {
val dataMap = mutableMapOf<String, Any?>()
dataMap["address"] = match.groupValues[1]
dataMap["func"] = match.groupValues[2]
dataMap["numeric"] = match.groupValues[3]
eventSink?.success(dataMap)
} catch (e: Exception) {
Log.e(TAG, "decode_fail", e)
}
}
}
handler.postDelayed(this, 200)
}
})
}
private fun startRecording(): Boolean {
if (isRecording.get()) return true
if (ContextCompat.checkSelfPermission(
context,
Manifest.permission.RECORD_AUDIO
) != PackageManager.PERMISSION_GRANTED
) {
Log.e(TAG, "Permission not granted")
return false
}
try {
val audioSource = MediaRecorder.AudioSource.UNPROCESSED
audioRecord = AudioRecord(
audioSource,
sampleRate,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT,
bufferSize
)
if (audioRecord?.state != AudioRecord.STATE_INITIALIZED) {
Log.e(TAG, "AudioRecord init failed")
return false
}
audioRecord?.startRecording()
isRecording.set(true)
recordingThread = Thread {
val buffer = ShortArray(bufferSize)
while (isRecording.get()) {
val readSize = audioRecord?.read(buffer, 0, buffer.size) ?: 0
if (readSize > 0) {
nativePushAudio(buffer, readSize)
}
}
}
recordingThread?.priority = Thread.MAX_PRIORITY
recordingThread?.start()
return true
} catch (e: Exception) {
Log.e(TAG, "Start recording exception", e)
stopRecording()
return false
}
}
private fun stopRecording() {
isRecording.set(false)
try {
recordingThread?.join(1000)
} catch (e: InterruptedException) {
e.printStackTrace()
}
try {
if (audioRecord?.recordingState == AudioRecord.RECORDSTATE_RECORDING) {
audioRecord?.stop()
}
audioRecord?.release()
} catch (e: Exception) {
Log.e(TAG, "Stop recording exception", e)
}
audioRecord = null
recordingThread = null
}
}

View File

@@ -7,5 +7,6 @@ class MainActivity: FlutterActivity() {
override fun configureFlutterEngine(flutterEngine: FlutterEngine) { override fun configureFlutterEngine(flutterEngine: FlutterEngine) {
super.configureFlutterEngine(flutterEngine) super.configureFlutterEngine(flutterEngine)
RtlTcpChannelHandler.registerWith(flutterEngine) RtlTcpChannelHandler.registerWith(flutterEngine)
AudioInputHandler.registerWith(flutterEngine, applicationContext)
} }
} }

View File

@@ -10,7 +10,7 @@ import java.nio.charset.Charset
class RtlTcpChannelHandler : EventChannel.StreamHandler { class RtlTcpChannelHandler : EventChannel.StreamHandler {
private external fun startClientAsync(host: String, port: String) private external fun startClientAsync(host: String, port: String)
private external fun pollMessages(): String private external fun pollMessages(): ByteArray
private external fun nativeStopClient() private external fun nativeStopClient()
private external fun getSignalStrength(): Double private external fun getSignalStrength(): Double
private external fun isConnected(): Boolean private external fun isConnected(): Boolean
@@ -61,6 +61,7 @@ class RtlTcpChannelHandler : EventChannel.StreamHandler {
override fun onListen(arguments: Any?, events: EventChannel.EventSink?) { override fun onListen(arguments: Any?, events: EventChannel.EventSink?) {
android.util.Log.d("RTL-TCP", "evt_listen") android.util.Log.d("RTL-TCP", "evt_listen")
this.eventSink = events this.eventSink = events
lastConnectedState = true
startPolling() startPolling()
} }
@@ -77,34 +78,66 @@ class RtlTcpChannelHandler : EventChannel.StreamHandler {
android.util.Log.w("RTL-TCP", "evt_null"); android.util.Log.w("RTL-TCP", "evt_null");
return; return;
} }
val connected = try {
isConnected()
} catch (e: Exception) {
android.util.Log.e("RTL-TCP", "isConnected() failed", e)
false
}
val connected = isConnected() val strength = try {
val strength = getSignalStrength() getSignalStrength()
val logs = pollMessages() } catch (e: Exception) {
val regex = "\\[MSG\\]\\s*(\\d+)\\|(-?\\d+)\\|(.*)".toRegex() android.util.Log.e("RTL-TCP", "getSignalStrength() failed", e)
0.0
}
if (connected != lastConnectedState || connected) { val logsBytes = try {
pollMessages()
} catch (e: Exception) {
android.util.Log.e("RTL-TCP", "pollMessages() failed", e)
ByteArray(0)
}
val regex = "\\[MSG\\]\\s*(\\d+)\\|(-?\\d+)\\|([^\\n]*)".toRegex()
if (logsBytes.isNotEmpty()) {
val preview = if (logsBytes.size > 200) "${logsBytes.size} bytes" else logsBytes.contentToString()
android.util.Log.d("RTL-TCP", "pollBytes: $preview")
}
val logs = if (logsBytes.isNotEmpty()) String(logsBytes, Charsets.ISO_8859_1) else ""
if (connected != lastConnectedState) {
val statusMap = mutableMapOf<String, Any?>() val statusMap = mutableMapOf<String, Any?>()
statusMap["connected"] = connected statusMap["connected"] = connected
statusMap["magsqRaw"] = strength statusMap["magsqRaw"] = strength
eventSink?.success(statusMap) try {
eventSink?.success(statusMap)
} catch (e: Exception) {
android.util.Log.e("RTL-TCP", "eventSink status send failed", e)
}
lastConnectedState = connected lastConnectedState = connected
} }
if (logs.isNotEmpty()) { if (logs.isNotEmpty()) {
regex.findAll(logs).forEach { match -> regex.findAll(logs).forEach { match ->
try { try {
val addr = match.groupValues[1]
val func = match.groupValues[2]
val content = match.groupValues[3]
val dataMap = mutableMapOf<String, Any?>() val dataMap = mutableMapOf<String, Any?>()
dataMap["address"] = match.groupValues[1] dataMap["address"] = addr
dataMap["func"] = match.groupValues[2] dataMap["func"] = func
dataMap["numeric"] = content
val gbkBytes = match.groupValues[3].toByteArray(Charsets.ISO_8859_1)
val utf8String = String(gbkBytes, Charset.forName("GBK"))
dataMap["numeric"] = utf8String
dataMap["magsqRaw"] = strength dataMap["magsqRaw"] = strength
eventSink?.success(dataMap) try {
eventSink?.success(dataMap)
} catch (e: Exception) {
android.util.Log.e("RTL-TCP", "eventSink data send failed", e)
}
} catch (e: Exception) { } catch (e: Exception) {
android.util.Log.e("RTL-TCP", "decode_fail", e) android.util.Log.e("RTL-TCP", "decode_fail", e)
} }

View File

@@ -7,13 +7,27 @@ allprojects {
} }
rootProject.buildDir = "../build" rootProject.buildDir = "../build"
subprojects { subprojects {
project.buildDir = "${rootProject.buildDir}/${project.name}" project.buildDir = "${rootProject.buildDir}/${project.name}"
} }
subprojects { subprojects {
project.evaluationDependsOn(":app") project.evaluationDependsOn(":app")
} }
subprojects {
if (project.name != "app") {
project.afterEvaluate {
if (project.hasProperty("android")) {
project.android {
compileSdk 36
}
}
}
}
}
tasks.register("clean", Delete) { tasks.register("clean", Delete) {
delete rootProject.buildDir delete rootProject.buildDir
} }

View File

@@ -13,15 +13,17 @@ import 'package:lbjconsole/services/database_service.dart';
import 'package:lbjconsole/services/notification_service.dart'; import 'package:lbjconsole/services/notification_service.dart';
import 'package:lbjconsole/services/background_service.dart'; import 'package:lbjconsole/services/background_service.dart';
import 'package:lbjconsole/services/rtl_tcp_service.dart'; import 'package:lbjconsole/services/rtl_tcp_service.dart';
import 'package:lbjconsole/services/audio_input_service.dart';
import 'package:lbjconsole/themes/app_theme.dart'; import 'package:lbjconsole/themes/app_theme.dart';
import 'dart:convert'; import 'package:lbjconsole/widgets/audio_waterfall_widget.dart';
class _ConnectionStatusWidget extends StatefulWidget { class _ConnectionStatusWidget extends StatefulWidget {
final BLEService bleService; final BLEService bleService;
final RtlTcpService rtlTcpService; final RtlTcpService rtlTcpService;
final DateTime? lastReceivedTime; final DateTime? lastReceivedTime;
final DateTime? rtlTcpLastReceivedTime; final DateTime? rtlTcpLastReceivedTime;
final bool rtlTcpEnabled; final DateTime? audioLastReceivedTime;
final InputSource inputSource;
final bool rtlTcpConnected; final bool rtlTcpConnected;
const _ConnectionStatusWidget({ const _ConnectionStatusWidget({
@@ -29,7 +31,8 @@ class _ConnectionStatusWidget extends StatefulWidget {
required this.rtlTcpService, required this.rtlTcpService,
required this.lastReceivedTime, required this.lastReceivedTime,
required this.rtlTcpLastReceivedTime, required this.rtlTcpLastReceivedTime,
required this.rtlTcpEnabled, required this.audioLastReceivedTime,
required this.inputSource,
required this.rtlTcpConnected, required this.rtlTcpConnected,
}); });
@@ -59,6 +62,15 @@ class _ConnectionStatusWidgetState extends State<_ConnectionStatusWidget> {
_deviceStatus = widget.bleService.deviceStatus; _deviceStatus = widget.bleService.deviceStatus;
} }
@override
void didUpdateWidget(covariant _ConnectionStatusWidget oldWidget) {
super.didUpdateWidget(oldWidget);
if (oldWidget.inputSource != widget.inputSource ||
oldWidget.rtlTcpConnected != widget.rtlTcpConnected) {
setState(() {});
}
}
@override @override
void dispose() { void dispose() {
_connectionSubscription?.cancel(); _connectionSubscription?.cancel();
@@ -67,18 +79,31 @@ class _ConnectionStatusWidgetState extends State<_ConnectionStatusWidget> {
@override @override
Widget build(BuildContext context) { Widget build(BuildContext context) {
final isRtlTcpMode = widget.rtlTcpEnabled; bool isConnected;
final rtlTcpConnected = widget.rtlTcpConnected; Color statusColor;
String statusText;
DateTime? displayTime;
final isConnected = isRtlTcpMode ? rtlTcpConnected : _isConnected; switch (widget.inputSource) {
final statusColor = isRtlTcpMode case InputSource.rtlTcp:
? (rtlTcpConnected ? Colors.green : Colors.red) isConnected = widget.rtlTcpConnected;
: (_isConnected ? Colors.green : Colors.red); statusColor = isConnected ? Colors.green : Colors.red;
final statusText = isRtlTcpMode statusText = isConnected ? '已连接' : '未连接';
? (rtlTcpConnected ? '已连接' : '未连接') displayTime = widget.rtlTcpLastReceivedTime;
: _deviceStatus; break;
case InputSource.audioInput:
final lastReceivedTime = isRtlTcpMode ? widget.rtlTcpLastReceivedTime : widget.lastReceivedTime; isConnected = AudioInputService().isListening;
statusColor = isConnected ? Colors.green : Colors.red;
statusText = isConnected ? '监听中' : '已停止';
displayTime = widget.audioLastReceivedTime;
break;
case InputSource.bluetooth:
isConnected = _isConnected;
statusColor = isConnected ? Colors.green : Colors.red;
statusText = _deviceStatus;
displayTime = widget.lastReceivedTime;
break;
}
return Row( return Row(
children: [ children: [
@@ -86,12 +111,12 @@ class _ConnectionStatusWidgetState extends State<_ConnectionStatusWidget> {
mainAxisAlignment: MainAxisAlignment.center, mainAxisAlignment: MainAxisAlignment.center,
crossAxisAlignment: CrossAxisAlignment.start, crossAxisAlignment: CrossAxisAlignment.start,
children: [ children: [
if (lastReceivedTime == null || !isConnected) ...[ if (displayTime == null || !isConnected) ...[
Text(statusText, Text(statusText,
style: const TextStyle(color: Colors.white70, fontSize: 12)), style: const TextStyle(color: Colors.white70, fontSize: 12)),
], ],
_LastReceivedTimeWidget( _LastReceivedTimeWidget(
lastReceivedTime: lastReceivedTime, lastReceivedTime: displayTime,
isConnected: isConnected, isConnected: isConnected,
), ),
], ],
@@ -207,15 +232,21 @@ class _MainScreenState extends State<MainScreen> with WidgetsBindingObserver {
StreamSubscription? _connectionSubscription; StreamSubscription? _connectionSubscription;
StreamSubscription? _rtlTcpConnectionSubscription; StreamSubscription? _rtlTcpConnectionSubscription;
StreamSubscription? _audioConnectionSubscription;
StreamSubscription? _dataSubscription; StreamSubscription? _dataSubscription;
StreamSubscription? _rtlTcpDataSubscription; StreamSubscription? _rtlTcpDataSubscription;
StreamSubscription? _audioDataSubscription;
StreamSubscription? _lastReceivedTimeSubscription; StreamSubscription? _lastReceivedTimeSubscription;
StreamSubscription? _rtlTcpLastReceivedTimeSubscription; StreamSubscription? _rtlTcpLastReceivedTimeSubscription;
StreamSubscription? _audioLastReceivedTimeSubscription;
StreamSubscription? _settingsSubscription; StreamSubscription? _settingsSubscription;
DateTime? _lastReceivedTime; DateTime? _lastReceivedTime;
DateTime? _rtlTcpLastReceivedTime; DateTime? _rtlTcpLastReceivedTime;
DateTime? _audioLastReceivedTime;
bool _isHistoryEditMode = false; bool _isHistoryEditMode = false;
bool _rtlTcpEnabled = false;
InputSource _inputSource = InputSource.bluetooth;
bool _rtlTcpConnected = false; bool _rtlTcpConnected = false;
bool _isConnected = false; bool _isConnected = false;
final GlobalKey<HistoryScreenState> _historyScreenKey = final GlobalKey<HistoryScreenState> _historyScreenKey =
@@ -230,7 +261,7 @@ class _MainScreenState extends State<MainScreen> with WidgetsBindingObserver {
_bleService = BLEService(); _bleService = BLEService();
_rtlTcpService = RtlTcpService(); _rtlTcpService = RtlTcpService();
_bleService.initialize(); _bleService.initialize();
_loadRtlTcpSettings(); _loadInputSettings();
_initializeServices(); _initializeServices();
_checkAndStartBackgroundService(); _checkAndStartBackgroundService();
_setupConnectionListener(); _setupConnectionListener();
@@ -248,23 +279,28 @@ class _MainScreenState extends State<MainScreen> with WidgetsBindingObserver {
} }
} }
void _loadRtlTcpSettings() async { void _loadInputSettings() async {
developer.log('rtl_tcp: load_settings');
final settings = await _databaseService.getAllSettings(); final settings = await _databaseService.getAllSettings();
developer.log('rtl_tcp: settings_loaded: enabled=${(settings?['rtlTcpEnabled'] ?? 0) == 1}, host=${settings?['rtlTcpHost']?.toString() ?? '127.0.0.1'}, port=${settings?['rtlTcpPort']?.toString() ?? '14423'}'); final sourceStr = settings?['inputSource'] as String? ?? 'bluetooth';
if (mounted) { if (mounted) {
final newSource = InputSource.values.firstWhere(
(e) => e.name == sourceStr,
orElse: () => InputSource.bluetooth,
);
setState(() { setState(() {
_rtlTcpEnabled = (settings?['rtlTcpEnabled'] ?? 0) == 1; _inputSource = newSource;
_rtlTcpConnected = _rtlTcpService.isConnected; _rtlTcpConnected = _rtlTcpService.isConnected;
}); });
if (_rtlTcpEnabled && !_rtlTcpConnected) { if (_inputSource == InputSource.rtlTcp && !_rtlTcpConnected) {
final host = settings?['rtlTcpHost']?.toString() ?? '127.0.0.1'; final host = settings?['rtlTcpHost']?.toString() ?? '127.0.0.1';
final port = settings?['rtlTcpPort']?.toString() ?? '14423'; final port = settings?['rtlTcpPort']?.toString() ?? '14423';
developer.log('rtl_tcp: auto_connect');
_connectToRtlTcp(host, port); _connectToRtlTcp(host, port);
} else { } else if (_inputSource == InputSource.audioInput) {
developer.log('rtl_tcp: skip_connect: enabled=$_rtlTcpEnabled, connected=$_rtlTcpConnected'); await AudioInputService().startListening();
setState(() {});
} }
} }
} }
@@ -292,41 +328,52 @@ class _MainScreenState extends State<MainScreen> with WidgetsBindingObserver {
_rtlTcpLastReceivedTimeSubscription = _rtlTcpLastReceivedTimeSubscription =
_rtlTcpService.lastReceivedTimeStream.listen((time) { _rtlTcpService.lastReceivedTimeStream.listen((time) {
if (mounted) { if (mounted) {
if (_rtlTcpEnabled) { setState(() {
setState(() { _rtlTcpLastReceivedTime = time;
_rtlTcpLastReceivedTime = time; });
}); }
} });
_audioLastReceivedTimeSubscription =
AudioInputService().lastReceivedTimeStream.listen((time) {
if (mounted) {
setState(() {
_audioLastReceivedTime = time;
});
} }
}); });
} }
void _setupSettingsListener() { void _setupSettingsListener() {
developer.log('rtl_tcp: setup_listener');
_settingsSubscription = _settingsSubscription =
DatabaseService.instance.onSettingsChanged((settings) { DatabaseService.instance.onSettingsChanged((settings) {
developer.log('rtl_tcp: settings_changed: enabled=${(settings['rtlTcpEnabled'] ?? 0) == 1}, host=${settings['rtlTcpHost']?.toString() ?? '127.0.0.1'}, port=${settings['rtlTcpPort']?.toString() ?? '14423'}');
if (mounted) { if (mounted) {
final rtlTcpEnabled = (settings['rtlTcpEnabled'] ?? 0) == 1; final sourceStr = settings['inputSource'] as String? ?? 'bluetooth';
if (rtlTcpEnabled != _rtlTcpEnabled) { final newInputSource = InputSource.values.firstWhere(
setState(() { (e) => e.name == sourceStr,
_rtlTcpEnabled = rtlTcpEnabled; orElse: () => InputSource.bluetooth,
}); );
if (rtlTcpEnabled) { setState(() {
final host = settings['rtlTcpHost']?.toString() ?? '127.0.0.1'; _inputSource = newInputSource;
final port = settings['rtlTcpPort']?.toString() ?? '14423'; });
_connectToRtlTcp(host, port);
} else { switch (newInputSource) {
_rtlTcpConnectionSubscription?.cancel(); case InputSource.rtlTcp:
_rtlTcpDataSubscription?.cancel(); setState(() {
_rtlTcpLastReceivedTimeSubscription?.cancel(); _rtlTcpConnected = _rtlTcpService.isConnected;
_rtlTcpService.disconnect(); });
setState(() { break;
_rtlTcpConnected = false; case InputSource.audioInput:
_rtlTcpLastReceivedTime = null; setState(() {});
}); break;
} case InputSource.bluetooth:
_rtlTcpService.disconnect();
setState(() {
_rtlTcpConnected = false;
_rtlTcpLastReceivedTime = null;
});
break;
} }
if (_currentIndex == 1) { if (_currentIndex == 1) {
@@ -347,20 +394,22 @@ class _MainScreenState extends State<MainScreen> with WidgetsBindingObserver {
_rtlTcpConnectionSubscription = _rtlTcpService.connectionStream.listen((connected) { _rtlTcpConnectionSubscription = _rtlTcpService.connectionStream.listen((connected) {
if (mounted) { if (mounted) {
if (_rtlTcpEnabled) { setState(() {
setState(() { _rtlTcpConnected = connected;
_rtlTcpConnected = connected; });
}); }
} });
_audioConnectionSubscription = AudioInputService().connectionStream.listen((listening) {
if (mounted) {
setState(() {});
} }
}); });
} }
Future<void> _connectToRtlTcp(String host, String port) async { Future<void> _connectToRtlTcp(String host, String port) async {
developer.log('rtl_tcp: connect: $host:$port');
try { try {
await _rtlTcpService.connect(host: host, port: port); await _rtlTcpService.connect(host: host, port: port);
developer.log('rtl_tcp: connect_req_sent');
} catch (e) { } catch (e) {
developer.log('rtl_tcp: connect_fail: $e'); developer.log('rtl_tcp: connect_fail: $e');
} }
@@ -370,10 +419,13 @@ class _MainScreenState extends State<MainScreen> with WidgetsBindingObserver {
void dispose() { void dispose() {
_connectionSubscription?.cancel(); _connectionSubscription?.cancel();
_rtlTcpConnectionSubscription?.cancel(); _rtlTcpConnectionSubscription?.cancel();
_audioConnectionSubscription?.cancel();
_dataSubscription?.cancel(); _dataSubscription?.cancel();
_rtlTcpDataSubscription?.cancel(); _rtlTcpDataSubscription?.cancel();
_audioDataSubscription?.cancel();
_lastReceivedTimeSubscription?.cancel(); _lastReceivedTimeSubscription?.cancel();
_rtlTcpLastReceivedTimeSubscription?.cancel(); _rtlTcpLastReceivedTimeSubscription?.cancel();
_audioLastReceivedTimeSubscription?.cancel();
_settingsSubscription?.cancel(); _settingsSubscription?.cancel();
WidgetsBinding.instance.removeObserver(this); WidgetsBinding.instance.removeObserver(this);
super.dispose(); super.dispose();
@@ -391,26 +443,28 @@ class _MainScreenState extends State<MainScreen> with WidgetsBindingObserver {
await _notificationService.initialize(); await _notificationService.initialize();
_dataSubscription = _bleService.dataStream.listen((record) { _dataSubscription = _bleService.dataStream.listen((record) {
_notificationService.showTrainNotification(record); if (_inputSource == InputSource.bluetooth) {
if (_historyScreenKey.currentState != null) { _processRecord(record);
_historyScreenKey.currentState!.addNewRecord(record);
}
if (_realtimeScreenKey.currentState != null) {
_realtimeScreenKey.currentState!.addNewRecord(record);
} }
}); });
_rtlTcpDataSubscription = _rtlTcpService.dataStream.listen((record) { _rtlTcpDataSubscription = _rtlTcpService.dataStream.listen((record) {
developer.log('rtl_tcp: recv_data: train=${record.train}'); if (_inputSource == InputSource.rtlTcp) {
developer.log('rtl_tcp: recv_json: ${jsonEncode(record.toJson())}'); _processRecord(record);
_notificationService.showTrainNotification(record);
if (_historyScreenKey.currentState != null) {
_historyScreenKey.currentState!.addNewRecord(record);
}
if (_realtimeScreenKey.currentState != null) {
_realtimeScreenKey.currentState!.addNewRecord(record);
} }
}); });
_audioDataSubscription = AudioInputService().dataStream.listen((record) {
if (_inputSource == InputSource.audioInput) {
_processRecord(record);
}
});
}
void _processRecord(record) {
_notificationService.showTrainNotification(record);
_historyScreenKey.currentState?.addNewRecord(record);
_realtimeScreenKey.currentState?.addNewRecord(record);
} }
void _showConnectionDialog() { void _showConnectionDialog() {
@@ -419,10 +473,13 @@ class _MainScreenState extends State<MainScreen> with WidgetsBindingObserver {
context: context, context: context,
barrierDismissible: true, barrierDismissible: true,
builder: (context) => builder: (context) =>
_PixelPerfectBluetoothDialog(bleService: _bleService, rtlTcpEnabled: _rtlTcpEnabled), _PixelPerfectBluetoothDialog(
bleService: _bleService,
inputSource: _inputSource
),
).then((_) { ).then((_) {
_bleService.setAutoConnectBlocked(false); _bleService.setAutoConnectBlocked(false);
if (!_bleService.isManualDisconnect) { if (_inputSource == InputSource.bluetooth && !_bleService.isManualDisconnect) {
_bleService.ensureConnection(); _bleService.ensureConnection();
} }
}); });
@@ -452,6 +509,12 @@ class _MainScreenState extends State<MainScreen> with WidgetsBindingObserver {
); );
} }
final IconData statusIcon = switch (_inputSource) {
InputSource.rtlTcp => Icons.wifi,
InputSource.audioInput => Icons.mic,
InputSource.bluetooth => Icons.bluetooth,
};
return AppBar( return AppBar(
backgroundColor: AppTheme.primaryBlack, backgroundColor: AppTheme.primaryBlack,
elevation: 0, elevation: 0,
@@ -469,12 +532,13 @@ class _MainScreenState extends State<MainScreen> with WidgetsBindingObserver {
rtlTcpService: _rtlTcpService, rtlTcpService: _rtlTcpService,
lastReceivedTime: _lastReceivedTime, lastReceivedTime: _lastReceivedTime,
rtlTcpLastReceivedTime: _rtlTcpLastReceivedTime, rtlTcpLastReceivedTime: _rtlTcpLastReceivedTime,
rtlTcpEnabled: _rtlTcpEnabled, audioLastReceivedTime: _audioLastReceivedTime,
inputSource: _inputSource,
rtlTcpConnected: _rtlTcpConnected, rtlTcpConnected: _rtlTcpConnected,
), ),
IconButton( IconButton(
icon: Icon( icon: Icon(
_rtlTcpEnabled ? Icons.wifi : Icons.bluetooth, statusIcon,
color: Colors.white, color: Colors.white,
), ),
onPressed: _showConnectionDialog, onPressed: _showConnectionDialog,
@@ -562,7 +626,6 @@ class _MainScreenState extends State<MainScreen> with WidgetsBindingObserver {
SettingsScreen( SettingsScreen(
onSettingsChanged: () { onSettingsChanged: () {
_loadMapType(); _loadMapType();
_loadRtlTcpSettings();
}, },
), ),
]; ];
@@ -609,8 +672,8 @@ enum _ScanState { initial, scanning, finished }
class _PixelPerfectBluetoothDialog extends StatefulWidget { class _PixelPerfectBluetoothDialog extends StatefulWidget {
final BLEService bleService; final BLEService bleService;
final bool rtlTcpEnabled; final InputSource inputSource;
const _PixelPerfectBluetoothDialog({required this.bleService, required this.rtlTcpEnabled}); const _PixelPerfectBluetoothDialog({required this.bleService, required this.inputSource});
@override @override
State<_PixelPerfectBluetoothDialog> createState() => State<_PixelPerfectBluetoothDialog> createState() =>
_PixelPerfectBluetoothDialogState(); _PixelPerfectBluetoothDialogState();
@@ -625,6 +688,7 @@ class _PixelPerfectBluetoothDialogState
DateTime? _lastReceivedTime; DateTime? _lastReceivedTime;
StreamSubscription? _rtlTcpConnectionSubscription; StreamSubscription? _rtlTcpConnectionSubscription;
bool _rtlTcpConnected = false; bool _rtlTcpConnected = false;
@override @override
void initState() { void initState() {
super.initState(); super.initState();
@@ -640,11 +704,11 @@ class _PixelPerfectBluetoothDialogState
} }
}); });
if (widget.rtlTcpEnabled && widget.bleService.rtlTcpService != null) { if (widget.inputSource == InputSource.rtlTcp && widget.bleService.rtlTcpService != null) {
_rtlTcpConnected = widget.bleService.rtlTcpService!.isConnected; _rtlTcpConnected = widget.bleService.rtlTcpService!.isConnected;
} }
if (!widget.bleService.isConnected && !widget.rtlTcpEnabled) { if (!widget.bleService.isConnected && widget.inputSource == InputSource.bluetooth) {
_startScan(); _startScan();
} }
} }
@@ -684,31 +748,24 @@ class _PixelPerfectBluetoothDialogState
await widget.bleService.disconnect(); await widget.bleService.disconnect();
} }
void _setupLastReceivedTimeListener() {
_lastReceivedTimeSubscription =
widget.bleService.lastReceivedTimeStream.listen((timestamp) {
if (mounted) {
setState(() {
_lastReceivedTime = timestamp;
});
}
});
}
@override @override
Widget build(BuildContext context) { Widget build(BuildContext context) {
final isConnected = widget.bleService.isConnected; final (String title, Widget content) = switch (widget.inputSource) {
InputSource.rtlTcp => ('RTL-TCP 服务器', _buildRtlTcpView(context)),
InputSource.audioInput => ('音频输入', _buildAudioInputView(context)),
InputSource.bluetooth => (
'蓝牙设备',
widget.bleService.isConnected
? _buildConnectedView(context, widget.bleService.connectedDevice)
: _buildDisconnectedView(context)
),
};
return AlertDialog( return AlertDialog(
title: Text(widget.rtlTcpEnabled ? 'RTL-TCP 服务器' : '蓝牙设备'), title: Text(title),
content: SizedBox( content: SizedBox(
width: double.maxFinite, width: double.maxFinite,
child: SingleChildScrollView( child: SingleChildScrollView(child: content),
child: widget.rtlTcpEnabled
? _buildRtlTcpView(context)
: (isConnected
? _buildConnectedView(context, widget.bleService.connectedDevice)
: _buildDisconnectedView(context)),
),
), ),
actions: [ actions: [
TextButton( TextButton(
@@ -733,13 +790,6 @@ class _PixelPerfectBluetoothDialogState
Text(device?.remoteId.str ?? '', Text(device?.remoteId.str ?? '',
style: Theme.of(context).textTheme.bodySmall, style: Theme.of(context).textTheme.bodySmall,
textAlign: TextAlign.center), textAlign: TextAlign.center),
if (_lastReceivedTime != null) ...[
const SizedBox(height: 8),
_LastReceivedTimeWidget(
lastReceivedTime: _lastReceivedTime,
isConnected: widget.bleService.isConnected,
),
],
const SizedBox(height: 16), const SizedBox(height: 16),
ElevatedButton.icon( ElevatedButton.icon(
onPressed: _disconnect, onPressed: _disconnect,
@@ -785,13 +835,13 @@ class _PixelPerfectBluetoothDialogState
const SizedBox(height: 8), const SizedBox(height: 8),
Text(currentAddress, Text(currentAddress,
style: TextStyle(color: isConnected ? Colors.green : Colors.grey)), style: TextStyle(color: isConnected ? Colors.green : Colors.grey)),
const SizedBox(height: 16), ]);
if (_lastReceivedTime != null && isConnected) ...[ }
_LastReceivedTimeWidget(
lastReceivedTime: _lastReceivedTime, Widget _buildAudioInputView(BuildContext context) {
isConnected: isConnected, return Column(mainAxisSize: MainAxisSize.min, children: [
), const SizedBox(height: 8),
], const AudioWaterfallWidget(),
]); ]);
} }

View File

@@ -5,6 +5,8 @@ import 'dart:io';
import 'package:lbjconsole/models/merged_record.dart'; import 'package:lbjconsole/models/merged_record.dart';
import 'package:lbjconsole/services/database_service.dart'; import 'package:lbjconsole/services/database_service.dart';
import 'package:lbjconsole/services/background_service.dart'; import 'package:lbjconsole/services/background_service.dart';
import 'package:lbjconsole/services/audio_input_service.dart';
import 'package:lbjconsole/services/rtl_tcp_service.dart';
import 'package:lbjconsole/themes/app_theme.dart'; import 'package:lbjconsole/themes/app_theme.dart';
import 'package:url_launcher/url_launcher.dart'; import 'package:url_launcher/url_launcher.dart';
@@ -27,6 +29,8 @@ class _SettingsScreenState extends State<SettingsScreen> {
late TextEditingController _rtlTcpHostController; late TextEditingController _rtlTcpHostController;
late TextEditingController _rtlTcpPortController; late TextEditingController _rtlTcpPortController;
bool _settingsLoaded = false;
String _deviceName = ''; String _deviceName = '';
bool _backgroundServiceEnabled = false; bool _backgroundServiceEnabled = false;
bool _notificationsEnabled = true; bool _notificationsEnabled = true;
@@ -37,7 +41,9 @@ class _SettingsScreenState extends State<SettingsScreen> {
GroupBy _groupBy = GroupBy.trainAndLoco; GroupBy _groupBy = GroupBy.trainAndLoco;
TimeWindow _timeWindow = TimeWindow.unlimited; TimeWindow _timeWindow = TimeWindow.unlimited;
String _mapType = 'map'; String _mapType = 'map';
bool _rtlTcpEnabled = false;
InputSource _inputSource = InputSource.bluetooth;
String _rtlTcpHost = '127.0.0.1'; String _rtlTcpHost = '127.0.0.1';
String _rtlTcpPort = '14423'; String _rtlTcpPort = '14423';
@@ -52,131 +58,6 @@ class _SettingsScreenState extends State<SettingsScreen> {
_loadRecordCount(); _loadRecordCount();
} }
Widget _buildRtlTcpSettings() {
return Card(
color: AppTheme.tertiaryBlack,
elevation: 0,
shape: RoundedRectangleBorder(
borderRadius: BorderRadius.circular(16.0),
),
child: Padding(
padding: const EdgeInsets.all(20.0),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Row(
children: [
Icon(Icons.wifi,
color: Theme.of(context).colorScheme.primary),
const SizedBox(width: 12),
const Text('RTL-TCP 源', style: AppTheme.titleMedium),
],
),
const SizedBox(height: 16),
Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
const Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Text('启用 RTL-TCP 源', style: AppTheme.bodyLarge),
],
),
Switch(
value: _rtlTcpEnabled,
onChanged: (value) {
setState(() {
_rtlTcpEnabled = value;
});
_saveSettings();
},
activeThumbColor: Theme.of(context).colorScheme.primary,
),
],
),
Visibility(
visible: _rtlTcpEnabled,
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
const SizedBox(height: 16),
TextField(
decoration: InputDecoration(
labelText: '服务器地址',
hintText: '输入RTL-TCP服务器地址',
labelStyle: const TextStyle(color: Colors.white70),
hintStyle: const TextStyle(color: Colors.white54),
border: OutlineInputBorder(
borderSide: const BorderSide(color: Colors.white54),
borderRadius: BorderRadius.circular(12.0),
),
enabledBorder: OutlineInputBorder(
borderSide: const BorderSide(color: Colors.white54),
borderRadius: BorderRadius.circular(12.0),
),
focusedBorder: OutlineInputBorder(
borderSide:
BorderSide(color: Theme.of(context).colorScheme.primary),
borderRadius: BorderRadius.circular(12.0),
),
),
style: const TextStyle(color: Colors.white),
controller: _rtlTcpHostController,
onChanged: (value) {
setState(() {
_rtlTcpHost = value;
});
_saveSettings();
},
),
const SizedBox(height: 16),
TextField(
decoration: InputDecoration(
labelText: '服务器端口',
hintText: '输入RTL-TCP服务器端口',
labelStyle: const TextStyle(color: Colors.white70),
hintStyle: const TextStyle(color: Colors.white54),
border: OutlineInputBorder(
borderSide: const BorderSide(color: Colors.white54),
borderRadius: BorderRadius.circular(12.0),
),
enabledBorder: OutlineInputBorder(
borderSide: const BorderSide(color: Colors.white54),
borderRadius: BorderRadius.circular(12.0),
),
focusedBorder: OutlineInputBorder(
borderSide:
BorderSide(color: Theme.of(context).colorScheme.primary),
borderRadius: BorderRadius.circular(12.0),
),
),
style: const TextStyle(color: Colors.white),
controller: _rtlTcpPortController,
keyboardType: TextInputType.number,
onChanged: (value) {
setState(() {
_rtlTcpPort = value;
});
_saveSettings();
},
),
],
),
),
],
),
),
);
}
@override
void dispose() {
_deviceNameController.dispose();
_rtlTcpHostController.dispose();
_rtlTcpPortController.dispose();
super.dispose();
}
Future<void> _loadSettings() async { Future<void> _loadSettings() async {
final settingsMap = await _databaseService.getAllSettings() ?? {}; final settingsMap = await _databaseService.getAllSettings() ?? {};
final settings = MergeSettings.fromMap(settingsMap); final settings = MergeSettings.fromMap(settingsMap);
@@ -193,25 +74,26 @@ class _SettingsScreenState extends State<SettingsScreen> {
_groupBy = settings.groupBy; _groupBy = settings.groupBy;
_timeWindow = settings.timeWindow; _timeWindow = settings.timeWindow;
_mapType = settingsMap['mapType']?.toString() ?? 'webview'; _mapType = settingsMap['mapType']?.toString() ?? 'webview';
_rtlTcpEnabled = (settingsMap['rtlTcpEnabled'] ?? 0) == 1;
_rtlTcpHost = settingsMap['rtlTcpHost']?.toString() ?? '127.0.0.1'; _rtlTcpHost = settingsMap['rtlTcpHost']?.toString() ?? '127.0.0.1';
_rtlTcpPort = settingsMap['rtlTcpPort']?.toString() ?? '14423'; _rtlTcpPort = settingsMap['rtlTcpPort']?.toString() ?? '14423';
_rtlTcpHostController.text = _rtlTcpHost; _rtlTcpHostController.text = _rtlTcpHost;
_rtlTcpPortController.text = _rtlTcpPort; _rtlTcpPortController.text = _rtlTcpPort;
});
}
}
Future<void> _loadRecordCount() async { final sourceStr = settingsMap['inputSource'] as String? ?? 'bluetooth';
final count = await _databaseService.getRecordCount(); _inputSource = InputSource.values.firstWhere(
if (mounted) { (e) => e.name == sourceStr,
setState(() { orElse: () => InputSource.bluetooth,
_recordCount = count; );
_settingsLoaded = true;
}); });
} }
} }
Future<void> _saveSettings() async { Future<void> _saveSettings() async {
if (!_settingsLoaded) return;
await _databaseService.updateSettings({ await _databaseService.updateSettings({
'deviceName': _deviceName, 'deviceName': _deviceName,
'backgroundServiceEnabled': _backgroundServiceEnabled ? 1 : 0, 'backgroundServiceEnabled': _backgroundServiceEnabled ? 1 : 0,
@@ -222,37 +104,43 @@ class _SettingsScreenState extends State<SettingsScreen> {
'groupBy': _groupBy.name, 'groupBy': _groupBy.name,
'timeWindow': _timeWindow.name, 'timeWindow': _timeWindow.name,
'mapType': _mapType, 'mapType': _mapType,
'rtlTcpEnabled': _rtlTcpEnabled ? 1 : 0, 'inputSource': _inputSource.name,
'rtlTcpHost': _rtlTcpHost, 'rtlTcpHost': _rtlTcpHost,
'rtlTcpPort': _rtlTcpPort, 'rtlTcpPort': _rtlTcpPort,
}); });
widget.onSettingsChanged?.call(); widget.onSettingsChanged?.call();
} }
@override Future<void> _switchInputSource(InputSource newSource) async {
Widget build(BuildContext context) { await AudioInputService().stopListening();
return SingleChildScrollView( await RtlTcpService().disconnect();
padding: const EdgeInsets.all(20.0), setState(() {
child: Column( _inputSource = newSource;
crossAxisAlignment: CrossAxisAlignment.start, });
children: [
_buildBluetoothSettings(), switch (newSource) {
const SizedBox(height: 20), case InputSource.audioInput:
_buildAppSettings(), await AudioInputService().startListening();
const SizedBox(height: 20), break;
_buildRtlTcpSettings(), case InputSource.rtlTcp:
const SizedBox(height: 20), RtlTcpService().connect(host: _rtlTcpHost, port: _rtlTcpPort);
_buildMergeSettings(), break;
const SizedBox(height: 20), case InputSource.bluetooth:
_buildDataManagement(), break;
const SizedBox(height: 20), }
_buildAboutSection(),
], _saveSettings();
),
);
} }
Widget _buildBluetoothSettings() { @override
void dispose() {
_deviceNameController.dispose();
_rtlTcpHostController.dispose();
_rtlTcpPortController.dispose();
super.dispose();
}
Widget _buildInputSourceSettings() {
return Card( return Card(
color: AppTheme.tertiaryBlack, color: AppTheme.tertiaryBlack,
elevation: 0, elevation: 0,
@@ -266,48 +154,213 @@ class _SettingsScreenState extends State<SettingsScreen> {
children: [ children: [
Row( Row(
children: [ children: [
Icon(Icons.bluetooth, Icon(Icons.input, color: Theme.of(context).colorScheme.primary),
color: Theme.of(context).colorScheme.primary),
const SizedBox(width: 12), const SizedBox(width: 12),
const Text('蓝牙设备', style: AppTheme.titleMedium), const Text('信号源设置', style: AppTheme.titleMedium),
], ],
), ),
const SizedBox(height: 16), const SizedBox(height: 16),
TextField(
controller: _deviceNameController,
decoration: InputDecoration( Row(
labelText: '设备名称', mainAxisAlignment: MainAxisAlignment.spaceBetween,
hintText: '输入设备名称', children: [
labelStyle: const TextStyle(color: Colors.white70), const Text('信号源', style: AppTheme.bodyLarge),
hintStyle: const TextStyle(color: Colors.white54), DropdownButton<InputSource>(
border: OutlineInputBorder( value: _inputSource,
borderSide: const BorderSide(color: Colors.white54), items: const [
borderRadius: BorderRadius.circular(12.0), DropdownMenuItem(
value: InputSource.bluetooth,
child: Text('蓝牙设备', style: AppTheme.bodyMedium),
),
DropdownMenuItem(
value: InputSource.rtlTcp,
child: Text('RTL-TCP', style: AppTheme.bodyMedium),
),
DropdownMenuItem(
value: InputSource.audioInput,
child: Text('音频输入', style: AppTheme.bodyMedium),
),
],
onChanged: (value) {
if (value != null) {
_switchInputSource(value);
}
},
dropdownColor: AppTheme.secondaryBlack,
style: AppTheme.bodyMedium,
underline: Container(height: 0),
), ),
enabledBorder: OutlineInputBorder( ],
borderSide: const BorderSide(color: Colors.white54), ),
borderRadius: BorderRadius.circular(12.0),
if (_inputSource == InputSource.bluetooth) ...[
const SizedBox(height: 16),
TextField(
controller: _deviceNameController,
decoration: InputDecoration(
labelText: '蓝牙设备名称',
hintText: '输入设备名称',
labelStyle: const TextStyle(color: Colors.white70),
hintStyle: const TextStyle(color: Colors.white54),
border: OutlineInputBorder(
borderSide: const BorderSide(color: Colors.white54),
borderRadius: BorderRadius.circular(12.0),
),
enabledBorder: OutlineInputBorder(
borderSide: const BorderSide(color: Colors.white54),
borderRadius: BorderRadius.circular(12.0),
),
focusedBorder: OutlineInputBorder(
borderSide: BorderSide(
color: Theme.of(context).colorScheme.primary),
borderRadius: BorderRadius.circular(12.0),
),
), ),
focusedBorder: OutlineInputBorder( style: const TextStyle(color: Colors.white),
borderSide: onChanged: (value) {
BorderSide(color: Theme.of(context).colorScheme.primary), setState(() {
borderRadius: BorderRadius.circular(12.0), _deviceName = value;
});
_saveSettings();
},
),
],
if (_inputSource == InputSource.rtlTcp) ...[
const SizedBox(height: 16),
TextField(
decoration: InputDecoration(
labelText: '服务器地址',
hintText: '127.0.0.1',
labelStyle: const TextStyle(color: Colors.white70),
hintStyle: const TextStyle(color: Colors.white54),
border: OutlineInputBorder(
borderSide: const BorderSide(color: Colors.white54),
borderRadius: BorderRadius.circular(12.0),
),
enabledBorder: OutlineInputBorder(
borderSide: const BorderSide(color: Colors.white54),
borderRadius: BorderRadius.circular(12.0),
),
focusedBorder: OutlineInputBorder(
borderSide: BorderSide(
color: Theme.of(context).colorScheme.primary),
borderRadius: BorderRadius.circular(12.0),
),
),
style: const TextStyle(color: Colors.white),
controller: _rtlTcpHostController,
onChanged: (value) {
setState(() {
_rtlTcpHost = value;
});
_saveSettings();
},
),
const SizedBox(height: 16),
TextField(
decoration: InputDecoration(
labelText: '服务器端口',
hintText: '14423',
labelStyle: const TextStyle(color: Colors.white70),
hintStyle: const TextStyle(color: Colors.white54),
border: OutlineInputBorder(
borderSide: const BorderSide(color: Colors.white54),
borderRadius: BorderRadius.circular(12.0),
),
enabledBorder: OutlineInputBorder(
borderSide: const BorderSide(color: Colors.white54),
borderRadius: BorderRadius.circular(12.0),
),
focusedBorder: OutlineInputBorder(
borderSide: BorderSide(
color: Theme.of(context).colorScheme.primary),
borderRadius: BorderRadius.circular(12.0),
),
),
style: const TextStyle(color: Colors.white),
controller: _rtlTcpPortController,
keyboardType: TextInputType.number,
onChanged: (value) {
setState(() {
_rtlTcpPort = value;
});
_saveSettings();
},
),
const SizedBox(height: 16),
SizedBox(
width: double.infinity,
child: ElevatedButton.icon(
onPressed: () {
RtlTcpService()
.connect(host: _rtlTcpHost, port: _rtlTcpPort);
},
icon: const Icon(Icons.refresh),
label: const Text("重新连接 RTL-TCP"),
style: ElevatedButton.styleFrom(
backgroundColor: AppTheme.secondaryBlack,
foregroundColor: Colors.white,
padding: const EdgeInsets.symmetric(vertical: 12),
shape: RoundedRectangleBorder(
borderRadius: BorderRadius.circular(12),
),
),
),
)
],
if (_inputSource == InputSource.audioInput) ...[
const SizedBox(height: 16),
Container(
padding: const EdgeInsets.all(12),
decoration: BoxDecoration(
color: Colors.blue.withOpacity(0.1),
borderRadius: BorderRadius.circular(8),
border: Border.all(color: Colors.blue.withOpacity(0.3)),
),
child: const Row(
children: [
Icon(Icons.mic, color: Colors.blue),
SizedBox(width: 12),
Expanded(
child: Text(
'音频解调已启用。请通过音频线 (Line-in) 或麦克风输入信号。',
style: TextStyle(color: Colors.white70, fontSize: 13),
),
),
],
), ),
), ),
style: const TextStyle(color: Colors.white), ],
onChanged: (value) {
setState(() {
_deviceName = value;
});
_saveSettings();
},
),
], ],
), ),
), ),
); );
} }
@override
Widget build(BuildContext context) {
return SingleChildScrollView(
padding: const EdgeInsets.all(20.0),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
_buildInputSourceSettings(),
const SizedBox(height: 20),
_buildAppSettings(),
const SizedBox(height: 20),
_buildMergeSettings(),
const SizedBox(height: 20),
_buildDataManagement(),
const SizedBox(height: 20),
_buildAboutSection(),
],
),
);
}
Widget _buildAppSettings() { Widget _buildAppSettings() {
return Card( return Card(
color: AppTheme.tertiaryBlack, color: AppTheme.tertiaryBlack,
@@ -490,86 +543,80 @@ class _SettingsScreenState extends State<SettingsScreen> {
crossAxisAlignment: CrossAxisAlignment.start, crossAxisAlignment: CrossAxisAlignment.start,
children: [ children: [
const SizedBox(height: 16), const SizedBox(height: 16),
const Text('分组方式', style: AppTheme.bodyLarge), Row(
const SizedBox(height: 8), mainAxisAlignment: MainAxisAlignment.spaceBetween,
DropdownButtonFormField<GroupBy>( children: [
initialValue: _groupBy, const Text('分组方式', style: AppTheme.bodyLarge),
items: const [ DropdownButton<GroupBy>(
DropdownMenuItem( value: _groupBy,
value: GroupBy.trainOnly, items: const [
child: Text('仅车次号', style: AppTheme.bodyMedium)), DropdownMenuItem(
DropdownMenuItem( value: GroupBy.trainOnly,
value: GroupBy.locoOnly, child: Text('仅车次号', style: AppTheme.bodyMedium)),
child: Text('仅机车号', style: AppTheme.bodyMedium)), DropdownMenuItem(
DropdownMenuItem( value: GroupBy.locoOnly,
value: GroupBy.trainOrLoco, child: Text('仅机车号', style: AppTheme.bodyMedium)),
child: Text('车次号或机车号', style: AppTheme.bodyMedium)), DropdownMenuItem(
DropdownMenuItem( value: GroupBy.trainOrLoco,
value: GroupBy.trainAndLoco, child: Text('车次号或机车号', style: AppTheme.bodyMedium)),
child: Text('车次号与机车号', style: AppTheme.bodyMedium)), DropdownMenuItem(
], value: GroupBy.trainAndLoco,
onChanged: (value) { child: Text('车次号与机车号', style: AppTheme.bodyMedium)),
if (value != null) { ],
setState(() { onChanged: (value) {
_groupBy = value; if (value != null) {
}); setState(() {
_saveSettings(); _groupBy = value;
} });
}, _saveSettings();
decoration: InputDecoration( }
filled: true, },
fillColor: AppTheme.secondaryBlack, dropdownColor: AppTheme.secondaryBlack,
border: OutlineInputBorder( style: AppTheme.bodyMedium,
borderRadius: BorderRadius.circular(12.0), underline: Container(height: 0),
borderSide: BorderSide.none,
), ),
), ],
dropdownColor: AppTheme.secondaryBlack,
style: AppTheme.bodyMedium,
), ),
const SizedBox(height: 16), const SizedBox(height: 16),
const Text('时间窗口', style: AppTheme.bodyLarge), Row(
const SizedBox(height: 8), mainAxisAlignment: MainAxisAlignment.spaceBetween,
DropdownButtonFormField<TimeWindow>( children: [
initialValue: _timeWindow, const Text('时间窗口', style: AppTheme.bodyLarge),
items: const [ DropdownButton<TimeWindow>(
DropdownMenuItem( value: _timeWindow,
value: TimeWindow.oneHour, items: const [
child: Text('1小时内', style: AppTheme.bodyMedium)), DropdownMenuItem(
DropdownMenuItem( value: TimeWindow.oneHour,
value: TimeWindow.twoHours, child: Text('1小时内', style: AppTheme.bodyMedium)),
child: Text('2小时内', style: AppTheme.bodyMedium)), DropdownMenuItem(
DropdownMenuItem( value: TimeWindow.twoHours,
value: TimeWindow.sixHours, child: Text('2小时内', style: AppTheme.bodyMedium)),
child: Text('6小时内', style: AppTheme.bodyMedium)), DropdownMenuItem(
DropdownMenuItem( value: TimeWindow.sixHours,
value: TimeWindow.twelveHours, child: Text('6小时内', style: AppTheme.bodyMedium)),
child: Text('12小时内', style: AppTheme.bodyMedium)), DropdownMenuItem(
DropdownMenuItem( value: TimeWindow.twelveHours,
value: TimeWindow.oneDay, child: Text('12小时内', style: AppTheme.bodyMedium)),
child: Text('24小时内', style: AppTheme.bodyMedium)), DropdownMenuItem(
DropdownMenuItem( value: TimeWindow.oneDay,
value: TimeWindow.unlimited, child: Text('24小时内', style: AppTheme.bodyMedium)),
child: Text('不限时间', style: AppTheme.bodyMedium)), DropdownMenuItem(
], value: TimeWindow.unlimited,
onChanged: (value) { child: Text('不限时间', style: AppTheme.bodyMedium)),
if (value != null) { ],
setState(() { onChanged: (value) {
_timeWindow = value; if (value != null) {
}); setState(() {
_saveSettings(); _timeWindow = value;
} });
}, _saveSettings();
decoration: InputDecoration( }
filled: true, },
fillColor: AppTheme.secondaryBlack, dropdownColor: AppTheme.secondaryBlack,
border: OutlineInputBorder( style: AppTheme.bodyMedium,
borderRadius: BorderRadius.circular(12.0), underline: Container(height: 0),
borderSide: BorderSide.none,
), ),
), ],
dropdownColor: AppTheme.secondaryBlack,
style: AppTheme.bodyMedium,
), ),
const SizedBox(height: 16), const SizedBox(height: 16),
Row( Row(
@@ -626,7 +673,7 @@ class _SettingsScreenState extends State<SettingsScreen> {
_buildActionButton( _buildActionButton(
icon: Icons.share, icon: Icons.share,
title: '分享数据', title: '分享数据',
subtitle: '将记录分享为JSON文件', subtitle: '将记录分享为 JSON 文件',
onTap: _shareData, onTap: _shareData,
), ),
const SizedBox(height: 12), const SizedBox(height: 12),
@@ -712,15 +759,13 @@ class _SettingsScreenState extends State<SettingsScreen> {
); );
} }
String _formatFileSize(int bytes) { Future<void> _loadRecordCount() async {
if (bytes < 1024) return '$bytes B'; final count = await _databaseService.getRecordCount();
if (bytes < 1024 * 1024) return '${(bytes / 1024).toStringAsFixed(1)} KB'; if (mounted) {
return '${(bytes / 1024 / 1024).toStringAsFixed(1)} MB'; setState(() {
} _recordCount = count;
});
String _formatDateTime(DateTime dateTime) { }
return '${dateTime.year}-${dateTime.month.toString().padLeft(2, '0')}-${dateTime.day.toString().padLeft(2, '0')} '
'${dateTime.hour.toString().padLeft(2, '0')}:${dateTime.minute.toString().padLeft(2, '0')}';
} }
Future<String> _getAppVersion() async { Future<String> _getAppVersion() async {
@@ -910,6 +955,7 @@ class _SettingsScreenState extends State<SettingsScreen> {
'mergeRecordsEnabled': 0, 'mergeRecordsEnabled': 0,
'groupBy': 'trainAndLoco', 'groupBy': 'trainAndLoco',
'timeWindow': 'unlimited', 'timeWindow': 'unlimited',
'inputSource': 'bluetooth',
}); });
Navigator.pop(context); Navigator.pop(context);

View File

@@ -0,0 +1,351 @@
import 'dart:async';
import 'dart:math';
import 'package:flutter/services.dart';
import 'package:permission_handler/permission_handler.dart';
import 'dart:developer' as developer;
import 'package:gbk_codec/gbk_codec.dart';
import 'package:lbjconsole/models/train_record.dart';
import 'package:lbjconsole/services/database_service.dart';
const String _lbjInfoAddr = "1234000";
const String _lbjInfo2Addr = "1234002";
const String _lbjSyncAddr = "1234008";
const int _functionDown = 1;
const int _functionUp = 3;
class _LbJState {
String train = "<NUL>";
int direction = -1;
String speed = "NUL";
String positionKm = " <NUL>";
String time = "<NUL>";
String lbjClass = "NA";
String loco = "<NUL>";
String route = "********";
String posLonDeg = "";
String posLonMin = "";
String posLatDeg = "";
String posLatMin = "";
String _info2Hex = "";
void reset() {
train = "<NUL>";
direction = -1;
speed = "NUL";
positionKm = " <NUL>";
time = "<NUL>";
lbjClass = "NA";
loco = "<NUL>";
route = "********";
posLonDeg = "";
posLonMin = "";
posLatDeg = "";
posLatMin = "";
_info2Hex = "";
}
String _recodeBCD(String numericStr) {
return numericStr
.replaceAll('.', 'A')
.replaceAll('U', 'B')
.replaceAll(' ', 'C')
.replaceAll('-', 'D')
.replaceAll(')', 'E')
.replaceAll('(', 'F');
}
int _hexToChar(String hex1, String hex2) {
final String hex = "$hex1$hex2";
return int.tryParse(hex, radix: 16) ?? 0;
}
String _gbkToUtf8(List<int> gbkBytes) {
try {
final validBytes = gbkBytes.where((b) => b != 0).toList();
return gbk.decode(validBytes);
} catch (e) {
return "";
}
}
void updateFromRaw(String addr, int func, String numeric) {
if (func == _functionDown || func == _functionUp) {
direction = func;
}
switch (addr) {
case _lbjInfoAddr:
final RegExp infoRegex = RegExp(r'^\s*(\S+)\s+(\S+)\s+(\S+)');
final match = infoRegex.firstMatch(numeric);
if (match != null) {
train = match.group(1) ?? "<NUL>";
speed = match.group(2) ?? "NUL";
String pos = match.group(3)?.trim() ?? "";
if (pos.isEmpty) {
positionKm = " <NUL>";
} else if (pos.length > 1) {
positionKm =
"${pos.substring(0, pos.length - 1)}.${pos.substring(pos.length - 1)}";
} else {
positionKm = "0.$pos";
}
}
break;
case _lbjInfo2Addr:
String buffer = numeric;
if (buffer.length < 50) return;
_info2Hex = _recodeBCD(buffer);
if (_info2Hex.length >= 4) {
try {
List<int> classBytes = [
_hexToChar(_info2Hex[0], _info2Hex[1]),
_hexToChar(_info2Hex[2], _info2Hex[3]),
];
lbjClass = String.fromCharCodes(classBytes
.where((b) => b > 0x1F && b < 0x7F && b != 0x22 && b != 0x2C));
} catch (e) {}
}
if (buffer.length >= 12) loco = buffer.substring(4, 12);
List<int> routeBytes = List<int>.filled(17, 0);
if (_info2Hex.length >= 18) {
try {
routeBytes[0] = _hexToChar(_info2Hex[14], _info2Hex[15]);
routeBytes[1] = _hexToChar(_info2Hex[16], _info2Hex[17]);
} catch (e) {}
}
if (_info2Hex.length >= 22) {
try {
routeBytes[2] = _hexToChar(_info2Hex[18], _info2Hex[19]);
routeBytes[3] = _hexToChar(_info2Hex[20], _info2Hex[21]);
} catch (e) {}
}
if (_info2Hex.length >= 30) {
try {
routeBytes[4] = _hexToChar(_info2Hex[22], _info2Hex[23]);
routeBytes[5] = _hexToChar(_info2Hex[24], _info2Hex[25]);
routeBytes[6] = _hexToChar(_info2Hex[26], _info2Hex[27]);
routeBytes[7] = _hexToChar(_info2Hex[28], _info2Hex[29]);
} catch (e) {}
}
route = _gbkToUtf8(routeBytes);
if (buffer.length >= 39) {
posLonDeg = buffer.substring(30, 33);
posLonMin = "${buffer.substring(33, 35)}.${buffer.substring(35, 39)}";
}
if (buffer.length >= 47) {
posLatDeg = buffer.substring(39, 41);
posLatMin = "${buffer.substring(41, 43)}.${buffer.substring(43, 47)}";
}
break;
case _lbjSyncAddr:
if (numeric.length >= 5) {
time = "${numeric.substring(1, 3)}:${numeric.substring(3, 5)}";
}
break;
}
}
Map<String, dynamic> toTrainRecordJson() {
final now = DateTime.now();
String gpsPosition = "";
if (posLatDeg.isNotEmpty && posLatMin.isNotEmpty) {
gpsPosition = "$posLatDeg°$posLatMin";
}
if (posLonDeg.isNotEmpty && posLonMin.isNotEmpty) {
gpsPosition +=
"${gpsPosition.isEmpty ? "" : " "}$posLonDeg°$posLonMin";
}
String kmPosition = positionKm.replaceAll(' <NUL>', '');
final jsonData = {
'uniqueId': '${now.millisecondsSinceEpoch}_${Random().nextInt(9999)}',
'receivedTimestamp': now.millisecondsSinceEpoch,
'timestamp': now.millisecondsSinceEpoch,
'rssi': 0.0,
'train': train.replaceAll('<NUL>', ''),
'loco': loco.replaceAll('<NUL>', ''),
'speed': speed.replaceAll('NUL', ''),
'position': kmPosition,
'positionInfo': gpsPosition,
'route': route.replaceAll('********', ''),
'lbjClass': lbjClass.replaceAll('NA', ''),
'time': time.replaceAll('<NUL>', ''),
'direction': (direction == 1 || direction == 3) ? direction : 0,
'locoType': "",
};
return jsonData;
}
}
class AudioInputService {
static final AudioInputService _instance = AudioInputService._internal();
factory AudioInputService() => _instance;
AudioInputService._internal();
static const _methodChannel = MethodChannel('org.noxylva.lbjconsole/audio_input');
static const _eventChannel = EventChannel('org.noxylva.lbjconsole/audio_input_event');
final StreamController<String> _statusController =
StreamController<String>.broadcast();
final StreamController<TrainRecord> _dataController =
StreamController<TrainRecord>.broadcast();
final StreamController<bool> _connectionController =
StreamController<bool>.broadcast();
final StreamController<DateTime?> _lastReceivedTimeController =
StreamController<DateTime?>.broadcast();
Stream<String> get statusStream => _statusController.stream;
Stream<TrainRecord> get dataStream => _dataController.stream;
Stream<bool> get connectionStream => _connectionController.stream;
Stream<DateTime?> get lastReceivedTimeStream => _lastReceivedTimeController.stream;
bool _isListening = false;
DateTime? _lastReceivedTime;
StreamSubscription? _eventChannelSubscription;
final _LbJState _state = _LbJState();
String _lastRawMessage = "";
bool get isListening => _isListening;
DateTime? get lastReceivedTime => _lastReceivedTime;
void _updateListeningState(bool listening, String status) {
if (_isListening == listening) return;
_isListening = listening;
if (!listening) {
_lastReceivedTime = null;
_state.reset();
}
_statusController.add(status);
_connectionController.add(listening);
_lastReceivedTimeController.add(_lastReceivedTime);
}
void _listenToEventChannel() {
if (_eventChannelSubscription != null) {
_eventChannelSubscription?.cancel();
}
_eventChannelSubscription = _eventChannel.receiveBroadcastStream().listen(
(dynamic event) {
try {
final map = event as Map;
if (map.containsKey('listening')) {
final listening = map['listening'] as bool? ?? false;
if (_isListening != listening) {
_updateListeningState(listening, listening ? "监听中" : "已停止");
}
}
if (map.containsKey('address')) {
final addr = map['address'] as String;
if (addr != _lbjInfoAddr &&
addr != _lbjInfo2Addr &&
addr != _lbjSyncAddr) {
return;
}
final func = int.tryParse(map['func'] as String? ?? '-1') ?? -1;
final numeric = map['numeric'] as String;
final String currentRawMessage = "$addr|$func|$numeric";
if (currentRawMessage == _lastRawMessage) {
return;
}
_lastRawMessage = currentRawMessage;
developer.log('Audio-RAW: $currentRawMessage', name: 'AudioInput');
if (!_isListening) {
_updateListeningState(true, "监听中");
}
_lastReceivedTime = DateTime.now();
_lastReceivedTimeController.add(_lastReceivedTime);
_state.updateFromRaw(addr, func, numeric);
if (addr == _lbjInfoAddr || addr == _lbjInfo2Addr) {
final jsonData = _state.toTrainRecordJson();
final trainRecord = TrainRecord.fromJson(jsonData);
_dataController.add(trainRecord);
DatabaseService.instance.insertRecord(trainRecord);
}
}
} catch (e, s) {
developer.log('Audio StateMachine Error: $e',
name: 'AudioInput', error: e, stackTrace: s);
}
},
onError: (dynamic error) {
_updateListeningState(false, "数据通道错误");
_eventChannelSubscription?.cancel();
},
);
}
Future<bool> startListening() async {
if (_isListening) return true;
var status = await Permission.microphone.status;
if (!status.isGranted) {
status = await Permission.microphone.request();
if (!status.isGranted) {
developer.log('Microphone permission denied', name: 'AudioInput');
return false;
}
}
try {
_listenToEventChannel();
await _methodChannel.invokeMethod('start');
_isListening = true;
_statusController.add("监听中");
_connectionController.add(true);
developer.log('Audio input started', name: 'AudioInput');
return true;
} on PlatformException catch (e) {
developer.log('Failed to start audio input: ${e.message}', name: 'AudioInput');
return false;
}
}
Future<void> stopListening() async {
if (!_isListening) return;
try {
await _methodChannel.invokeMethod('stop');
_isListening = false;
_lastReceivedTime = null;
_state.reset();
_statusController.add("已停止");
_connectionController.add(false);
_lastReceivedTimeController.add(null);
developer.log('Audio input stopped', name: 'AudioInput');
} catch (e) {
developer.log('Error stopping audio input: $e', name: 'AudioInput');
}
}
void dispose() {
_eventChannelSubscription?.cancel();
_statusController.close();
_dataController.close();
_connectionController.close();
_lastReceivedTimeController.close();
}
}

View File

@@ -4,16 +4,23 @@ import 'package:path/path.dart';
import 'package:path_provider/path_provider.dart'; import 'package:path_provider/path_provider.dart';
import 'dart:io'; import 'dart:io';
import 'dart:convert'; import 'dart:convert';
import 'dart:developer' as developer;
import 'package:lbjconsole/models/train_record.dart'; import 'package:lbjconsole/models/train_record.dart';
enum InputSource {
bluetooth,
rtlTcp,
audioInput
}
class DatabaseService { class DatabaseService {
static final DatabaseService instance = DatabaseService._internal(); static final DatabaseService instance = DatabaseService._internal();
factory DatabaseService() => instance; factory DatabaseService() => instance;
DatabaseService._internal(); DatabaseService._internal();
static const String _databaseName = 'train_database'; static const String _databaseName = 'train_database';
static const _databaseVersion = 8; static const _databaseVersion = 9;
static const String trainRecordsTable = 'train_records'; static const String trainRecordsTable = 'train_records';
static const String appSettingsTable = 'app_settings'; static const String appSettingsTable = 'app_settings';
@@ -63,6 +70,8 @@ class DatabaseService {
} }
Future<void> _onUpgrade(Database db, int oldVersion, int newVersion) async { Future<void> _onUpgrade(Database db, int oldVersion, int newVersion) async {
developer.log('Database upgrading from $oldVersion to $newVersion', name: 'Database');
if (oldVersion < 2) { if (oldVersion < 2) {
await db.execute( await db.execute(
'ALTER TABLE $appSettingsTable ADD COLUMN hideTimeOnlyRecords INTEGER NOT NULL DEFAULT 0'); 'ALTER TABLE $appSettingsTable ADD COLUMN hideTimeOnlyRecords INTEGER NOT NULL DEFAULT 0');
@@ -97,6 +106,27 @@ class DatabaseService {
await db.execute( await db.execute(
'ALTER TABLE $appSettingsTable ADD COLUMN rtlTcpPort TEXT NOT NULL DEFAULT "14423"'); 'ALTER TABLE $appSettingsTable ADD COLUMN rtlTcpPort TEXT NOT NULL DEFAULT "14423"');
} }
if (oldVersion < 9) {
await db.execute(
'ALTER TABLE $appSettingsTable ADD COLUMN inputSource TEXT NOT NULL DEFAULT "bluetooth"');
try {
final List<Map<String, dynamic>> results = await db.query(appSettingsTable, columns: ['rtlTcpEnabled'], where: 'id = 1');
if (results.isNotEmpty) {
final int rtlTcpEnabled = results.first['rtlTcpEnabled'] as int? ?? 0;
if (rtlTcpEnabled == 1) {
await db.update(
appSettingsTable,
{'inputSource': 'rtlTcp'},
where: 'id = 1'
);
developer.log('Migrated V8 settings: inputSource set to rtlTcp', name: 'Database');
}
}
} catch (e) {
developer.log('Migration V8->V9 data update failed: $e', name: 'Database');
}
}
} }
Future<void> _onCreate(Database db, int version) async { Future<void> _onCreate(Database db, int version) async {
@@ -150,7 +180,8 @@ class DatabaseService {
mapSettingsTimestamp INTEGER, mapSettingsTimestamp INTEGER,
rtlTcpEnabled INTEGER NOT NULL DEFAULT 0, rtlTcpEnabled INTEGER NOT NULL DEFAULT 0,
rtlTcpHost TEXT NOT NULL DEFAULT '127.0.0.1', rtlTcpHost TEXT NOT NULL DEFAULT '127.0.0.1',
rtlTcpPort TEXT NOT NULL DEFAULT '14423' rtlTcpPort TEXT NOT NULL DEFAULT '14423',
inputSource TEXT NOT NULL DEFAULT 'bluetooth'
) )
'''); ''');
@@ -177,11 +208,12 @@ class DatabaseService {
'groupBy': 'trainAndLoco', 'groupBy': 'trainAndLoco',
'timeWindow': 'unlimited', 'timeWindow': 'unlimited',
'mapTimeFilter': 'unlimited', 'mapTimeFilter': 'unlimited',
'hideUngroupableRecords': 0, 'hideUngroupableRecords': 0,
'mapSettingsTimestamp': null, 'mapSettingsTimestamp': null,
'rtlTcpEnabled': 0, 'rtlTcpEnabled': 0,
'rtlTcpHost': '127.0.0.1', 'rtlTcpHost': '127.0.0.1',
'rtlTcpPort': '14423', 'rtlTcpPort': '14423',
'inputSource': 'bluetooth',
}); });
} }
@@ -309,7 +341,10 @@ class DatabaseService {
where: 'id = 1', where: 'id = 1',
); );
if (result > 0) { if (result > 0) {
_notifySettingsChanged(settings); final currentSettings = await getAllSettings();
if (currentSettings != null) {
_notifySettingsChanged(currentSettings);
}
} }
return result; return result;
} }
@@ -409,11 +444,9 @@ class DatabaseService {
StreamSubscription<void> onSettingsChanged( StreamSubscription<void> onSettingsChanged(
Function(Map<String, dynamic>) listener) { Function(Map<String, dynamic>) listener) {
_settingsListeners.add(listener); _settingsListeners.add(listener);
return Stream.value(null).listen((_) {}) return _SettingsListenerSubscription(() {
..onData((_) {}) _settingsListeners.remove(listener);
..onDone(() { });
_settingsListeners.remove(listener);
});
} }
void _notifySettingsChanged(Map<String, dynamic> settings) { void _notifySettingsChanged(Map<String, dynamic> settings) {
@@ -499,3 +532,39 @@ class DatabaseService {
} }
} }
} }
class _SettingsListenerSubscription implements StreamSubscription<void> {
final void Function() _onCancel;
bool _isCanceled = false;
_SettingsListenerSubscription(this._onCancel);
@override
Future<void> cancel() async {
if (!_isCanceled) {
_isCanceled = true;
_onCancel();
}
}
@override
void onData(void Function(void data)? handleData) {}
@override
void onDone(void Function()? handleDone) {}
@override
void onError(Function? handleError) {}
@override
void pause([Future<void>? resumeSignal]) {}
@override
void resume() {}
@override
bool get isPaused => false;
@override
Future<E> asFuture<E>([E? futureValue]) => Future.value(futureValue);
}

View File

@@ -64,8 +64,13 @@ class _LbJState {
String _gbkToUtf8(List<int> gbkBytes) { String _gbkToUtf8(List<int> gbkBytes) {
try { try {
final validBytes = gbkBytes.where((b) => b != 0).toList(); final validBytes = gbkBytes.where((b) => b != 0).toList();
return gbk.decode(validBytes); print('gbkBytes: ${validBytes.map((b) => b.toRadixString(16).padLeft(2, '0')).join(',')}');
final result = gbk_bytes.decode(validBytes);
print('gbk decoded: $result');
return result;
} catch (e) { } catch (e) {
print('gbk decode error: $e');
return ""; return "";
} }
} }
@@ -99,6 +104,8 @@ class _LbJState {
String buffer = numeric; String buffer = numeric;
if (buffer.length < 50) return; if (buffer.length < 50) return;
_info2Hex = _recodeBCD(buffer); _info2Hex = _recodeBCD(buffer);
print('info2 raw: $buffer');
print('info2 hex: $_info2Hex');
if (_info2Hex.length >= 4) { if (_info2Hex.length >= 4) {
try { try {

View File

@@ -0,0 +1,234 @@
import 'dart:async';
import 'dart:typed_data';
import 'dart:ui' as ui;
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
class _WaterfallCache {
static final _WaterfallCache instance = _WaterfallCache._();
_WaterfallCache._();
static const int waterfallWidth = 500;
static const int waterfallHeight = 500;
Uint32List? pixels;
late List<int> colorLUT;
void initialize() {
if (pixels == null) {
pixels = Uint32List(waterfallWidth * waterfallHeight);
for (int i = 0; i < pixels!.length; i++) {
pixels![i] = 0xFF000033;
}
_buildColorLUT();
}
}
void _buildColorLUT() {
colorLUT = List.filled(256, 0);
for (int i = 0; i < 256; i++) {
final intensity = i / 255.0;
final color = _intensityToColor(intensity);
colorLUT[i] = (color.alpha << 24) |
(color.red << 16) |
(color.green << 8) |
color.blue;
}
}
Color _intensityToColor(double intensity) {
if (intensity < 0.2) {
return Color.lerp(
const Color(0xFF000033), const Color(0xFF0000FF), intensity / 0.2)!;
} else if (intensity < 0.4) {
return Color.lerp(const Color(0xFF0000FF), const Color(0xFF00FFFF),
(intensity - 0.2) / 0.2)!;
} else if (intensity < 0.6) {
return Color.lerp(const Color(0xFF00FFFF), const Color(0xFF00FF00),
(intensity - 0.4) / 0.2)!;
} else if (intensity < 0.8) {
return Color.lerp(const Color(0xFF00FF00), const Color(0xFFFFFF00),
(intensity - 0.6) / 0.2)!;
} else {
return Color.lerp(const Color(0xFFFFFF00), const Color(0xFFFF0000),
(intensity - 0.8) / 0.2)!;
}
}
}
class AudioWaterfallWidget extends StatefulWidget {
const AudioWaterfallWidget({super.key});
@override
State<AudioWaterfallWidget> createState() => _AudioWaterfallWidgetState();
}
class _AudioWaterfallWidgetState extends State<AudioWaterfallWidget> {
static const platform = MethodChannel('org.noxylva.lbjconsole/audio_input');
final _cache = _WaterfallCache.instance;
ui.Image? _waterfallImage;
List<double> _currentSpectrum = [];
Timer? _updateTimer;
bool _imageNeedsUpdate = false;
@override
void initState() {
super.initState();
_cache.initialize();
_startUpdating();
}
@override
void dispose() {
_updateTimer?.cancel();
_waterfallImage?.dispose();
super.dispose();
}
void _startUpdating() {
_updateTimer =
Timer.periodic(const Duration(milliseconds: 20), (timer) async {
try {
final result = await platform.invokeMethod('getSpectrum');
if (result != null && result is List && mounted) {
final fftData = result.cast<double>();
final pixels = _cache.pixels!;
pixels.setRange(
_WaterfallCache.waterfallWidth, pixels.length, pixels, 0);
for (int i = 0;
i < _WaterfallCache.waterfallWidth && i < fftData.length;
i++) {
final intensity = (fftData[i].clamp(0.0, 1.0) * 255).toInt();
pixels[i] = _cache.colorLUT[intensity];
}
_currentSpectrum = fftData;
_imageNeedsUpdate = true;
if (_imageNeedsUpdate) {
_rebuildImage();
}
}
} catch (e) {}
});
}
Future<void> _rebuildImage() async {
_imageNeedsUpdate = false;
final completer = Completer<ui.Image>();
ui.decodeImageFromPixels(
_cache.pixels!.buffer.asUint8List(),
_WaterfallCache.waterfallWidth,
_WaterfallCache.waterfallHeight,
ui.PixelFormat.bgra8888,
(image) => completer.complete(image),
);
final newImage = await completer.future;
if (mounted) {
setState(() {
_waterfallImage?.dispose();
_waterfallImage = newImage;
});
} else {
newImage.dispose();
}
}
@override
Widget build(BuildContext context) {
return Column(
mainAxisSize: MainAxisSize.min,
children: [
Container(
height: 80,
width: double.infinity,
decoration: BoxDecoration(
color: Colors.black,
border: Border(
left: BorderSide(color: Colors.cyan.withOpacity(0.3), width: 2),
right: BorderSide(color: Colors.cyan.withOpacity(0.3), width: 2),
top: BorderSide(color: Colors.cyan.withOpacity(0.3), width: 2),
),
),
child: _currentSpectrum.isEmpty
? const Center(
child: CircularProgressIndicator(
color: Colors.cyan, strokeWidth: 2))
: CustomPaint(painter: _SpectrumPainter(_currentSpectrum)),
),
Container(
height: 100,
width: double.infinity,
decoration: BoxDecoration(
color: Colors.black,
border: Border(
left: BorderSide(color: Colors.cyan.withOpacity(0.3), width: 2),
right: BorderSide(color: Colors.cyan.withOpacity(0.3), width: 2),
bottom: BorderSide(color: Colors.cyan.withOpacity(0.3), width: 2),
),
),
child: _waterfallImage == null
? const Center(
child: CircularProgressIndicator(color: Colors.cyan))
: CustomPaint(painter: _WaterfallImagePainter(_waterfallImage!)),
),
],
);
}
}
class _SpectrumPainter extends CustomPainter {
final List<double> spectrum;
_SpectrumPainter(this.spectrum);
@override
void paint(Canvas canvas, Size size) {
if (spectrum.isEmpty) return;
final path = Path();
final binWidth = size.width / spectrum.length;
for (int i = 0; i < spectrum.length; i++) {
final x = i * binWidth;
final y = size.height - (spectrum[i].clamp(0.0, 1.0) * size.height);
i == 0 ? path.moveTo(x, y) : path.lineTo(x, y);
}
canvas.drawPath(
path,
Paint()
..color = Colors.cyan
..strokeWidth = 0.5
..style = PaintingStyle.stroke
..isAntiAlias = true
..strokeCap = StrokeCap.round
..strokeJoin = StrokeJoin.round);
}
@override
bool shouldRepaint(_SpectrumPainter old) => true;
}
class _WaterfallImagePainter extends CustomPainter {
final ui.Image image;
_WaterfallImagePainter(this.image);
@override
void paint(Canvas canvas, Size size) {
canvas.drawImageRect(
image,
Rect.fromLTWH(0, 0, image.width.toDouble(), image.height.toDouble()),
Rect.fromLTWH(0, 0, size.width, size.height),
Paint()..filterQuality = FilterQuality.none,
);
}
@override
bool shouldRepaint(_WaterfallImagePainter old) => old.image != image;
}

View File

@@ -16,7 +16,7 @@ publish_to: "none" # Remove this line if you wish to publish to pub.dev
# https://developer.apple.com/library/archive/documentation/General/Reference/InfoPlistKeyReference/Articles/CoreFoundationKeys.html # https://developer.apple.com/library/archive/documentation/General/Reference/InfoPlistKeyReference/Articles/CoreFoundationKeys.html
# In Windows, build-name is used as the major, minor, and patch parts # In Windows, build-name is used as the major, minor, and patch parts
# of the product and file versions while build-number is used as the build suffix. # of the product and file versions while build-number is used as the build suffix.
version: 0.8.1-flutter+81 version: 0.9.0-flutter+90
environment: environment:
sdk: ^3.5.4 sdk: ^3.5.4