feat: add audio demodulation and FFT processing, enhance audio input handling and visualization

This commit is contained in:
Nedifinita
2025-12-12 00:47:09 +08:00
parent 8507029499
commit ec9d9a389e
9 changed files with 566 additions and 16 deletions

View File

@@ -6,6 +6,10 @@ project("railwaypagerdemod")
add_library(${CMAKE_PROJECT_NAME} SHARED
demod.cpp
demod.h
audio_demod.cpp
audio_demod.h
audio_fft.cpp
audio_fft.h
native-lib.cpp
${CMAKE_CURRENT_SOURCE_DIR}/dsp/firfilter.cpp
)

View File

@@ -0,0 +1,106 @@
#include "audio_demod.h"
#include "demod.h"
#include <android/log.h>
#include <cmath>
#include <cstring>
#define LOG_TAG "AudioDemod"
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)
#define FINE_CLKT_HI 1.90
#define FINE_CLKT_LO 0.32
#define SAMPLE_RATE 48000
#define BAUD_RATE 1200
static double watch_ctr = 0.0;
static double atb_ctr = 0.0;
static double clkt = (double)SAMPLE_RATE / BAUD_RATE;
static int last_value = 0;
static int preamble_count = 0;
static int pocbit = 0;
static bool preamble_detected = false;
static int crossing_count = 0;
static int nSamples = 0;
static const int AUDIO_THRESHOLD = 128;
void resetAudioDemod() {
watch_ctr = 0.0;
atb_ctr = 0.0;
clkt = (double)SAMPLE_RATE / BAUD_RATE;
last_value = 0;
preamble_count = 0;
pocbit = 0;
preamble_detected = false;
crossing_count = 0;
nSamples = 0;
LOGD("Audio demodulator reset");
}
void processAudioSamples(int16_t *samples, int size) {
for (int i = 0; i < size; i++) {
int audio_value = (samples[i] + 32768) / 256;
if (audio_value < 0) audio_value = 0;
if (audio_value > 255) audio_value = 255;
int current_bit = (audio_value > AUDIO_THRESHOLD) ? 1 : 0;
if (current_bit != last_value) {
crossing_count++;
if ((nSamples > 28) && (nSamples < 44)) {
preamble_count++;
if (preamble_count > 50 && !preamble_detected) {
preamble_detected = true;
pocbit = 0;
LOGD("Preamble detected! crossings=%d samples=%d", preamble_count, nSamples);
}
}
nSamples = 0;
}
nSamples++;
last_value = current_bit;
watch_ctr += 1.0;
if (watch_ctr - atb_ctr < 1.0) {
int bit = current_bit;
if (preamble_detected) {
processBasebandSample(bit);
pocbit++;
if (pocbit > 1250) {
LOGD("POCSAG timeout - no sync after 1250 bits");
preamble_detected = false;
preamble_count = 0;
pocbit = 0;
}
}
if (crossing_count > 0) {
double offset = watch_ctr - atb_ctr;
if (offset > FINE_CLKT_HI) {
clkt -= 0.01;
if (clkt < (SAMPLE_RATE / BAUD_RATE) * 0.95) {
clkt = (SAMPLE_RATE / BAUD_RATE) * 0.95;
}
} else if (offset < FINE_CLKT_LO) {
clkt += 0.01;
if (clkt > (SAMPLE_RATE / BAUD_RATE) * 1.05) {
clkt = (SAMPLE_RATE / BAUD_RATE) * 1.05;
}
}
crossing_count = 0;
}
atb_ctr += clkt;
}
}
}

View File

@@ -0,0 +1,10 @@
#ifndef AUDIO_DEMOD_H
#define AUDIO_DEMOD_H
#include <cstdint>
void processAudioSamples(int16_t *samples, int size);
void resetAudioDemod();
#endif

View File

@@ -0,0 +1,117 @@
#include "audio_fft.h"
#include <cmath>
#include <algorithm>
#include <android/log.h>
#define LOG_TAG "AudioFFT"
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)
#ifndef M_PI
#define M_PI 3.14159265358979323846
#endif
AudioFFT::AudioFFT(int fftSize)
: fftSize_(fftSize)
, inputBuffer_(fftSize, 0.0f)
, windowBuffer_(fftSize, 0.0f)
, realPart_(fftSize, 0.0f)
, imagPart_(fftSize, 0.0f)
, magnitude_(fftSize, 0.0f)
, bufferPos_(0) {
for (int i = 0; i < fftSize_; i++) {
windowBuffer_[i] = 0.54f - 0.46f * std::cos(2.0f * M_PI * i / (fftSize_ - 1));
}
LOGD("AudioFFT initialized with size %d", fftSize_);
}
AudioFFT::~AudioFFT() {
}
void AudioFFT::processSamples(const int16_t* samples, int size) {
for (int i = 0; i < size; i++) {
inputBuffer_[bufferPos_] = samples[i] / 32768.0f;
bufferPos_++;
if (bufferPos_ >= fftSize_) {
computeFFT();
bufferPos_ = 0;
}
}
}
void AudioFFT::applyWindow() {
for (int i = 0; i < fftSize_; i++) {
realPart_[i] = inputBuffer_[i] * windowBuffer_[i];
imagPart_[i] = 0.0f;
}
}
void AudioFFT::computeFFT() {
applyWindow();
int n = fftSize_;
int j = 0;
for (int i = 0; i < n - 1; i++) {
if (i < j) {
std::swap(realPart_[i], realPart_[j]);
std::swap(imagPart_[i], imagPart_[j]);
}
int k = n / 2;
while (k <= j) {
j -= k;
k /= 2;
}
j += k;
}
for (int len = 2; len <= n; len *= 2) {
float angle = -2.0f * M_PI / len;
float wlenReal = std::cos(angle);
float wlenImag = std::sin(angle);
for (int i = 0; i < n; i += len) {
float wReal = 1.0f;
float wImag = 0.0f;
for (int k = 0; k < len / 2; k++) {
int idx1 = i + k;
int idx2 = i + k + len / 2;
float tReal = wReal * realPart_[idx2] - wImag * imagPart_[idx2];
float tImag = wReal * imagPart_[idx2] + wImag * realPart_[idx2];
realPart_[idx2] = realPart_[idx1] - tReal;
imagPart_[idx2] = imagPart_[idx1] - tImag;
realPart_[idx1] += tReal;
imagPart_[idx1] += tImag;
float wTempReal = wReal * wlenReal - wImag * wlenImag;
wImag = wReal * wlenImag + wImag * wlenReal;
wReal = wTempReal;
}
}
}
for (int i = 0; i < fftSize_; i++) {
float real = realPart_[i];
float imag = imagPart_[i];
magnitude_[i] = std::sqrt(real * real + imag * imag);
}
}
void AudioFFT::getSpectrum(float* output, int outputSize) {
int copySize = std::min(outputSize, fftSize_ / 2);
for (int i = 0; i < copySize; i++) {
float mag = magnitude_[i];
if (mag < 1e-10f) mag = 1e-10f;
float db = 20.0f * std::log10(mag);
float normalized = (db + 80.0f) / 80.0f;
output[i] = std::max(0.0f, std::min(1.0f, normalized));
}
}

View File

@@ -0,0 +1,29 @@
#ifndef AUDIO_FFT_H
#define AUDIO_FFT_H
#include <cstdint>
#include <vector>
class AudioFFT {
public:
AudioFFT(int fftSize = 256);
~AudioFFT();
void processSamples(const int16_t* samples, int size);
void getSpectrum(float* output, int outputSize);
int getFFTSize() const { return fftSize_; }
private:
void computeFFT();
void applyWindow();
int fftSize_;
std::vector<float> inputBuffer_;
std::vector<float> windowBuffer_;
std::vector<float> realPart_;
std::vector<float> imagPart_;
std::vector<float> magnitude_;
int bufferPos_;
};
#endif

View File

@@ -15,6 +15,8 @@
#include <android/log.h>
#include <errno.h>
#include "demod.h"
#include "audio_demod.h"
#include "audio_fft.h"
#define BUF_SIZE 8192
@@ -26,6 +28,8 @@ static std::mutex msgMutex;
static std::vector<std::string> messageBuffer;
static std::mutex demodDataMutex;
static std::mutex fftMutex;
static AudioFFT* audioFFT = nullptr;
static JavaVM *g_vm = nullptr;
static jobject g_obj = nullptr;
@@ -95,20 +99,36 @@ Java_org_noxylva_lbjconsole_flutter_AudioInputHandler_nativePushAudio(
jshort *samples = env->GetShortArrayElements(audioData, NULL);
{
std::lock_guard<std::mutex> fftLock(fftMutex);
if (!audioFFT) {
audioFFT = new AudioFFT(4096);
}
audioFFT->processSamples(samples, size);
}
std::lock_guard<std::mutex> demodLock(demodDataMutex);
for (int i = 0; i < size; i++) {
double sample = (double)samples[i] / 32768.0;
processBasebandSample(sample);
}
env->ReleaseShortArrayElements(audioData, samples, 0);
processAudioSamples(samples, size);
if (is_message_ready) {
std::ostringstream ss;
std::lock_guard<std::mutex> msgLock(msgMutex);
std::string message_content = alpha_msg.empty() ? numeric_msg : alpha_msg;
std::string message_content;
if (function_bits == 3) {
message_content = alpha_msg;
} else {
message_content = numeric_msg;
}
if (message_content.empty()) {
message_content = alpha_msg.empty() ? numeric_msg : alpha_msg;
}
__android_log_print(ANDROID_LOG_DEBUG, "AUDIO",
"msg_ready: addr=%u func=%d alpha_len=%zu numeric_len=%zu",
address, function_bits, alpha_msg.length(), numeric_msg.length());
ss << "[MSG]" << address << "|" << function_bits << "|" << message_content;
messageBuffer.push_back(ss.str());
@@ -116,6 +136,8 @@ Java_org_noxylva_lbjconsole_flutter_AudioInputHandler_nativePushAudio(
numeric_msg.clear();
alpha_msg.clear();
}
env->ReleaseShortArrayElements(audioData, samples, 0);
}
extern "C" JNIEXPORT jdouble JNICALL
@@ -135,6 +157,32 @@ Java_org_noxylva_lbjconsole_flutter_AudioInputHandler_clearMessageBuffer(JNIEnv
alpha_msg.clear();
}
extern "C" JNIEXPORT jfloatArray JNICALL
Java_org_noxylva_lbjconsole_flutter_AudioInputHandler_getAudioSpectrum(JNIEnv *env, jobject)
{
std::lock_guard<std::mutex> fftLock(fftMutex);
if (!audioFFT) {
return env->NewFloatArray(0);
}
int spectrumSize = audioFFT->getFFTSize() / 2;
std::vector<float> spectrum(spectrumSize);
audioFFT->getSpectrum(spectrum.data(), spectrumSize);
const int outputBins = 500;
std::vector<float> downsampled(outputBins);
for (int i = 0; i < outputBins; i++) {
int srcIdx = (i * spectrumSize) / outputBins;
downsampled[i] = spectrum[srcIdx];
}
jfloatArray result = env->NewFloatArray(outputBins);
env->SetFloatArrayRegion(result, 0, outputBins, downsampled.data());
return result;
}
extern "C" JNIEXPORT jbyteArray JNICALL
Java_org_noxylva_lbjconsole_flutter_AudioInputHandler_pollMessages(JNIEnv *env, jobject)
{

View File

@@ -53,6 +53,7 @@ class AudioInputHandler(private val context: Context) : MethodChannel.MethodCall
private external fun nativePushAudio(data: ShortArray, size: Int)
private external fun pollMessages(): ByteArray
private external fun clearMessageBuffer()
private external fun getAudioSpectrum(): FloatArray
override fun onMethodCall(call: MethodCall, result: MethodChannel.Result) {
when (call.method) {
@@ -69,6 +70,14 @@ class AudioInputHandler(private val context: Context) : MethodChannel.MethodCall
clearMessageBuffer()
result.success(null)
}
"getSpectrum" -> {
try {
val spectrum = getAudioSpectrum()
result.success(spectrum.toList())
} catch (e: Exception) {
result.error("FFT_ERROR", "Failed to get spectrum", e.message)
}
}
else -> result.notImplemented()
}
}

View File

@@ -15,6 +15,7 @@ import 'package:lbjconsole/services/background_service.dart';
import 'package:lbjconsole/services/rtl_tcp_service.dart';
import 'package:lbjconsole/services/audio_input_service.dart';
import 'package:lbjconsole/themes/app_theme.dart';
import 'package:lbjconsole/widgets/audio_waterfall_widget.dart';
class _ConnectionStatusWidget extends StatefulWidget {
final BLEService bleService;
@@ -839,16 +840,8 @@ class _PixelPerfectBluetoothDialogState
Widget _buildAudioInputView(BuildContext context) {
return Column(mainAxisSize: MainAxisSize.min, children: [
const Icon(Icons.mic, size: 48, color: Colors.blue),
const SizedBox(height: 16),
Text('监听中',
style: Theme.of(context)
.textTheme
.titleMedium
?.copyWith(fontWeight: FontWeight.bold)),
const SizedBox(height: 8),
const Text("请使用音频线连接设备",
style: TextStyle(color: Colors.grey)),
const AudioWaterfallWidget(),
]);
}

View File

@@ -0,0 +1,234 @@
import 'dart:async';
import 'dart:typed_data';
import 'dart:ui' as ui;
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
class _WaterfallCache {
static final _WaterfallCache instance = _WaterfallCache._();
_WaterfallCache._();
static const int waterfallWidth = 500;
static const int waterfallHeight = 500;
Uint32List? pixels;
late List<int> colorLUT;
void initialize() {
if (pixels == null) {
pixels = Uint32List(waterfallWidth * waterfallHeight);
for (int i = 0; i < pixels!.length; i++) {
pixels![i] = 0xFF000033;
}
_buildColorLUT();
}
}
void _buildColorLUT() {
colorLUT = List.filled(256, 0);
for (int i = 0; i < 256; i++) {
final intensity = i / 255.0;
final color = _intensityToColor(intensity);
colorLUT[i] = (color.alpha << 24) |
(color.red << 16) |
(color.green << 8) |
color.blue;
}
}
Color _intensityToColor(double intensity) {
if (intensity < 0.2) {
return Color.lerp(
const Color(0xFF000033), const Color(0xFF0000FF), intensity / 0.2)!;
} else if (intensity < 0.4) {
return Color.lerp(const Color(0xFF0000FF), const Color(0xFF00FFFF),
(intensity - 0.2) / 0.2)!;
} else if (intensity < 0.6) {
return Color.lerp(const Color(0xFF00FFFF), const Color(0xFF00FF00),
(intensity - 0.4) / 0.2)!;
} else if (intensity < 0.8) {
return Color.lerp(const Color(0xFF00FF00), const Color(0xFFFFFF00),
(intensity - 0.6) / 0.2)!;
} else {
return Color.lerp(const Color(0xFFFFFF00), const Color(0xFFFF0000),
(intensity - 0.8) / 0.2)!;
}
}
}
class AudioWaterfallWidget extends StatefulWidget {
const AudioWaterfallWidget({super.key});
@override
State<AudioWaterfallWidget> createState() => _AudioWaterfallWidgetState();
}
class _AudioWaterfallWidgetState extends State<AudioWaterfallWidget> {
static const platform = MethodChannel('org.noxylva.lbjconsole/audio_input');
final _cache = _WaterfallCache.instance;
ui.Image? _waterfallImage;
List<double> _currentSpectrum = [];
Timer? _updateTimer;
bool _imageNeedsUpdate = false;
@override
void initState() {
super.initState();
_cache.initialize();
_startUpdating();
}
@override
void dispose() {
_updateTimer?.cancel();
_waterfallImage?.dispose();
super.dispose();
}
void _startUpdating() {
_updateTimer =
Timer.periodic(const Duration(milliseconds: 20), (timer) async {
try {
final result = await platform.invokeMethod('getSpectrum');
if (result != null && result is List && mounted) {
final fftData = result.cast<double>();
final pixels = _cache.pixels!;
pixels.setRange(
_WaterfallCache.waterfallWidth, pixels.length, pixels, 0);
for (int i = 0;
i < _WaterfallCache.waterfallWidth && i < fftData.length;
i++) {
final intensity = (fftData[i].clamp(0.0, 1.0) * 255).toInt();
pixels[i] = _cache.colorLUT[intensity];
}
_currentSpectrum = fftData;
_imageNeedsUpdate = true;
if (_imageNeedsUpdate) {
_rebuildImage();
}
}
} catch (e) {}
});
}
Future<void> _rebuildImage() async {
_imageNeedsUpdate = false;
final completer = Completer<ui.Image>();
ui.decodeImageFromPixels(
_cache.pixels!.buffer.asUint8List(),
_WaterfallCache.waterfallWidth,
_WaterfallCache.waterfallHeight,
ui.PixelFormat.bgra8888,
(image) => completer.complete(image),
);
final newImage = await completer.future;
if (mounted) {
setState(() {
_waterfallImage?.dispose();
_waterfallImage = newImage;
});
} else {
newImage.dispose();
}
}
@override
Widget build(BuildContext context) {
return Column(
mainAxisSize: MainAxisSize.min,
children: [
Container(
height: 80,
width: double.infinity,
decoration: BoxDecoration(
color: Colors.black,
border: Border(
left: BorderSide(color: Colors.cyan.withOpacity(0.3), width: 2),
right: BorderSide(color: Colors.cyan.withOpacity(0.3), width: 2),
top: BorderSide(color: Colors.cyan.withOpacity(0.3), width: 2),
),
),
child: _currentSpectrum.isEmpty
? const Center(
child: CircularProgressIndicator(
color: Colors.cyan, strokeWidth: 2))
: CustomPaint(painter: _SpectrumPainter(_currentSpectrum)),
),
Container(
height: 100,
width: double.infinity,
decoration: BoxDecoration(
color: Colors.black,
border: Border(
left: BorderSide(color: Colors.cyan.withOpacity(0.3), width: 2),
right: BorderSide(color: Colors.cyan.withOpacity(0.3), width: 2),
bottom: BorderSide(color: Colors.cyan.withOpacity(0.3), width: 2),
),
),
child: _waterfallImage == null
? const Center(
child: CircularProgressIndicator(color: Colors.cyan))
: CustomPaint(painter: _WaterfallImagePainter(_waterfallImage!)),
),
],
);
}
}
class _SpectrumPainter extends CustomPainter {
final List<double> spectrum;
_SpectrumPainter(this.spectrum);
@override
void paint(Canvas canvas, Size size) {
if (spectrum.isEmpty) return;
final path = Path();
final binWidth = size.width / spectrum.length;
for (int i = 0; i < spectrum.length; i++) {
final x = i * binWidth;
final y = size.height - (spectrum[i].clamp(0.0, 1.0) * size.height);
i == 0 ? path.moveTo(x, y) : path.lineTo(x, y);
}
canvas.drawPath(
path,
Paint()
..color = Colors.cyan
..strokeWidth = 0.5
..style = PaintingStyle.stroke
..isAntiAlias = true
..strokeCap = StrokeCap.round
..strokeJoin = StrokeJoin.round);
}
@override
bool shouldRepaint(_SpectrumPainter old) => true;
}
class _WaterfallImagePainter extends CustomPainter {
final ui.Image image;
_WaterfallImagePainter(this.image);
@override
void paint(Canvas canvas, Size size) {
canvas.drawImageRect(
image,
Rect.fromLTWH(0, 0, image.width.toDouble(), image.height.toDouble()),
Rect.fromLTWH(0, 0, size.width, size.height),
Paint()..filterQuality = FilterQuality.none,
);
}
@override
bool shouldRepaint(_WaterfallImagePainter old) => old.image != image;
}