feat: refactor input source handling and add audio input service

This commit is contained in:
Nedifinita
2025-12-05 17:01:42 +08:00
parent 7772112658
commit 99bc081583
13 changed files with 838 additions and 522 deletions

View File

@@ -1,5 +1,8 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools">
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.BLUETOOTH"/>
<uses-permission android:name="android.permission.BLUETOOTH_ADMIN"/>
<uses-permission android:name="android.permission.BLUETOOTH_CONNECT" android:usesPermissionFlags="neverForLocation"/>
@@ -31,10 +34,6 @@
android:configChanges="orientation|keyboardHidden|keyboard|screenSize|smallestScreenSize|locale|layoutDirection|fontScale|screenLayout|density|uiMode"
android:hardwareAccelerated="true"
android:windowSoftInputMode="adjustResize">
<!-- Specifies an Android theme to apply to this Activity as soon as
the Android process has started. This theme is visible to the user
while the Flutter UI initializes. After that, this theme continues
to determine the Window background behind the Flutter UI. -->
<meta-data
android:name="io.flutter.embedding.android.NormalTheme"
android:resource="@style/NormalTheme"
@@ -44,13 +43,10 @@
<category android:name="android.intent.category.LAUNCHER"/>
</intent-filter>
</activity>
<!-- Don't delete the meta-data below.
This is used by the Flutter tool to generate GeneratedPluginRegistrant.java -->
<meta-data
android:name="flutterEmbedding"
android:value="2" />
<!-- 前台服务配置 -->
<service
android:name="id.flutter.flutter_background_service.BackgroundService"
android:foregroundServiceType="connectedDevice|dataSync"
@@ -59,15 +55,10 @@
android:enabled="true"
tools:replace="android:exported"/>
</application>
<!-- Required to query activities that can process text, see:
https://developer.android.com/training/package-visibility and
https://developer.android.com/reference/android/content/Intent#ACTION_PROCESS_TEXT.
In particular, this is used by the Flutter engine in io.flutter.plugin.text.ProcessTextPlugin. -->
<queries>
<intent>
<action android:name="android.intent.action.PROCESS_TEXT"/>
<data android:mimeType="text/plain"/>
</intent>
</queries>
</manifest>
</manifest>

View File

@@ -14,17 +14,29 @@ uint32_t bits;
uint32_t code_words[PAGERDEMOD_BATCH_WORDS];
bool code_words_bch_error[PAGERDEMOD_BATCH_WORDS];
static bool hysteresis_state = false;
static bool dsp_initialized = false;
std::string numeric_msg, alpha_msg;
int function_bits;
uint32_t address;
uint32_t alpha_bit_buffer; // Bit buffer to 7-bit chars spread across codewords
int alpha_bit_buffer_bits; // Count of bits in alpha_bit_buffer
int parity_errors; // Count of parity errors in current message
int bch_errors; // Count of BCH errors in current message
int batch_num; // Count of batches in current transmission
uint32_t alpha_bit_buffer;
int alpha_bit_buffer_bits;
int parity_errors;
int bch_errors;
int batch_num;
double magsqRaw;
void ensureDSPInitialized() {
if (dsp_initialized) return;
lowpassBaud.create(301, SAMPLE_RATE, BAUD_RATE * 5.0f);
phaseDiscri.setFMScaling(SAMPLE_RATE / (2.0f * DEVIATION));
dsp_initialized = true;
}
int pop_cnt(uint32_t cw)
{
int cnt = 0;
@@ -39,10 +51,9 @@ int pop_cnt(uint32_t cw)
uint32_t bchEncode(const uint32_t cw)
{
uint32_t bit = 0;
uint32_t localCW = cw & 0xFFFFF800; // Mask off BCH parity and even parity bits
uint32_t localCW = cw & 0xFFFFF800;
uint32_t cwE = localCW;
// Calculate BCH bits
for (bit = 1; bit <= 21; bit++)
{
if (cwE & 0x80000000)
@@ -56,38 +67,28 @@ uint32_t bchEncode(const uint32_t cw)
return localCW;
}
// Use BCH decoding to try to fix any bit errors
// Returns true if able to be decode/repair successful
// See: https://www.eevblog.com/forum/microcontrollers/practical-guides-to-bch-fec/
bool bchDecode(const uint32_t cw, uint32_t &correctedCW)
{
// Calculate syndrome
// We do this by recalculating the BCH parity bits and XORing them against the received ones
uint32_t syndrome = ((bchEncode(cw) ^ cw) >> 1) & 0x3FF;
if (syndrome == 0)
{
// Syndrome of zero indicates no repair required
correctedCW = cw;
return true;
}
// Meggitt decoder
uint32_t result = 0;
uint32_t damagedCW = cw;
// Calculate BCH bits
for (uint32_t xbit = 0; xbit < 31; xbit++)
{
// Produce the next corrected bit in the high bit of the result
result <<= 1;
if ((syndrome == 0x3B4) || // 0x3B4: Syndrome when a single error is detected in the MSB
(syndrome == 0x26E) || // 0x26E: Two adjacent errors
(syndrome == 0x359) || // 0x359: Two errors, one OK bit between
(syndrome == 0x076) || // 0x076: Two errors, two OK bits between
(syndrome == 0x255) || // 0x255: Two errors, three OK bits between
(syndrome == 0x0F0) || // 0x0F0: Two errors, four OK bits between
if ((syndrome == 0x3B4) ||
(syndrome == 0x26E) ||
(syndrome == 0x359) ||
(syndrome == 0x076) ||
(syndrome == 0x255) ||
(syndrome == 0x0F0) ||
(syndrome == 0x216) ||
(syndrome == 0x365) ||
(syndrome == 0x068) ||
@@ -114,36 +115,29 @@ bool bchDecode(const uint32_t cw, uint32_t &correctedCW)
(syndrome == 0x3B6) ||
(syndrome == 0x3B5))
{
// Syndrome matches an error in the MSB
// Correct that error and adjust the syndrome to account for it
syndrome ^= 0x3B4;
result |= (~damagedCW & 0x80000000) >> 30;
}
else
{
// No error
result |= (damagedCW & 0x80000000) >> 30;
}
damagedCW <<= 1;
// Handle syndrome shift register feedback
if (syndrome & 0x200)
{
syndrome <<= 1;
syndrome ^= 0x769; // 0x769 = POCSAG generator polynomial -- x^10 + x^9 + x^8 + x^6 + x^5 + x^3 + 1
syndrome ^= 0x769;
}
else
{
syndrome <<= 1;
}
// Mask off bits which fall off the end of the syndrome shift register
syndrome &= 0x3FF;
}
// Check if error correction was successful
if (syndrome != 0)
{
// Syndrome nonzero at end indicates uncorrectable errors
correctedCW = cw;
return false;
}
@@ -162,13 +156,11 @@ int xorBits(uint32_t word, int firstBit, int lastBit)
return x;
}
// Check for even parity
bool evenParity(uint32_t word, int firstBit, int lastBit, int parityBit)
{
return xorBits(word, firstBit, lastBit) == parityBit;
}
// Reverse order of bits
uint32_t reverse(uint32_t x)
{
x = (((x & 0xaaaaaaaa) >> 1) | ((x & 0x55555555) << 1));
@@ -178,10 +170,6 @@ uint32_t reverse(uint32_t x)
return ((x >> 16) | (x << 16));
}
// Decode a batch of codewords to addresses and messages
// Messages may be spreadout over multiple batches
// https://www.itu.int/dms_pubrec/itu-r/rec/m/R-REC-M.584-1-198607-S!!PDF-E.pdf
// https://www.itu.int/dms_pubrec/itu-r/rec/m/R-REC-M.584-2-199711-I!!PDF-E.pdf
void decodeBatch()
{
int i = 1;
@@ -190,17 +178,13 @@ void decodeBatch()
for (int word = 0; word < PAGERDEMOD_CODEWORDS_PER_FRAME; word++)
{
bool addressCodeWord = ((code_words[i] >> 31) & 1) == 0;
// Check parity bit
bool parityError = !evenParity(code_words[i], 1, 31, code_words[i] & 0x1);
if (code_words[i] == PAGERDEMOD_POCSAG_IDLECODE)
{
// Idle
}
else if (addressCodeWord)
{
// Address
function_bits = (code_words[i] >> 11) & 0x3;
int addressBits = (code_words[i] >> 13) & 0x3ffff;
address = (addressBits << 3) | frame;
@@ -213,44 +197,30 @@ void decodeBatch()
}
else
{
// Message - decode as both numeric and ASCII - not all operators use functionBits to indidcate encoding
int messageBits = (code_words[i] >> 11) & 0xfffff;
if (parityError)
{
parity_errors++;
}
if (code_words_bch_error[i])
{
bch_errors++;
}
if (parityError) parity_errors++;
if (code_words_bch_error[i]) bch_errors++;
// Numeric format
for (int j = 16; j >= 0; j -= 4)
{
uint32_t numericBits = (messageBits >> j) & 0xf;
numericBits = reverse(numericBits) >> (32 - 4);
// Spec has 0xa as 'spare', but other decoders treat is as .
const char numericChars[] = {
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '.', 'U', ' ', '-', ')', '('};
char numericChar = numericChars[numericBits];
numeric_msg.push_back(numericChar);
}
// 7-bit ASCII alpnanumeric format
alpha_bit_buffer = (alpha_bit_buffer << 20) | messageBits;
alpha_bit_buffer_bits += 20;
while (alpha_bit_buffer_bits >= 7)
{
// Extract next 7-bit character from bit buffer
char c = (alpha_bit_buffer >> (alpha_bit_buffer_bits - 7)) & 0x7f;
// Reverse bit ordering
c = reverse(c) >> (32 - 7);
// Add to received message string (excluding, null, end of text, end ot transmission)
if (c != 0 && c != 0x3 && c != 0x4)
{
alpha_msg.push_back(c);
}
// Remove from bit buffer
alpha_bit_buffer_bits -= 7;
if (alpha_bit_buffer_bits == 0)
{
@@ -262,25 +232,16 @@ void decodeBatch()
}
}
}
// Move to next codeword
i++;
}
}
}
void processOneSample(int8_t i, int8_t q)
void processBasebandSample(double sample)
{
float fi = ((float)i) / 128.0f;
float fq = ((float)q) / 128.0f;
ensureDSPInitialized();
std::complex<float> iq(fi, fq);
float deviation;
double fmDemod = phaseDiscri.phaseDiscriminatorDelta(iq, magsqRaw, deviation);
// printf("fmDemod: %.3f\n", fmDemod);
double filt = lowpassBaud.filter(fmDemod);
double filt = lowpassBaud.filter(sample);
if (!got_SC)
{
@@ -288,54 +249,49 @@ void processOneSample(int8_t i, int8_t q)
dc_offset = preambleMovingAverage.asDouble();
}
bool data = (filt - dc_offset) >= 0.0;
// printf("filt - dc: %.3f\n", filt - dc_offset);
double sample_val = filt - dc_offset;
double threshold = 0.05;
if (sample_val > threshold)
{
hysteresis_state = true;
}
else if (sample_val < -threshold)
{
hysteresis_state = false;
}
bool data = hysteresis_state;
if (data != prev_data)
{
sync_cnt = SAMPLES_PER_SYMBOL / 2; // reset
sync_cnt = SAMPLES_PER_SYMBOL / 2;
}
else
{
sync_cnt--; // wait until next bit's midpoint
sync_cnt--;
if (sync_cnt <= 0)
{
if (bit_inverted)
{
data_bit = data;
}
else
{
data_bit = !data;
}
// printf("%d", data_bit);
if (bit_inverted) data_bit = data;
else data_bit = !data;
bits = (bits << 1) | data_bit;
bit_cnt++;
if (bit_cnt > 32)
{
bit_cnt = 32;
}
if (bit_cnt > 32) bit_cnt = 32;
if (bit_cnt == 32 && !got_SC)
{
// printf("pop count: %d\n", pop_cnt(bits ^ POCSAG_SYNCCODE));
// printf("pop count inv: %d\n", pop_cnt(bits ^ POCSAG_SYNCCODE_INV));
if (bits == POCSAG_SYNCCODE)
{
got_SC = true;
bit_inverted = false;
printf("\nSync code found\n");
}
else if (bits == POCSAG_SYNCCODE_INV)
{
got_SC = true;
bit_inverted = true;
printf("\nSync code found\n");
}
else if (pop_cnt(bits ^ POCSAG_SYNCCODE) <= 3)
{
@@ -344,9 +300,7 @@ void processOneSample(int8_t i, int8_t q)
{
got_SC = true;
bit_inverted = false;
printf("\nSync code found\n");
}
// else printf("\nSync code not found\n");
}
else if (pop_cnt(bits ^ POCSAG_SYNCCODE_INV) <= 3)
{
@@ -355,9 +309,7 @@ void processOneSample(int8_t i, int8_t q)
{
got_SC = true;
bit_inverted = true;
printf("\nSync code found\n");
}
// else printf("\nSync code not found\n");
}
if (got_SC)
@@ -394,7 +346,6 @@ void processOneSample(int8_t i, int8_t q)
if (address > 0 && !numeric_msg.empty())
{
is_message_ready = true;
printf("Addr: %d | Numeric: %s | Alpha: %s\n", address, numeric_msg.c_str(), alpha_msg.c_str());
}
else
{
@@ -408,3 +359,16 @@ void processOneSample(int8_t i, int8_t q)
prev_data = data;
}
void processOneSample(int8_t i, int8_t q)
{
float fi = ((float)i) / 128.0f;
float fq = ((float)q) / 128.0f;
std::complex<float> iq(fi, fq);
float deviation;
double fmDemod = phaseDiscri.phaseDiscriminatorDelta(iq, magsqRaw, deviation);
processBasebandSample(fmDemod);
}

View File

@@ -34,6 +34,8 @@ extern Lowpass<double> lowpassBaud;
extern MovingAverageUtil<double, double, 2048> preambleMovingAverage;
extern double magsqRaw;
void ensureDSPInitialized();
void processOneSample(int8_t i, int8_t q);
void processBasebandSample(double sample);
#endif

View File

@@ -8,6 +8,8 @@
#include <chrono>
#include <unistd.h>
#include <arpa/inet.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <fcntl.h>
#include <android/log.h>
#include <errno.h>
@@ -84,6 +86,40 @@ Java_org_noxylva_lbjconsole_flutter_RtlTcpChannelHandler_startClientAsync(
env->ReleaseStringUTFChars(port_, portStr);
}
extern "C" JNIEXPORT void JNICALL
Java_org_noxylva_lbjconsole_flutter_AudioInputHandler_nativePushAudio(
JNIEnv *env, jobject thiz, jshortArray audioData, jint size) {
ensureDSPInitialized();
jshort *samples = env->GetShortArrayElements(audioData, NULL);
std::lock_guard<std::mutex> demodLock(demodDataMutex);
for (int i = 0; i < size; i++) {
double sample = (double)samples[i] / 32768.0;
sample *= 5.0;
processBasebandSample(sample);
}
env->ReleaseShortArrayElements(audioData, samples, 0);
if (is_message_ready) {
std::ostringstream ss;
std::lock_guard<std::mutex> msgLock(msgMutex);
std::string message_content = alpha_msg.empty() ? numeric_msg : alpha_msg;
ss << "[MSG]" << address << "|" << function_bits << "|" << message_content;
messageBuffer.push_back(ss.str());
is_message_ready = false;
numeric_msg.clear();
alpha_msg.clear();
}
}
extern "C" JNIEXPORT jdouble JNICALL
Java_org_noxylva_lbjconsole_flutter_RtlTcpChannelHandler_getSignalStrength(JNIEnv *, jobject)
{
@@ -171,8 +207,8 @@ void clientThread(std::string host, int port)
goto cleanup;
}
lowpassBaud.create(301, SAMPLE_RATE, BAUD_RATE * 5.0f);
phaseDiscri.setFMScaling(SAMPLE_RATE / (2.0f * DEVIATION));
ensureDSPInitialized();
sockfd_atomic.store(localSockfd);
{
std::lock_guard<std::mutex> lock(msgMutex);

View File

@@ -0,0 +1,126 @@
package org.noxylva.lbjconsole.flutter
import android.Manifest
import android.content.Context
import android.content.pm.PackageManager
import android.media.AudioFormat
import android.media.AudioRecord
import android.media.MediaRecorder
import android.util.Log
import androidx.core.content.ContextCompat
import io.flutter.embedding.engine.FlutterEngine
import io.flutter.plugin.common.MethodCall
import io.flutter.plugin.common.MethodChannel
import java.util.concurrent.atomic.AtomicBoolean
class AudioInputHandler(private val context: Context) : MethodChannel.MethodCallHandler {
private var audioRecord: AudioRecord? = null
private val isRecording = AtomicBoolean(false)
private var recordingThread: Thread? = null
private val sampleRate = 48000
private val bufferSize = AudioRecord.getMinBufferSize(
sampleRate,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT
) * 2
companion object {
private const val CHANNEL = "org.noxylva.lbjconsole/audio_input"
private const val TAG = "AudioInputHandler"
fun registerWith(flutterEngine: FlutterEngine, context: Context) {
val channel = MethodChannel(flutterEngine.dartExecutor.binaryMessenger, CHANNEL)
channel.setMethodCallHandler(AudioInputHandler(context))
}
}
private external fun nativePushAudio(data: ShortArray, size: Int)
override fun onMethodCall(call: MethodCall, result: MethodChannel.Result) {
when (call.method) {
"start" -> {
if (startRecording()) {
result.success(null)
} else {
result.error("AUDIO_ERROR", "Failed to start audio recording", null)
}
}
"stop" -> {
stopRecording()
result.success(null)
}
else -> result.notImplemented()
}
}
private fun startRecording(): Boolean {
if (isRecording.get()) return true
if (ContextCompat.checkSelfPermission(
context,
Manifest.permission.RECORD_AUDIO
) != PackageManager.PERMISSION_GRANTED
) {
Log.e(TAG, "Permission not granted")
return false
}
try {
val audioSource = MediaRecorder.AudioSource.UNPROCESSED
audioRecord = AudioRecord(
audioSource,
sampleRate,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT,
bufferSize
)
if (audioRecord?.state != AudioRecord.STATE_INITIALIZED) {
Log.e(TAG, "AudioRecord init failed")
return false
}
audioRecord?.startRecording()
isRecording.set(true)
recordingThread = Thread {
val buffer = ShortArray(bufferSize)
while (isRecording.get()) {
val readSize = audioRecord?.read(buffer, 0, buffer.size) ?: 0
if (readSize > 0) {
nativePushAudio(buffer, readSize)
}
}
}
recordingThread?.priority = Thread.MAX_PRIORITY
recordingThread?.start()
return true
} catch (e: Exception) {
Log.e(TAG, "Start recording exception", e)
stopRecording()
return false
}
}
private fun stopRecording() {
isRecording.set(false)
try {
recordingThread?.join(1000)
} catch (e: InterruptedException) {
e.printStackTrace()
}
try {
if (audioRecord?.recordingState == AudioRecord.RECORDSTATE_RECORDING) {
audioRecord?.stop()
}
audioRecord?.release()
} catch (e: Exception) {
Log.e(TAG, "Stop recording exception", e)
}
audioRecord = null
recordingThread = null
}
}

View File

@@ -7,5 +7,6 @@ class MainActivity: FlutterActivity() {
override fun configureFlutterEngine(flutterEngine: FlutterEngine) {
super.configureFlutterEngine(flutterEngine)
RtlTcpChannelHandler.registerWith(flutterEngine)
AudioInputHandler.registerWith(flutterEngine, applicationContext)
}
}
}

View File

@@ -7,13 +7,27 @@ allprojects {
}
rootProject.buildDir = "../build"
subprojects {
project.buildDir = "${rootProject.buildDir}/${project.name}"
}
subprojects {
project.evaluationDependsOn(":app")
}
subprojects {
if (project.name != "app") {
project.afterEvaluate {
if (project.hasProperty("android")) {
project.android {
compileSdk 36
}
}
}
}
}
tasks.register("clean", Delete) {
delete rootProject.buildDir
}
}