chore: rename project quicnprotochat -> quicproquo (binaries: qpq)

Rename the entire workspace:
- Crate packages: quicnprotochat-{core,proto,server,client,gui,p2p,mobile} -> quicproquo-*
- Binary names: quicnprotochat -> qpq, quicnprotochat-server -> qpq-server,
  quicnprotochat-gui -> qpq-gui
- Default files: *-state.bin -> qpq-state.bin, *-server.toml -> qpq-server.toml,
  *.db -> qpq.db
- Environment variable prefix: QUICNPROTOCHAT_* -> QPQ_*
- App identifier: chat.quicnproto.gui -> chat.quicproquo.gui
- Proto package: quicnprotochat.bench -> quicproquo.bench
- All documentation, Docker, CI, and script references updated

HKDF domain-separation strings and P2P ALPN remain unchanged for
backward compatibility with existing encrypted state and wire protocol.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-01 20:11:51 +01:00
parent 553de3a2b7
commit 853ca4fec0
152 changed files with 4070 additions and 788 deletions

View File

@@ -0,0 +1,86 @@
//! Backend service running on a dedicated thread with a tokio LocalSet.
//!
//! All server-facing work (capnp-rpc, node_service::Client) is !Send and must run on this
//! single thread. The UI thread sends commands over a channel; this thread runs
//! `LocalSet::run_until` for each request and sends the result back.
use std::path::PathBuf;
use std::sync::mpsc;
use std::thread;
use tokio::runtime::Builder;
use tokio::task::LocalSet;
use quicproquo_client::{cmd_health_json, whoami_json};
/// Commands the UI can send to the backend thread.
pub enum BackendCommand {
Whoami {
state_path: String,
password: Option<String>,
},
Health {
server: String,
ca_cert: PathBuf,
server_name: String,
},
}
/// Response sent back to the UI.
pub type BackendResponse = Result<String, String>;
/// Spawn the backend thread and return a sender to post commands and a join handle.
/// The backend runs a tokio LocalSet and processes one command at a time:
/// for each received command it runs `LocalSet::run_until(future)` (for async commands)
/// or runs sync code (whoami), then sends the result on the provided reply channel.
pub fn spawn_backend() -> (mpsc::Sender<(BackendCommand, mpsc::Sender<BackendResponse>)>, thread::JoinHandle<()>) {
let (tx, rx) = mpsc::channel::<(BackendCommand, mpsc::Sender<BackendResponse>)>();
let handle = thread::spawn(move || {
let rt = Builder::new_current_thread()
.enable_all()
.build()
.expect("backend tokio runtime");
let local = LocalSet::new();
while let Ok((cmd, reply_tx)) = rx.recv() {
let result = run_command(&local, &rt, cmd);
let _ = reply_tx.send(result);
}
});
(tx, handle)
}
fn run_command(
local: &LocalSet,
rt: &tokio::runtime::Runtime,
cmd: BackendCommand,
) -> BackendResponse {
match cmd {
BackendCommand::Whoami { state_path, password } => {
let path = PathBuf::from(&state_path);
whoami_json(&path, password.as_deref()).map_err(|e| e.to_string())
}
BackendCommand::Health {
server,
ca_cert,
server_name,
} => {
// Request-response: we run LocalSet::run_until for this single request so capnp-rpc
// and connect_node stay on this thread (!Send).
let fut = cmd_health_json(&server, &ca_cert, &server_name);
rt.block_on(local.run_until(fut)).map_err(|e| e.to_string())
}
}
}
/// Default CA cert path (relative to cwd or absolute); same default as CLI.
pub fn default_ca_cert() -> PathBuf {
PathBuf::from("data/server-cert.der")
}
/// Default TLS server name.
pub fn default_server_name() -> String {
"localhost".to_string()
}

View File

@@ -0,0 +1,76 @@
//! quicproquo native GUI (Tauri 2).
//!
//! The backend runs on a dedicated thread with a tokio LocalSet; all server-facing
//! work (capnp-rpc, node_service::Client) is dispatched there. Tauri commands
//! block on the request-response channel until the backend returns.
mod backend;
use std::path::PathBuf;
use std::sync::mpsc;
use backend::{spawn_backend, BackendCommand};
/// Shared state: sender to the backend thread.
struct BackendState {
tx: mpsc::Sender<(BackendCommand, mpsc::Sender<backend::BackendResponse>)>,
}
/// Runs whoami on the backend thread and returns JSON string (identity_key, fingerprint, etc.).
#[tauri::command]
fn whoami(
state: tauri::State<BackendState>,
state_path: String,
password: Option<String>,
) -> Result<String, String> {
let (reply_tx, reply_rx) = mpsc::channel();
state
.tx
.send((
BackendCommand::Whoami {
state_path,
password,
},
reply_tx,
))
.map_err(|e| e.to_string())?;
reply_rx.recv().map_err(|e| e.to_string())?
}
/// Runs health check on the backend thread (LocalSet::run_until) and returns status JSON.
#[tauri::command]
fn health(
state: tauri::State<BackendState>,
server: String,
ca_cert: Option<String>,
server_name: Option<String>,
) -> Result<String, String> {
let ca_cert = ca_cert
.map(PathBuf::from)
.unwrap_or_else(backend::default_ca_cert);
let server_name = server_name.unwrap_or_else(backend::default_server_name);
let (reply_tx, reply_rx) = mpsc::channel();
state
.tx
.send((
BackendCommand::Health {
server,
ca_cert,
server_name,
},
reply_tx,
))
.map_err(|e| e.to_string())?;
reply_rx.recv().map_err(|e| e.to_string())?
}
#[cfg_attr(mobile, tauri::mobile_entry_point)]
pub fn run() {
let (backend_tx, _backend_handle) = spawn_backend();
tauri::Builder::default()
.manage(BackendState { tx: backend_tx })
.invoke_handler(tauri::generate_handler![whoami, health])
.run(tauri::generate_context!())
.expect("error while running tauri application");
}

View File

@@ -0,0 +1,5 @@
//! Desktop entry point for quicproquo-gui.
fn main() {
quicproquo_gui::run()
}