chore: rename quicproquo → quicprochat in Rust workspace
Rename all crate directories, package names, binary names, proto package/module paths, ALPN strings, env var prefixes, config filenames, mDNS service names, and plugin ABI symbols from quicproquo/qpq to quicprochat/qpc.
This commit is contained in:
38
sdks/python/quicprochat/__init__.py
Normal file
38
sdks/python/quicprochat/__init__.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""quicproquo -- Python SDK for the quicproquo E2E encrypted messenger.
|
||||
|
||||
Two transport backends are available:
|
||||
|
||||
1. **FFI** (``QpqClient.connect_ffi``): wraps the Rust ``libquicproquo_ffi``
|
||||
shared library via CFFI. This gives you the full Rust crypto stack
|
||||
(MLS, hybrid KEM, OPAQUE) at native speed.
|
||||
|
||||
2. **QUIC** (``QpqClient.connect``): pure-Python QUIC transport via *aioquic*
|
||||
with protobuf (v2 wire format). No Rust dependency required; crypto
|
||||
operations must be supplied externally.
|
||||
"""
|
||||
|
||||
from quicproquo.client import QpqClient
|
||||
from quicproquo.types import (
|
||||
ConnectOptions,
|
||||
Envelope,
|
||||
ChannelResult,
|
||||
HealthInfo,
|
||||
QpqError,
|
||||
AuthError,
|
||||
TimeoutError as QpqTimeoutError,
|
||||
ConnectionError as QpqConnectionError,
|
||||
)
|
||||
|
||||
__version__ = "0.1.0"
|
||||
|
||||
__all__ = [
|
||||
"QpqClient",
|
||||
"ConnectOptions",
|
||||
"Envelope",
|
||||
"ChannelResult",
|
||||
"HealthInfo",
|
||||
"QpqError",
|
||||
"AuthError",
|
||||
"QpqTimeoutError",
|
||||
"QpqConnectionError",
|
||||
]
|
||||
291
sdks/python/quicprochat/client.py
Normal file
291
sdks/python/quicprochat/client.py
Normal file
@@ -0,0 +1,291 @@
|
||||
"""High-level quicproquo client.
|
||||
|
||||
Provides both async (QUIC transport) and sync (FFI transport) APIs for
|
||||
interacting with a quicproquo server.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from quicproquo.types import (
|
||||
ConnectOptions,
|
||||
Envelope,
|
||||
ChannelResult,
|
||||
HealthInfo,
|
||||
ConnectionError,
|
||||
)
|
||||
from quicproquo.transport import QuicTransport
|
||||
from quicproquo.ffi import FfiTransport
|
||||
from quicproquo import proto, wire
|
||||
|
||||
|
||||
class QpqClient:
|
||||
"""High-level quicproquo client.
|
||||
|
||||
Use ``QpqClient.connect()`` for the async QUIC transport, or
|
||||
``QpqClient.connect_ffi()`` for the synchronous Rust FFI backend.
|
||||
|
||||
Example (async)::
|
||||
|
||||
client = await QpqClient.connect(ConnectOptions(addr="127.0.0.1:5001"))
|
||||
health = await client.health()
|
||||
token = await client.login_start("alice", opaque_request)
|
||||
await client.close()
|
||||
|
||||
Example (FFI)::
|
||||
|
||||
client = QpqClient.connect_ffi(ConnectOptions(addr="127.0.0.1:5001"))
|
||||
client.ffi_login("alice", "password123")
|
||||
client.ffi_send("bob", b"hello")
|
||||
client.close()
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
quic: Optional[QuicTransport] = None,
|
||||
ffi: Optional[FfiTransport] = None,
|
||||
) -> None:
|
||||
self._quic = quic
|
||||
self._ffi = ffi
|
||||
self._session_token: bytes = b""
|
||||
self._device_id: bytes = b""
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Constructors
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@staticmethod
|
||||
async def connect(opts: ConnectOptions) -> "QpqClient":
|
||||
"""Connect to a server using the async QUIC transport."""
|
||||
transport = await QuicTransport.connect(
|
||||
opts.addr,
|
||||
ca_cert_path=opts.ca_cert_path,
|
||||
server_name=opts.server_name,
|
||||
insecure_skip_verify=opts.insecure_skip_verify,
|
||||
connect_timeout_ms=opts.connect_timeout_ms,
|
||||
request_timeout_ms=opts.request_timeout_ms,
|
||||
)
|
||||
return QpqClient(quic=transport)
|
||||
|
||||
@staticmethod
|
||||
def connect_ffi(opts: ConnectOptions) -> "QpqClient":
|
||||
"""Connect using the synchronous Rust FFI backend."""
|
||||
transport = FfiTransport.connect(
|
||||
opts.addr,
|
||||
ca_cert_path=opts.ca_cert_path,
|
||||
server_name=opts.server_name,
|
||||
)
|
||||
return QpqClient(ffi=transport)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Session management
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def set_session_token(self, token: bytes) -> None:
|
||||
"""Set an externally-obtained session token for authenticated RPCs."""
|
||||
self._session_token = token
|
||||
|
||||
def set_device_id(self, device_id: bytes) -> None:
|
||||
"""Set the device ID for multi-device scoping."""
|
||||
self._device_id = device_id
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Async RPC methods (QUIC transport)
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
async def health(self) -> HealthInfo:
|
||||
"""Check server health (async)."""
|
||||
data = await self._rpc(wire.HEALTH, proto.encode_health())
|
||||
info = proto.decode_health_response(data)
|
||||
return HealthInfo(
|
||||
status=str(info["status"]),
|
||||
node_id=str(info["node_id"]),
|
||||
version=str(info["version"]),
|
||||
uptime_secs=int(info["uptime_secs"]),
|
||||
storage_backend=str(info["storage_backend"]),
|
||||
)
|
||||
|
||||
async def register_start(self, username: str, request: bytes) -> bytes:
|
||||
"""Start OPAQUE registration. Returns server response bytes."""
|
||||
payload = proto.encode_opaque_register_start(username, request)
|
||||
data = await self._rpc(wire.OPAQUE_REGISTER_START, payload)
|
||||
return proto.decode_opaque_register_start_response(data)
|
||||
|
||||
async def register_finish(
|
||||
self, username: str, upload: bytes, identity_key: bytes
|
||||
) -> bool:
|
||||
"""Complete OPAQUE registration."""
|
||||
payload = proto.encode_opaque_register_finish(username, upload, identity_key)
|
||||
data = await self._rpc(wire.OPAQUE_REGISTER_FINISH, payload)
|
||||
return proto.decode_opaque_register_finish_response(data)
|
||||
|
||||
async def login_start(self, username: str, request: bytes) -> bytes:
|
||||
"""Start OPAQUE login. Returns server response bytes."""
|
||||
payload = proto.encode_opaque_login_start(username, request)
|
||||
data = await self._rpc(wire.OPAQUE_LOGIN_START, payload)
|
||||
return proto.decode_opaque_login_start_response(data)
|
||||
|
||||
async def login_finish(
|
||||
self, username: str, finalization: bytes, identity_key: bytes
|
||||
) -> bytes:
|
||||
"""Complete OPAQUE login. Returns and stores session token."""
|
||||
payload = proto.encode_opaque_login_finish(username, finalization, identity_key)
|
||||
data = await self._rpc(wire.OPAQUE_LOGIN_FINISH, payload)
|
||||
token = proto.decode_opaque_login_finish_response(data)
|
||||
self._session_token = token
|
||||
return token
|
||||
|
||||
async def resolve_user(self, username: str) -> tuple[bytes, bytes]:
|
||||
"""Resolve username to (identity_key, inclusion_proof)."""
|
||||
payload = proto.encode_resolve_user(username)
|
||||
data = await self._rpc(wire.RESOLVE_USER, payload)
|
||||
return proto.decode_resolve_user_response(data)
|
||||
|
||||
async def resolve_identity(self, identity_key: bytes) -> str:
|
||||
"""Resolve identity key to username."""
|
||||
payload = proto.encode_resolve_identity(identity_key)
|
||||
data = await self._rpc(wire.RESOLVE_IDENTITY, payload)
|
||||
return proto.decode_resolve_identity_response(data)
|
||||
|
||||
async def create_channel(self, peer_key: bytes) -> ChannelResult:
|
||||
"""Create a 1:1 DM channel with a peer."""
|
||||
payload = proto.encode_create_channel(peer_key)
|
||||
data = await self._rpc(wire.CREATE_CHANNEL, payload)
|
||||
channel_id, was_new = proto.decode_create_channel_response(data)
|
||||
return ChannelResult(channel_id=channel_id, was_new=was_new)
|
||||
|
||||
async def send(
|
||||
self,
|
||||
recipient_key: bytes,
|
||||
payload: bytes,
|
||||
*,
|
||||
channel_id: bytes = b"",
|
||||
ttl_secs: int = 0,
|
||||
message_id: bytes = b"",
|
||||
) -> tuple[int, bytes]:
|
||||
"""Enqueue a message. Returns (seq, delivery_proof)."""
|
||||
req = proto.encode_enqueue(recipient_key, payload, channel_id, ttl_secs, message_id)
|
||||
data = await self._rpc(wire.ENQUEUE, req)
|
||||
seq, proof, _ = proto.decode_enqueue_response(data)
|
||||
return seq, proof
|
||||
|
||||
async def receive(
|
||||
self,
|
||||
recipient_key: bytes,
|
||||
*,
|
||||
channel_id: bytes = b"",
|
||||
limit: int = 0,
|
||||
device_id: bytes = b"",
|
||||
) -> list[Envelope]:
|
||||
"""Fetch queued messages."""
|
||||
req = proto.encode_fetch(recipient_key, channel_id, limit, device_id)
|
||||
data = await self._rpc(wire.FETCH, req)
|
||||
return [Envelope(seq=s, data=d) for s, d in proto.decode_fetch_response(data)]
|
||||
|
||||
async def receive_wait(
|
||||
self,
|
||||
recipient_key: bytes,
|
||||
*,
|
||||
timeout_ms: int = 5000,
|
||||
channel_id: bytes = b"",
|
||||
limit: int = 0,
|
||||
device_id: bytes = b"",
|
||||
) -> list[Envelope]:
|
||||
"""Long-poll for messages with a timeout."""
|
||||
req = proto.encode_fetch_wait(recipient_key, channel_id, timeout_ms, limit, device_id)
|
||||
data = await self._rpc(wire.FETCH_WAIT, req)
|
||||
return [Envelope(seq=s, data=d) for s, d in proto.decode_fetch_wait_response(data)]
|
||||
|
||||
async def ack(
|
||||
self,
|
||||
recipient_key: bytes,
|
||||
seq_up_to: int,
|
||||
*,
|
||||
channel_id: bytes = b"",
|
||||
device_id: bytes = b"",
|
||||
) -> None:
|
||||
"""Acknowledge messages up to a sequence number."""
|
||||
req = proto.encode_ack(recipient_key, seq_up_to, channel_id, device_id)
|
||||
await self._rpc(wire.ACK, req)
|
||||
|
||||
async def upload_key_package(self, identity_key: bytes, package: bytes) -> bytes:
|
||||
"""Upload an MLS key package. Returns fingerprint."""
|
||||
req = proto.encode_upload_key_package(identity_key, package)
|
||||
data = await self._rpc(wire.UPLOAD_KEY_PACKAGE, req)
|
||||
return proto.decode_upload_key_package_response(data)
|
||||
|
||||
async def fetch_key_package(self, identity_key: bytes) -> bytes:
|
||||
"""Fetch an MLS key package."""
|
||||
req = proto.encode_fetch_key_package(identity_key)
|
||||
data = await self._rpc(wire.FETCH_KEY_PACKAGE, req)
|
||||
return proto.decode_fetch_key_package_response(data)
|
||||
|
||||
async def upload_hybrid_key(self, identity_key: bytes, hybrid_public_key: bytes) -> None:
|
||||
"""Upload a hybrid (X25519 + ML-KEM-768) public key."""
|
||||
req = proto.encode_upload_hybrid_key(identity_key, hybrid_public_key)
|
||||
await self._rpc(wire.UPLOAD_HYBRID_KEY, req)
|
||||
|
||||
async def fetch_hybrid_key(self, identity_key: bytes) -> bytes:
|
||||
"""Fetch a hybrid public key."""
|
||||
req = proto.encode_fetch_hybrid_key(identity_key)
|
||||
data = await self._rpc(wire.FETCH_HYBRID_KEY, req)
|
||||
return proto.decode_fetch_hybrid_key_response(data)
|
||||
|
||||
async def delete_account(self) -> bool:
|
||||
"""Permanently delete the authenticated account."""
|
||||
data = await self._rpc(wire.DELETE_ACCOUNT, proto.encode_delete_account())
|
||||
return proto.decode_delete_account_response(data)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# FFI (synchronous) methods
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def ffi_login(self, username: str, password: str) -> None:
|
||||
"""Authenticate via OPAQUE using the FFI backend (synchronous)."""
|
||||
if not self._ffi:
|
||||
raise ConnectionError("no FFI transport; use QpqClient.connect_ffi()")
|
||||
self._ffi.login(username, password)
|
||||
|
||||
def ffi_send(self, recipient: str, message: bytes) -> None:
|
||||
"""Send a message via the FFI backend (synchronous)."""
|
||||
if not self._ffi:
|
||||
raise ConnectionError("no FFI transport; use QpqClient.connect_ffi()")
|
||||
self._ffi.send(recipient, message)
|
||||
|
||||
def ffi_receive(self, timeout_ms: int = 5000) -> list[str]:
|
||||
"""Receive messages via the FFI backend (synchronous)."""
|
||||
if not self._ffi:
|
||||
raise ConnectionError("no FFI transport; use QpqClient.connect_ffi()")
|
||||
return self._ffi.receive(timeout_ms)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Lifecycle
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
async def close(self) -> None:
|
||||
"""Close all transports."""
|
||||
if self._quic:
|
||||
self._quic.close()
|
||||
self._quic = None
|
||||
if self._ffi:
|
||||
self._ffi.close()
|
||||
self._ffi = None
|
||||
|
||||
def close_sync(self) -> None:
|
||||
"""Close all transports (synchronous variant)."""
|
||||
if self._quic:
|
||||
self._quic.close()
|
||||
self._quic = None
|
||||
if self._ffi:
|
||||
self._ffi.close()
|
||||
self._ffi = None
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Internal
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
async def _rpc(self, method_id: int, payload: bytes) -> bytes:
|
||||
if not self._quic:
|
||||
raise ConnectionError("no QUIC transport; use QpqClient.connect()")
|
||||
return await self._quic.rpc(method_id, payload)
|
||||
192
sdks/python/quicprochat/ffi.py
Normal file
192
sdks/python/quicprochat/ffi.py
Normal file
@@ -0,0 +1,192 @@
|
||||
"""CFFI bindings to ``libquicproquo_ffi`` (the Rust C FFI layer).
|
||||
|
||||
This module loads the shared library and exposes a synchronous Python API
|
||||
that mirrors the C functions in ``crates/quicproquo-ffi/src/lib.rs``.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import cffi
|
||||
|
||||
from quicproquo.types import (
|
||||
QpqError,
|
||||
AuthError,
|
||||
TimeoutError,
|
||||
ConnectionError,
|
||||
)
|
||||
|
||||
# Status codes (must match crates/quicproquo-ffi/src/lib.rs).
|
||||
QPQ_OK = 0
|
||||
QPQ_ERROR = 1
|
||||
QPQ_AUTH_FAILED = 2
|
||||
QPQ_TIMEOUT = 3
|
||||
QPQ_NOT_CONNECTED = 4
|
||||
|
||||
_CDEFS = """
|
||||
typedef struct QpqHandle QpqHandle;
|
||||
|
||||
QpqHandle* qpq_connect(const char* server, const char* ca_cert, const char* server_name);
|
||||
int qpq_login(QpqHandle* handle, const char* username, const char* password);
|
||||
int qpq_send(QpqHandle* handle, const char* recipient, const uint8_t* message, size_t message_len);
|
||||
int qpq_receive(QpqHandle* handle, uint32_t timeout_ms, char** out_json);
|
||||
void qpq_disconnect(QpqHandle* handle);
|
||||
const char* qpq_last_error(const QpqHandle* handle);
|
||||
void qpq_free_string(char* ptr);
|
||||
"""
|
||||
|
||||
_ffi = cffi.FFI()
|
||||
_ffi.cdef(_CDEFS)
|
||||
_lib: Optional[object] = None
|
||||
|
||||
|
||||
def _load_lib() -> object:
|
||||
"""Load the shared library, searching common paths."""
|
||||
global _lib
|
||||
if _lib is not None:
|
||||
return _lib
|
||||
|
||||
search_paths = [
|
||||
# Explicit environment variable.
|
||||
os.environ.get("QPQ_LIB_PATH", ""),
|
||||
# Common cargo build output locations.
|
||||
str(Path(__file__).resolve().parents[3] / "target" / "release" / "libquicproquo_ffi.so"),
|
||||
str(Path(__file__).resolve().parents[3] / "target" / "debug" / "libquicproquo_ffi.so"),
|
||||
# macOS dylib.
|
||||
str(
|
||||
Path(__file__).resolve().parents[3]
|
||||
/ "target"
|
||||
/ "release"
|
||||
/ "libquicproquo_ffi.dylib"
|
||||
),
|
||||
str(
|
||||
Path(__file__).resolve().parents[3]
|
||||
/ "target"
|
||||
/ "debug"
|
||||
/ "libquicproquo_ffi.dylib"
|
||||
),
|
||||
# System library path.
|
||||
"libquicproquo_ffi.so",
|
||||
]
|
||||
|
||||
for path in search_paths:
|
||||
if not path:
|
||||
continue
|
||||
try:
|
||||
_lib = _ffi.dlopen(path)
|
||||
return _lib
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
raise OSError(
|
||||
"Could not find libquicproquo_ffi. Set QPQ_LIB_PATH or build with "
|
||||
"`cargo build --release -p quicproquo-ffi`."
|
||||
)
|
||||
|
||||
|
||||
def _check_error(handle: object, code: int) -> None:
|
||||
"""Raise an appropriate exception if the FFI call returned an error code."""
|
||||
if code == QPQ_OK:
|
||||
return
|
||||
|
||||
lib = _load_lib()
|
||||
err_ptr = lib.qpq_last_error(handle) # type: ignore[union-attr]
|
||||
msg = _ffi.string(err_ptr).decode("utf-8") if err_ptr != _ffi.NULL else "unknown error"
|
||||
|
||||
if code == QPQ_AUTH_FAILED:
|
||||
raise AuthError(msg)
|
||||
if code == QPQ_TIMEOUT:
|
||||
raise TimeoutError(msg)
|
||||
if code == QPQ_NOT_CONNECTED:
|
||||
raise ConnectionError(msg)
|
||||
raise QpqError(msg)
|
||||
|
||||
|
||||
class FfiTransport:
|
||||
"""Synchronous transport wrapping ``libquicproquo_ffi``.
|
||||
|
||||
Provides the same logical operations as ``QuicTransport`` but backed
|
||||
by the Rust client library through C FFI.
|
||||
|
||||
Usage::
|
||||
|
||||
ffi = FfiTransport.connect("127.0.0.1:5001", ca_cert="/path/to/ca.pem")
|
||||
ffi.login("alice", "password123")
|
||||
ffi.send("bob", b"hello")
|
||||
messages = ffi.receive(timeout_ms=5000)
|
||||
ffi.close()
|
||||
"""
|
||||
|
||||
def __init__(self, handle: object) -> None:
|
||||
self._handle = handle
|
||||
self._lib = _load_lib()
|
||||
|
||||
@staticmethod
|
||||
def connect(
|
||||
addr: str,
|
||||
*,
|
||||
ca_cert_path: str = "",
|
||||
server_name: str = "",
|
||||
) -> "FfiTransport":
|
||||
"""Connect to a qpq server via the Rust FFI layer."""
|
||||
lib = _load_lib()
|
||||
|
||||
server_c = _ffi.new("char[]", addr.encode("utf-8"))
|
||||
ca_c = _ffi.new("char[]", ca_cert_path.encode("utf-8"))
|
||||
|
||||
if not server_name:
|
||||
host = addr.split(":")[0]
|
||||
server_name = host
|
||||
sn_c = _ffi.new("char[]", server_name.encode("utf-8"))
|
||||
|
||||
handle = lib.qpq_connect(server_c, ca_c, sn_c) # type: ignore[union-attr]
|
||||
if handle == _ffi.NULL:
|
||||
raise ConnectionError(f"qpq_connect failed for {addr}")
|
||||
|
||||
return FfiTransport(handle)
|
||||
|
||||
def login(self, username: str, password: str) -> None:
|
||||
"""Authenticate with OPAQUE credentials."""
|
||||
u = _ffi.new("char[]", username.encode("utf-8"))
|
||||
p = _ffi.new("char[]", password.encode("utf-8"))
|
||||
code = self._lib.qpq_login(self._handle, u, p) # type: ignore[union-attr]
|
||||
_check_error(self._handle, code)
|
||||
|
||||
def send(self, recipient: str, message: bytes) -> None:
|
||||
"""Send a message to a recipient (by username)."""
|
||||
r = _ffi.new("char[]", recipient.encode("utf-8"))
|
||||
m = _ffi.new("uint8_t[]", message)
|
||||
code = self._lib.qpq_send(self._handle, r, m, len(message)) # type: ignore[union-attr]
|
||||
_check_error(self._handle, code)
|
||||
|
||||
def receive(self, timeout_ms: int = 5000) -> list[str]:
|
||||
"""Receive pending messages, blocking up to *timeout_ms*.
|
||||
|
||||
Returns a list of message strings (UTF-8).
|
||||
"""
|
||||
out = _ffi.new("char**")
|
||||
code = self._lib.qpq_receive(self._handle, timeout_ms, out) # type: ignore[union-attr]
|
||||
_check_error(self._handle, code)
|
||||
|
||||
if out[0] == _ffi.NULL:
|
||||
return []
|
||||
|
||||
json_str = _ffi.string(out[0]).decode("utf-8")
|
||||
self._lib.qpq_free_string(out[0]) # type: ignore[union-attr]
|
||||
return json.loads(json_str) # type: ignore[no-any-return]
|
||||
|
||||
def close(self) -> None:
|
||||
"""Disconnect and free the handle."""
|
||||
if self._handle is not None:
|
||||
self._lib.qpq_disconnect(self._handle) # type: ignore[union-attr]
|
||||
self._handle = None
|
||||
|
||||
def __enter__(self) -> "FfiTransport":
|
||||
return self
|
||||
|
||||
def __exit__(self, *args: object) -> None:
|
||||
self.close()
|
||||
303
sdks/python/quicprochat/proto.py
Normal file
303
sdks/python/quicprochat/proto.py
Normal file
@@ -0,0 +1,303 @@
|
||||
"""Minimal protobuf encode/decode for qpq v1 messages.
|
||||
|
||||
Uses the ``protobuf`` library's descriptor-less encoding for simplicity.
|
||||
Each message is represented as a plain dict and encoded/decoded via
|
||||
google.protobuf helpers.
|
||||
|
||||
This avoids requiring protoc at build time while still producing
|
||||
wire-compatible protobuf bytes.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from google.protobuf import descriptor_pb2 as _ # noqa: F401 – ensure protobuf is importable
|
||||
from google.protobuf.internal.encoder import _VarintBytes # type: ignore[attr-defined]
|
||||
from google.protobuf.internal.decoder import _DecodeVarint # type: ignore[attr-defined]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Low-level protobuf helpers (wire types 0=varint, 2=length-delimited)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _encode_varint_field(field_number: int, value: int) -> bytes:
|
||||
"""Encode a varint (wire type 0) field."""
|
||||
if value == 0:
|
||||
return b""
|
||||
tag = (field_number << 3) | 0
|
||||
return _VarintBytes(tag) + _VarintBytes(value)
|
||||
|
||||
|
||||
def _encode_bytes_field(field_number: int, value: bytes) -> bytes:
|
||||
"""Encode a length-delimited (wire type 2) field."""
|
||||
if not value:
|
||||
return b""
|
||||
tag = (field_number << 3) | 2
|
||||
return _VarintBytes(tag) + _VarintBytes(len(value)) + value
|
||||
|
||||
|
||||
def _encode_string_field(field_number: int, value: str) -> bytes:
|
||||
"""Encode a string (wire type 2) field."""
|
||||
return _encode_bytes_field(field_number, value.encode("utf-8"))
|
||||
|
||||
|
||||
def _decode_fields(data: bytes) -> dict[int, list[tuple[int, bytes | int]]]:
|
||||
"""Decode a protobuf message into {field_number: [(wire_type, value), ...]}."""
|
||||
fields: dict[int, list[tuple[int, bytes | int]]] = {}
|
||||
pos = 0
|
||||
while pos < len(data):
|
||||
tag, pos = _DecodeVarint(data, pos)
|
||||
field_number = tag >> 3
|
||||
wire_type = tag & 0x07
|
||||
if wire_type == 0: # varint
|
||||
value, pos = _DecodeVarint(data, pos)
|
||||
fields.setdefault(field_number, []).append((wire_type, value))
|
||||
elif wire_type == 2: # length-delimited
|
||||
length, pos = _DecodeVarint(data, pos)
|
||||
fields.setdefault(field_number, []).append((wire_type, data[pos : pos + length]))
|
||||
pos += length
|
||||
elif wire_type == 5: # 32-bit fixed
|
||||
fields.setdefault(field_number, []).append((wire_type, data[pos : pos + 4]))
|
||||
pos += 4
|
||||
elif wire_type == 1: # 64-bit fixed
|
||||
fields.setdefault(field_number, []).append((wire_type, data[pos : pos + 8]))
|
||||
pos += 8
|
||||
else:
|
||||
raise ValueError(f"unsupported wire type {wire_type}")
|
||||
return fields
|
||||
|
||||
|
||||
def _get_bytes(fields: dict[int, list[tuple[int, bytes | int]]], fn: int) -> bytes:
|
||||
entries = fields.get(fn, [])
|
||||
if not entries:
|
||||
return b""
|
||||
_, val = entries[0]
|
||||
return val if isinstance(val, bytes) else b""
|
||||
|
||||
|
||||
def _get_string(fields: dict[int, list[tuple[int, bytes | int]]], fn: int) -> str:
|
||||
return _get_bytes(fields, fn).decode("utf-8", errors="replace")
|
||||
|
||||
|
||||
def _get_varint(fields: dict[int, list[tuple[int, bytes | int]]], fn: int) -> int:
|
||||
entries = fields.get(fn, [])
|
||||
if not entries:
|
||||
return 0
|
||||
_, val = entries[0]
|
||||
return val if isinstance(val, int) else 0
|
||||
|
||||
|
||||
def _get_bool(fields: dict[int, list[tuple[int, bytes | int]]], fn: int) -> bool:
|
||||
return _get_varint(fields, fn) != 0
|
||||
|
||||
|
||||
def _get_repeated_bytes(fields: dict[int, list[tuple[int, bytes | int]]], fn: int) -> list[bytes]:
|
||||
result: list[bytes] = []
|
||||
for _, val in fields.get(fn, []):
|
||||
if isinstance(val, bytes):
|
||||
result.append(val)
|
||||
return result
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Auth
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def encode_opaque_register_start(username: str, request: bytes) -> bytes:
|
||||
return _encode_string_field(1, username) + _encode_bytes_field(2, request)
|
||||
|
||||
def decode_opaque_register_start_response(data: bytes) -> bytes:
|
||||
return _get_bytes(_decode_fields(data), 1)
|
||||
|
||||
def encode_opaque_register_finish(username: str, upload: bytes, identity_key: bytes) -> bytes:
|
||||
return (
|
||||
_encode_string_field(1, username)
|
||||
+ _encode_bytes_field(2, upload)
|
||||
+ _encode_bytes_field(3, identity_key)
|
||||
)
|
||||
|
||||
def decode_opaque_register_finish_response(data: bytes) -> bool:
|
||||
return _get_bool(_decode_fields(data), 1)
|
||||
|
||||
def encode_opaque_login_start(username: str, request: bytes) -> bytes:
|
||||
return _encode_string_field(1, username) + _encode_bytes_field(2, request)
|
||||
|
||||
def decode_opaque_login_start_response(data: bytes) -> bytes:
|
||||
return _get_bytes(_decode_fields(data), 1)
|
||||
|
||||
def encode_opaque_login_finish(username: str, finalization: bytes, identity_key: bytes) -> bytes:
|
||||
return (
|
||||
_encode_string_field(1, username)
|
||||
+ _encode_bytes_field(2, finalization)
|
||||
+ _encode_bytes_field(3, identity_key)
|
||||
)
|
||||
|
||||
def decode_opaque_login_finish_response(data: bytes) -> bytes:
|
||||
"""Returns session_token."""
|
||||
return _get_bytes(_decode_fields(data), 1)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Delivery
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def encode_enqueue(
|
||||
recipient_key: bytes,
|
||||
payload: bytes,
|
||||
channel_id: bytes = b"",
|
||||
ttl_secs: int = 0,
|
||||
message_id: bytes = b"",
|
||||
) -> bytes:
|
||||
return (
|
||||
_encode_bytes_field(1, recipient_key)
|
||||
+ _encode_bytes_field(2, payload)
|
||||
+ _encode_bytes_field(3, channel_id)
|
||||
+ _encode_varint_field(4, ttl_secs)
|
||||
+ _encode_bytes_field(5, message_id)
|
||||
)
|
||||
|
||||
def decode_enqueue_response(data: bytes) -> tuple[int, bytes, bool]:
|
||||
"""Returns (seq, delivery_proof, duplicate)."""
|
||||
fields = _decode_fields(data)
|
||||
return _get_varint(fields, 1), _get_bytes(fields, 2), _get_bool(fields, 3)
|
||||
|
||||
def encode_fetch(
|
||||
recipient_key: bytes,
|
||||
channel_id: bytes = b"",
|
||||
limit: int = 0,
|
||||
device_id: bytes = b"",
|
||||
) -> bytes:
|
||||
return (
|
||||
_encode_bytes_field(1, recipient_key)
|
||||
+ _encode_bytes_field(2, channel_id)
|
||||
+ _encode_varint_field(3, limit)
|
||||
+ _encode_bytes_field(4, device_id)
|
||||
)
|
||||
|
||||
def decode_fetch_response(data: bytes) -> list[tuple[int, bytes]]:
|
||||
"""Returns list of (seq, data) envelopes."""
|
||||
fields = _decode_fields(data)
|
||||
envelopes: list[tuple[int, bytes]] = []
|
||||
for _, val in fields.get(1, []):
|
||||
if isinstance(val, bytes):
|
||||
env_fields = _decode_fields(val)
|
||||
envelopes.append((_get_varint(env_fields, 1), _get_bytes(env_fields, 2)))
|
||||
return envelopes
|
||||
|
||||
def encode_fetch_wait(
|
||||
recipient_key: bytes,
|
||||
channel_id: bytes = b"",
|
||||
timeout_ms: int = 5000,
|
||||
limit: int = 0,
|
||||
device_id: bytes = b"",
|
||||
) -> bytes:
|
||||
return (
|
||||
_encode_bytes_field(1, recipient_key)
|
||||
+ _encode_bytes_field(2, channel_id)
|
||||
+ _encode_varint_field(3, timeout_ms)
|
||||
+ _encode_varint_field(4, limit)
|
||||
+ _encode_bytes_field(5, device_id)
|
||||
)
|
||||
|
||||
# decode_fetch_wait_response = decode_fetch_response (same message shape)
|
||||
decode_fetch_wait_response = decode_fetch_response
|
||||
|
||||
def encode_ack(
|
||||
recipient_key: bytes,
|
||||
seq_up_to: int,
|
||||
channel_id: bytes = b"",
|
||||
device_id: bytes = b"",
|
||||
) -> bytes:
|
||||
return (
|
||||
_encode_bytes_field(1, recipient_key)
|
||||
+ _encode_bytes_field(2, channel_id)
|
||||
+ _encode_varint_field(3, seq_up_to)
|
||||
+ _encode_bytes_field(4, device_id)
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Channel
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def encode_create_channel(peer_key: bytes) -> bytes:
|
||||
return _encode_bytes_field(1, peer_key)
|
||||
|
||||
def decode_create_channel_response(data: bytes) -> tuple[bytes, bool]:
|
||||
"""Returns (channel_id, was_new)."""
|
||||
fields = _decode_fields(data)
|
||||
return _get_bytes(fields, 1), _get_bool(fields, 2)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# User
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def encode_resolve_user(username: str) -> bytes:
|
||||
return _encode_string_field(1, username)
|
||||
|
||||
def decode_resolve_user_response(data: bytes) -> tuple[bytes, bytes]:
|
||||
"""Returns (identity_key, inclusion_proof)."""
|
||||
fields = _decode_fields(data)
|
||||
return _get_bytes(fields, 1), _get_bytes(fields, 2)
|
||||
|
||||
def encode_resolve_identity(identity_key: bytes) -> bytes:
|
||||
return _encode_bytes_field(1, identity_key)
|
||||
|
||||
def decode_resolve_identity_response(data: bytes) -> str:
|
||||
"""Returns username."""
|
||||
return _get_string(_decode_fields(data), 1)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Keys
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def encode_upload_key_package(identity_key: bytes, package: bytes) -> bytes:
|
||||
return _encode_bytes_field(1, identity_key) + _encode_bytes_field(2, package)
|
||||
|
||||
def decode_upload_key_package_response(data: bytes) -> bytes:
|
||||
return _get_bytes(_decode_fields(data), 1)
|
||||
|
||||
def encode_fetch_key_package(identity_key: bytes) -> bytes:
|
||||
return _encode_bytes_field(1, identity_key)
|
||||
|
||||
def decode_fetch_key_package_response(data: bytes) -> bytes:
|
||||
return _get_bytes(_decode_fields(data), 1)
|
||||
|
||||
def encode_upload_hybrid_key(identity_key: bytes, hybrid_public_key: bytes) -> bytes:
|
||||
return _encode_bytes_field(1, identity_key) + _encode_bytes_field(2, hybrid_public_key)
|
||||
|
||||
def encode_fetch_hybrid_key(identity_key: bytes) -> bytes:
|
||||
return _encode_bytes_field(1, identity_key)
|
||||
|
||||
def decode_fetch_hybrid_key_response(data: bytes) -> bytes:
|
||||
return _get_bytes(_decode_fields(data), 1)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Health
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def encode_health() -> bytes:
|
||||
return b""
|
||||
|
||||
def decode_health_response(data: bytes) -> dict[str, str | int]:
|
||||
fields = _decode_fields(data)
|
||||
return {
|
||||
"status": _get_string(fields, 1),
|
||||
"node_id": _get_string(fields, 2),
|
||||
"version": _get_string(fields, 3),
|
||||
"uptime_secs": _get_varint(fields, 4),
|
||||
"storage_backend": _get_string(fields, 5),
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Delete Account
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def encode_delete_account() -> bytes:
|
||||
return b""
|
||||
|
||||
def decode_delete_account_response(data: bytes) -> bool:
|
||||
return _get_bool(_decode_fields(data), 1)
|
||||
181
sdks/python/quicprochat/transport.py
Normal file
181
sdks/python/quicprochat/transport.py
Normal file
@@ -0,0 +1,181 @@
|
||||
"""QUIC transport using aioquic for the v2 wire format.
|
||||
|
||||
Opens a QUIC connection to the qpq server and provides ``rpc()`` to send
|
||||
protobuf-encoded requests over individual QUIC streams, reading back the
|
||||
framed response on the same stream.
|
||||
|
||||
aioquic is imported lazily so that the module can be loaded even when
|
||||
aioquic is not installed (e.g. for tests that only exercise wire/proto).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import ssl
|
||||
from typing import Any
|
||||
|
||||
from quicproquo.types import ConnectionError, TimeoutError
|
||||
from quicproquo.wire import HEADER_SIZE, encode_frame, decode_header
|
||||
|
||||
|
||||
def _make_protocol_class() -> type:
|
||||
"""Build the protocol class at call time so aioquic is imported lazily."""
|
||||
from aioquic.asyncio.protocol import QuicConnectionProtocol
|
||||
from aioquic.quic.events import StreamDataReceived, QuicEvent
|
||||
|
||||
class _QpqQuicProtocol(QuicConnectionProtocol):
|
||||
"""QUIC protocol handler that dispatches stream data to waiting futures."""
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self._stream_buffers: dict[int, bytearray] = {}
|
||||
self._stream_waiters: dict[int, asyncio.Future[bytes]] = {}
|
||||
|
||||
def quic_event_received(self, event: QuicEvent) -> None:
|
||||
if isinstance(event, StreamDataReceived):
|
||||
sid = event.stream_id
|
||||
buf = self._stream_buffers.setdefault(sid, bytearray())
|
||||
buf.extend(event.data)
|
||||
|
||||
if len(buf) >= HEADER_SIZE:
|
||||
_, _, payload_len = decode_header(bytes(buf[:HEADER_SIZE]))
|
||||
total = HEADER_SIZE + payload_len
|
||||
if len(buf) >= total:
|
||||
frame = bytes(buf[:total])
|
||||
del buf[:total]
|
||||
waiter = self._stream_waiters.pop(sid, None)
|
||||
if waiter and not waiter.done():
|
||||
waiter.set_result(frame)
|
||||
|
||||
def wait_for_stream(self, stream_id: int) -> asyncio.Future[bytes]:
|
||||
loop = asyncio.get_event_loop()
|
||||
fut: asyncio.Future[bytes] = loop.create_future()
|
||||
self._stream_waiters[stream_id] = fut
|
||||
|
||||
buf = self._stream_buffers.get(stream_id, bytearray())
|
||||
if len(buf) >= HEADER_SIZE:
|
||||
_, _, payload_len = decode_header(bytes(buf[:HEADER_SIZE]))
|
||||
total = HEADER_SIZE + payload_len
|
||||
if len(buf) >= total:
|
||||
frame = bytes(buf[:total])
|
||||
del buf[:total]
|
||||
if not fut.done():
|
||||
fut.set_result(frame)
|
||||
|
||||
return fut
|
||||
|
||||
return _QpqQuicProtocol
|
||||
|
||||
|
||||
class QuicTransport:
|
||||
"""Async QUIC transport for the qpq v2 wire format.
|
||||
|
||||
Usage::
|
||||
|
||||
transport = await QuicTransport.connect("127.0.0.1:5001")
|
||||
response_bytes = await transport.rpc(method_id, request_payload)
|
||||
transport.close()
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
protocol: Any,
|
||||
connection: Any,
|
||||
request_timeout_ms: int,
|
||||
) -> None:
|
||||
self._protocol = protocol
|
||||
self._connection = connection
|
||||
self._req_id = 0
|
||||
self._request_timeout = request_timeout_ms / 1000.0
|
||||
self._closed = False
|
||||
|
||||
@staticmethod
|
||||
async def connect(
|
||||
addr: str,
|
||||
*,
|
||||
ca_cert_path: str = "",
|
||||
server_name: str = "",
|
||||
insecure_skip_verify: bool = False,
|
||||
connect_timeout_ms: int = 5_000,
|
||||
request_timeout_ms: int = 10_000,
|
||||
) -> "QuicTransport":
|
||||
"""Open a QUIC connection to the server."""
|
||||
from aioquic.asyncio import connect as quic_connect
|
||||
from aioquic.quic.configuration import QuicConfiguration
|
||||
|
||||
host, _, port_str = addr.rpartition(":")
|
||||
if not host:
|
||||
host = addr
|
||||
port_str = "5001"
|
||||
port = int(port_str)
|
||||
|
||||
configuration = QuicConfiguration(
|
||||
is_client=True,
|
||||
alpn_protocols=["qpq"],
|
||||
)
|
||||
|
||||
if insecure_skip_verify:
|
||||
configuration.verify_mode = ssl.CERT_NONE
|
||||
elif ca_cert_path:
|
||||
configuration.load_verify_locations(ca_cert_path)
|
||||
|
||||
if not server_name:
|
||||
server_name = host
|
||||
|
||||
proto_cls = _make_protocol_class()
|
||||
|
||||
try:
|
||||
async with asyncio.timeout(connect_timeout_ms / 1000.0):
|
||||
connection = await quic_connect(
|
||||
host,
|
||||
port,
|
||||
configuration=configuration,
|
||||
create_protocol=proto_cls,
|
||||
server_name=server_name,
|
||||
)
|
||||
except (OSError, asyncio.TimeoutError) as exc:
|
||||
raise ConnectionError(f"failed to connect to {addr}: {exc}") from exc
|
||||
|
||||
protocol = connection._protocol # type: ignore[attr-defined]
|
||||
return QuicTransport(protocol, connection, request_timeout_ms)
|
||||
|
||||
async def rpc(self, method_id: int, payload: bytes) -> bytes:
|
||||
"""Send an RPC request and return the response payload (protobuf bytes).
|
||||
|
||||
Opens a new QUIC stream for each request.
|
||||
"""
|
||||
if self._closed:
|
||||
raise ConnectionError("transport is closed")
|
||||
|
||||
self._req_id += 1
|
||||
req_id = self._req_id
|
||||
|
||||
frame = encode_frame(method_id, req_id, payload)
|
||||
|
||||
stream_id = self._protocol._quic.get_next_available_stream_id()
|
||||
waiter = self._protocol.wait_for_stream(stream_id)
|
||||
|
||||
self._protocol._quic.send_stream_data(stream_id, frame, end_stream=True)
|
||||
self._protocol.transmit()
|
||||
|
||||
try:
|
||||
async with asyncio.timeout(self._request_timeout):
|
||||
response_frame = await waiter
|
||||
except asyncio.TimeoutError as exc:
|
||||
raise TimeoutError(
|
||||
f"RPC timeout for method {method_id} (req_id={req_id})"
|
||||
) from exc
|
||||
|
||||
_, _, resp_len = decode_header(response_frame)
|
||||
return response_frame[HEADER_SIZE : HEADER_SIZE + resp_len]
|
||||
|
||||
@property
|
||||
def closed(self) -> bool:
|
||||
return self._closed
|
||||
|
||||
def close(self) -> None:
|
||||
"""Close the QUIC connection."""
|
||||
if not self._closed:
|
||||
self._closed = True
|
||||
self._protocol._quic.close()
|
||||
self._protocol.transmit()
|
||||
92
sdks/python/quicprochat/types.py
Normal file
92
sdks/python/quicprochat/types.py
Normal file
@@ -0,0 +1,92 @@
|
||||
"""Data types and exceptions for the quicproquo Python SDK."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Exceptions
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class QpqError(Exception):
|
||||
"""Base exception for quicproquo SDK errors."""
|
||||
|
||||
|
||||
class AuthError(QpqError):
|
||||
"""OPAQUE authentication failed (bad credentials)."""
|
||||
|
||||
|
||||
class TimeoutError(QpqError):
|
||||
"""An operation timed out waiting for a response."""
|
||||
|
||||
|
||||
class ConnectionError(QpqError):
|
||||
"""Could not connect to or communicate with the server."""
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Connection options
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ConnectOptions:
|
||||
"""Options for connecting to a quicproquo server.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
addr:
|
||||
Server address as ``host:port``.
|
||||
ca_cert_path:
|
||||
Path to PEM-encoded CA certificate. Required for production.
|
||||
server_name:
|
||||
TLS SNI server name (defaults to the host part of *addr*).
|
||||
insecure_skip_verify:
|
||||
Disable TLS certificate verification (development only).
|
||||
connect_timeout_ms:
|
||||
Connection timeout in milliseconds (default 5000).
|
||||
request_timeout_ms:
|
||||
Per-request timeout in milliseconds (default 10000).
|
||||
"""
|
||||
|
||||
addr: str
|
||||
ca_cert_path: str = ""
|
||||
server_name: str = ""
|
||||
insecure_skip_verify: bool = False
|
||||
connect_timeout_ms: int = 5_000
|
||||
request_timeout_ms: int = 10_000
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Response types
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Envelope:
|
||||
"""A received message envelope."""
|
||||
|
||||
seq: int
|
||||
data: bytes
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ChannelResult:
|
||||
"""Result of creating or joining a channel."""
|
||||
|
||||
channel_id: bytes
|
||||
was_new: bool
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class HealthInfo:
|
||||
"""Server health check response."""
|
||||
|
||||
status: str
|
||||
node_id: str = ""
|
||||
version: str = ""
|
||||
uptime_secs: int = 0
|
||||
storage_backend: str = ""
|
||||
73
sdks/python/quicprochat/wire.py
Normal file
73
sdks/python/quicprochat/wire.py
Normal file
@@ -0,0 +1,73 @@
|
||||
"""v2 wire format: ``[method_id:u16][req_id:u32][len:u32][protobuf]``.
|
||||
|
||||
Each RPC is sent over its own QUIC stream. The response uses the same
|
||||
framing on the same stream.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import struct
|
||||
|
||||
# Header: method_id (u16) + req_id (u32) + length (u32) = 10 bytes.
|
||||
HEADER_FMT = "!HII" # network byte-order: u16 + u32 + u32
|
||||
HEADER_SIZE = struct.calcsize(HEADER_FMT)
|
||||
|
||||
# Method IDs (mirrors quicproquo-proto/src/lib.rs::method_ids).
|
||||
# Auth (100-103)
|
||||
OPAQUE_REGISTER_START = 100
|
||||
OPAQUE_REGISTER_FINISH = 101
|
||||
OPAQUE_LOGIN_START = 102
|
||||
OPAQUE_LOGIN_FINISH = 103
|
||||
|
||||
# Delivery (200-205)
|
||||
ENQUEUE = 200
|
||||
FETCH = 201
|
||||
FETCH_WAIT = 202
|
||||
PEEK = 203
|
||||
ACK = 204
|
||||
BATCH_ENQUEUE = 205
|
||||
|
||||
# Keys (300-304)
|
||||
UPLOAD_KEY_PACKAGE = 300
|
||||
FETCH_KEY_PACKAGE = 301
|
||||
UPLOAD_HYBRID_KEY = 302
|
||||
FETCH_HYBRID_KEY = 303
|
||||
FETCH_HYBRID_KEYS = 304
|
||||
|
||||
# Channel (400)
|
||||
CREATE_CHANNEL = 400
|
||||
|
||||
# User (500-501)
|
||||
RESOLVE_USER = 500
|
||||
RESOLVE_IDENTITY = 501
|
||||
|
||||
# Blob (600-601)
|
||||
UPLOAD_BLOB = 600
|
||||
DOWNLOAD_BLOB = 601
|
||||
|
||||
# Device (700-702)
|
||||
REGISTER_DEVICE = 700
|
||||
LIST_DEVICES = 701
|
||||
REVOKE_DEVICE = 702
|
||||
|
||||
# P2P (800-802)
|
||||
PUBLISH_ENDPOINT = 800
|
||||
RESOLVE_ENDPOINT = 801
|
||||
HEALTH = 802
|
||||
|
||||
# Delete account (950)
|
||||
DELETE_ACCOUNT = 950
|
||||
|
||||
|
||||
def encode_frame(method_id: int, req_id: int, payload: bytes) -> bytes:
|
||||
"""Encode a wire frame: header + protobuf payload."""
|
||||
header = struct.pack(HEADER_FMT, method_id, req_id, len(payload))
|
||||
return header + payload
|
||||
|
||||
|
||||
def decode_header(data: bytes) -> tuple[int, int, int]:
|
||||
"""Decode a wire frame header, returning (method_id, req_id, payload_len)."""
|
||||
if len(data) < HEADER_SIZE:
|
||||
raise ValueError(f"header too short: {len(data)} < {HEADER_SIZE}")
|
||||
method_id, req_id, length = struct.unpack(HEADER_FMT, data[:HEADER_SIZE])
|
||||
return method_id, req_id, length
|
||||
Reference in New Issue
Block a user