forked from lavina/lavina
1
0
Fork 0
This commit is contained in:
Nikita Vilunov 2024-05-03 00:57:07 +02:00
parent 9a09ff717e
commit 8a14a4d687
10 changed files with 275 additions and 35 deletions

52
Cargo.lock generated
View File

@ -204,7 +204,7 @@ dependencies = [
"http-body 0.4.6",
"hyper 0.14.28",
"itoa",
"matchit",
"matchit 0.7.3",
"memchr",
"mime",
"percent-encoding",
@ -709,8 +709,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c"
dependencies = [
"cfg-if",
"js-sys",
"libc",
"wasi",
"wasm-bindgen",
]
[[package]]
@ -1097,8 +1099,13 @@ dependencies = [
"anyhow",
"argon2",
"chrono",
"mgmt-api",
"opentelemetry",
"prometheus",
"rand_core",
"reqwest",
"reqwest-middleware",
"reqwest-tracing",
"serde",
"sqlx",
"tokio",
@ -1165,6 +1172,12 @@ version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94"
[[package]]
name = "matchit"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "540f1c43aed89909c0cc0cc604e3bb2f7e7a341a3728a9e6cfe760e733cd11ed"
[[package]]
name = "md-5"
version = "0.10.6"
@ -1764,9 +1777,9 @@ checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56"
[[package]]
name = "reqwest"
version = "0.12.3"
version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e6cc1e89e689536eb5aeede61520e874df5a4707df811cd5da4aa5fbb2aae19"
checksum = "566cafdd92868e0939d3fb961bd0dc25fcfaaed179291093b3d43e6b3150ea10"
dependencies = [
"base64 0.22.0",
"bytes",
@ -1797,6 +1810,39 @@ dependencies = [
"winreg",
]
[[package]]
name = "reqwest-middleware"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0209efb52486ad88136190094ee214759ef7507068b27992256ed6610eb71a01"
dependencies = [
"anyhow",
"async-trait",
"http 1.1.0",
"reqwest",
"serde",
"thiserror",
"tower-service",
]
[[package]]
name = "reqwest-tracing"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b253954a1979e02eabccd7e9c3d61d8f86576108baa160775e7f160bb4e800a3"
dependencies = [
"anyhow",
"async-trait",
"getrandom",
"http 1.1.0",
"matchit 0.8.2",
"opentelemetry",
"reqwest",
"reqwest-middleware",
"tracing",
"tracing-opentelemetry",
]
[[package]]
name = "ring"
version = "0.17.8"

View File

@ -32,6 +32,7 @@ lavina-core = { path = "crates/lavina-core" }
tracing-subscriber = "0.3.16"
sasl = { path = "crates/sasl" }
chrono = "0.4.37"
reqwest = { version = "0.12.0", default-features = false, features = ["json"] }
[package]
name = "lavina"
@ -69,4 +70,4 @@ chrono.workspace = true
[dev-dependencies]
assert_matches.workspace = true
regex = "1.7.1"
reqwest = { version = "0.12.0", default-features = false }
reqwest.workspace = true

View File

@ -1,15 +0,0 @@
[telemetry]
listen_on = "127.0.0.1:8080"
[irc]
listen_on = "127.0.0.1:6667"
server_name = "irc.localhost"
[xmpp]
listen_on = "127.0.0.1:5222"
cert = "./certs/xmpp.pem"
key = "./certs/xmpp.key"
hostname = "localhost"
[storage]
db_path = "db.sqlite"

View File

@ -13,3 +13,8 @@ prometheus.workspace = true
chrono.workspace = true
argon2 = { version = "0.5.3" }
rand_core = { version = "0.6.4", features = ["getrandom"] }
reqwest.workspace = true
reqwest-middleware = { version = "0.3", features = ["json"] }
opentelemetry = "0.22.0"
mgmt-api = { path = "../mgmt-api" }
reqwest-tracing = { version = "0.5", features = ["opentelemetry_0_22"] }

View File

@ -0,0 +1,69 @@
use anyhow::{anyhow, Result};
use reqwest::Client;
use reqwest_middleware::{ClientBuilder, ClientWithMiddleware};
use reqwest_tracing::{DefaultSpanBackend, TracingMiddleware};
use serde::{Deserialize, Serialize};
use std::net::SocketAddr;
use std::sync::Arc;
type Addresses = Vec<SocketAddr>;
#[derive(Deserialize, Debug, Clone)]
pub struct ClusterConfig {
pub metadata: ClusterMetadata,
pub addresses: Addresses,
}
#[derive(Deserialize, Debug, Clone)]
pub struct ClusterMetadata {
pub node_id: u32,
/// Owns all rooms and players in the cluster.
pub main_owner: u32,
/// Owns the room `test`.
pub test_owner: u32,
/// Owns the room `test2`.
pub test2_owner: u32,
}
#[derive(Clone)]
pub struct LavinaClient {
addresses: Arc<Addresses>,
client: ClientWithMiddleware,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct SendMessageReq<'a> {
pub room_id: &'a str,
pub player_id: &'a str,
pub message: &'a str,
pub created_at: &'a str,
}
impl LavinaClient {
pub fn new(addresses: Addresses) -> Self {
let client = ClientBuilder::new(Client::new()).with(TracingMiddleware::<DefaultSpanBackend>::new()).build();
Self {
addresses: Arc::new(addresses),
client,
}
}
#[tracing::instrument(skip(self, req), name = "LavinaClient::send_room_message")]
pub async fn send_room_message(&self, node_id: u32, req: SendMessageReq<'_>) -> Result<()> {
tracing::info!("Sending a message to a room on a remote node");
let Some(address) = self.addresses.get(node_id as usize) else {
tracing::error!("Failed");
return Err(anyhow!("Unknown node"));
};
match self.client.post(format!("http://{}/cluster/rooms/add_message", address)).json(&req).send().await {
Ok(_) => {
tracing::info!("Message sent");
Ok(())
}
Err(e) => {
tracing::error!("Failed to send message: {e:?}");
Err(e.into())
}
}
}
}

View File

@ -1,6 +1,8 @@
//! Domain definitions and implementation of common chat logic.
use crate::clustering::{ClusterConfig, LavinaClient};
use anyhow::Result;
use prometheus::Registry as MetricsRegistry;
use std::sync::Arc;
use crate::dialog::DialogRegistry;
use crate::player::PlayerRegistry;
@ -8,6 +10,7 @@ use crate::repo::Storage;
use crate::room::RoomRegistry;
pub mod auth;
pub mod clustering;
pub mod dialog;
pub mod player;
pub mod prelude;
@ -25,11 +28,23 @@ pub struct LavinaCore {
}
impl LavinaCore {
pub async fn new(mut metrics: MetricsRegistry, storage: Storage) -> Result<LavinaCore> {
pub async fn new(
mut metrics: MetricsRegistry,
cluster_config: ClusterConfig,
storage: Storage,
) -> Result<LavinaCore> {
// TODO shutdown all services in reverse order on error
let client = LavinaClient::new(cluster_config.addresses.clone());
let rooms = RoomRegistry::new(&mut metrics, storage.clone())?;
let dialogs = DialogRegistry::new(storage.clone());
let players = PlayerRegistry::empty(rooms.clone(), dialogs.clone(), storage.clone(), &mut metrics)?;
let players = PlayerRegistry::empty(
rooms.clone(),
dialogs.clone(),
storage.clone(),
&mut metrics,
Arc::new(cluster_config.metadata),
client,
)?;
dialogs.set_players(players.clone()).await;
Ok(LavinaCore {
players,

View File

@ -17,6 +17,7 @@ use tokio::sync::mpsc::{channel, Receiver, Sender};
use tokio::sync::RwLock;
use tracing::{Instrument, Span};
use crate::clustering::{ClusterMetadata, LavinaClient, SendMessageReq};
use crate::dialog::DialogRegistry;
use crate::prelude::*;
use crate::repo::Storage;
@ -253,6 +254,8 @@ impl PlayerRegistry {
dialogs: DialogRegistry,
storage: Storage,
metrics: &mut MetricsRegistry,
cluster_metadata: Arc<ClusterMetadata>,
cluster_client: LavinaClient,
) -> Result<PlayerRegistry> {
let metric_active_players = IntGauge::new("chat_players_active", "Number of alive player actors")?;
metrics.register(Box::new(metric_active_players.clone()))?;
@ -260,6 +263,8 @@ impl PlayerRegistry {
room_registry,
dialogs,
storage,
cluster_metadata,
cluster_client,
players: HashMap::new(),
metric_active_players,
};
@ -311,6 +316,8 @@ impl PlayerRegistry {
id.clone(),
inner.room_registry.clone(),
inner.dialogs.clone(),
inner.cluster_metadata.clone(),
inner.cluster_client.clone(),
inner.storage.clone(),
)
.await;
@ -345,29 +352,40 @@ struct PlayerRegistryInner {
room_registry: RoomRegistry,
dialogs: DialogRegistry,
storage: Storage,
cluster_metadata: Arc<ClusterMetadata>,
cluster_client: LavinaClient,
/// Active player actors.
players: HashMap<PlayerId, (PlayerHandle, JoinHandle<Player>)>,
metric_active_players: IntGauge,
}
enum RoomRef {
Local(RoomHandle),
Remote { node_id: u32 },
}
/// Player actor inner state representation.
struct Player {
player_id: PlayerId,
storage_id: u32,
connections: AnonTable<Sender<ConnectionMessage>>,
my_rooms: HashMap<RoomId, RoomHandle>,
my_rooms: HashMap<RoomId, RoomRef>,
banned_from: HashSet<RoomId>,
rx: Receiver<(ActorCommand, Span)>,
handle: PlayerHandle,
rooms: RoomRegistry,
dialogs: DialogRegistry,
storage: Storage,
cluster_metadata: Arc<ClusterMetadata>,
cluster_client: LavinaClient,
}
impl Player {
async fn launch(
player_id: PlayerId,
rooms: RoomRegistry,
dialogs: DialogRegistry,
cluster_metadata: Arc<ClusterMetadata>,
cluster_client: LavinaClient,
storage: Storage,
) -> (PlayerHandle, JoinHandle<Player>) {
let (tx, rx) = channel(32);
@ -388,6 +406,8 @@ impl Player {
rooms,
dialogs,
storage,
cluster_metadata,
cluster_client,
};
let fiber = tokio::task::spawn(player.main_loop());
(handle_clone, fiber)
@ -396,11 +416,20 @@ impl Player {
async fn main_loop(mut self) -> Self {
let rooms = self.storage.get_rooms_of_a_user(self.storage_id).await.unwrap();
for room_id in rooms {
let room = self.rooms.get_room(&room_id).await;
if let Some(room) = room {
self.my_rooms.insert(room_id, room);
let node = match &**room_id.as_inner() {
"aaaaa" => self.cluster_metadata.test_owner,
"test" => self.cluster_metadata.test2_owner,
_ => self.cluster_metadata.main_owner,
};
if node == self.cluster_metadata.node_id {
let room = self.rooms.get_room(&room_id).await;
if let Some(room) = room {
self.my_rooms.insert(room_id, RoomRef::Local(room));
} else {
tracing::error!("Room #{room_id:?} not found");
}
} else {
tracing::error!("Room #{room_id:?} not found");
self.my_rooms.insert(room_id, RoomRef::Remote { node_id: node });
}
}
while let Some(cmd) = self.rx.recv().await {
@ -521,7 +550,8 @@ impl Player {
};
room.add_member(&self.player_id, self.storage_id).await;
room.subscribe(&self.player_id, self.handle.clone()).await;
self.my_rooms.insert(room_id.clone(), room.clone());
// self.my_rooms.insert(room_id.clone(), room.clone());
panic!();
let room_info = room.get_room_info().await;
let update = Updates::RoomJoined {
room_id,
@ -535,8 +565,9 @@ impl Player {
async fn leave_room(&mut self, connection_id: ConnectionId, room_id: RoomId) {
let room = self.my_rooms.remove(&room_id);
if let Some(room) = room {
room.unsubscribe(&self.player_id).await;
room.remove_member(&self.player_id, self.storage_id).await;
panic!();
// room.unsubscribe(&self.player_id).await;
// room.remove_member(&self.player_id, self.storage_id).await;
}
let update = Updates::RoomLeft {
room_id,
@ -552,7 +583,20 @@ impl Player {
return SendMessageResult::NoSuchRoom;
};
let created_at = chrono::Utc::now();
room.send_message(&self.player_id, body.clone(), created_at.clone()).await;
match room {
RoomRef::Local(room) => {
room.send_message(&self.player_id, body.clone(), created_at.clone()).await;
}
RoomRef::Remote { node_id } => {
let req = SendMessageReq {
room_id: room_id.as_inner(),
player_id: self.player_id.as_inner(),
message: &*body,
created_at: &*created_at.to_rfc3339(),
};
self.cluster_client.send_room_message(*node_id, req).await.unwrap();
}
}
let update = Updates::NewMessage {
room_id,
author_id: self.player_id.clone(),
@ -569,7 +613,8 @@ impl Player {
tracing::info!("no room found");
return;
};
room.set_topic(&self.player_id, new_topic.clone()).await;
// room.set_topic(&self.player_id, new_topic.clone()).await;
todo!();
let update = Updates::RoomTopicChanged { room_id, new_topic };
self.broadcast_update(update, connection_id).await;
}
@ -577,8 +622,18 @@ impl Player {
#[tracing::instrument(skip(self), name = "Player::get_rooms")]
async fn get_rooms(&self) -> Vec<RoomInfo> {
let mut response = vec![];
for (_, handle) in &self.my_rooms {
response.push(handle.get_room_info().await);
for (room_id, handle) in &self.my_rooms {
if let RoomRef::Local(handle) = handle {
response.push(handle.get_room_info().await);
} else {
let room_info = RoomInfo {
id: room_id.clone(),
topic: "unknown".into(),
members: vec![],
};
response.push(room_info);
// TODO
}
}
response
}

View File

@ -35,6 +35,10 @@ struct TestScope<'a> {
buffer: Vec<u8>,
}
fn element_name<'a>(event: &quick_xml::events::BytesStart<'a>) -> &'a str {
std::str::from_utf8(event.local_name().into_inner()).unwrap()
}
impl<'a> TestScope<'a> {
fn new(stream: &mut TcpStream) -> TestScope<'_> {
let (reader, writer) = stream.split();
@ -56,7 +60,7 @@ impl<'a> TestScope<'a> {
}
async fn expect_starttls_required(&mut self) -> Result<()> {
assert_matches!(self.next_xml_event().await?, Event::Start(b) => assert_eq!(b.local_name().into_inner(), b"features"));
assert_matches!(self.next_xml_event().await?, Event::Start(b) => assert_eq!(element_name(&b), "features"));
assert_matches!(self.next_xml_event().await?, Event::Start(b) => assert_eq!(b.local_name().into_inner(), b"starttls"));
assert_matches!(self.next_xml_event().await?, Event::Empty(b) => assert_eq!(b.local_name().into_inner(), b"required"));
assert_matches!(self.next_xml_event().await?, Event::End(b) => assert_eq!(b.local_name().into_inner(), b"starttls"));

View File

@ -1,3 +1,4 @@
use chrono::Utc;
use std::convert::Infallible;
use std::net::SocketAddr;
@ -8,11 +9,16 @@ use hyper::server::conn::http1;
use hyper::service::service_fn;
use hyper::{Method, Request, Response, StatusCode};
use hyper_util::rt::TokioIo;
use opentelemetry::propagation::Extractor;
use prometheus::{Encoder, Registry as MetricsRegistry, TextEncoder};
use serde::{Deserialize, Serialize};
use tokio::net::TcpListener;
use tracing::Span;
use tracing_opentelemetry::OpenTelemetrySpanExt;
use lavina_core::auth::UpdatePasswordResult::PasswordUpdated;
use lavina_core::auth::{Authenticator, UpdatePasswordResult};
use lavina_core::clustering::SendMessageReq;
use lavina_core::player::{PlayerId, PlayerRegistry, SendMessageResult};
use lavina_core::prelude::*;
use lavina_core::repo::Storage;
@ -76,12 +82,29 @@ async fn main_loop(
Ok(())
}
#[tracing::instrument(skip_all, name = "route")]
async fn route(
registry: MetricsRegistry,
core: LavinaCore,
storage: Storage,
request: Request<hyper::body::Incoming>,
) -> HttpResult<Response<Full<Bytes>>> {
struct HttpReqExtractor<'a, T> {
req: &'a Request<T>,
}
impl<'a, T> Extractor for HttpReqExtractor<'a, T> {
fn get(&self, key: &str) -> Option<&str> {
self.req.headers().get(key).and_then(|v| v.to_str().ok())
}
fn keys(&self) -> Vec<&str> {
self.req.headers().keys().map(|k| k.as_str()).collect()
}
}
let ctx = opentelemetry::global::get_text_map_propagator(|pp| pp.extract(&HttpReqExtractor { req: &request }));
Span::current().set_parent(ctx);
let res = match (request.method(), request.uri().path()) {
(&Method::GET, "/metrics") => endpoint_metrics(registry),
(&Method::GET, "/rooms") => endpoint_rooms(core.rooms).await,
@ -90,6 +113,7 @@ async fn route(
(&Method::POST, paths::SET_PASSWORD) => endpoint_set_password(request, storage).await.or5xx(),
(&Method::POST, rooms::paths::SEND_MESSAGE) => endpoint_send_room_message(request, core).await.or5xx(),
(&Method::POST, rooms::paths::SET_TOPIC) => endpoint_set_room_topic(request, core).await.or5xx(),
(&Method::POST, "/cluster/rooms/add_message") => endpoint_cluster_add_message(request, core).await.or5xx(),
_ => endpoint_not_found(),
};
Ok(res)
@ -163,6 +187,7 @@ async fn endpoint_set_password(
Ok(empty_204_request())
}
#[tracing::instrument(skip_all, name = "LavinaClient::endpoint_send_room_message")]
async fn endpoint_send_room_message(
request: Request<hyper::body::Incoming>,
mut core: LavinaCore,
@ -204,6 +229,36 @@ async fn endpoint_set_room_topic(
Ok(empty_204_request())
}
#[tracing::instrument(skip_all, name = "endpoint_cluster_add_message")]
async fn endpoint_cluster_add_message(
request: Request<hyper::body::Incoming>,
core: LavinaCore,
) -> Result<Response<Full<Bytes>>> {
let str = request.collect().await?.to_bytes();
let Ok(req) = serde_json::from_slice::<SendMessageReq>(&str[..]) else {
return Ok(malformed_request());
};
tracing::info!("Incoming request: {:?}", &req);
let Ok(created_at) = chrono::DateTime::parse_from_rfc3339(req.created_at) else {
dbg!(&req.created_at);
return Ok(malformed_request());
};
let Ok(room_id) = RoomId::from(req.room_id) else {
dbg!(&req.room_id);
return Ok(room_not_found());
};
let Ok(player_id) = PlayerId::from(req.player_id) else {
dbg!(&req.player_id);
return Ok(player_not_found());
};
let Some(room_handle) = core.rooms.get_room(&room_id).await else {
dbg!(&room_id);
return Ok(room_not_found());
};
room_handle.send_message(&player_id, req.message.into(), created_at.to_utc()).await;
Ok(empty_204_request())
}
fn endpoint_not_found() -> Response<Full<Bytes>> {
let payload = ErrorResponse {
code: errors::INVALID_PATH,

View File

@ -6,8 +6,10 @@ use std::path::Path;
use clap::Parser;
use figment::providers::Format;
use figment::{providers::Toml, Figment};
use opentelemetry::global::set_text_map_propagator;
use opentelemetry::KeyValue;
use opentelemetry_otlp::WithExportConfig;
use opentelemetry_sdk::propagation::TraceContextPropagator;
use opentelemetry_sdk::trace::{BatchConfig, RandomIdGenerator, Sampler};
use opentelemetry_sdk::{runtime, Resource};
use opentelemetry_semantic_conventions::resource::SERVICE_NAME;
@ -28,6 +30,7 @@ struct ServerConfig {
irc: projection_irc::ServerConfig,
xmpp: projection_xmpp::ServerConfig,
storage: lavina_core::repo::StorageConfig,
cluster: lavina_core::clustering::ClusterConfig,
tracing: Option<TracingConfig>,
}
@ -63,11 +66,12 @@ async fn main() -> Result<()> {
irc: irc_config,
xmpp: xmpp_config,
storage: storage_config,
cluster: cluster_config,
tracing: _,
} = config;
let metrics = MetricsRegistry::new();
let storage = Storage::open(storage_config).await?;
let core = LavinaCore::new(metrics.clone(), storage.clone()).await?;
let core = LavinaCore::new(metrics.clone(), cluster_config, storage.clone()).await?;
let telemetry_terminator = http::launch(telemetry_config, metrics.clone(), core.clone(), storage.clone()).await?;
let irc = projection_irc::launch(irc_config, core.clone(), metrics.clone(), storage.clone()).await?;
let xmpp = projection_xmpp::launch(xmpp_config, core.clone(), metrics.clone(), storage.clone()).await?;
@ -139,6 +143,7 @@ fn set_up_logging(tracing_config: &Option<TracingConfig>) -> Result<()> {
.with_exporter(trace_exporter)
.install_batch(runtime::Tokio)?;
let subscriber = subscriber.with(OpenTelemetryLayer::new(tracer));
set_text_map_propagator(TraceContextPropagator::new());
targets.with_subscriber(subscriber).try_init()?;
} else {
targets.with_subscriber(subscriber).try_init()?;