scalability: initial support for remote rooms (#61)

Reviewed-on: lavina/lavina#61
This commit is contained in:
Nikita Vilunov 2024-05-10 20:44:24 +00:00
parent 3b454ad7cd
commit 6749103726
21 changed files with 666 additions and 54 deletions

2
.gitignore vendored
View File

@ -1,4 +1,4 @@
/target /target
/db.sqlite *.sqlite
.idea/ .idea/
.DS_Store .DS_Store

52
Cargo.lock generated
View File

@ -204,7 +204,7 @@ dependencies = [
"http-body 0.4.6", "http-body 0.4.6",
"hyper 0.14.28", "hyper 0.14.28",
"itoa", "itoa",
"matchit", "matchit 0.7.3",
"memchr", "memchr",
"mime", "mime",
"percent-encoding", "percent-encoding",
@ -709,8 +709,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c" checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"js-sys",
"libc", "libc",
"wasi", "wasi",
"wasm-bindgen",
] ]
[[package]] [[package]]
@ -1097,8 +1099,13 @@ dependencies = [
"anyhow", "anyhow",
"argon2", "argon2",
"chrono", "chrono",
"mgmt-api",
"opentelemetry",
"prometheus", "prometheus",
"rand_core", "rand_core",
"reqwest",
"reqwest-middleware",
"reqwest-tracing",
"serde", "serde",
"sqlx", "sqlx",
"tokio", "tokio",
@ -1165,6 +1172,12 @@ version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94"
[[package]]
name = "matchit"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "540f1c43aed89909c0cc0cc604e3bb2f7e7a341a3728a9e6cfe760e733cd11ed"
[[package]] [[package]]
name = "md-5" name = "md-5"
version = "0.10.6" version = "0.10.6"
@ -1764,9 +1777,9 @@ checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56"
[[package]] [[package]]
name = "reqwest" name = "reqwest"
version = "0.12.3" version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e6cc1e89e689536eb5aeede61520e874df5a4707df811cd5da4aa5fbb2aae19" checksum = "566cafdd92868e0939d3fb961bd0dc25fcfaaed179291093b3d43e6b3150ea10"
dependencies = [ dependencies = [
"base64 0.22.0", "base64 0.22.0",
"bytes", "bytes",
@ -1797,6 +1810,39 @@ dependencies = [
"winreg", "winreg",
] ]
[[package]]
name = "reqwest-middleware"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0209efb52486ad88136190094ee214759ef7507068b27992256ed6610eb71a01"
dependencies = [
"anyhow",
"async-trait",
"http 1.1.0",
"reqwest",
"serde",
"thiserror",
"tower-service",
]
[[package]]
name = "reqwest-tracing"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b253954a1979e02eabccd7e9c3d61d8f86576108baa160775e7f160bb4e800a3"
dependencies = [
"anyhow",
"async-trait",
"getrandom",
"http 1.1.0",
"matchit 0.8.2",
"opentelemetry",
"reqwest",
"reqwest-middleware",
"tracing",
"tracing-opentelemetry",
]
[[package]] [[package]]
name = "ring" name = "ring"
version = "0.17.8" version = "0.17.8"

View File

@ -32,6 +32,7 @@ lavina-core = { path = "crates/lavina-core" }
tracing-subscriber = "0.3.16" tracing-subscriber = "0.3.16"
sasl = { path = "crates/sasl" } sasl = { path = "crates/sasl" }
chrono = "0.4.37" chrono = "0.4.37"
reqwest = { version = "0.12.0", default-features = false, features = ["json"] }
[package] [package]
name = "lavina" name = "lavina"
@ -69,4 +70,4 @@ chrono.workspace = true
[dev-dependencies] [dev-dependencies]
assert_matches.workspace = true assert_matches.workspace = true
regex = "1.7.1" regex = "1.7.1"
reqwest = { version = "0.12.0", default-features = false } reqwest.workspace = true

30
config.0.toml Normal file
View File

@ -0,0 +1,30 @@
[telemetry]
listen_on = "127.0.0.1:8080"
[irc]
listen_on = "127.0.0.1:6667"
server_name = "irc.localhost"
[xmpp]
listen_on = "127.0.0.1:5222"
cert = "./certs/xmpp.pem"
key = "./certs/xmpp.key"
hostname = "localhost"
[storage]
db_path = "db.0.sqlite"
[cluster]
addresses = [
"127.0.0.1:8080",
"127.0.0.1:8081",
]
[cluster.metadata]
node_id = 0
main_owner = 0
rooms = { aaaaa = 1, test = 0 }
[tracing]
endpoint = "http://localhost:4317"
service_name = "lavina-0"

30
config.1.toml Normal file
View File

@ -0,0 +1,30 @@
[telemetry]
listen_on = "127.0.0.1:8081"
[irc]
listen_on = "127.0.0.1:6668"
server_name = "irc.localhost"
[xmpp]
listen_on = "127.0.0.1:5223"
cert = "./certs/xmpp.pem"
key = "./certs/xmpp.key"
hostname = "localhost"
[storage]
db_path = "db.1.sqlite"
[cluster]
addresses = [
"127.0.0.1:8080",
"127.0.0.1:8081",
]
[cluster.metadata]
node_id = 1
main_owner = 0
rooms = { aaaaa = 1, test = 0 }
[tracing]
endpoint = "http://localhost:4317"
service_name = "lavina-1"

View File

@ -13,3 +13,11 @@ hostname = "localhost"
[storage] [storage]
db_path = "db.sqlite" db_path = "db.sqlite"
[cluster]
addresses = []
[cluster.metadata]
node_id = 0
main_owner = 0
rooms = {}

View File

@ -13,3 +13,8 @@ prometheus.workspace = true
chrono.workspace = true chrono.workspace = true
argon2 = { version = "0.5.3" } argon2 = { version = "0.5.3" }
rand_core = { version = "0.6.4", features = ["getrandom"] } rand_core = { version = "0.6.4", features = ["getrandom"] }
reqwest.workspace = true
reqwest-middleware = { version = "0.3", features = ["json"] }
opentelemetry = "0.22.0"
mgmt-api = { path = "../mgmt-api" }
reqwest-tracing = { version = "0.5", features = ["opentelemetry_0_22"] }

View File

@ -0,0 +1,60 @@
use anyhow::{anyhow, Result};
use reqwest::Client;
use reqwest_middleware::{ClientBuilder, ClientWithMiddleware};
use reqwest_tracing::{DefaultSpanBackend, TracingMiddleware};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::net::SocketAddr;
use std::sync::Arc;
pub mod broadcast;
pub mod room;
type Addresses = Vec<SocketAddr>;
#[derive(Deserialize, Debug, Clone)]
pub struct ClusterConfig {
pub metadata: ClusterMetadata,
pub addresses: Addresses,
}
#[derive(Deserialize, Debug, Clone)]
pub struct ClusterMetadata {
/// The node id of the current node.
pub node_id: u32,
/// Owns all rooms in the cluster except the ones specified in `rooms`.
pub main_owner: u32,
pub rooms: HashMap<String, u32>,
}
#[derive(Clone)]
pub struct LavinaClient {
addresses: Arc<Addresses>,
client: ClientWithMiddleware,
}
impl LavinaClient {
pub fn new(addresses: Addresses) -> Self {
let client = ClientBuilder::new(Client::new()).with(TracingMiddleware::<DefaultSpanBackend>::new()).build();
Self {
addresses: Arc::new(addresses),
client,
}
}
async fn send_request(&self, node_id: u32, path: &str, req: impl Serialize) -> Result<()> {
let Some(address) = self.addresses.get(node_id as usize) else {
return Err(anyhow!("Unknown node"));
};
match self.client.post(format!("http://{}{}", address, path)).json(&req).send().await {
Ok(res) => {
if res.status().is_server_error() || res.status().is_client_error() {
tracing::error!("Cluster request failed: {:?}", res);
return Err(anyhow!("Server error"));
}
Ok(())
}
Err(e) => Err(e.into()),
}
}
}

View File

@ -0,0 +1,62 @@
use std::collections::{HashMap, HashSet};
use std::sync::Arc;
use chrono::{DateTime, Utc};
use tokio::sync::Mutex;
use crate::player::{PlayerId, PlayerRegistry, Updates};
use crate::prelude::Str;
use crate::room::RoomId;
/// Receives updates from other nodes and broadcasts them to local player actors.
struct BroadcastingInner {
subscriptions: HashMap<RoomId, HashSet<PlayerId>>,
}
impl Broadcasting {}
#[derive(Clone)]
pub struct Broadcasting(Arc<Mutex<BroadcastingInner>>);
impl Broadcasting {
pub fn new() -> Self {
let inner = BroadcastingInner {
subscriptions: HashMap::new(),
};
Self(Arc::new(Mutex::new(inner)))
}
/// Broadcasts the given update to subscribed player actors on local node.
#[tracing::instrument(skip(self, players, message, created_at), name = "Broadcasting::broadcast")]
pub async fn broadcast(
&self,
players: &PlayerRegistry,
room_id: RoomId,
author_id: PlayerId,
message: Str,
created_at: DateTime<Utc>,
) {
let inner = self.0.lock().await;
let Some(subscribers) = inner.subscriptions.get(&room_id) else {
return;
};
let update = Updates::NewMessage {
room_id: room_id.clone(),
author_id: author_id.clone(),
body: message.clone(),
created_at: created_at.clone(),
};
for i in subscribers {
if i == &author_id {
continue;
}
let Some(player) = players.get_player(i).await else {
continue;
};
player.update(update.clone()).await;
}
}
pub async fn subscribe(&self, subscriber: PlayerId, room_id: RoomId) {
self.0.lock().await.subscriptions.entry(room_id).or_insert_with(HashSet::new).insert(subscriber);
}
}

View File

@ -0,0 +1,59 @@
use serde::{Deserialize, Serialize};
use crate::clustering::LavinaClient;
pub mod paths {
pub const JOIN: &'static str = "/cluster/rooms/join";
pub const LEAVE: &'static str = "/cluster/rooms/leave";
pub const ADD_MESSAGE: &'static str = "/cluster/rooms/add_message";
pub const SET_TOPIC: &'static str = "/cluster/rooms/set_topic";
}
#[derive(Serialize, Deserialize, Debug)]
pub struct JoinRoomReq<'a> {
pub room_id: &'a str,
pub player_id: &'a str,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct LeaveRoomReq<'a> {
pub room_id: &'a str,
pub player_id: &'a str,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct SendMessageReq<'a> {
pub room_id: &'a str,
pub player_id: &'a str,
pub message: &'a str,
pub created_at: &'a str,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct SetRoomTopicReq<'a> {
pub room_id: &'a str,
pub player_id: &'a str,
pub topic: &'a str,
}
impl LavinaClient {
#[tracing::instrument(skip(self, req), name = "LavinaClient::join_room")]
pub async fn join_room(&self, node_id: u32, req: JoinRoomReq<'_>) -> anyhow::Result<()> {
self.send_request(node_id, paths::JOIN, req).await
}
#[tracing::instrument(skip(self, req), name = "LavinaClient::leave_room")]
pub async fn leave_room(&self, node_id: u32, req: LeaveRoomReq<'_>) -> anyhow::Result<()> {
self.send_request(node_id, paths::LEAVE, req).await
}
#[tracing::instrument(skip(self, req), name = "LavinaClient::send_room_message")]
pub async fn send_room_message(&self, node_id: u32, req: SendMessageReq<'_>) -> anyhow::Result<()> {
self.send_request(node_id, paths::ADD_MESSAGE, req).await
}
#[tracing::instrument(skip(self, req), name = "LavinaClient::set_room_topic")]
pub async fn set_room_topic(&self, node_id: u32, req: SetRoomTopicReq<'_>) -> anyhow::Result<()> {
self.send_request(node_id, paths::SET_TOPIC, req).await
}
}

View File

@ -1,14 +1,19 @@
//! Domain definitions and implementation of common chat logic. //! Domain definitions and implementation of common chat logic.
use std::sync::Arc;
use anyhow::Result; use anyhow::Result;
use prometheus::Registry as MetricsRegistry; use prometheus::Registry as MetricsRegistry;
use crate::auth::Authenticator; use crate::auth::Authenticator;
use crate::clustering::broadcast::Broadcasting;
use crate::clustering::{ClusterConfig, LavinaClient};
use crate::dialog::DialogRegistry; use crate::dialog::DialogRegistry;
use crate::player::PlayerRegistry; use crate::player::PlayerRegistry;
use crate::repo::Storage; use crate::repo::Storage;
use crate::room::RoomRegistry; use crate::room::RoomRegistry;
pub mod auth; pub mod auth;
pub mod clustering;
pub mod dialog; pub mod dialog;
pub mod player; pub mod player;
pub mod prelude; pub mod prelude;
@ -23,21 +28,37 @@ pub struct LavinaCore {
pub players: PlayerRegistry, pub players: PlayerRegistry,
pub rooms: RoomRegistry, pub rooms: RoomRegistry,
pub dialogs: DialogRegistry, pub dialogs: DialogRegistry,
pub broadcasting: Broadcasting,
pub authenticator: Authenticator, pub authenticator: Authenticator,
} }
impl LavinaCore { impl LavinaCore {
pub async fn new(mut metrics: MetricsRegistry, storage: Storage) -> Result<LavinaCore> { pub async fn new(
mut metrics: MetricsRegistry,
cluster_config: ClusterConfig,
storage: Storage,
) -> Result<LavinaCore> {
// TODO shutdown all services in reverse order on error // TODO shutdown all services in reverse order on error
let broadcasting = Broadcasting::new();
let client = LavinaClient::new(cluster_config.addresses.clone());
let rooms = RoomRegistry::new(&mut metrics, storage.clone())?; let rooms = RoomRegistry::new(&mut metrics, storage.clone())?;
let dialogs = DialogRegistry::new(storage.clone()); let dialogs = DialogRegistry::new(storage.clone());
let players = PlayerRegistry::empty(rooms.clone(), dialogs.clone(), storage.clone(), &mut metrics)?; let players = PlayerRegistry::empty(
rooms.clone(),
dialogs.clone(),
storage.clone(),
&mut metrics,
Arc::new(cluster_config.metadata),
client,
broadcasting.clone(),
)?;
dialogs.set_players(players.clone()).await; dialogs.set_players(players.clone()).await;
let authenticator = Authenticator::new(storage.clone()); let authenticator = Authenticator::new(storage.clone());
Ok(LavinaCore { Ok(LavinaCore {
players, players,
rooms, rooms,
dialogs, dialogs,
broadcasting,
authenticator, authenticator,
}) })
} }

View File

@ -18,6 +18,9 @@ use tokio::sync::mpsc::{channel, Receiver, Sender};
use tokio::sync::RwLock; use tokio::sync::RwLock;
use tracing::{Instrument, Span}; use tracing::{Instrument, Span};
use crate::clustering::broadcast::Broadcasting;
use crate::clustering::room::*;
use crate::clustering::{ClusterMetadata, LavinaClient};
use crate::dialog::DialogRegistry; use crate::dialog::DialogRegistry;
use crate::prelude::*; use crate::prelude::*;
use crate::repo::Storage; use crate::repo::Storage;
@ -60,7 +63,7 @@ pub struct PlayerConnection {
player_handle: PlayerHandle, player_handle: PlayerHandle,
} }
impl PlayerConnection { impl PlayerConnection {
/// Handled in [Player::send_message]. /// Handled in [Player::send_room_message].
#[tracing::instrument(skip(self, body), name = "PlayerConnection::send_message")] #[tracing::instrument(skip(self, body), name = "PlayerConnection::send_message")]
pub async fn send_message(&mut self, room_id: RoomId, body: Str) -> Result<SendMessageResult> { pub async fn send_message(&mut self, room_id: RoomId, body: Str) -> Result<SendMessageResult> {
let (promise, deferred) = oneshot(); let (promise, deferred) = oneshot();
@ -78,7 +81,7 @@ impl PlayerConnection {
Ok(deferred.await?) Ok(deferred.await?)
} }
/// Handled in [Player::change_topic]. /// Handled in [Player::change_room_topic].
#[tracing::instrument(skip(self, new_topic), name = "PlayerConnection::change_topic")] #[tracing::instrument(skip(self, new_topic), name = "PlayerConnection::change_topic")]
pub async fn change_topic(&mut self, room_id: RoomId, new_topic: Str) -> Result<()> { pub async fn change_topic(&mut self, room_id: RoomId, new_topic: Str) -> Result<()> {
let (promise, deferred) = oneshot(); let (promise, deferred) = oneshot();
@ -272,6 +275,9 @@ impl PlayerRegistry {
dialogs: DialogRegistry, dialogs: DialogRegistry,
storage: Storage, storage: Storage,
metrics: &mut MetricsRegistry, metrics: &mut MetricsRegistry,
cluster_metadata: Arc<ClusterMetadata>,
cluster_client: LavinaClient,
broadcasting: Broadcasting,
) -> Result<PlayerRegistry> { ) -> Result<PlayerRegistry> {
let metric_active_players = IntGauge::new("chat_players_active", "Number of alive player actors")?; let metric_active_players = IntGauge::new("chat_players_active", "Number of alive player actors")?;
metrics.register(Box::new(metric_active_players.clone()))?; metrics.register(Box::new(metric_active_players.clone()))?;
@ -279,6 +285,9 @@ impl PlayerRegistry {
room_registry, room_registry,
dialogs, dialogs,
storage, storage,
cluster_metadata,
cluster_client,
broadcasting,
players: HashMap::new(), players: HashMap::new(),
metric_active_players, metric_active_players,
}; };
@ -328,8 +337,12 @@ impl PlayerRegistry {
} else { } else {
let (handle, fiber) = Player::launch( let (handle, fiber) = Player::launch(
id.clone(), id.clone(),
self.clone(),
inner.room_registry.clone(), inner.room_registry.clone(),
inner.dialogs.clone(), inner.dialogs.clone(),
inner.cluster_metadata.clone(),
inner.cluster_client.clone(),
inner.broadcasting.clone(),
inner.storage.clone(), inner.storage.clone(),
) )
.await; .await;
@ -364,29 +377,45 @@ struct PlayerRegistryInner {
room_registry: RoomRegistry, room_registry: RoomRegistry,
dialogs: DialogRegistry, dialogs: DialogRegistry,
storage: Storage, storage: Storage,
cluster_metadata: Arc<ClusterMetadata>,
cluster_client: LavinaClient,
broadcasting: Broadcasting,
/// Active player actors. /// Active player actors.
players: HashMap<PlayerId, (PlayerHandle, JoinHandle<Player>)>, players: HashMap<PlayerId, (PlayerHandle, JoinHandle<Player>)>,
metric_active_players: IntGauge, metric_active_players: IntGauge,
} }
enum RoomRef {
Local(RoomHandle),
Remote { node_id: u32 },
}
/// Player actor inner state representation. /// Player actor inner state representation.
struct Player { struct Player {
player_id: PlayerId, player_id: PlayerId,
storage_id: u32, storage_id: u32,
connections: AnonTable<Sender<ConnectionMessage>>, connections: AnonTable<Sender<ConnectionMessage>>,
my_rooms: HashMap<RoomId, RoomHandle>, my_rooms: HashMap<RoomId, RoomRef>,
banned_from: HashSet<RoomId>, banned_from: HashSet<RoomId>,
rx: Receiver<(ActorCommand, Span)>, rx: Receiver<(ActorCommand, Span)>,
handle: PlayerHandle, handle: PlayerHandle,
players: PlayerRegistry,
rooms: RoomRegistry, rooms: RoomRegistry,
dialogs: DialogRegistry, dialogs: DialogRegistry,
storage: Storage, storage: Storage,
cluster_metadata: Arc<ClusterMetadata>,
cluster_client: LavinaClient,
broadcasting: Broadcasting,
} }
impl Player { impl Player {
async fn launch( async fn launch(
player_id: PlayerId, player_id: PlayerId,
players: PlayerRegistry,
rooms: RoomRegistry, rooms: RoomRegistry,
dialogs: DialogRegistry, dialogs: DialogRegistry,
cluster_metadata: Arc<ClusterMetadata>,
cluster_client: LavinaClient,
broadcasting: Broadcasting,
storage: Storage, storage: Storage,
) -> (PlayerHandle, JoinHandle<Player>) { ) -> (PlayerHandle, JoinHandle<Player>) {
let (tx, rx) = channel(32); let (tx, rx) = channel(32);
@ -404,24 +433,43 @@ impl Player {
banned_from: HashSet::new(), banned_from: HashSet::new(),
rx, rx,
handle, handle,
players,
rooms, rooms,
dialogs, dialogs,
storage, storage,
cluster_metadata,
cluster_client,
broadcasting,
}; };
let fiber = tokio::task::spawn(player.main_loop()); let fiber = tokio::task::spawn(player.main_loop());
(handle_clone, fiber) (handle_clone, fiber)
} }
fn room_location(&self, room_id: &RoomId) -> Option<u32> {
let res = self.cluster_metadata.rooms.get(room_id.as_inner().as_ref()).copied();
let node = res.unwrap_or(self.cluster_metadata.main_owner);
if node == self.cluster_metadata.node_id {
None
} else {
Some(node)
}
}
async fn main_loop(mut self) -> Self { async fn main_loop(mut self) -> Self {
let rooms = self.storage.get_rooms_of_a_user(self.storage_id).await.unwrap(); let rooms = self.storage.get_rooms_of_a_user(self.storage_id).await.unwrap();
for room_id in rooms { for room_id in rooms {
if let Some(remote_node) = self.room_location(&room_id) {
self.my_rooms.insert(room_id.clone(), RoomRef::Remote { node_id: remote_node });
self.broadcasting.subscribe(self.player_id.clone(), room_id).await;
} else {
let room = self.rooms.get_room(&room_id).await; let room = self.rooms.get_room(&room_id).await;
if let Some(room) = room { if let Some(room) = room {
self.my_rooms.insert(room_id, room); self.my_rooms.insert(room_id, RoomRef::Local(room));
} else { } else {
tracing::error!("Room #{room_id:?} not found"); tracing::error!("Room #{room_id:?} not found");
} }
} }
}
while let Some(cmd) = self.rx.recv().await { while let Some(cmd) = self.rx.recv().await {
let (cmd, span) = cmd; let (cmd, span) = cmd;
let should_stop = async { let should_stop = async {
@ -496,7 +544,7 @@ impl Player {
let _ = promise.send(()); let _ = promise.send(());
} }
ClientCommand::SendMessage { room_id, body, promise } => { ClientCommand::SendMessage { room_id, body, promise } => {
let result = self.send_message(connection_id, room_id, body).await; let result = self.send_room_message(connection_id, room_id, body).await;
let _ = promise.send(result); let _ = promise.send(result);
} }
ClientCommand::ChangeTopic { ClientCommand::ChangeTopic {
@ -504,7 +552,7 @@ impl Player {
new_topic, new_topic,
promise, promise,
} => { } => {
self.change_topic(connection_id, room_id, new_topic).await; self.change_room_topic(connection_id, room_id, new_topic).await;
let _ = promise.send(()); let _ = promise.send(());
} }
ClientCommand::GetRooms { promise } => { ClientCommand::GetRooms { promise } => {
@ -535,6 +583,21 @@ impl Player {
return JoinResult::AlreadyJoined; return JoinResult::AlreadyJoined;
} }
if let Some(remote_node) = self.room_location(&room_id) {
let req = JoinRoomReq {
room_id: room_id.as_inner(),
player_id: self.player_id.as_inner(),
};
self.cluster_client.join_room(remote_node, req).await.unwrap();
let room_storage_id = self.storage.create_or_retrieve_room_id_by_name(room_id.as_inner()).await.unwrap();
self.storage.add_room_member(room_storage_id, self.storage_id).await.unwrap();
self.my_rooms.insert(room_id.clone(), RoomRef::Remote { node_id: remote_node });
JoinResult::Success(RoomInfo {
id: room_id,
topic: "unknown".into(),
members: vec![],
})
} else {
let room = match self.rooms.get_or_create_room(room_id.clone()).await { let room = match self.rooms.get_or_create_room(room_id.clone()).await {
Ok(room) => room, Ok(room) => room,
Err(e) => { Err(e) => {
@ -544,7 +607,7 @@ impl Player {
}; };
room.add_member(&self.player_id, self.storage_id).await; room.add_member(&self.player_id, self.storage_id).await;
room.subscribe(&self.player_id, self.handle.clone()).await; room.subscribe(&self.player_id, self.handle.clone()).await;
self.my_rooms.insert(room_id.clone(), room.clone()); self.my_rooms.insert(room_id.clone(), RoomRef::Local(room.clone()));
let room_info = room.get_room_info().await; let room_info = room.get_room_info().await;
let update = Updates::RoomJoined { let update = Updates::RoomJoined {
room_id, room_id,
@ -553,14 +616,29 @@ impl Player {
self.broadcast_update(update, connection_id).await; self.broadcast_update(update, connection_id).await;
JoinResult::Success(room_info) JoinResult::Success(room_info)
} }
}
#[tracing::instrument(skip(self), name = "Player::leave_room")] #[tracing::instrument(skip(self), name = "Player::leave_room")]
async fn leave_room(&mut self, connection_id: ConnectionId, room_id: RoomId) { async fn leave_room(&mut self, connection_id: ConnectionId, room_id: RoomId) {
let room = self.my_rooms.remove(&room_id); let room = self.my_rooms.remove(&room_id);
if let Some(room) = room { if let Some(room) = room {
match room {
RoomRef::Local(room) => {
room.unsubscribe(&self.player_id).await; room.unsubscribe(&self.player_id).await;
room.remove_member(&self.player_id, self.storage_id).await; room.remove_member(&self.player_id, self.storage_id).await;
} }
RoomRef::Remote { node_id } => {
let req = LeaveRoomReq {
room_id: room_id.as_inner(),
player_id: self.player_id.as_inner(),
};
self.cluster_client.leave_room(node_id, req).await.unwrap();
let room_storage_id =
self.storage.create_or_retrieve_room_id_by_name(room_id.as_inner()).await.unwrap();
self.storage.remove_room_member(room_storage_id, self.storage_id).await.unwrap();
}
}
}
let update = Updates::RoomLeft { let update = Updates::RoomLeft {
room_id, room_id,
former_member_id: self.player_id.clone(), former_member_id: self.player_id.clone(),
@ -568,14 +646,41 @@ impl Player {
self.broadcast_update(update, connection_id).await; self.broadcast_update(update, connection_id).await;
} }
#[tracing::instrument(skip(self, body), name = "Player::send_message")] #[tracing::instrument(skip(self, body), name = "Player::send_room_message")]
async fn send_message(&mut self, connection_id: ConnectionId, room_id: RoomId, body: Str) -> SendMessageResult { async fn send_room_message(
&mut self,
connection_id: ConnectionId,
room_id: RoomId,
body: Str,
) -> SendMessageResult {
let Some(room) = self.my_rooms.get(&room_id) else { let Some(room) = self.my_rooms.get(&room_id) else {
tracing::info!("no room found"); tracing::info!("no room found");
return SendMessageResult::NoSuchRoom; return SendMessageResult::NoSuchRoom;
}; };
let created_at = chrono::Utc::now(); let created_at = chrono::Utc::now();
match room {
RoomRef::Local(room) => {
room.send_message(&self.player_id, body.clone(), created_at.clone()).await; room.send_message(&self.player_id, body.clone(), created_at.clone()).await;
}
RoomRef::Remote { node_id } => {
let req = SendMessageReq {
room_id: room_id.as_inner(),
player_id: self.player_id.as_inner(),
message: &*body,
created_at: &*created_at.to_rfc3339(),
};
self.cluster_client.send_room_message(*node_id, req).await.unwrap();
self.broadcasting
.broadcast(
&self.players,
room_id.clone(),
self.player_id.clone(),
body.clone(),
created_at.clone(),
)
.await;
}
}
let update = Updates::NewMessage { let update = Updates::NewMessage {
room_id, room_id,
author_id: self.player_id.clone(), author_id: self.player_id.clone(),
@ -586,13 +691,25 @@ impl Player {
SendMessageResult::Success(created_at) SendMessageResult::Success(created_at)
} }
#[tracing::instrument(skip(self, new_topic), name = "Player::change_topic")] #[tracing::instrument(skip(self, new_topic), name = "Player::change_room_topic")]
async fn change_topic(&mut self, connection_id: ConnectionId, room_id: RoomId, new_topic: Str) { async fn change_room_topic(&mut self, connection_id: ConnectionId, room_id: RoomId, new_topic: Str) {
let Some(room) = self.my_rooms.get(&room_id) else { let Some(room) = self.my_rooms.get(&room_id) else {
tracing::info!("no room found"); tracing::info!("no room found");
return; return;
}; };
match room {
RoomRef::Local(room) => {
room.set_topic(&self.player_id, new_topic.clone()).await; room.set_topic(&self.player_id, new_topic.clone()).await;
}
RoomRef::Remote { node_id } => {
let req = SetRoomTopicReq {
room_id: room_id.as_inner(),
player_id: self.player_id.as_inner(),
topic: &*new_topic,
};
self.cluster_client.set_room_topic(*node_id, req).await.unwrap();
}
}
let update = Updates::RoomTopicChanged { room_id, new_topic }; let update = Updates::RoomTopicChanged { room_id, new_topic };
self.broadcast_update(update, connection_id).await; self.broadcast_update(update, connection_id).await;
} }
@ -600,9 +717,21 @@ impl Player {
#[tracing::instrument(skip(self), name = "Player::get_rooms")] #[tracing::instrument(skip(self), name = "Player::get_rooms")]
async fn get_rooms(&self) -> Vec<RoomInfo> { async fn get_rooms(&self) -> Vec<RoomInfo> {
let mut response = vec![]; let mut response = vec![];
for (_, handle) in &self.my_rooms { for (room_id, handle) in &self.my_rooms {
match handle {
RoomRef::Local(handle) => {
response.push(handle.get_room_info().await); response.push(handle.get_room_info().await);
} }
RoomRef::Remote { .. } => {
let room_info = RoomInfo {
id: room_id.clone(),
topic: "unknown".into(),
members: vec![],
};
response.push(room_info);
}
}
}
response response
} }

View File

@ -69,4 +69,20 @@ impl Storage {
Ok(()) Ok(())
} }
pub async fn create_or_retrieve_room_id_by_name(&self, name: &str) -> Result<u32> {
// TODO we don't need any info except the name on non-owning nodes, should remove stubs here
let mut executor = self.conn.lock().await;
let res: (u32,) = sqlx::query_as(
"insert into rooms(name, topic)
values (?, '')
on conflict(name) do update set name = excluded.name
returning id;",
)
.bind(name)
.fetch_one(&mut *executor)
.await?;
Ok(res.0)
}
} }

View File

@ -14,6 +14,21 @@ impl Storage {
Ok(res.map(|(id,)| id)) Ok(res.map(|(id,)| id))
} }
pub async fn create_or_retrieve_user_id_by_name(&self, name: &str) -> Result<u32> {
let mut executor = self.conn.lock().await;
let res: (u32,) = sqlx::query_as(
"insert into users(name)
values (?)
on conflict(name) do update set name = excluded.name
returning id;",
)
.bind(name)
.fetch_one(&mut *executor)
.await?;
Ok(res.0)
}
pub async fn get_rooms_of_a_user(&self, user_id: u32) -> Result<Vec<RoomId>> { pub async fn get_rooms_of_a_user(&self, user_id: u32) -> Result<Vec<RoomId>> {
let mut executor = self.conn.lock().await; let mut executor = self.conn.lock().await;
let res: Vec<(String,)> = sqlx::query_as( let res: Vec<(String,)> = sqlx::query_as(

View File

@ -62,7 +62,7 @@ impl RoomRegistry {
} }
#[tracing::instrument(skip(self), name = "RoomRegistry::get_or_create_room")] #[tracing::instrument(skip(self), name = "RoomRegistry::get_or_create_room")]
pub async fn get_or_create_room(&mut self, room_id: RoomId) -> Result<RoomHandle> { pub async fn get_or_create_room(&self, room_id: RoomId) -> Result<RoomHandle> {
let mut inner = self.0.write().await; let mut inner = self.0.write().await;
if let Some(room_handle) = inner.get_or_load_room(&room_id).await? { if let Some(room_handle) = inner.get_or_load_room(&room_id).await? {
Ok(room_handle.clone()) Ok(room_handle.clone())

View File

@ -9,6 +9,7 @@ use tokio::net::tcp::{ReadHalf, WriteHalf};
use tokio::net::TcpStream; use tokio::net::TcpStream;
use lavina_core::auth::Authenticator; use lavina_core::auth::Authenticator;
use lavina_core::clustering::{ClusterConfig, ClusterMetadata};
use lavina_core::player::{JoinResult, PlayerId, SendMessageResult}; use lavina_core::player::{JoinResult, PlayerId, SendMessageResult};
use lavina_core::repo::{Storage, StorageConfig}; use lavina_core::repo::{Storage, StorageConfig};
use lavina_core::room::RoomId; use lavina_core::room::RoomId;
@ -118,7 +119,15 @@ impl TestServer {
db_path: ":memory:".into(), db_path: ":memory:".into(),
}) })
.await?; .await?;
let core = LavinaCore::new(metrics.clone(), storage.clone()).await?; let cluster_config = ClusterConfig {
addresses: vec![],
metadata: ClusterMetadata {
node_id: 0,
main_owner: 0,
rooms: Default::default(),
},
};
let core = LavinaCore::new(metrics.clone(), cluster_config, storage.clone()).await?;
let server = launch(config, core.clone(), metrics.clone()).await.unwrap(); let server = launch(config, core.clone(), metrics.clone()).await.unwrap();
Ok(TestServer { Ok(TestServer {
metrics, metrics,
@ -133,6 +142,14 @@ impl TestServer {
listen_on: "127.0.0.1:0".parse().unwrap(), listen_on: "127.0.0.1:0".parse().unwrap(),
server_name: "testserver".into(), server_name: "testserver".into(),
}; };
let cluster_config = ClusterConfig {
addresses: vec![],
metadata: ClusterMetadata {
node_id: 0,
main_owner: 0,
rooms: Default::default(),
},
};
let TestServer { let TestServer {
metrics: _, metrics: _,
storage, storage,
@ -142,7 +159,7 @@ impl TestServer {
server.terminate().await?; server.terminate().await?;
core.shutdown().await?; core.shutdown().await?;
let metrics = MetricsRegistry::new(); let metrics = MetricsRegistry::new();
let core = LavinaCore::new(metrics.clone(), storage.clone()).await?; let core = LavinaCore::new(metrics.clone(), cluster_config, storage.clone()).await?;
let server = launch(config, core.clone(), metrics.clone()).await.unwrap(); let server = launch(config, core.clone(), metrics.clone()).await.unwrap();
Ok(TestServer { Ok(TestServer {
metrics, metrics,

View File

@ -1,8 +1,10 @@
use prometheus::Registry as MetricsRegistry;
use crate::{Authenticated, XmppConnection}; use crate::{Authenticated, XmppConnection};
use lavina_core::player::{PlayerConnection, PlayerId}; use lavina_core::clustering::{ClusterConfig, ClusterMetadata};
use lavina_core::player::PlayerConnection;
use lavina_core::repo::{Storage, StorageConfig}; use lavina_core::repo::{Storage, StorageConfig};
use lavina_core::LavinaCore; use lavina_core::LavinaCore;
use prometheus::Registry as MetricsRegistry;
use proto_xmpp::bind::{BindRequest, BindResponse, Jid, Name, Resource, Server}; use proto_xmpp::bind::{BindRequest, BindResponse, Jid, Name, Resource, Server};
pub(crate) struct TestServer { pub(crate) struct TestServer {
@ -19,7 +21,15 @@ impl TestServer {
db_path: ":memory:".into(), db_path: ":memory:".into(),
}) })
.await?; .await?;
let core = LavinaCore::new(metrics.clone(), storage.clone()).await?; let cluster_config = ClusterConfig {
metadata: ClusterMetadata {
node_id: 0,
main_owner: 0,
rooms: Default::default(),
},
addresses: vec![],
};
let core = LavinaCore::new(metrics.clone(), cluster_config, storage.clone()).await?;
Ok(TestServer { metrics, storage, core }) Ok(TestServer { metrics, storage, core })
} }
@ -27,7 +37,15 @@ impl TestServer {
self.core.shutdown().await?; self.core.shutdown().await?;
let metrics = MetricsRegistry::new(); let metrics = MetricsRegistry::new();
let core = LavinaCore::new(metrics.clone(), self.storage.clone()).await?; let cluster_config = ClusterConfig {
metadata: ClusterMetadata {
node_id: 0,
main_owner: 0,
rooms: Default::default(),
},
addresses: vec![],
};
let core = LavinaCore::new(metrics.clone(), cluster_config, self.storage.clone()).await?;
Ok(TestServer { Ok(TestServer {
metrics, metrics,

View File

@ -19,6 +19,7 @@ use tokio_rustls::rustls::{ClientConfig, ServerName};
use tokio_rustls::TlsConnector; use tokio_rustls::TlsConnector;
use lavina_core::auth::Authenticator; use lavina_core::auth::Authenticator;
use lavina_core::clustering::{ClusterConfig, ClusterMetadata};
use lavina_core::repo::{Storage, StorageConfig}; use lavina_core::repo::{Storage, StorageConfig};
use lavina_core::LavinaCore; use lavina_core::LavinaCore;
use projection_xmpp::{launch, RunningServer, ServerConfig}; use projection_xmpp::{launch, RunningServer, ServerConfig};
@ -161,7 +162,15 @@ impl TestServer {
db_path: ":memory:".into(), db_path: ":memory:".into(),
}) })
.await?; .await?;
let core = LavinaCore::new(metrics.clone(), storage.clone()).await?; let cluster_config = ClusterConfig {
addresses: vec![],
metadata: ClusterMetadata {
node_id: 0,
main_owner: 0,
rooms: Default::default(),
},
};
let core = LavinaCore::new(metrics.clone(), cluster_config, storage.clone()).await?;
let server = launch(config, core.clone(), metrics.clone()).await.unwrap(); let server = launch(config, core.clone(), metrics.clone()).await.unwrap();
Ok(TestServer { Ok(TestServer {
metrics, metrics,

View File

@ -19,9 +19,10 @@ use lavina_core::repo::Storage;
use lavina_core::room::{RoomId, RoomRegistry}; use lavina_core::room::{RoomId, RoomRegistry};
use lavina_core::terminator::Terminator; use lavina_core::terminator::Terminator;
use lavina_core::LavinaCore; use lavina_core::LavinaCore;
use mgmt_api::*; use mgmt_api::*;
mod clustering;
type HttpResult<T> = std::result::Result<T, Infallible>; type HttpResult<T> = std::result::Result<T, Infallible>;
#[derive(Deserialize, Debug)] #[derive(Deserialize, Debug)]
@ -90,7 +91,7 @@ async fn route(
(&Method::POST, paths::SET_PASSWORD) => endpoint_set_password(request, core).await.or5xx(), (&Method::POST, paths::SET_PASSWORD) => endpoint_set_password(request, core).await.or5xx(),
(&Method::POST, rooms::paths::SEND_MESSAGE) => endpoint_send_room_message(request, core).await.or5xx(), (&Method::POST, rooms::paths::SEND_MESSAGE) => endpoint_send_room_message(request, core).await.or5xx(),
(&Method::POST, rooms::paths::SET_TOPIC) => endpoint_set_room_topic(request, core).await.or5xx(), (&Method::POST, rooms::paths::SET_TOPIC) => endpoint_set_room_topic(request, core).await.or5xx(),
_ => endpoint_not_found(), _ => clustering::route(core, storage, request).await.unwrap_or_else(endpoint_not_found),
}; };
Ok(res) Ok(res)
} }

80
src/http/clustering.rs Normal file
View File

@ -0,0 +1,80 @@
use http_body_util::{BodyExt, Full};
use hyper::body::Bytes;
use hyper::{Method, Request, Response};
use super::Or5xx;
use crate::http::{empty_204_request, malformed_request, player_not_found, room_not_found};
use lavina_core::clustering::room::{paths, JoinRoomReq, SendMessageReq};
use lavina_core::player::PlayerId;
use lavina_core::repo::Storage;
use lavina_core::room::RoomId;
use lavina_core::LavinaCore;
// TODO move this into core
pub async fn route(
core: &LavinaCore,
storage: &Storage,
request: Request<hyper::body::Incoming>,
) -> Option<Response<Full<Bytes>>> {
match (request.method(), request.uri().path()) {
(&Method::POST, paths::JOIN) => Some(endpoint_cluster_join_room(request, core, storage).await.or5xx()),
(&Method::POST, paths::ADD_MESSAGE) => Some(endpoint_cluster_add_message(request, core).await.or5xx()),
_ => None,
}
}
#[tracing::instrument(skip_all, name = "endpoint_cluster_join_room")]
async fn endpoint_cluster_join_room(
request: Request<hyper::body::Incoming>,
core: &LavinaCore,
storage: &Storage,
) -> lavina_core::prelude::Result<Response<Full<Bytes>>> {
let str = request.collect().await?.to_bytes();
let Ok(req) = serde_json::from_slice::<JoinRoomReq>(&str[..]) else {
return Ok(malformed_request());
};
tracing::info!("Incoming request: {:?}", &req);
let Ok(room_id) = RoomId::from(req.room_id) else {
dbg!(&req.room_id);
return Ok(room_not_found());
};
let Ok(player_id) = PlayerId::from(req.player_id) else {
dbg!(&req.player_id);
return Ok(player_not_found());
};
let room_handle = core.rooms.get_or_create_room(room_id).await.unwrap();
let storage_id = storage.create_or_retrieve_user_id_by_name(req.player_id).await?;
room_handle.add_member(&player_id, storage_id).await;
Ok(empty_204_request())
}
#[tracing::instrument(skip_all, name = "endpoint_cluster_add_message")]
async fn endpoint_cluster_add_message(
request: Request<hyper::body::Incoming>,
core: &LavinaCore,
) -> lavina_core::prelude::Result<Response<Full<Bytes>>> {
let str = request.collect().await?.to_bytes();
let Ok(req) = serde_json::from_slice::<SendMessageReq>(&str[..]) else {
return Ok(malformed_request());
};
tracing::info!("Incoming request: {:?}", &req);
let Ok(created_at) = chrono::DateTime::parse_from_rfc3339(req.created_at) else {
dbg!(&req.created_at);
return Ok(malformed_request());
};
let Ok(room_id) = RoomId::from(req.room_id) else {
dbg!(&req.room_id);
return Ok(room_not_found());
};
let Ok(player_id) = PlayerId::from(req.player_id) else {
dbg!(&req.player_id);
return Ok(player_not_found());
};
let Some(room_handle) = core.rooms.get_room(&room_id).await else {
dbg!(&room_id);
return Ok(room_not_found());
};
room_handle.send_message(&player_id, req.message.into(), created_at.to_utc()).await;
Ok(empty_204_request())
}

View File

@ -6,8 +6,10 @@ use std::path::Path;
use clap::Parser; use clap::Parser;
use figment::providers::Format; use figment::providers::Format;
use figment::{providers::Toml, Figment}; use figment::{providers::Toml, Figment};
use opentelemetry::global::set_text_map_propagator;
use opentelemetry::KeyValue; use opentelemetry::KeyValue;
use opentelemetry_otlp::WithExportConfig; use opentelemetry_otlp::WithExportConfig;
use opentelemetry_sdk::propagation::TraceContextPropagator;
use opentelemetry_sdk::trace::{BatchConfig, RandomIdGenerator, Sampler}; use opentelemetry_sdk::trace::{BatchConfig, RandomIdGenerator, Sampler};
use opentelemetry_sdk::{runtime, Resource}; use opentelemetry_sdk::{runtime, Resource};
use opentelemetry_semantic_conventions::resource::SERVICE_NAME; use opentelemetry_semantic_conventions::resource::SERVICE_NAME;
@ -28,6 +30,7 @@ struct ServerConfig {
irc: projection_irc::ServerConfig, irc: projection_irc::ServerConfig,
xmpp: projection_xmpp::ServerConfig, xmpp: projection_xmpp::ServerConfig,
storage: lavina_core::repo::StorageConfig, storage: lavina_core::repo::StorageConfig,
cluster: lavina_core::clustering::ClusterConfig,
tracing: Option<TracingConfig>, tracing: Option<TracingConfig>,
} }
@ -63,11 +66,12 @@ async fn main() -> Result<()> {
irc: irc_config, irc: irc_config,
xmpp: xmpp_config, xmpp: xmpp_config,
storage: storage_config, storage: storage_config,
cluster: cluster_config,
tracing: _, tracing: _,
} = config; } = config;
let metrics = MetricsRegistry::new(); let metrics = MetricsRegistry::new();
let storage = Storage::open(storage_config).await?; let storage = Storage::open(storage_config).await?;
let core = LavinaCore::new(metrics.clone(), storage.clone()).await?; let core = LavinaCore::new(metrics.clone(), cluster_config, storage.clone()).await?;
let telemetry_terminator = http::launch(telemetry_config, metrics.clone(), core.clone(), storage.clone()).await?; let telemetry_terminator = http::launch(telemetry_config, metrics.clone(), core.clone(), storage.clone()).await?;
let irc = projection_irc::launch(irc_config, core.clone(), metrics.clone()).await?; let irc = projection_irc::launch(irc_config, core.clone(), metrics.clone()).await?;
let xmpp = projection_xmpp::launch(xmpp_config, core.clone(), metrics.clone()).await?; let xmpp = projection_xmpp::launch(xmpp_config, core.clone(), metrics.clone()).await?;
@ -139,6 +143,7 @@ fn set_up_logging(tracing_config: &Option<TracingConfig>) -> Result<()> {
.with_exporter(trace_exporter) .with_exporter(trace_exporter)
.install_batch(runtime::Tokio)?; .install_batch(runtime::Tokio)?;
let subscriber = subscriber.with(OpenTelemetryLayer::new(tracer)); let subscriber = subscriber.with(OpenTelemetryLayer::new(tracer));
set_text_map_propagator(TraceContextPropagator::new());
targets.with_subscriber(subscriber).try_init()?; targets.with_subscriber(subscriber).try_init()?;
} else { } else {
targets.with_subscriber(subscriber).try_init()?; targets.with_subscriber(subscriber).try_init()?;