forked from lavina/lavina
1
0
Fork 0

Compare commits

..

No commits in common. "a8a4b1c4902fdb753bd1ef7f5bd71dd3951255ce" and "adf1d8c14c39b0bdcdbab12d30f9bd4317e5f262" have entirely different histories.

40 changed files with 623 additions and 1793 deletions

2
.gitignore vendored
View File

@ -1,4 +1,4 @@
/target
*.sqlite
/db.sqlite
.idea/
.DS_Store

52
Cargo.lock generated
View File

@ -204,7 +204,7 @@ dependencies = [
"http-body 0.4.6",
"hyper 0.14.28",
"itoa",
"matchit 0.7.3",
"matchit",
"memchr",
"mime",
"percent-encoding",
@ -709,10 +709,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c"
dependencies = [
"cfg-if",
"js-sys",
"libc",
"wasi",
"wasm-bindgen",
]
[[package]]
@ -1099,13 +1097,8 @@ dependencies = [
"anyhow",
"argon2",
"chrono",
"mgmt-api",
"opentelemetry",
"prometheus",
"rand_core",
"reqwest",
"reqwest-middleware",
"reqwest-tracing",
"serde",
"sqlx",
"tokio",
@ -1172,12 +1165,6 @@ version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94"
[[package]]
name = "matchit"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "540f1c43aed89909c0cc0cc604e3bb2f7e7a341a3728a9e6cfe760e733cd11ed"
[[package]]
name = "md-5"
version = "0.10.6"
@ -1777,9 +1764,9 @@ checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56"
[[package]]
name = "reqwest"
version = "0.12.4"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "566cafdd92868e0939d3fb961bd0dc25fcfaaed179291093b3d43e6b3150ea10"
checksum = "3e6cc1e89e689536eb5aeede61520e874df5a4707df811cd5da4aa5fbb2aae19"
dependencies = [
"base64 0.22.0",
"bytes",
@ -1810,39 +1797,6 @@ dependencies = [
"winreg",
]
[[package]]
name = "reqwest-middleware"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0209efb52486ad88136190094ee214759ef7507068b27992256ed6610eb71a01"
dependencies = [
"anyhow",
"async-trait",
"http 1.1.0",
"reqwest",
"serde",
"thiserror",
"tower-service",
]
[[package]]
name = "reqwest-tracing"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b253954a1979e02eabccd7e9c3d61d8f86576108baa160775e7f160bb4e800a3"
dependencies = [
"anyhow",
"async-trait",
"getrandom",
"http 1.1.0",
"matchit 0.8.2",
"opentelemetry",
"reqwest",
"reqwest-middleware",
"tracing",
"tracing-opentelemetry",
]
[[package]]
name = "ring"
version = "0.17.8"

View File

@ -32,7 +32,6 @@ lavina-core = { path = "crates/lavina-core" }
tracing-subscriber = "0.3.16"
sasl = { path = "crates/sasl" }
chrono = "0.4.37"
reqwest = { version = "0.12.0", default-features = false, features = ["json"] }
[package]
name = "lavina"
@ -70,4 +69,4 @@ chrono.workspace = true
[dev-dependencies]
assert_matches.workspace = true
regex = "1.7.1"
reqwest.workspace = true
reqwest = { version = "0.12.0", default-features = false }

View File

@ -1,30 +0,0 @@
[telemetry]
listen_on = "127.0.0.1:8080"
[irc]
listen_on = "127.0.0.1:6667"
server_name = "irc.localhost"
[xmpp]
listen_on = "127.0.0.1:5222"
cert = "./certs/xmpp.pem"
key = "./certs/xmpp.key"
hostname = "localhost"
[storage]
db_path = "db.0.sqlite"
[cluster]
addresses = [
"127.0.0.1:8080",
"127.0.0.1:8081",
]
[cluster.metadata]
node_id = 0
main_owner = 0
rooms = { aaaaa = 1, test = 0 }
[tracing]
endpoint = "http://localhost:4317"
service_name = "lavina-0"

View File

@ -1,30 +0,0 @@
[telemetry]
listen_on = "127.0.0.1:8081"
[irc]
listen_on = "127.0.0.1:6668"
server_name = "irc.localhost"
[xmpp]
listen_on = "127.0.0.1:5223"
cert = "./certs/xmpp.pem"
key = "./certs/xmpp.key"
hostname = "localhost"
[storage]
db_path = "db.1.sqlite"
[cluster]
addresses = [
"127.0.0.1:8080",
"127.0.0.1:8081",
]
[cluster.metadata]
node_id = 1
main_owner = 0
rooms = { aaaaa = 1, test = 0 }
[tracing]
endpoint = "http://localhost:4317"
service_name = "lavina-1"

View File

@ -13,11 +13,3 @@ hostname = "localhost"
[storage]
db_path = "db.sqlite"
[cluster]
addresses = []
[cluster.metadata]
node_id = 0
main_owner = 0
rooms = {}

View File

@ -13,8 +13,3 @@ prometheus.workspace = true
chrono.workspace = true
argon2 = { version = "0.5.3" }
rand_core = { version = "0.6.4", features = ["getrandom"] }
reqwest.workspace = true
reqwest-middleware = { version = "0.3", features = ["json"] }
opentelemetry = "0.22.0"
mgmt-api = { path = "../mgmt-api" }
reqwest-tracing = { version = "0.5", features = ["opentelemetry_0_22"] }

View File

@ -3,7 +3,8 @@ use argon2::password_hash::{PasswordHash, PasswordHasher, PasswordVerifier, Salt
use argon2::Argon2;
use rand_core::OsRng;
use crate::LavinaCore;
use crate::prelude::log;
use crate::repo::Storage;
pub enum Verdict {
Authenticated,
@ -16,10 +17,17 @@ pub enum UpdatePasswordResult {
UserNotFound,
}
impl LavinaCore {
#[tracing::instrument(skip(self, provided_password), name = "Services::authenticate")]
pub struct Authenticator<'a> {
storage: &'a Storage,
}
impl<'a> Authenticator<'a> {
pub fn new(storage: &'a Storage) -> Self {
Self { storage }
}
#[tracing::instrument(skip(self, provided_password), name = "Authenticator::authenticate")]
pub async fn authenticate(&self, login: &str, provided_password: &str) -> Result<Verdict> {
let Some(stored_user) = self.services.storage.retrieve_user_by_name(login).await? else {
let Some(stored_user) = self.storage.retrieve_user_by_name(login).await? else {
return Ok(Verdict::UserNotFound);
};
if let Some(argon2_hash) = stored_user.argon2_hash {
@ -39,9 +47,9 @@ impl LavinaCore {
Ok(Verdict::InvalidPassword)
}
#[tracing::instrument(skip(self, provided_password), name = "Services::set_password")]
#[tracing::instrument(skip(self, provided_password), name = "Authenticator::set_password")]
pub async fn set_password(&self, login: &str, provided_password: &str) -> Result<UpdatePasswordResult> {
let Some(u) = self.services.storage.retrieve_user_by_name(login).await? else {
let Some(u) = self.storage.retrieve_user_by_name(login).await? else {
return Ok(UpdatePasswordResult::UserNotFound);
};
@ -51,8 +59,8 @@ impl LavinaCore {
.hash_password(provided_password.as_bytes(), &salt)
.map_err(|e| anyhow!("Failed to hash password: {e:?}"))?;
self.services.storage.set_argon2_challenge(u.id, password_hash.to_string().as_str()).await?;
tracing::info!("Password changed for player {login}");
self.storage.set_argon2_challenge(u.id, password_hash.to_string().as_str()).await?;
log::info!("Password changed for player {login}");
Ok(UpdatePasswordResult::PasswordUpdated)
}
}

View File

@ -1,56 +0,0 @@
use std::collections::HashMap;
use std::net::SocketAddr;
use anyhow::{anyhow, Result};
use reqwest::Client;
use reqwest_middleware::{ClientBuilder, ClientWithMiddleware};
use reqwest_tracing::{DefaultSpanBackend, TracingMiddleware};
use serde::{Deserialize, Serialize};
pub mod broadcast;
pub mod room;
type Addresses = Vec<SocketAddr>;
#[derive(Deserialize, Debug, Clone)]
pub struct ClusterConfig {
pub metadata: ClusterMetadata,
pub addresses: Addresses,
}
#[derive(Deserialize, Debug, Clone)]
pub struct ClusterMetadata {
/// The node id of the current node.
pub node_id: u32,
/// Owns all rooms in the cluster except the ones specified in `rooms`.
pub main_owner: u32,
pub rooms: HashMap<String, u32>,
}
pub struct LavinaClient {
addresses: Addresses,
client: ClientWithMiddleware,
}
impl LavinaClient {
pub fn new(addresses: Addresses) -> Self {
let client = ClientBuilder::new(Client::new()).with(TracingMiddleware::<DefaultSpanBackend>::new()).build();
Self { addresses, client }
}
async fn send_request(&self, node_id: u32, path: &str, req: impl Serialize) -> Result<()> {
let Some(address) = self.addresses.get(node_id as usize) else {
return Err(anyhow!("Unknown node"));
};
match self.client.post(format!("http://{}{}", address, path)).json(&req).send().await {
Ok(res) => {
if res.status().is_server_error() || res.status().is_client_error() {
tracing::error!("Cluster request failed: {:?}", res);
return Err(anyhow!("Server error"));
}
Ok(())
}
Err(e) => Err(e.into()),
}
}
}

View File

@ -1,58 +0,0 @@
use std::collections::{HashMap, HashSet};
use chrono::{DateTime, Utc};
use tokio::sync::Mutex;
use crate::player::{PlayerId, PlayerRegistry, Updates};
use crate::prelude::Str;
use crate::room::RoomId;
/// Receives updates from other nodes and broadcasts them to local player actors.
struct BroadcastingInner {
subscriptions: HashMap<RoomId, HashSet<PlayerId>>,
}
pub struct Broadcasting(Mutex<BroadcastingInner>);
impl Broadcasting {
pub fn new() -> Self {
let inner = BroadcastingInner {
subscriptions: HashMap::new(),
};
Self(Mutex::new(inner))
}
/// Broadcasts the given update to subscribed player actors on local node.
#[tracing::instrument(skip(self, players, message, created_at), name = "Broadcasting::broadcast")]
pub async fn broadcast(
&self,
players: &PlayerRegistry,
room_id: RoomId,
author_id: PlayerId,
message: Str,
created_at: DateTime<Utc>,
) {
let inner = self.0.lock().await;
let Some(subscribers) = inner.subscriptions.get(&room_id) else {
return;
};
let update = Updates::NewMessage {
room_id: room_id.clone(),
author_id: author_id.clone(),
body: message.clone(),
created_at: created_at.clone(),
};
for i in subscribers {
if i == &author_id {
continue;
}
let Some(player) = players.get_player(i).await else {
continue;
};
player.update(update.clone()).await;
}
}
pub async fn subscribe(&self, subscriber: PlayerId, room_id: RoomId) {
self.0.lock().await.subscriptions.entry(room_id).or_insert_with(HashSet::new).insert(subscriber);
}
}

View File

@ -1,88 +0,0 @@
use anyhow::Result;
use serde::{Deserialize, Serialize};
use crate::clustering::LavinaClient;
use crate::player::PlayerId;
use crate::prelude::Str;
use crate::room::RoomId;
use crate::LavinaCore;
pub mod paths {
pub const JOIN: &'static str = "/cluster/rooms/join";
pub const LEAVE: &'static str = "/cluster/rooms/leave";
pub const ADD_MESSAGE: &'static str = "/cluster/rooms/add_message";
pub const SET_TOPIC: &'static str = "/cluster/rooms/set_topic";
}
#[derive(Serialize, Deserialize, Debug)]
pub struct JoinRoomReq<'a> {
pub room_id: &'a str,
pub player_id: &'a str,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct LeaveRoomReq<'a> {
pub room_id: &'a str,
pub player_id: &'a str,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct SendMessageReq<'a> {
pub room_id: &'a str,
pub player_id: &'a str,
pub message: &'a str,
pub created_at: &'a str,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct SetRoomTopicReq<'a> {
pub room_id: &'a str,
pub player_id: &'a str,
pub topic: &'a str,
}
impl LavinaClient {
#[tracing::instrument(skip(self, req), name = "LavinaClient::join_room")]
pub async fn join_room(&self, node_id: u32, req: JoinRoomReq<'_>) -> anyhow::Result<()> {
self.send_request(node_id, paths::JOIN, req).await
}
#[tracing::instrument(skip(self, req), name = "LavinaClient::leave_room")]
pub async fn leave_room(&self, node_id: u32, req: LeaveRoomReq<'_>) -> anyhow::Result<()> {
self.send_request(node_id, paths::LEAVE, req).await
}
#[tracing::instrument(skip(self, req), name = "LavinaClient::send_room_message")]
pub async fn send_room_message(&self, node_id: u32, req: SendMessageReq<'_>) -> anyhow::Result<()> {
self.send_request(node_id, paths::ADD_MESSAGE, req).await
}
#[tracing::instrument(skip(self, req), name = "LavinaClient::set_room_topic")]
pub async fn set_room_topic(&self, node_id: u32, req: SetRoomTopicReq<'_>) -> anyhow::Result<()> {
self.send_request(node_id, paths::SET_TOPIC, req).await
}
}
impl LavinaCore {
pub async fn cluster_join_room(&self, room_id: RoomId, player_id: &PlayerId) -> Result<()> {
let room_handle = self.services.rooms.get_or_create_room(&self.services, room_id).await?;
let storage_id =
self.services.storage.create_or_retrieve_user_id_by_name(player_id.as_inner().as_ref()).await?;
room_handle.add_member(&self.services, &player_id, storage_id).await;
Ok(())
}
pub async fn cluster_send_room_message(
&self,
room_id: RoomId,
player_id: &PlayerId,
message: Str,
created_at: chrono::DateTime<chrono::Utc>,
) -> Result<Option<()>> {
let Some(room_handle) = self.services.rooms.get_room(&self.services, &room_id).await else {
return Ok(None);
};
room_handle.send_message(&self.services, &player_id, message, created_at).await;
Ok(Some(()))
}
}

View File

@ -4,13 +4,14 @@
//! There are no admins or other roles in dialogs, both participants have equal rights.
use std::collections::HashMap;
use std::sync::Arc;
use chrono::{DateTime, Utc};
use tokio::sync::RwLock as AsyncRwLock;
use crate::player::{PlayerId, Updates};
use crate::player::{PlayerId, PlayerRegistry, Updates};
use crate::prelude::*;
use crate::Services;
use crate::repo::Storage;
/// Id of a conversation between two players.
///
@ -44,25 +45,27 @@ struct Dialog {
struct DialogRegistryInner {
dialogs: HashMap<DialogId, AsyncRwLock<Dialog>>,
players: Option<PlayerRegistry>,
storage: Storage,
}
pub(crate) struct DialogRegistry(AsyncRwLock<DialogRegistryInner>);
#[derive(Clone)]
pub struct DialogRegistry(Arc<AsyncRwLock<DialogRegistryInner>>);
impl DialogRegistry {
pub async fn send_message(
&self,
services: &Services,
from: PlayerId,
to: PlayerId,
body: Str,
created_at: &DateTime<Utc>,
) -> Result<()> {
let guard = self.0.read().await;
let mut guard = self.0.read().await;
let id = DialogId::new(from.clone(), to.clone());
let dialog = guard.dialogs.get(&id);
if let Some(d) = dialog {
let mut d = d.write().await;
services
guard
.storage
.insert_dialog_message(d.storage_id, d.message_count, from.as_inner(), &body, created_at)
.await?;
@ -73,7 +76,7 @@ impl DialogRegistry {
// double check in case concurrent access has loaded this dialog
if let Some(d) = guard2.dialogs.get(&id) {
let mut d = d.write().await;
services
guard2
.storage
.insert_dialog_message(d.storage_id, d.message_count, from.as_inner(), &body, created_at)
.await?;
@ -81,15 +84,15 @@ impl DialogRegistry {
} else {
let (p1, p2) = id.as_inner();
tracing::info!("Dialog {id:?} not found locally, trying to load from storage");
let stored_dialog = match services.storage.retrieve_dialog(p1.as_inner(), p2.as_inner()).await? {
let stored_dialog = match guard2.storage.retrieve_dialog(p1.as_inner(), p2.as_inner()).await? {
Some(t) => t,
None => {
tracing::info!("Dialog {id:?} does not exist, creating a new one in storage");
services.storage.initialize_dialog(p1.as_inner(), p2.as_inner(), created_at).await?
guard2.storage.initialize_dialog(p1.as_inner(), p2.as_inner(), created_at).await?
}
};
tracing::info!("Dialog {id:?} loaded");
services
guard2
.storage
.insert_dialog_message(
stored_dialog.id,
@ -107,10 +110,14 @@ impl DialogRegistry {
};
guard2.dialogs.insert(id.clone(), AsyncRwLock::new(dialog));
}
drop(guard2);
guard = guard2.downgrade();
}
// TODO send message to the other player and persist it
let Some(player) = services.players.get_player(&to).await else {
let Some(players) = &guard.players else {
tracing::error!("No player registry present");
return Ok(());
};
let Some(player) = players.get_player(&to).await else {
tracing::debug!("Player {to:?} not active, not sending message");
return Ok(());
};
@ -126,13 +133,33 @@ impl DialogRegistry {
}
impl DialogRegistry {
pub fn new() -> DialogRegistry {
DialogRegistry(AsyncRwLock::new(DialogRegistryInner {
pub fn new(storage: Storage) -> DialogRegistry {
DialogRegistry(Arc::new(AsyncRwLock::new(DialogRegistryInner {
dialogs: HashMap::new(),
}))
players: None,
storage,
})))
}
pub fn shutdown(self) {}
pub async fn set_players(&self, players: PlayerRegistry) {
let mut guard = self.0.write().await;
guard.players = Some(players);
}
pub async fn unset_players(&self) {
let mut guard = self.0.write().await;
guard.players = None;
}
pub fn shutdown(self) -> Result<()> {
let res = match Arc::try_unwrap(self.0) {
Ok(e) => e,
Err(_) => return Err(fail("failed to acquire dialogs ownership on shutdown")),
};
let res = res.into_inner();
drop(res);
Ok(())
}
}
#[cfg(test)]

View File

@ -1,19 +1,13 @@
//! Domain definitions and implementation of common chat logic.
use std::ops::Deref;
use std::sync::Arc;
use anyhow::Result;
use prometheus::Registry as MetricsRegistry;
use crate::clustering::broadcast::Broadcasting;
use crate::clustering::{ClusterConfig, ClusterMetadata, LavinaClient};
use crate::dialog::DialogRegistry;
use crate::player::{PlayerConnection, PlayerId, PlayerRegistry};
use crate::player::PlayerRegistry;
use crate::repo::Storage;
use crate::room::{RoomHandle, RoomId, RoomInfo, RoomRegistry};
use crate::room::RoomRegistry;
pub mod auth;
pub mod clustering;
pub mod dialog;
pub mod player;
pub mod prelude;
@ -25,88 +19,31 @@ mod table;
#[derive(Clone)]
pub struct LavinaCore {
services: Arc<Services>,
}
impl Deref for LavinaCore {
type Target = Services;
fn deref(&self) -> &Self::Target {
&self.services
}
pub players: PlayerRegistry,
pub rooms: RoomRegistry,
pub dialogs: DialogRegistry,
}
impl LavinaCore {
pub async fn connect_to_player(&self, player_id: &PlayerId) -> PlayerConnection {
self.services.players.connect_to_player(&self, player_id).await
}
pub async fn get_room(&self, room_id: &RoomId) -> Option<RoomHandle> {
self.services.rooms.get_room(&self.services, room_id).await
}
pub async fn create_player(&self, player_id: &PlayerId) -> Result<()> {
self.services.storage.create_user(player_id.as_inner()).await
}
pub async fn get_all_rooms(&self) -> Vec<RoomInfo> {
self.services.rooms.get_all_rooms().await
}
pub async fn stop_player(&self, player_id: &PlayerId) -> Result<Option<()>> {
self.services.players.stop_player(player_id).await
}
}
pub struct Services {
pub(crate) players: PlayerRegistry,
pub(crate) rooms: RoomRegistry,
pub(crate) dialogs: DialogRegistry,
pub(crate) broadcasting: Broadcasting,
pub(crate) client: LavinaClient,
pub(crate) storage: Storage,
pub(crate) cluster_metadata: Arc<ClusterMetadata>,
}
impl LavinaCore {
pub async fn new(
metrics: &mut MetricsRegistry,
cluster_config: ClusterConfig,
storage: Storage,
) -> Result<LavinaCore> {
pub async fn new(mut metrics: MetricsRegistry, storage: Storage) -> Result<LavinaCore> {
// TODO shutdown all services in reverse order on error
let broadcasting = Broadcasting::new();
let client = LavinaClient::new(cluster_config.addresses.clone());
let rooms = RoomRegistry::new(metrics)?;
let dialogs = DialogRegistry::new();
let players = PlayerRegistry::empty(metrics)?;
let services = Services {
let rooms = RoomRegistry::new(&mut metrics, storage.clone())?;
let dialogs = DialogRegistry::new(storage.clone());
let players = PlayerRegistry::empty(rooms.clone(), dialogs.clone(), storage.clone(), &mut metrics)?;
dialogs.set_players(players.clone()).await;
Ok(LavinaCore {
players,
rooms,
dialogs,
broadcasting,
client,
storage,
cluster_metadata: Arc::new(cluster_config.metadata),
};
Ok(LavinaCore {
services: Arc::new(services),
})
}
pub async fn shutdown(self) -> Storage {
let _ = self.players.shutdown_all().await;
let services = match Arc::try_unwrap(self.services) {
Ok(e) => e,
Err(_) => {
panic!("failed to acquire services ownership on shutdown");
}
};
let _ = services.players.shutdown();
let _ = services.dialogs.shutdown();
let _ = services.rooms.shutdown();
services.storage
pub async fn shutdown(mut self) -> Result<()> {
self.players.shutdown_all().await?;
self.dialogs.unset_players().await;
self.players.shutdown()?;
self.dialogs.shutdown()?;
self.rooms.shutdown()?;
Ok(())
}
}

View File

@ -8,6 +8,7 @@
//! A player actor is a serial handler of commands from a single player. It is preferable to run all per-player validations in the player actor,
//! so that they don't overload the room actor.
use std::collections::{HashMap, HashSet};
use std::sync::Arc;
use chrono::{DateTime, Utc};
use prometheus::{IntGauge, Registry as MetricsRegistry};
@ -16,11 +17,11 @@ use tokio::sync::mpsc::{channel, Receiver, Sender};
use tokio::sync::RwLock;
use tracing::{Instrument, Span};
use crate::clustering::room::*;
use crate::dialog::DialogRegistry;
use crate::prelude::*;
use crate::room::{RoomHandle, RoomId, RoomInfo};
use crate::repo::Storage;
use crate::room::{RoomHandle, RoomId, RoomInfo, RoomRegistry};
use crate::table::{AnonTable, Key as AnonKey};
use crate::LavinaCore;
/// Opaque player identifier. Cannot contain spaces, must be shorter than 32.
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize)]
@ -58,7 +59,7 @@ pub struct PlayerConnection {
player_handle: PlayerHandle,
}
impl PlayerConnection {
/// Handled in [Player::send_room_message].
/// Handled in [Player::send_message].
#[tracing::instrument(skip(self, body), name = "PlayerConnection::send_message")]
pub async fn send_message(&mut self, room_id: RoomId, body: Str) -> Result<SendMessageResult> {
let (promise, deferred) = oneshot();
@ -76,7 +77,7 @@ impl PlayerConnection {
Ok(deferred.await?)
}
/// Handled in [Player::change_room_topic].
/// Handled in [Player::change_topic].
#[tracing::instrument(skip(self, new_topic), name = "PlayerConnection::change_topic")]
pub async fn change_topic(&mut self, room_id: RoomId, new_topic: Str) -> Result<()> {
let (promise, deferred) = oneshot();
@ -123,15 +124,6 @@ impl PlayerConnection {
self.player_handle.send(ActorCommand::ClientCommand(cmd, self.connection_id.clone())).await;
Ok(deferred.await?)
}
/// Handler in [Player::check_user_existence].
#[tracing::instrument(skip(self), name = "PlayerConnection::check_user_existence")]
pub async fn check_user_existence(&self, recipient: PlayerId) -> Result<GetInfoResult> {
let (promise, deferred) = oneshot();
let cmd = ClientCommand::GetInfo { recipient, promise };
self.player_handle.send(ActorCommand::ClientCommand(cmd, self.connection_id.clone())).await;
Ok(deferred.await?)
}
}
/// Handle to a player actor.
@ -208,15 +200,6 @@ pub enum ClientCommand {
body: Str,
promise: Promise<()>,
},
GetInfo {
recipient: PlayerId,
promise: Promise<GetInfoResult>,
},
}
pub enum GetInfoResult {
UserExists,
UserDoesntExist,
}
pub enum JoinResult {
@ -262,21 +245,35 @@ pub enum Updates {
}
/// Handle to a player registry — a shared data structure containing information about players.
pub(crate) struct PlayerRegistry(RwLock<PlayerRegistryInner>);
#[derive(Clone)]
pub struct PlayerRegistry(Arc<RwLock<PlayerRegistryInner>>);
impl PlayerRegistry {
pub fn empty(metrics: &mut MetricsRegistry) -> Result<PlayerRegistry> {
pub fn empty(
room_registry: RoomRegistry,
dialogs: DialogRegistry,
storage: Storage,
metrics: &mut MetricsRegistry,
) -> Result<PlayerRegistry> {
let metric_active_players = IntGauge::new("chat_players_active", "Number of alive player actors")?;
metrics.register(Box::new(metric_active_players.clone()))?;
let inner = PlayerRegistryInner {
room_registry,
dialogs,
storage,
players: HashMap::new(),
metric_active_players,
};
Ok(PlayerRegistry(RwLock::new(inner)))
Ok(PlayerRegistry(Arc::new(RwLock::new(inner))))
}
pub fn shutdown(self) {
let res = self.0.into_inner();
pub fn shutdown(self) -> Result<()> {
let res = match Arc::try_unwrap(self.0) {
Ok(e) => e,
Err(_) => return Err(fail("failed to acquire players ownership on shutdown")),
};
let res = res.into_inner();
drop(res);
Ok(())
}
#[tracing::instrument(skip(self), name = "PlayerRegistry::get_player")]
@ -299,8 +296,8 @@ impl PlayerRegistry {
}
}
#[tracing::instrument(skip(self, core), name = "PlayerRegistry::get_or_launch_player")]
pub async fn get_or_launch_player(&self, core: &LavinaCore, id: &PlayerId) -> PlayerHandle {
#[tracing::instrument(skip(self), name = "PlayerRegistry::get_or_launch_player")]
pub async fn get_or_launch_player(&self, id: &PlayerId) -> PlayerHandle {
let inner = self.0.read().await;
if let Some((handle, _)) = inner.players.get(id) {
handle.clone()
@ -310,7 +307,13 @@ impl PlayerRegistry {
if let Some((handle, _)) = inner.players.get(id) {
handle.clone()
} else {
let (handle, fiber) = Player::launch(id.clone(), core.clone()).await;
let (handle, fiber) = Player::launch(
id.clone(),
inner.room_registry.clone(),
inner.dialogs.clone(),
inner.storage.clone(),
)
.await;
inner.players.insert(id.clone(), (handle.clone(), fiber));
inner.metric_active_players.inc();
handle
@ -318,13 +321,13 @@ impl PlayerRegistry {
}
}
#[tracing::instrument(skip(self, core), name = "PlayerRegistry::connect_to_player")]
pub async fn connect_to_player(&self, core: &LavinaCore, id: &PlayerId) -> PlayerConnection {
let player_handle = self.get_or_launch_player(core, id).await;
#[tracing::instrument(skip(self), name = "PlayerRegistry::connect_to_player")]
pub async fn connect_to_player(&self, id: &PlayerId) -> PlayerConnection {
let player_handle = self.get_or_launch_player(id).await;
player_handle.subscribe().await
}
pub async fn shutdown_all(&self) -> Result<()> {
pub async fn shutdown_all(&mut self) -> Result<()> {
let mut inner = self.0.write().await;
for (i, (k, j)) in inner.players.drain() {
k.send(ActorCommand::Stop).await;
@ -339,33 +342,38 @@ impl PlayerRegistry {
/// The player registry state representation.
struct PlayerRegistryInner {
room_registry: RoomRegistry,
dialogs: DialogRegistry,
storage: Storage,
/// Active player actors.
players: HashMap<PlayerId, (PlayerHandle, JoinHandle<Player>)>,
metric_active_players: IntGauge,
}
enum RoomRef {
Local(RoomHandle),
Remote { node_id: u32 },
}
/// Player actor inner state representation.
struct Player {
player_id: PlayerId,
storage_id: u32,
connections: AnonTable<Sender<ConnectionMessage>>,
my_rooms: HashMap<RoomId, RoomRef>,
my_rooms: HashMap<RoomId, RoomHandle>,
banned_from: HashSet<RoomId>,
rx: Receiver<(ActorCommand, Span)>,
handle: PlayerHandle,
services: LavinaCore,
rooms: RoomRegistry,
dialogs: DialogRegistry,
storage: Storage,
}
impl Player {
async fn launch(player_id: PlayerId, core: LavinaCore) -> (PlayerHandle, JoinHandle<Player>) {
async fn launch(
player_id: PlayerId,
rooms: RoomRegistry,
dialogs: DialogRegistry,
storage: Storage,
) -> (PlayerHandle, JoinHandle<Player>) {
let (tx, rx) = channel(32);
let handle = PlayerHandle { tx };
let handle_clone = handle.clone();
let storage_id = core.services.storage.retrieve_user_id_by_name(player_id.as_inner()).await.unwrap().unwrap();
let storage_id = storage.retrieve_user_id_by_name(player_id.as_inner()).await.unwrap().unwrap();
let player = Player {
player_id,
storage_id,
@ -377,35 +385,22 @@ impl Player {
banned_from: HashSet::new(),
rx,
handle,
services: core,
rooms,
dialogs,
storage,
};
let fiber = tokio::task::spawn(player.main_loop());
(handle_clone, fiber)
}
fn room_location(&self, room_id: &RoomId) -> Option<u32> {
let res = self.services.cluster_metadata.rooms.get(room_id.as_inner().as_ref()).copied();
let node = res.unwrap_or(self.services.cluster_metadata.main_owner);
if node == self.services.cluster_metadata.node_id {
None
} else {
Some(node)
}
}
async fn main_loop(mut self) -> Self {
let rooms = self.services.storage.get_rooms_of_a_user(self.storage_id).await.unwrap();
let rooms = self.storage.get_rooms_of_a_user(self.storage_id).await.unwrap();
for room_id in rooms {
if let Some(remote_node) = self.room_location(&room_id) {
self.my_rooms.insert(room_id.clone(), RoomRef::Remote { node_id: remote_node });
self.services.broadcasting.subscribe(self.player_id.clone(), room_id).await;
let room = self.rooms.get_room(&room_id).await;
if let Some(room) = room {
self.my_rooms.insert(room_id, room);
} else {
let room = self.services.rooms.get_room(&self.services, &room_id).await;
if let Some(room) = room {
self.my_rooms.insert(room_id, RoomRef::Local(room));
} else {
tracing::error!("Room #{room_id:?} not found");
}
tracing::error!("Room #{room_id:?} not found");
}
}
while let Some(cmd) = self.rx.recv().await {
@ -482,7 +477,7 @@ impl Player {
let _ = promise.send(());
}
ClientCommand::SendMessage { room_id, body, promise } => {
let result = self.send_room_message(connection_id, room_id, body).await;
let result = self.send_message(connection_id, room_id, body).await;
let _ = promise.send(result);
}
ClientCommand::ChangeTopic {
@ -490,7 +485,7 @@ impl Player {
new_topic,
promise,
} => {
self.change_room_topic(connection_id, room_id, new_topic).await;
self.change_topic(connection_id, room_id, new_topic).await;
let _ = promise.send(());
}
ClientCommand::GetRooms { promise } => {
@ -505,10 +500,6 @@ impl Player {
self.send_dialog_message(connection_id, recipient, body).await;
let _ = promise.send(());
}
ClientCommand::GetInfo { recipient, promise } => {
let result = self.check_user_existence(recipient).await;
let _ = promise.send(result);
}
}
}
@ -521,62 +512,31 @@ impl Player {
return JoinResult::AlreadyJoined;
}
if let Some(remote_node) = self.room_location(&room_id) {
let req = JoinRoomReq {
room_id: room_id.as_inner(),
player_id: self.player_id.as_inner(),
};
self.services.client.join_room(remote_node, req).await.unwrap();
let room_storage_id =
self.services.storage.create_or_retrieve_room_id_by_name(room_id.as_inner()).await.unwrap();
self.services.storage.add_room_member(room_storage_id, self.storage_id).await.unwrap();
self.my_rooms.insert(room_id.clone(), RoomRef::Remote { node_id: remote_node });
JoinResult::Success(RoomInfo {
id: room_id,
topic: "unknown".into(),
members: vec![],
})
} else {
let room = match self.services.rooms.get_or_create_room(&self.services, room_id.clone()).await {
Ok(room) => room,
Err(e) => {
log::error!("Failed to get or create room: {e}");
todo!();
}
};
room.add_member(&self.services, &self.player_id, self.storage_id).await;
room.subscribe(&self.player_id, self.handle.clone()).await;
self.my_rooms.insert(room_id.clone(), RoomRef::Local(room.clone()));
let room_info = room.get_room_info().await;
let update = Updates::RoomJoined {
room_id,
new_member_id: self.player_id.clone(),
};
self.broadcast_update(update, connection_id).await;
JoinResult::Success(room_info)
}
let room = match self.rooms.get_or_create_room(room_id.clone()).await {
Ok(room) => room,
Err(e) => {
log::error!("Failed to get or create room: {e}");
todo!();
}
};
room.add_member(&self.player_id, self.storage_id).await;
room.subscribe(&self.player_id, self.handle.clone()).await;
self.my_rooms.insert(room_id.clone(), room.clone());
let room_info = room.get_room_info().await;
let update = Updates::RoomJoined {
room_id,
new_member_id: self.player_id.clone(),
};
self.broadcast_update(update, connection_id).await;
JoinResult::Success(room_info)
}
#[tracing::instrument(skip(self), name = "Player::leave_room")]
async fn leave_room(&mut self, connection_id: ConnectionId, room_id: RoomId) {
let room = self.my_rooms.remove(&room_id);
if let Some(room) = room {
match room {
RoomRef::Local(room) => {
room.unsubscribe(&self.player_id).await;
room.remove_member(&self.services, &self.player_id, self.storage_id).await;
}
RoomRef::Remote { node_id } => {
let req = LeaveRoomReq {
room_id: room_id.as_inner(),
player_id: self.player_id.as_inner(),
};
self.services.client.leave_room(node_id, req).await.unwrap();
let room_storage_id =
self.services.storage.create_or_retrieve_room_id_by_name(room_id.as_inner()).await.unwrap();
self.services.storage.remove_room_member(room_storage_id, self.storage_id).await.unwrap();
}
}
room.unsubscribe(&self.player_id).await;
room.remove_member(&self.player_id, self.storage_id).await;
}
let update = Updates::RoomLeft {
room_id,
@ -585,42 +545,14 @@ impl Player {
self.broadcast_update(update, connection_id).await;
}
#[tracing::instrument(skip(self, body), name = "Player::send_room_message")]
async fn send_room_message(
&mut self,
connection_id: ConnectionId,
room_id: RoomId,
body: Str,
) -> SendMessageResult {
#[tracing::instrument(skip(self, body), name = "Player::send_message")]
async fn send_message(&mut self, connection_id: ConnectionId, room_id: RoomId, body: Str) -> SendMessageResult {
let Some(room) = self.my_rooms.get(&room_id) else {
tracing::info!("no room found");
return SendMessageResult::NoSuchRoom;
};
let created_at = chrono::Utc::now();
match room {
RoomRef::Local(room) => {
room.send_message(&self.services, &self.player_id, body.clone(), created_at.clone()).await;
}
RoomRef::Remote { node_id } => {
let req = SendMessageReq {
room_id: room_id.as_inner(),
player_id: self.player_id.as_inner(),
message: &*body,
created_at: &*created_at.to_rfc3339(),
};
self.services.client.send_room_message(*node_id, req).await.unwrap();
self.services
.broadcasting
.broadcast(
&self.services.players,
room_id.clone(),
self.player_id.clone(),
body.clone(),
created_at.clone(),
)
.await;
}
}
room.send_message(&self.player_id, body.clone(), created_at.clone()).await;
let update = Updates::NewMessage {
room_id,
author_id: self.player_id.clone(),
@ -631,25 +563,13 @@ impl Player {
SendMessageResult::Success(created_at)
}
#[tracing::instrument(skip(self, new_topic), name = "Player::change_room_topic")]
async fn change_room_topic(&mut self, connection_id: ConnectionId, room_id: RoomId, new_topic: Str) {
#[tracing::instrument(skip(self, new_topic), name = "Player::change_topic")]
async fn change_topic(&mut self, connection_id: ConnectionId, room_id: RoomId, new_topic: Str) {
let Some(room) = self.my_rooms.get(&room_id) else {
tracing::info!("no room found");
return;
};
match room {
RoomRef::Local(room) => {
room.set_topic(&self.services, &self.player_id, new_topic.clone()).await;
}
RoomRef::Remote { node_id } => {
let req = SetRoomTopicReq {
room_id: room_id.as_inner(),
player_id: self.player_id.as_inner(),
topic: &*new_topic,
};
self.services.client.set_room_topic(*node_id, req).await.unwrap();
}
}
room.set_topic(&self.player_id, new_topic.clone()).await;
let update = Updates::RoomTopicChanged { room_id, new_topic };
self.broadcast_update(update, connection_id).await;
}
@ -657,20 +577,8 @@ impl Player {
#[tracing::instrument(skip(self), name = "Player::get_rooms")]
async fn get_rooms(&self) -> Vec<RoomInfo> {
let mut response = vec![];
for (room_id, handle) in &self.my_rooms {
match handle {
RoomRef::Local(handle) => {
response.push(handle.get_room_info().await);
}
RoomRef::Remote { .. } => {
let room_info = RoomInfo {
id: room_id.clone(),
topic: "unknown".into(),
members: vec![],
};
response.push(room_info);
}
}
for (_, handle) in &self.my_rooms {
response.push(handle.get_room_info().await);
}
response
}
@ -678,17 +586,7 @@ impl Player {
#[tracing::instrument(skip(self, body), name = "Player::send_dialog_message")]
async fn send_dialog_message(&self, connection_id: ConnectionId, recipient: PlayerId, body: Str) {
let created_at = chrono::Utc::now();
self.services
.dialogs
.send_message(
&self.services,
self.player_id.clone(),
recipient.clone(),
body.clone(),
&created_at,
)
.await
.unwrap();
self.dialogs.send_message(self.player_id.clone(), recipient.clone(), body.clone(), &created_at).await.unwrap();
let update = Updates::NewDialogMessage {
sender: self.player_id.clone(),
receiver: recipient.clone(),
@ -698,15 +596,6 @@ impl Player {
self.broadcast_update(update, connection_id).await;
}
#[tracing::instrument(skip(self), name = "Player::check_user_existence")]
async fn check_user_existence(&self, recipient: PlayerId) -> GetInfoResult {
if self.services.storage.check_user_existence(recipient.as_inner().as_ref()).await.unwrap() {
GetInfoResult::UserExists
} else {
GetInfoResult::UserDoesntExist
}
}
/// Broadcasts an update to all connections except the one with the given id.
///
/// This is called after handling a client command.

View File

@ -1,10 +1,13 @@
//! Storage and persistence logic.
use std::str::FromStr;
use std::sync::Arc;
use anyhow::anyhow;
use chrono::{DateTime, Utc};
use serde::Deserialize;
use sqlx::sqlite::SqliteConnectOptions;
use sqlx::{ConnectOptions, Connection, SqliteConnection};
use sqlx::{ConnectOptions, Connection, FromRow, Sqlite, SqliteConnection, Transaction};
use tokio::sync::Mutex;
use crate::prelude::*;
@ -19,8 +22,9 @@ pub struct StorageConfig {
pub db_path: String,
}
#[derive(Clone)]
pub struct Storage {
conn: Mutex<SqliteConnection>,
conn: Arc<Mutex<SqliteConnection>>,
}
impl Storage {
pub async fn open(config: StorageConfig) -> Result<Storage> {
@ -32,17 +36,160 @@ impl Storage {
migrator.run(&mut conn).await?;
log::info!("Migrations passed");
let conn = Mutex::new(conn);
let conn = Arc::new(Mutex::new(conn));
Ok(Storage { conn })
}
pub async fn close(self) {
let res = self.conn.into_inner();
match res.close().await {
Ok(_) => {}
#[tracing::instrument(skip(self), name = "Storage::retrieve_user_by_name")]
pub async fn retrieve_user_by_name(&self, name: &str) -> Result<Option<StoredUser>> {
let mut executor = self.conn.lock().await;
let res = sqlx::query_as(
"select u.id, u.name, c.password, a.hash as argon2_hash
from users u left join challenges_plain_password c on u.id = c.user_id
left join challenges_argon2_password a on u.id = a.user_id
where u.name = ?;",
)
.bind(name)
.fetch_optional(&mut *executor)
.await?;
Ok(res)
}
#[tracing::instrument(skip(self), name = "Storage::retrieve_room_by_name")]
pub async fn retrieve_room_by_name(&self, name: &str) -> Result<Option<StoredRoom>> {
let mut executor = self.conn.lock().await;
let res = sqlx::query_as(
"select id, name, topic, message_count
from rooms
where name = ?;",
)
.bind(name)
.fetch_optional(&mut *executor)
.await?;
Ok(res)
}
#[tracing::instrument(skip(self, topic), name = "Storage::create_new_room")]
pub async fn create_new_room(&mut self, name: &str, topic: &str) -> Result<u32> {
let mut executor = self.conn.lock().await;
let (id,): (u32,) = sqlx::query_as(
"insert into rooms(name, topic)
values (?, ?)
returning id;",
)
.bind(name)
.bind(topic)
.fetch_one(&mut *executor)
.await?;
Ok(id)
}
#[tracing::instrument(skip(self, content, created_at), name = "Storage::insert_message")]
pub async fn insert_message(
&mut self,
room_id: u32,
id: u32,
content: &str,
author_id: &str,
created_at: &DateTime<Utc>,
) -> Result<()> {
let mut executor = self.conn.lock().await;
let res: Option<(u32,)> = sqlx::query_as("select id from users where name = ?;")
.bind(author_id)
.fetch_optional(&mut *executor)
.await?;
let Some((author_id,)) = res else {
return Err(anyhow!("No such user"));
};
sqlx::query(
"insert into messages(room_id, id, content, author_id, created_at)
values (?, ?, ?, ?, ?);
update rooms set message_count = message_count + 1 where id = ?;",
)
.bind(room_id)
.bind(id)
.bind(content)
.bind(author_id)
.bind(created_at.to_string())
.bind(room_id)
.execute(&mut *executor)
.await?;
Ok(())
}
pub async fn close(self) -> Result<()> {
let res = match Arc::try_unwrap(self.conn) {
Ok(e) => e,
Err(_) => return Err(fail("failed to acquire DB ownership on shutdown")),
};
let res = res.into_inner();
res.close().await?;
Ok(())
}
#[tracing::instrument(skip(self), name = "Storage::create_user")]
pub async fn create_user(&mut self, name: &str) -> Result<()> {
let query = sqlx::query(
"insert into users(name)
values (?);",
)
.bind(name);
let mut executor = self.conn.lock().await;
query.execute(&mut *executor).await?;
Ok(())
}
#[tracing::instrument(skip(self, pwd), name = "Storage::set_password")]
pub async fn set_password<'a>(&'a self, name: &'a str, pwd: &'a str) -> Result<Option<()>> {
async fn inner(txn: &mut Transaction<'_, Sqlite>, name: &str, pwd: &str) -> Result<Option<()>> {
let id: Option<(u32,)> = sqlx::query_as("select * from users where name = ? limit 1;")
.bind(name)
.fetch_optional(&mut **txn)
.await?;
let Some((id,)) = id else {
return Ok(None);
};
sqlx::query("insert or replace into challenges_plain_password(user_id, password) values (?, ?);")
.bind(id)
.bind(pwd)
.execute(&mut **txn)
.await?;
Ok(Some(()))
}
let mut executor = self.conn.lock().await;
let mut tx = executor.begin().await?;
let res = inner(&mut tx, name, pwd).await;
match res {
Ok(e) => {
tx.commit().await?;
Ok(e)
}
Err(e) => {
tracing::error!("Failed to close the DB connection: {e:?}");
tx.rollback().await?;
Err(e)
}
}
}
}
#[derive(FromRow)]
pub struct StoredUser {
pub id: u32,
pub name: String,
pub password: Option<String>,
pub argon2_hash: Option<Box<str>>,
}
#[derive(FromRow)]
pub struct StoredRoom {
pub id: u32,
pub name: String,
pub topic: String,
pub message_count: u32,
}

View File

@ -1,104 +1,8 @@
use anyhow::{anyhow, Result};
use chrono::{DateTime, Utc};
use sqlx::FromRow;
use anyhow::Result;
use crate::repo::Storage;
use crate::room::RoomId;
#[derive(FromRow)]
pub struct StoredRoom {
pub id: u32,
pub name: String,
pub topic: String,
pub message_count: u32,
}
impl Storage {
#[tracing::instrument(skip(self), name = "Storage::retrieve_room_by_name")]
pub async fn retrieve_room_by_name(&self, name: &str) -> Result<Option<StoredRoom>> {
let mut executor = self.conn.lock().await;
let res = sqlx::query_as(
"select id, name, topic, message_count
from rooms
where name = ?;",
)
.bind(name)
.fetch_optional(&mut *executor)
.await?;
Ok(res)
}
#[tracing::instrument(skip(self, topic), name = "Storage::create_new_room")]
pub async fn create_new_room(&self, name: &str, topic: &str) -> Result<u32> {
let mut executor = self.conn.lock().await;
let (id,): (u32,) = sqlx::query_as(
"insert into rooms(name, topic)
values (?, ?)
returning id;",
)
.bind(name)
.bind(topic)
.fetch_one(&mut *executor)
.await?;
Ok(id)
}
#[tracing::instrument(skip(self, content, created_at), name = "Storage::insert_room_message")]
pub async fn insert_room_message(
&self,
room_id: u32,
id: u32,
content: &str,
author_id: &str,
created_at: &DateTime<Utc>,
) -> Result<()> {
let mut executor = self.conn.lock().await;
let res: Option<(u32,)> = sqlx::query_as("select id from users where name = ?;")
.bind(author_id)
.fetch_optional(&mut *executor)
.await?;
let Some((author_id,)) = res else {
return Err(anyhow!("No such user"));
};
sqlx::query(
"insert into messages(room_id, id, content, author_id, created_at)
values (?, ?, ?, ?, ?);
update rooms set message_count = message_count + 1 where id = ?;",
)
.bind(room_id)
.bind(id)
.bind(content)
.bind(author_id)
.bind(created_at.to_string())
.bind(room_id)
.execute(&mut *executor)
.await?;
Ok(())
}
#[tracing::instrument(skip(self), name = "Storage::is_room_member")]
pub async fn is_room_member(&self, room_id: u32, player_id: u32) -> Result<bool> {
let mut executor = self.conn.lock().await;
let res: (u32,) = sqlx::query_as(
"
select
count(*)
from
memberships
where
user_id = ? and room_id = ?;
",
)
.bind(player_id)
.bind(room_id)
.fetch_one(&mut *executor)
.await?;
Ok(res.0 > 0)
}
#[tracing::instrument(skip(self), name = "Storage::add_room_member")]
pub async fn add_room_member(&self, room_id: u32, player_id: u32) -> Result<()> {
let mut executor = self.conn.lock().await;
@ -130,7 +34,7 @@ impl Storage {
}
#[tracing::instrument(skip(self, topic), name = "Storage::set_room_topic")]
pub async fn set_room_topic(&self, id: u32, topic: &str) -> Result<()> {
pub async fn set_room_topic(&mut self, id: u32, topic: &str) -> Result<()> {
let mut executor = self.conn.lock().await;
sqlx::query(
"update rooms
@ -144,36 +48,4 @@ impl Storage {
Ok(())
}
#[tracing::instrument(skip(self), name = "Storage::create_or_retrieve_room_id_by_name")]
pub async fn create_or_retrieve_room_id_by_name(&self, name: &str) -> Result<u32> {
// TODO we don't need any info except the name on non-owning nodes, should remove stubs here
let mut executor = self.conn.lock().await;
let res: (u32,) = sqlx::query_as(
"insert into rooms(name, topic)
values (?, '')
on conflict(name) do update set name = excluded.name
returning id;",
)
.bind(name)
.fetch_one(&mut *executor)
.await?;
Ok(res.0)
}
#[tracing::instrument(skip(self), name = "Storage::get_rooms_of_a_user")]
pub async fn get_rooms_of_a_user(&self, user_id: u32) -> Result<Vec<RoomId>> {
let mut executor = self.conn.lock().await;
let res: Vec<(String,)> = sqlx::query_as(
"select r.name
from memberships m inner join rooms r on m.room_id = r.id
where m.user_id = ?;",
)
.bind(user_id)
.fetch_all(&mut *executor)
.await?;
res.into_iter().map(|(room_id,)| RoomId::from(room_id)).collect()
}
}

View File

@ -1,91 +1,9 @@
use anyhow::Result;
use sqlx::{Connection, FromRow, Sqlite, Transaction};
use crate::repo::Storage;
#[derive(FromRow)]
pub struct StoredUser {
pub id: u32,
pub name: String,
pub password: Option<String>,
pub argon2_hash: Option<Box<str>>,
}
use crate::room::RoomId;
impl Storage {
#[tracing::instrument(skip(self), name = "Storage::retrieve_user_by_name")]
pub async fn retrieve_user_by_name(&self, name: &str) -> Result<Option<StoredUser>> {
let mut executor = self.conn.lock().await;
let res = sqlx::query_as(
"select u.id, u.name, c.password, a.hash as argon2_hash
from users u left join challenges_plain_password c on u.id = c.user_id
left join challenges_argon2_password a on u.id = a.user_id
where u.name = ?;",
)
.bind(name)
.fetch_optional(&mut *executor)
.await?;
Ok(res)
}
#[tracing::instrument(skip(self), name = "Storage::check_user_existence")]
pub async fn check_user_existence(&self, username: &str) -> Result<bool> {
let mut executor = self.conn.lock().await;
let result: Option<(String,)> = sqlx::query_as("select name from users where name = ?;")
.bind(username)
.fetch_optional(&mut *executor)
.await?;
Ok(result.is_some())
}
#[tracing::instrument(skip(self), name = "Storage::create_user")]
pub async fn create_user(&self, name: &str) -> Result<()> {
let query = sqlx::query(
"insert into users(name)
values (?);",
)
.bind(name);
let mut executor = self.conn.lock().await;
query.execute(&mut *executor).await?;
Ok(())
}
#[tracing::instrument(skip(self, pwd), name = "Storage::set_password")]
pub async fn set_password(&self, name: &str, pwd: &str) -> Result<Option<()>> {
async fn inner(txn: &mut Transaction<'_, Sqlite>, name: &str, pwd: &str) -> Result<Option<()>> {
let id: Option<(u32,)> = sqlx::query_as("select * from users where name = ? limit 1;")
.bind(name)
.fetch_optional(&mut **txn)
.await?;
let Some((id,)) = id else {
return Ok(None);
};
sqlx::query("insert or replace into challenges_plain_password(user_id, password) values (?, ?);")
.bind(id)
.bind(pwd)
.execute(&mut **txn)
.await?;
Ok(Some(()))
}
let mut executor = self.conn.lock().await;
let mut tx = executor.begin().await?;
let res = inner(&mut tx, name, pwd).await;
match res {
Ok(e) => {
tx.commit().await?;
Ok(e)
}
Err(e) => {
tx.rollback().await?;
Err(e)
}
}
}
#[tracing::instrument(skip(self), name = "Storage::retrieve_user_id_by_name")]
pub async fn retrieve_user_id_by_name(&self, name: &str) -> Result<Option<u32>> {
let mut executor = self.conn.lock().await;
let res: Option<(u32,)> = sqlx::query_as("select u.id from users u where u.name = ?;")
@ -96,19 +14,17 @@ impl Storage {
Ok(res.map(|(id,)| id))
}
#[tracing::instrument(skip(self), name = "Storage::create_or_retrieve_user_id_by_name")]
pub async fn create_or_retrieve_user_id_by_name(&self, name: &str) -> Result<u32> {
pub async fn get_rooms_of_a_user(&self, user_id: u32) -> Result<Vec<RoomId>> {
let mut executor = self.conn.lock().await;
let res: (u32,) = sqlx::query_as(
"insert into users(name)
values (?)
on conflict(name) do update set name = excluded.name
returning id;",
let res: Vec<(String,)> = sqlx::query_as(
"select r.name
from memberships m inner join rooms r on m.room_id = r.id
where m.user_id = ?;",
)
.bind(name)
.fetch_one(&mut *executor)
.bind(user_id)
.fetch_all(&mut *executor)
.await?;
Ok(res.0)
res.into_iter().map(|(room_id,)| RoomId::from(room_id)).collect()
}
}

View File

@ -9,12 +9,11 @@ use tokio::sync::RwLock as AsyncRwLock;
use crate::player::{PlayerHandle, PlayerId, Updates};
use crate::prelude::*;
use crate::Services;
use crate::repo::Storage;
/// Opaque room id
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize)]
pub struct RoomId(Str);
impl RoomId {
pub fn from(str: impl Into<Str>) -> Result<RoomId> {
let bytes = str.into();
@ -35,32 +34,40 @@ impl RoomId {
}
/// Shared data structure for storing metadata about rooms.
pub(crate) struct RoomRegistry(AsyncRwLock<RoomRegistryInner>);
#[derive(Clone)]
pub struct RoomRegistry(Arc<AsyncRwLock<RoomRegistryInner>>);
impl RoomRegistry {
pub fn new(metrics: &mut MetricRegistry) -> Result<RoomRegistry> {
pub fn new(metrics: &mut MetricRegistry, storage: Storage) -> Result<RoomRegistry> {
let metric_active_rooms = IntGauge::new("chat_rooms_active", "Number of alive room actors")?;
metrics.register(Box::new(metric_active_rooms.clone()))?;
let inner = RoomRegistryInner {
rooms: HashMap::new(),
metric_active_rooms,
storage,
};
Ok(RoomRegistry(AsyncRwLock::new(inner)))
Ok(RoomRegistry(Arc::new(AsyncRwLock::new(inner))))
}
pub fn shutdown(self) {
// TODO iterate over rooms and stop them
pub fn shutdown(self) -> Result<()> {
let res = match Arc::try_unwrap(self.0) {
Ok(e) => e,
Err(_) => return Err(fail("failed to acquire rooms ownership on shutdown")),
};
let res = res.into_inner();
// TODO drop all rooms
drop(res);
Ok(())
}
#[tracing::instrument(skip(self, services), name = "RoomRegistry::get_or_create_room")]
pub async fn get_or_create_room(&self, services: &Services, room_id: RoomId) -> Result<RoomHandle> {
#[tracing::instrument(skip(self), name = "RoomRegistry::get_or_create_room")]
pub async fn get_or_create_room(&mut self, room_id: RoomId) -> Result<RoomHandle> {
let mut inner = self.0.write().await;
if let Some(room_handle) = inner.get_or_load_room(services, &room_id).await? {
if let Some(room_handle) = inner.get_or_load_room(&room_id).await? {
Ok(room_handle.clone())
} else {
log::debug!("Creating room {}...", &room_id.0);
let topic = "New room";
let id = services.storage.create_new_room(&*room_id.0, &*topic).await?;
let id = inner.storage.create_new_room(&*room_id.0, &*topic).await?;
let room = Room {
storage_id: id,
room_id: room_id.clone(),
@ -68,6 +75,7 @@ impl RoomRegistry {
members: HashSet::new(),
topic: topic.into(),
message_count: 0,
storage: inner.storage.clone(),
};
let room_handle = RoomHandle(Arc::new(AsyncRwLock::new(room)));
inner.rooms.insert(room_id, room_handle.clone());
@ -76,10 +84,10 @@ impl RoomRegistry {
}
}
#[tracing::instrument(skip(self, services), name = "RoomRegistry::get_room")]
pub async fn get_room(&self, services: &Services, room_id: &RoomId) -> Option<RoomHandle> {
#[tracing::instrument(skip(self), name = "RoomRegistry::get_room")]
pub async fn get_room(&self, room_id: &RoomId) -> Option<RoomHandle> {
let mut inner = self.0.write().await;
inner.get_or_load_room(services, room_id).await.unwrap()
inner.get_or_load_room(room_id).await.unwrap()
}
#[tracing::instrument(skip(self), name = "RoomRegistry::get_all_rooms")]
@ -100,15 +108,16 @@ impl RoomRegistry {
struct RoomRegistryInner {
rooms: HashMap<RoomId, RoomHandle>,
metric_active_rooms: IntGauge,
storage: Storage,
}
impl RoomRegistryInner {
#[tracing::instrument(skip(self, services), name = "RoomRegistryInner::get_or_load_room")]
async fn get_or_load_room(&mut self, services: &Services, room_id: &RoomId) -> Result<Option<RoomHandle>> {
#[tracing::instrument(skip(self), name = "RoomRegistryInner::get_or_load_room")]
async fn get_or_load_room(&mut self, room_id: &RoomId) -> Result<Option<RoomHandle>> {
if let Some(room_handle) = self.rooms.get(room_id) {
log::debug!("Room {} was loaded already", &room_id.0);
Ok(Some(room_handle.clone()))
} else if let Some(stored_room) = services.storage.retrieve_room_by_name(&*room_id.0).await? {
} else if let Some(stored_room) = self.storage.retrieve_room_by_name(&*room_id.0).await? {
log::debug!("Loading room {}...", &room_id.0);
let room = Room {
storage_id: stored_room.id,
@ -117,6 +126,7 @@ impl RoomRegistryInner {
members: HashSet::new(), // TODO load members from storage
topic: stored_room.topic.into(),
message_count: stored_room.message_count,
storage: self.storage.clone(),
};
let room_handle = RoomHandle(Arc::new(AsyncRwLock::new(room)));
self.rooms.insert(room_id.clone(), room_handle.clone());
@ -131,7 +141,6 @@ impl RoomRegistryInner {
#[derive(Clone)]
pub struct RoomHandle(Arc<AsyncRwLock<Room>>);
impl RoomHandle {
#[tracing::instrument(skip(self, player_handle), name = "RoomHandle::subscribe")]
pub async fn subscribe(&self, player_id: &PlayerId, player_handle: PlayerHandle) {
@ -140,16 +149,12 @@ impl RoomHandle {
lock.subscriptions.insert(player_id.clone(), player_handle);
}
#[tracing::instrument(skip(self, services), name = "RoomHandle::add_member")]
pub async fn add_member(&self, services: &Services, player_id: &PlayerId, player_storage_id: u32) {
#[tracing::instrument(skip(self), name = "RoomHandle::add_member")]
pub async fn add_member(&self, player_id: &PlayerId, player_storage_id: u32) {
let mut lock = self.0.write().await;
tracing::info!("Adding a new member to a room");
let room_storage_id = lock.storage_id;
if !services.storage.is_room_member(room_storage_id, player_storage_id).await.unwrap() {
services.storage.add_room_member(room_storage_id, player_storage_id).await.unwrap();
} else {
tracing::warn!("User {:#?} has already been added to the room.", player_id);
}
lock.storage.add_room_member(room_storage_id, player_storage_id).await.unwrap();
lock.members.insert(player_id.clone());
let update = Updates::RoomJoined {
room_id: lock.room_id.clone(),
@ -164,12 +169,12 @@ impl RoomHandle {
lock.subscriptions.remove(player_id);
}
#[tracing::instrument(skip(self, services), name = "RoomHandle::remove_member")]
pub async fn remove_member(&self, services: &Services, player_id: &PlayerId, player_storage_id: u32) {
#[tracing::instrument(skip(self), name = "RoomHandle::remove_member")]
pub async fn remove_member(&self, player_id: &PlayerId, player_storage_id: u32) {
let mut lock = self.0.write().await;
tracing::info!("Removing a member from a room");
let room_storage_id = lock.storage_id;
services.storage.remove_room_member(room_storage_id, player_storage_id).await.unwrap();
lock.storage.remove_room_member(room_storage_id, player_storage_id).await.unwrap();
lock.members.remove(player_id);
let update = Updates::RoomLeft {
room_id: lock.room_id.clone(),
@ -178,10 +183,10 @@ impl RoomHandle {
lock.broadcast_update(update, player_id).await;
}
#[tracing::instrument(skip(self, services, body, created_at), name = "RoomHandle::send_message")]
pub async fn send_message(&self, services: &Services, player_id: &PlayerId, body: Str, created_at: DateTime<Utc>) {
#[tracing::instrument(skip(self, body, created_at), name = "RoomHandle::send_message")]
pub async fn send_message(&self, player_id: &PlayerId, body: Str, created_at: DateTime<Utc>) {
let mut lock = self.0.write().await;
let res = lock.send_message(services, player_id, body, created_at).await;
let res = lock.send_message(player_id, body, created_at).await;
if let Err(err) = res {
log::warn!("Failed to send message: {err:?}");
}
@ -197,12 +202,12 @@ impl RoomHandle {
}
}
#[tracing::instrument(skip(self, services, new_topic), name = "RoomHandle::set_topic")]
pub async fn set_topic(&self, services: &Services, changer_id: &PlayerId, new_topic: Str) {
#[tracing::instrument(skip(self, new_topic), name = "RoomHandle::set_topic")]
pub async fn set_topic(&self, changer_id: &PlayerId, new_topic: Str) {
let mut lock = self.0.write().await;
let storage_id = lock.storage_id;
lock.topic = new_topic.clone();
services.storage.set_room_topic(storage_id, &new_topic).await.unwrap();
lock.storage.set_room_topic(storage_id, &new_topic).await.unwrap();
let update = Updates::RoomTopicChanged {
room_id: lock.room_id.clone(),
new_topic: new_topic.clone(),
@ -223,21 +228,14 @@ struct Room {
/// The total number of messages. Used to calculate the id of the new message.
message_count: u32,
topic: Str,
storage: Storage,
}
impl Room {
#[tracing::instrument(skip(self, services, body, created_at), name = "Room::send_message")]
async fn send_message(
&mut self,
services: &Services,
author_id: &PlayerId,
body: Str,
created_at: DateTime<Utc>,
) -> Result<()> {
#[tracing::instrument(skip(self, body, created_at), name = "Room::send_message")]
async fn send_message(&mut self, author_id: &PlayerId, body: Str, created_at: DateTime<Utc>) -> Result<()> {
tracing::info!("Adding a message to room");
services
.storage
.insert_room_message(
self.storage
.insert_message(
self.storage_id,
self.message_count,
&body,

View File

@ -1,21 +0,0 @@
use std::future::Future;
use tokio::io::AsyncWrite;
use lavina_core::player::PlayerConnection;
use lavina_core::prelude::Str;
pub struct IrcConnection<'a, T: AsyncWrite + Unpin> {
pub server_name: Str,
/// client is nick of requester
pub client: Str,
pub writer: &'a mut T,
pub player_connection: &'a mut PlayerConnection,
}
pub trait Handler<T>
where
T: AsyncWrite + Unpin,
{
fn handle(&self, arg: IrcConnection<T>) -> impl Future<Output = anyhow::Result<()>>;
}

View File

@ -14,10 +14,11 @@ use tokio::net::tcp::{ReadHalf, WriteHalf};
use tokio::net::{TcpListener, TcpStream};
use tokio::sync::mpsc::channel;
use lavina_core::auth::Verdict;
use lavina_core::auth::{Authenticator, Verdict};
use lavina_core::player::*;
use lavina_core::prelude::*;
use lavina_core::room::{RoomId, RoomInfo};
use lavina_core::repo::Storage;
use lavina_core::room::{RoomId, RoomInfo, RoomRegistry};
use lavina_core::terminator::Terminator;
use lavina_core::LavinaCore;
use proto_irc::client::CapabilitySubcommand;
@ -27,14 +28,11 @@ use proto_irc::server::{AwayStatus, ServerMessage, ServerMessageBody};
use proto_irc::user::PrefixedNick;
use proto_irc::{Chan, Recipient, Tag};
use sasl::AuthBody;
mod cap;
use handler::Handler;
mod whois;
use crate::cap::Capabilities;
mod handler;
pub const APP_VERSION: &str = concat!("lavina", "_", env!("CARGO_PKG_VERSION"));
#[derive(Deserialize, Debug, Clone)]
@ -60,8 +58,9 @@ async fn handle_socket(
config: ServerConfig,
mut stream: TcpStream,
socket_addr: &SocketAddr,
core: LavinaCore,
mut core: LavinaCore,
termination: Deferred<()>, // TODO use it to stop the connection gracefully
mut storage: Storage,
) -> Result<()> {
log::info!("Received an IRC connection from {socket_addr}");
let (reader, writer) = stream.split();
@ -75,11 +74,11 @@ async fn handle_socket(
log::info!("Socket handling was terminated");
return Ok(())
},
registered_user = handle_registration(&mut reader, &mut writer, &core, &config) =>
registered_user = handle_registration(&mut reader, &mut writer, &mut storage, &config) =>
match registered_user {
Ok(user) => {
log::debug!("User registered");
handle_registered_socket(config, &core, &mut reader, &mut writer, user).await?;
handle_registered_socket(config, core.players, core.rooms, &mut reader, &mut writer, user).await?;
}
Err(err) => {
log::debug!("Registration failed: {err}");
@ -124,7 +123,7 @@ impl RegistrationState {
&mut self,
msg: ClientMessage,
writer: &mut BufWriter<WriteHalf<'_>>,
core: &LavinaCore,
storage: &mut Storage,
config: &ServerConfig,
) -> Result<Option<RegisteredUser>> {
match msg {
@ -215,7 +214,7 @@ impl RegistrationState {
realname,
enabled_capabilities: self.enabled_capabilities,
};
self.finalize_auth(candidate_user, writer, core, config).await
self.finalize_auth(candidate_user, writer, storage, config).await
}
},
ClientMessage::Nick { nickname } => {
@ -229,7 +228,7 @@ impl RegistrationState {
realname: realname.clone(),
enabled_capabilities: self.enabled_capabilities,
};
self.finalize_auth(candidate_user, writer, core, config).await
self.finalize_auth(candidate_user, writer, storage, config).await
} else {
self.future_nickname = Some(nickname);
Ok(None)
@ -246,7 +245,7 @@ impl RegistrationState {
realname,
enabled_capabilities: self.enabled_capabilities,
};
self.finalize_auth(candidate_user, writer, core, config).await
self.finalize_auth(candidate_user, writer, storage, config).await
} else {
self.future_username = Some((username, realname));
Ok(None)
@ -277,7 +276,7 @@ impl RegistrationState {
}
} else {
let body = AuthBody::from_str(body.as_bytes())?;
if let Err(e) = auth_user(core, &body.login, &body.password).await {
if let Err(e) = auth_user(storage, &body.login, &body.password).await {
tracing::warn!("Authentication failed: {:?}", e);
let target = self.future_nickname.clone().unwrap_or_else(|| "*".into());
sasl_fail_message(config.server_name.clone(), target, "Bad credentials".into())
@ -325,7 +324,7 @@ impl RegistrationState {
&mut self,
candidate_user: RegisteredUser,
writer: &mut BufWriter<WriteHalf<'_>>,
core: &LavinaCore,
storage: &mut Storage,
config: &ServerConfig,
) -> Result<Option<RegisteredUser>> {
if self.enabled_capabilities.contains(Capabilities::Sasl)
@ -344,7 +343,7 @@ impl RegistrationState {
writer.flush().await?;
return Ok(None);
};
auth_user(core, &*candidate_user.nickname, &*candidate_password).await?;
auth_user(storage, &*candidate_user.nickname, &*candidate_password).await?;
Ok(Some(candidate_user))
}
}
@ -353,7 +352,7 @@ impl RegistrationState {
async fn handle_registration<'a>(
reader: &mut BufReader<ReadHalf<'a>>,
writer: &mut BufWriter<WriteHalf<'a>>,
core: &LavinaCore,
storage: &mut Storage,
config: &ServerConfig,
) -> Result<RegisteredUser> {
let mut buffer = vec![];
@ -389,7 +388,7 @@ async fn handle_registration<'a>(
}
};
tracing::debug!("Incoming IRC message: {msg:?}");
if let Some(user) = state.handle_msg(msg, writer, core, config).await? {
if let Some(user) = state.handle_msg(msg, writer, storage, config).await? {
break Ok(user);
}
buffer.clear();
@ -406,8 +405,8 @@ fn sasl_fail_message(sender: Str, nick: Str, text: Str) -> ServerMessage {
}
}
async fn auth_user(core: &LavinaCore, login: &str, plain_password: &str) -> Result<()> {
let verdict = core.authenticate(login, plain_password).await?;
async fn auth_user(storage: &mut Storage, login: &str, plain_password: &str) -> Result<()> {
let verdict = Authenticator::new(storage).authenticate(login, plain_password).await?;
// TODO properly map these onto protocol messages
match verdict {
Verdict::Authenticated => Ok(()),
@ -418,7 +417,8 @@ async fn auth_user(core: &LavinaCore, login: &str, plain_password: &str) -> Resu
async fn handle_registered_socket<'a>(
config: ServerConfig,
core: &LavinaCore,
mut players: PlayerRegistry,
rooms: RoomRegistry,
reader: &mut BufReader<ReadHalf<'a>>,
writer: &mut BufWriter<WriteHalf<'a>>,
user: RegisteredUser,
@ -427,7 +427,7 @@ async fn handle_registered_socket<'a>(
log::info!("Handling registered user: {user:?}");
let player_id = PlayerId::from(user.nickname.clone())?;
let mut connection = core.connect_to_player(&player_id).await;
let mut connection = players.connect_to_player(&player_id).await;
let text: Str = format!("Welcome to {} Server", &config.server_name).into();
ServerMessage {
@ -501,7 +501,7 @@ async fn handle_registered_socket<'a>(
len
};
let incoming = std::str::from_utf8(&buffer[0..len-2])?;
if let HandleResult::Leave = handle_incoming_message(incoming, &config, &user, core, &mut connection, writer).await? {
if let HandleResult::Leave = handle_incoming_message(incoming, &config, &user, &rooms, &mut connection, writer).await? {
break;
}
buffer.clear();
@ -509,7 +509,7 @@ async fn handle_registered_socket<'a>(
update = connection.receiver.recv() => {
match update {
Some(ConnectionMessage::Update(update)) => {
handle_update(&config, &user, &player_id, writer, core, update).await?;
handle_update(&config, &user, &player_id, writer, &rooms, update).await?;
}
Some(ConnectionMessage::Stop(_)) => {
tracing::debug!("Connection is being terminated");
@ -560,14 +560,14 @@ async fn handle_update(
user: &RegisteredUser,
player_id: &PlayerId,
writer: &mut (impl AsyncWrite + Unpin),
core: &LavinaCore,
rooms: &RoomRegistry,
update: Updates,
) -> Result<()> {
log::debug!("Sending irc message to player {player_id:?} on update {update:?}");
match update {
Updates::RoomJoined { new_member_id, room_id } => {
if player_id == &new_member_id {
if let Some(room) = core.get_room(&room_id).await {
if let Some(room) = rooms.get_room(&room_id).await {
let room_info = room.get_room_info().await;
let chan = Chan::Global(room_id.as_inner().clone());
produce_on_join_cmd_messages(&config, &user, &chan, &room_info, writer).await?;
@ -690,7 +690,7 @@ async fn handle_incoming_message(
buffer: &str,
config: &ServerConfig,
user: &RegisteredUser,
core: &LavinaCore,
rooms: &RoomRegistry,
user_handle: &mut PlayerConnection,
writer: &mut (impl AsyncWrite + Unpin),
) -> Result<HandleResult> {
@ -753,6 +753,8 @@ async fn handle_incoming_message(
ClientMessage::Who { target } => match &target {
Recipient::Nick(nick) => {
// TODO handle non-existing user
let mut username = format!("~{nick}");
let mut host = format!("user/{nick}");
ServerMessage {
tags: vec![],
sender: Some(config.server_name.clone()),
@ -774,7 +776,7 @@ async fn handle_incoming_message(
writer.flush().await?;
}
Recipient::Chan(Chan::Global(chan)) => {
let room = core.get_room(&RoomId::from(chan.clone())?).await;
let room = rooms.get_room(&RoomId::from(chan.clone())?).await;
if let Some(room) = room {
let room_info = room.get_room_info().await;
for member in room_info.members {
@ -804,17 +806,6 @@ async fn handle_incoming_message(
log::warn!("Local chans not supported");
}
},
ClientMessage::Whois { arg } => {
arg.handle(handler::IrcConnection {
server_name: config.server_name.clone(),
client: user.nickname.clone(),
writer,
player_connection: user_handle,
})
.await?;
writer.flush().await?;
}
ClientMessage::Mode { target } => {
match target {
Recipient::Nick(nickname) => {
@ -869,7 +860,7 @@ fn user_to_who_msg(config: &ServerConfig, requestor: &RegisteredUser, target_use
let username = format!("~{target_user_nickname}").into();
// User's host is not public, replace it with `user/<nickname>` pattern
let host = format!("user/{target_user_nickname}").into();
let mut host = format!("user/{target_user_nickname}").into();
ServerMessageBody::N352WhoReply {
client: requestor.nickname.clone(),
@ -1005,7 +996,12 @@ impl RunningServer {
}
}
pub async fn launch(config: ServerConfig, core: LavinaCore, metrics: MetricsRegistry) -> Result<RunningServer> {
pub async fn launch(
config: ServerConfig,
core: LavinaCore,
metrics: MetricsRegistry,
storage: Storage,
) -> Result<RunningServer> {
log::info!("Starting IRC projection");
let (stopped_tx, mut stopped_rx) = channel(32);
let current_connections = IntGauge::new("irc_current_connections", "Open and alive TCP connections")?;
@ -1046,8 +1042,9 @@ pub async fn launch(config: ServerConfig, core: LavinaCore, metrics: MetricsRegi
let core = core.clone();
let current_connections_clone = current_connections.clone();
let stopped_tx = stopped_tx.clone();
let storage = storage.clone();
async move {
match handle_socket(config, stream, &socket_addr, core, termination).await {
match handle_socket(config, stream, &socket_addr, core, termination, storage).await {
Ok(_) => log::info!("Connection terminated"),
Err(err) => log::warn!("Connection failed: {err}"),
}

View File

@ -1,67 +0,0 @@
use lavina_core::{
player::{GetInfoResult, PlayerId},
prelude::Str,
};
use proto_irc::{
client::command_args::Whois,
commands::whois::{
error::{ErrNoNicknameGiven431, ErrNoSuchNick401},
response::RplEndOfWhois318,
},
response::{IrcResponseMessage, WriteResponse},
};
use tokio::io::AsyncWrite;
use crate::handler::{Handler, IrcConnection};
impl<T: AsyncWrite + Unpin> Handler<T> for Whois {
async fn handle(&self, body: IrcConnection<'_, T>) -> anyhow::Result<()> {
match self {
Whois::Nick(nick) => handle_nick_target(nick.clone(), body).await?,
Whois::TargetNick(_, nick) => handle_nick_target(nick.clone(), body).await?,
Whois::EmptyArgs => {
let IrcConnection {
server_name,
mut writer,
..
} = body;
IrcResponseMessage::empty_tags(
Some(server_name.clone()),
ErrNoNicknameGiven431::new(server_name.clone()),
)
.write_response(&mut writer)
.await?
}
}
Ok(())
}
}
async fn handle_nick_target(nick: Str, body: IrcConnection<'_, impl AsyncWrite + Unpin>) -> anyhow::Result<()> {
let IrcConnection {
server_name,
mut writer,
client,
player_connection,
} = body;
if let GetInfoResult::UserDoesntExist =
player_connection.check_user_existence(PlayerId::from(nick.clone())?).await?
{
IrcResponseMessage::empty_tags(
Some(server_name.clone()),
ErrNoSuchNick401::new(client.clone(), nick.clone()),
)
.write_response(&mut writer)
.await?
}
IrcResponseMessage::empty_tags(
Some(server_name.clone()),
RplEndOfWhois318::new(client.clone(), nick.clone()),
)
.write_response(&mut writer)
.await?;
Ok(())
}

View File

@ -8,7 +8,7 @@ use tokio::io::{AsyncReadExt, AsyncWriteExt, BufReader};
use tokio::net::tcp::{ReadHalf, WriteHalf};
use tokio::net::TcpStream;
use lavina_core::clustering::{ClusterConfig, ClusterMetadata};
use lavina_core::auth::Authenticator;
use lavina_core::player::{JoinResult, PlayerId, SendMessageResult};
use lavina_core::repo::{Storage, StorageConfig};
use lavina_core::room::RoomId;
@ -101,6 +101,8 @@ impl<'a> TestScope<'a> {
}
struct TestServer {
metrics: MetricsRegistry,
storage: Storage,
core: LavinaCore,
server: RunningServer,
}
@ -112,21 +114,18 @@ impl TestServer {
server_name: "testserver".into(),
};
let mut metrics = MetricsRegistry::new();
let storage = Storage::open(StorageConfig {
let mut storage = Storage::open(StorageConfig {
db_path: ":memory:".into(),
})
.await?;
let cluster_config = ClusterConfig {
addresses: vec![],
metadata: ClusterMetadata {
node_id: 0,
main_owner: 0,
rooms: Default::default(),
},
};
let core = LavinaCore::new(&mut metrics, cluster_config, storage).await?;
let server = launch(config, core.clone(), metrics.clone()).await.unwrap();
Ok(TestServer { core, server })
let core = LavinaCore::new(metrics.clone(), storage.clone()).await?;
let server = launch(config, core.clone(), metrics.clone(), storage.clone()).await.unwrap();
Ok(TestServer {
metrics,
storage,
core,
server,
})
}
async fn reboot(self) -> Result<TestServer> {
@ -134,38 +133,41 @@ impl TestServer {
listen_on: "127.0.0.1:0".parse().unwrap(),
server_name: "testserver".into(),
};
let cluster_config = ClusterConfig {
addresses: vec![],
metadata: ClusterMetadata {
node_id: 0,
main_owner: 0,
rooms: Default::default(),
},
};
let TestServer { core, server } = self;
let TestServer {
metrics: _,
storage,
mut core,
server,
} = self;
server.terminate().await?;
let storage = core.shutdown().await;
let mut metrics = MetricsRegistry::new();
let core = LavinaCore::new(&mut metrics, cluster_config, storage).await?;
let server = launch(config, core.clone(), metrics.clone()).await.unwrap();
Ok(TestServer { core, server })
core.shutdown().await?;
let metrics = MetricsRegistry::new();
let core = LavinaCore::new(metrics.clone(), storage.clone()).await?;
let server = launch(config, core.clone(), metrics.clone(), storage.clone()).await.unwrap();
Ok(TestServer {
metrics,
storage,
core,
server,
})
}
async fn shutdown(self) {
let _ = self.server.terminate().await;
let storage = self.core.shutdown().await;
let _ = storage.close().await;
async fn shutdown(self) -> Result<()> {
self.server.terminate().await?;
self.core.shutdown().await?;
self.storage.close().await?;
Ok(())
}
}
#[tokio::test]
async fn scenario_basic() -> Result<()> {
let server = TestServer::start().await?;
let mut server = TestServer::start().await?;
// test scenario
server.core.create_player(&PlayerId::from("tester")?).await?;
server.core.set_password("tester", "password").await?;
server.storage.create_user("tester").await?;
Authenticator::new(&server.storage).set_password("tester", "password").await?;
let mut stream = TcpStream::connect(server.server.addr).await?;
let mut s = TestScope::new(&mut stream);
@ -183,18 +185,18 @@ async fn scenario_basic() -> Result<()> {
// wrap up
server.shutdown().await;
server.shutdown().await?;
Ok(())
}
#[tokio::test]
async fn scenario_join_and_reboot() -> Result<()> {
let server = TestServer::start().await?;
let mut server = TestServer::start().await?;
// test scenario
server.core.create_player(&PlayerId::from("tester")?).await?;
server.core.set_password("tester", "password").await?;
server.storage.create_user("tester").await?;
Authenticator::new(&server.storage).set_password("tester", "password").await?;
let mut stream = TcpStream::connect(server.server.addr).await?;
let mut s = TestScope::new(&mut stream);
@ -253,18 +255,18 @@ async fn scenario_join_and_reboot() -> Result<()> {
// wrap up
server.shutdown().await;
server.shutdown().await?;
Ok(())
}
#[tokio::test]
async fn scenario_force_join_msg() -> Result<()> {
let server = TestServer::start().await?;
let mut server = TestServer::start().await?;
// test scenario
server.core.create_player(&PlayerId::from("tester")?).await?;
server.core.set_password("tester", "password").await?;
server.storage.create_user("tester").await?;
Authenticator::new(&server.storage).set_password("tester", "password").await?;
let mut stream1 = TcpStream::connect(server.server.addr).await?;
let mut s1 = TestScope::new(&mut stream1);
@ -319,20 +321,20 @@ async fn scenario_force_join_msg() -> Result<()> {
// wrap up
server.shutdown().await;
server.shutdown().await?;
Ok(())
}
#[tokio::test]
async fn scenario_two_users() -> Result<()> {
let server = TestServer::start().await?;
let mut server = TestServer::start().await?;
// test scenario
server.core.create_player(&PlayerId::from("tester1")?).await?;
server.core.set_password("tester1", "password").await?;
server.core.create_player(&PlayerId::from("tester2")?).await?;
server.core.set_password("tester2", "password").await?;
server.storage.create_user("tester1").await?;
Authenticator::new(&server.storage).set_password("tester1", "password").await?;
server.storage.create_user("tester2").await?;
Authenticator::new(&server.storage).set_password("tester2", "password").await?;
let mut stream1 = TcpStream::connect(server.server.addr).await?;
let mut s1 = TestScope::new(&mut stream1);
@ -381,17 +383,10 @@ async fn scenario_two_users() -> Result<()> {
// The second user should receive the PART message
s2.expect(":tester1 PART #test").await?;
s1.send("WHOIS tester2").await?;
s1.expect(":testserver 318 tester1 tester2 :End of /WHOIS list").await?;
stream1.shutdown().await?;
s2.send("WHOIS tester3").await?;
s2.expect(":testserver 401 tester2 tester3 :No such nick/channel").await?;
s2.expect(":testserver 318 tester2 tester3 :End of /WHOIS list").await?;
stream2.shutdown().await?;
server.shutdown().await;
server.shutdown().await?;
Ok(())
}
@ -401,12 +396,12 @@ AUTHENTICATE doc: https://modern.ircdocs.horse/#authenticate-message
*/
#[tokio::test]
async fn scenario_cap_full_negotiation() -> Result<()> {
let server = TestServer::start().await?;
let mut server = TestServer::start().await?;
// test scenario
server.core.create_player(&PlayerId::from("tester")?).await?;
server.core.set_password("tester", "password").await?;
server.storage.create_user("tester").await?;
Authenticator::new(&server.storage).set_password("tester", "password").await?;
let mut stream = TcpStream::connect(server.server.addr).await?;
let mut s = TestScope::new(&mut stream);
@ -435,18 +430,18 @@ async fn scenario_cap_full_negotiation() -> Result<()> {
// wrap up
server.shutdown().await;
server.shutdown().await?;
Ok(())
}
#[tokio::test]
async fn scenario_cap_full_negotiation_nick_last() -> Result<()> {
let server = TestServer::start().await?;
let mut server = TestServer::start().await?;
// test scenario
server.core.create_player(&PlayerId::from("tester")?).await?;
server.core.set_password("tester", "password").await?;
server.storage.create_user("tester").await?;
Authenticator::new(&server.storage).set_password("tester", "password").await?;
let mut stream = TcpStream::connect(server.server.addr).await?;
let mut s = TestScope::new(&mut stream);
@ -474,18 +469,18 @@ async fn scenario_cap_full_negotiation_nick_last() -> Result<()> {
// wrap up
server.shutdown().await;
server.shutdown().await?;
Ok(())
}
#[tokio::test]
async fn scenario_cap_short_negotiation() -> Result<()> {
let server = TestServer::start().await?;
let mut server = TestServer::start().await?;
// test scenario
server.core.create_player(&PlayerId::from("tester")?).await?;
server.core.set_password("tester", "password").await?;
server.storage.create_user("tester").await?;
Authenticator::new(&server.storage).set_password("tester", "password").await?;
let mut stream = TcpStream::connect(server.server.addr).await?;
let mut s = TestScope::new(&mut stream);
@ -512,18 +507,18 @@ async fn scenario_cap_short_negotiation() -> Result<()> {
// wrap up
server.shutdown().await;
server.shutdown().await?;
Ok(())
}
#[tokio::test]
async fn scenario_cap_sasl_fail() -> Result<()> {
let server = TestServer::start().await?;
let mut server = TestServer::start().await?;
// test scenario
server.core.create_player(&PlayerId::from("tester")?).await?;
server.core.set_password("tester", "password").await?;
server.storage.create_user("tester").await?;
Authenticator::new(&server.storage).set_password("tester", "password").await?;
let mut stream = TcpStream::connect(server.server.addr).await?;
let mut s = TestScope::new(&mut stream);
@ -556,18 +551,18 @@ async fn scenario_cap_sasl_fail() -> Result<()> {
// wrap up
server.shutdown().await;
server.shutdown().await?;
Ok(())
}
#[tokio::test]
async fn terminate_socket_scenario() -> Result<()> {
let server = TestServer::start().await?;
let mut server = TestServer::start().await?;
// test scenario
server.core.create_player(&PlayerId::from("tester")?).await?;
server.core.set_password("tester", "password").await?;
server.storage.create_user("tester").await?;
Authenticator::new(&server.storage).set_password("tester", "password").await?;
let mut stream = TcpStream::connect(server.server.addr).await?;
let mut s = TestScope::new(&mut stream);
@ -579,7 +574,7 @@ async fn terminate_socket_scenario() -> Result<()> {
s.send("AUTHENTICATE PLAIN").await?;
s.expect(":testserver AUTHENTICATE +").await?;
server.shutdown().await;
server.shutdown().await?;
assert_eq!(stream.read_u8().await.unwrap_err().kind(), ErrorKind::UnexpectedEof);
Ok(())
@ -587,12 +582,12 @@ async fn terminate_socket_scenario() -> Result<()> {
#[tokio::test]
async fn server_time_capability() -> Result<()> {
let server = TestServer::start().await?;
let mut server = TestServer::start().await?;
// test scenario
server.core.create_player(&PlayerId::from("tester")?).await?;
server.core.set_password("tester", "password").await?;
server.storage.create_user("tester").await?;
Authenticator::new(&server.storage).set_password("tester", "password").await?;
let mut stream = TcpStream::connect(server.server.addr).await?;
let mut s = TestScope::new(&mut stream);
@ -617,8 +612,8 @@ async fn server_time_capability() -> Result<()> {
s.expect(":testserver 353 tester = #test :tester").await?;
s.expect(":testserver 366 tester #test :End of /NAMES list").await?;
server.core.create_player(&PlayerId::from("some_guy")?).await?;
let mut conn = server.core.connect_to_player(&PlayerId::from("some_guy").unwrap()).await;
server.storage.create_user("some_guy").await?;
let mut conn = server.core.players.connect_to_player(&PlayerId::from("some_guy").unwrap()).await;
let res = conn.join_room(RoomId::from("test").unwrap()).await?;
let JoinResult::Success(_) = res else {
panic!("Failed to join room");
@ -650,20 +645,20 @@ async fn server_time_capability() -> Result<()> {
// wrap up
server.shutdown().await;
server.shutdown().await?;
Ok(())
}
#[tokio::test]
async fn scenario_two_players_dialog() -> Result<()> {
let server = TestServer::start().await?;
let mut server = TestServer::start().await?;
// test scenario
server.core.create_player(&PlayerId::from("tester1")?).await?;
server.core.set_password("tester1", "password").await?;
server.core.create_player(&PlayerId::from("tester2")?).await?;
server.core.set_password("tester2", "password").await?;
server.storage.create_user("tester1").await?;
server.storage.set_password("tester1", "password").await?;
server.storage.create_user("tester2").await?;
server.storage.set_password("tester2", "password").await?;
let mut stream1 = TcpStream::connect(server.server.addr).await?;
let mut s1 = TestScope::new(&mut stream1);
@ -714,7 +709,7 @@ async fn scenario_two_players_dialog() -> Result<()> {
stream1.shutdown().await?;
stream2.shutdown().await?;
server.shutdown().await;
server.shutdown().await?;
Ok(())
}

View File

@ -2,10 +2,9 @@
use quick_xml::events::Event;
use lavina_core::LavinaCore;
use lavina_core::room::RoomId;
use proto_xmpp::bind::{BindRequest, BindResponse, Jid, Name, Server};
use proto_xmpp::client::{Iq, IqError, IqErrorType, IqType};
use lavina_core::room::{RoomId, RoomRegistry};
use proto_xmpp::bind::{BindResponse, Jid, Name, Server};
use proto_xmpp::client::{Iq, IqError, IqErrorType, IqType, Message, MessageType};
use proto_xmpp::disco::{Feature, Identity, InfoQuery, Item, ItemQuery};
use proto_xmpp::mam::{Fin, Set};
use proto_xmpp::roster::RosterQuery;
@ -18,13 +17,17 @@ use crate::XmppConnection;
impl<'a> XmppConnection<'a> {
pub async fn handle_iq(&self, output: &mut Vec<Event<'static>>, iq: Iq<IqClientBody>) {
match iq.body {
IqClientBody::Bind(req) => {
IqClientBody::Bind(_) => {
let req = Iq {
from: None,
id: iq.id,
to: None,
r#type: IqType::Result,
body: self.bind(&req).await,
body: BindResponse(Jid {
name: Some(self.user.xmpp_name.clone()),
server: Server(self.hostname.clone()),
resource: Some(self.user.xmpp_resource.clone()),
}),
};
req.serialize(output);
}
@ -74,7 +77,7 @@ impl<'a> XmppConnection<'a> {
}
}
IqClientBody::DiscoItem(item) => {
let response = self.disco_items(iq.to.as_ref(), &item, self.core).await;
let response = self.disco_items(iq.to.as_ref(), &item, self.rooms).await;
let req = Iq {
from: iq.to,
id: iq.id,
@ -111,14 +114,6 @@ impl<'a> XmppConnection<'a> {
}
}
pub(crate) async fn bind(&self, req: &BindRequest) -> BindResponse {
BindResponse(Jid {
name: Some(self.user.xmpp_name.clone()),
server: Server(self.hostname.clone()),
resource: Some(self.user.xmpp_resource.clone()),
})
}
async fn disco_info(&self, to: Option<&Jid>, req: &InfoQuery) -> Result<InfoQuery, IqError> {
let identity;
let feature;
@ -163,7 +158,7 @@ impl<'a> XmppConnection<'a> {
resource: None,
}) if server.0 == self.hostname_rooms => {
let room_id = RoomId::from(room_name.0.clone()).unwrap();
let Some(_) = self.core.get_room(&room_id).await else {
let Some(_) = self.rooms.get_room(&room_id).await else {
// TODO should return item-not-found
// example:
// <error type="cancel">
@ -197,7 +192,7 @@ impl<'a> XmppConnection<'a> {
})
}
async fn disco_items(&self, to: Option<&Jid>, req: &ItemQuery, core: &LavinaCore) -> ItemQuery {
async fn disco_items(&self, to: Option<&Jid>, req: &ItemQuery, rooms: &RoomRegistry) -> ItemQuery {
let item = match to {
Some(Jid {
name: None,
@ -219,7 +214,7 @@ impl<'a> XmppConnection<'a> {
server,
resource: None,
}) if server.0 == self.hostname_rooms => {
let room_list = core.get_all_rooms().await;
let room_list = rooms.get_all_rooms().await;
room_list
.into_iter()
.map(|room_info| Item {

View File

@ -22,9 +22,11 @@ use tokio::sync::mpsc::channel;
use tokio_rustls::rustls::{Certificate, PrivateKey};
use tokio_rustls::TlsAcceptor;
use lavina_core::auth::Verdict;
use lavina_core::player::{ConnectionMessage, PlayerConnection, PlayerId, StopReason};
use lavina_core::auth::{Authenticator, Verdict};
use lavina_core::player::{ConnectionMessage, PlayerConnection, PlayerId, PlayerRegistry, StopReason};
use lavina_core::prelude::*;
use lavina_core::repo::Storage;
use lavina_core::room::RoomRegistry;
use lavina_core::terminator::Terminator;
use lavina_core::LavinaCore;
use proto_xmpp::bind::{Name, Resource};
@ -40,9 +42,6 @@ mod message;
mod presence;
mod updates;
#[cfg(test)]
mod testkit;
#[derive(Deserialize, Debug, Clone)]
pub struct ServerConfig {
pub listen_on: SocketAddr,
@ -82,7 +81,12 @@ impl RunningServer {
}
}
pub async fn launch(config: ServerConfig, core: LavinaCore, metrics: MetricsRegistry) -> Result<RunningServer> {
pub async fn launch(
config: ServerConfig,
core: LavinaCore,
metrics: MetricsRegistry,
storage: Storage,
) -> Result<RunningServer> {
log::info!("Starting XMPP projection");
let certs = certs(&mut SyncBufReader::new(File::open(config.cert)?))?;
@ -122,12 +126,13 @@ pub async fn launch(config: ServerConfig, core: LavinaCore, metrics: MetricsRegi
continue;
}
let core = core.clone();
let storage = storage.clone();
let hostname = config.hostname.clone();
let terminator = Terminator::spawn(|termination| {
let stopped_tx = stopped_tx.clone();
let loaded_config = loaded_config.clone();
async move {
match handle_socket(loaded_config, stream, &socket_addr, core, hostname, termination).await {
match handle_socket(loaded_config, stream, &socket_addr, core, storage, hostname, termination).await {
Ok(_) => log::info!("Connection terminated"),
Err(err) => log::warn!("Connection failed: {err}"),
}
@ -165,7 +170,8 @@ async fn handle_socket(
cert_config: Arc<LoadedConfig>,
mut stream: TcpStream,
socket_addr: &SocketAddr,
core: LavinaCore,
mut core: LavinaCore,
mut storage: Storage,
hostname: Str,
termination: Deferred<()>, // TODO use it to stop the connection gracefully
) -> Result<()> {
@ -195,21 +201,21 @@ async fn handle_socket(
pin!(termination);
select! {
biased;
_ = &mut termination => {
_ = &mut termination =>{
log::info!("Socket handling was terminated");
return Ok(())
},
authenticated = socket_auth(&mut xml_reader, &mut xml_writer, &mut reader_buf, &core, &hostname) => {
authenticated = socket_auth(&mut xml_reader, &mut xml_writer, &mut reader_buf, &mut storage, &hostname) => {
match authenticated {
Ok(authenticated) => {
let mut connection = core.connect_to_player(&authenticated.player_id).await;
let mut connection = core.players.connect_to_player(&authenticated.player_id).await;
socket_final(
&mut xml_reader,
&mut xml_writer,
&mut reader_buf,
&authenticated,
&mut connection,
&core,
&core.rooms,
&hostname,
)
.await?;
@ -266,7 +272,7 @@ async fn socket_auth(
xml_reader: &mut NsReader<(impl AsyncBufRead + Unpin)>,
xml_writer: &mut Writer<(impl AsyncWrite + Unpin)>,
reader_buf: &mut Vec<u8>,
core: &LavinaCore,
storage: &mut Storage,
hostname: &Str,
) -> Result<Authenticated> {
// TODO validate the server hostname received in the stream start
@ -295,7 +301,7 @@ async fn socket_auth(
match AuthBody::from_str(&auth.body) {
Ok(logopass) => {
let name = &logopass.login;
let verdict = core.authenticate(name, &logopass.password).await?;
let verdict = Authenticator::new(storage).authenticate(name, &logopass.password).await?;
match verdict {
Verdict::Authenticated => {
proto_xmpp::sasl::Success.write_xml(xml_writer).await?;
@ -325,7 +331,7 @@ async fn socket_final(
reader_buf: &mut Vec<u8>,
authenticated: &Authenticated,
user_handle: &mut PlayerConnection,
core: &LavinaCore,
rooms: &RoomRegistry,
hostname: &Str,
) -> Result<()> {
// TODO validate the server hostname received in the stream start
@ -358,7 +364,7 @@ async fn socket_final(
let mut conn = XmppConnection {
user: authenticated,
user_handle,
core,
rooms,
hostname: hostname.clone(),
hostname_rooms: format!("rooms.{}", hostname).into(),
};
@ -441,7 +447,7 @@ async fn socket_final(
struct XmppConnection<'a> {
user: &'a Authenticated,
user_handle: &'a mut PlayerConnection,
core: &'a LavinaCore,
rooms: &'a RoomRegistry,
hostname: Str,
hostname_rooms: Str,
}

View File

@ -1,8 +1,8 @@
//! Handling of all client2server presence stanzas
use anyhow::Result;
use quick_xml::events::Event;
use lavina_core::prelude::*;
use lavina_core::room::RoomId;
use proto_xmpp::bind::{Jid, Name, Server};
use proto_xmpp::client::Presence;
@ -22,8 +22,7 @@ impl<'a> XmppConnection<'a> {
// resources in MUCs are members' personas not implemented (yet?)
resource: Some(_),
}) if server.0 == self.hostname_rooms => {
let response = self.muc_presence(&name).await?;
response.serialize(output);
self.muc_presence(name, output).await?;
}
_ => {
// TODO other presence cases
@ -59,8 +58,7 @@ impl<'a> XmppConnection<'a> {
}
}
// todo: return Presence and serialize on the outside.
async fn muc_presence(&mut self, name: &Name) -> Result<(Presence<()>)> {
async fn muc_presence(&mut self, name: Name, output: &mut Vec<Event<'static>>) -> Result<()> {
let a = self.user_handle.join_room(RoomId::from(name.0.clone())?).await?;
// TODO handle bans
let response = Presence::<()> {
@ -76,104 +74,7 @@ impl<'a> XmppConnection<'a> {
}),
..Default::default()
};
Ok(response)
}
}
// todo: set up so that the user has been previously joined.
// todo: first call to muc_presence is OK, next one is OK too.
#[cfg(test)]
mod tests {
use anyhow::Result;
use crate::testkit::{expect_user_authenticated, TestServer};
use crate::Authenticated;
use lavina_core::player::PlayerId;
use proto_xmpp::bind::{Jid, Name, Resource, Server};
use proto_xmpp::client::Presence;
#[tokio::test]
async fn test_muc_joining() -> Result<()> {
let server = TestServer::start().await.unwrap();
server.core.create_player(&PlayerId::from("tester")?).await?;
let player_id = PlayerId::from("tester").unwrap();
let user = Authenticated {
player_id,
xmpp_name: Name("tester".into()),
xmpp_resource: Resource("tester".into()),
xmpp_muc_name: Resource("tester".into()),
};
let mut player_conn = server.core.connect_to_player(&user.player_id).await;
let mut conn = expect_user_authenticated(&server, &user, &mut player_conn).await.unwrap();
let response = conn.muc_presence(&user.xmpp_name).await.unwrap();
let expected = Presence::<()> {
to: Some(Jid {
name: Some(conn.user.xmpp_name.clone()),
server: Server(conn.hostname.clone()),
resource: Some(conn.user.xmpp_resource.clone()),
}),
from: Some(Jid {
name: Some(user.xmpp_name.clone()),
server: Server(conn.hostname_rooms.clone()),
resource: Some(conn.user.xmpp_muc_name.clone()),
}),
..Default::default()
};
assert_eq!(expected, response);
server.shutdown().await.unwrap();
Ok(())
}
// Test that joining a room second time after a server restart,
// i.e. in-memory cache of memberships is cleaned, does not cause any issues.
#[tokio::test]
async fn test_muc_joining_twice() -> Result<()> {
let server = TestServer::start().await.unwrap();
server.core.create_player(&PlayerId::from("tester")?).await?;
let player_id = PlayerId::from("tester").unwrap();
let user = Authenticated {
player_id,
xmpp_name: Name("tester".into()),
xmpp_resource: Resource("tester".into()),
xmpp_muc_name: Resource("tester".into()),
};
let mut player_conn = server.core.connect_to_player(&user.player_id).await;
let mut conn = expect_user_authenticated(&server, &user, &mut player_conn).await.unwrap();
let response = conn.muc_presence(&user.xmpp_name).await.unwrap();
let expected = Presence::<()> {
to: Some(Jid {
name: Some(conn.user.xmpp_name.clone()),
server: Server(conn.hostname.clone()),
resource: Some(conn.user.xmpp_resource.clone()),
}),
from: Some(Jid {
name: Some(user.xmpp_name.clone()),
server: Server(conn.hostname_rooms.clone()),
resource: Some(conn.user.xmpp_muc_name.clone()),
}),
..Default::default()
};
assert_eq!(expected, response);
drop(conn);
let server = server.reboot().await.unwrap();
let mut player_conn = server.core.connect_to_player(&user.player_id).await;
let mut conn = expect_user_authenticated(&server, &user, &mut player_conn).await.unwrap();
let response = conn.muc_presence(&user.xmpp_name).await.unwrap();
assert_eq!(expected, response);
server.shutdown().await.unwrap();
response.serialize(output);
Ok(())
}
}

View File

@ -1,78 +0,0 @@
use prometheus::Registry as MetricsRegistry;
use crate::{Authenticated, XmppConnection};
use lavina_core::clustering::{ClusterConfig, ClusterMetadata};
use lavina_core::player::PlayerConnection;
use lavina_core::repo::{Storage, StorageConfig};
use lavina_core::LavinaCore;
use proto_xmpp::bind::{BindRequest, BindResponse, Jid, Name, Resource, Server};
pub(crate) struct TestServer {
pub core: LavinaCore,
}
impl TestServer {
pub async fn start() -> anyhow::Result<TestServer> {
let _ = tracing_subscriber::fmt::try_init();
let mut metrics = MetricsRegistry::new();
let storage = Storage::open(StorageConfig {
db_path: ":memory:".into(),
})
.await?;
let cluster_config = ClusterConfig {
metadata: ClusterMetadata {
node_id: 0,
main_owner: 0,
rooms: Default::default(),
},
addresses: vec![],
};
let core = LavinaCore::new(&mut metrics, cluster_config, storage).await?;
Ok(TestServer { core })
}
pub async fn reboot(self) -> anyhow::Result<TestServer> {
let storage = self.core.shutdown().await;
let mut metrics = MetricsRegistry::new();
let cluster_config = ClusterConfig {
metadata: ClusterMetadata {
node_id: 0,
main_owner: 0,
rooms: Default::default(),
},
addresses: vec![],
};
let core = LavinaCore::new(&mut metrics, cluster_config, storage).await?;
Ok(TestServer { core })
}
pub async fn shutdown(self) -> anyhow::Result<()> {
let storage = self.core.shutdown().await;
storage.close().await;
Ok(())
}
}
pub async fn expect_user_authenticated<'a>(
server: &'a TestServer,
user: &'a Authenticated,
conn: &'a mut PlayerConnection,
) -> anyhow::Result<XmppConnection<'a>> {
let conn = XmppConnection {
user: &user,
user_handle: conn,
core: &server.core,
hostname: "localhost".into(),
hostname_rooms: "rooms.localhost".into(),
};
let result = conn.bind(&BindRequest(Resource("whatever".into()))).await;
let expected = BindResponse(Jid {
name: Some(Name("tester".into())),
server: Server("localhost".into()),
resource: Some(Resource("tester".into())),
});
assert_eq!(expected, result);
Ok(conn)
}

View File

@ -18,11 +18,11 @@ use tokio_rustls::rustls::client::ServerCertVerifier;
use tokio_rustls::rustls::{ClientConfig, ServerName};
use tokio_rustls::TlsConnector;
use lavina_core::clustering::{ClusterConfig, ClusterMetadata};
use lavina_core::player::PlayerId;
use lavina_core::auth::Authenticator;
use lavina_core::repo::{Storage, StorageConfig};
use lavina_core::LavinaCore;
use projection_xmpp::{launch, RunningServer, ServerConfig};
use proto_xmpp::xml::{Continuation, FromXml, Parser};
fn element_name<'a>(local_name: &LocalName<'a>) -> &'a str {
from_utf8(local_name.into_inner()).unwrap()
@ -141,6 +141,8 @@ impl ServerCertVerifier for IgnoreCertVerification {
}
struct TestServer {
metrics: MetricsRegistry,
storage: Storage,
core: LavinaCore,
server: RunningServer,
}
@ -154,40 +156,37 @@ impl TestServer {
key: "tests/certs/xmpp.key".parse().unwrap(),
hostname: "localhost".into(),
};
let mut metrics = MetricsRegistry::new();
let metrics = MetricsRegistry::new();
let storage = Storage::open(StorageConfig {
db_path: ":memory:".into(),
})
.await?;
let cluster_config = ClusterConfig {
addresses: vec![],
metadata: ClusterMetadata {
node_id: 0,
main_owner: 0,
rooms: Default::default(),
},
};
let core = LavinaCore::new(&mut metrics, cluster_config, storage).await?;
let server = launch(config, core.clone(), metrics.clone()).await.unwrap();
Ok(TestServer { core, server })
let core = LavinaCore::new(metrics.clone(), storage.clone()).await?;
let server = launch(config, core.clone(), metrics.clone(), storage.clone()).await.unwrap();
Ok(TestServer {
metrics,
storage,
core,
server,
})
}
async fn shutdown(self) -> Result<()> {
self.server.terminate().await?;
let storage = self.core.shutdown().await;
storage.close().await;
self.core.shutdown().await?;
self.storage.close().await?;
Ok(())
}
}
#[tokio::test]
async fn scenario_basic() -> Result<()> {
let server = TestServer::start().await?;
let mut server = TestServer::start().await?;
// test scenario
server.core.create_player(&PlayerId::from("tester")?).await?;
server.core.set_password("tester", "password").await?;
server.storage.create_user("tester").await?;
Authenticator::new(&server.storage).set_password("tester", "password").await?;
let mut stream = TcpStream::connect(server.server.addr).await?;
let mut s = TestScope::new(&mut stream);
@ -250,12 +249,12 @@ async fn scenario_basic() -> Result<()> {
#[tokio::test]
async fn scenario_wrong_password() -> Result<()> {
let server = TestServer::start().await?;
let mut server = TestServer::start().await?;
// test scenario
server.core.create_player(&PlayerId::from("tester")?).await?;
server.core.set_password("tester", "password").await?;
server.storage.create_user("tester").await?;
Authenticator::new(&server.storage).set_password("tester", "password").await?;
let mut stream = TcpStream::connect(server.server.addr).await?;
let mut s = TestScope::new(&mut stream);
@ -305,12 +304,12 @@ async fn scenario_wrong_password() -> Result<()> {
#[tokio::test]
async fn scenario_basic_without_headers() -> Result<()> {
let server = TestServer::start().await?;
let mut server = TestServer::start().await?;
// test scenario
server.core.create_player(&PlayerId::from("tester")?).await?;
server.core.set_password("tester", "password").await?;
server.storage.create_user("tester").await?;
Authenticator::new(&server.storage).set_password("tester", "password").await?;
let mut stream = TcpStream::connect(server.server.addr).await?;
let mut s = TestScope::new(&mut stream);
@ -351,12 +350,12 @@ async fn scenario_basic_without_headers() -> Result<()> {
#[tokio::test]
async fn terminate_socket() -> Result<()> {
let server = TestServer::start().await?;
let mut server = TestServer::start().await?;
// test scenario
server.core.create_player(&PlayerId::from("tester")?).await?;
server.core.set_password("tester", "password").await?;
server.storage.create_user("tester").await?;
Authenticator::new(&server.storage).set_password("tester", "password").await?;
let mut stream = TcpStream::connect(server.server.addr).await?;
let mut s = TestScope::new(&mut stream);
@ -391,12 +390,12 @@ async fn terminate_socket() -> Result<()> {
#[tokio::test]
async fn test_message_archive_request() -> Result<()> {
let server = TestServer::start().await?;
let mut server = TestServer::start().await?;
// test scenario
server.core.create_player(&PlayerId::from("tester")?).await?;
server.core.set_password("tester", "password").await?;
server.storage.create_user("tester").await?;
Authenticator::new(&server.storage).set_password("tester", "password").await?;
let mut stream = TcpStream::connect(server.server.addr).await?;
let mut s = TestScope::new(&mut stream);

View File

@ -1,9 +1,9 @@
use super::*;
use anyhow::{anyhow, Result};
use nom::combinator::{all_consuming, opt};
use nonempty::NonEmpty;
use super::*;
/// Client-to-server command.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum ClientMessage {
@ -42,10 +42,6 @@ pub enum ClientMessage {
Who {
target: Recipient, // aka mask
},
/// WHOIS [<target>] <nick>
Whois {
arg: command_args::Whois,
},
/// `TOPIC <chan> :<topic>`
Topic {
chan: Chan,
@ -67,17 +63,6 @@ pub enum ClientMessage {
Authenticate(Str),
}
pub mod command_args {
use crate::prelude::Str;
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Whois {
Nick(Str),
TargetNick(Str, Str),
EmptyArgs,
}
}
pub fn client_message(input: &str) -> Result<ClientMessage> {
let res = all_consuming(alt((
client_message_capability,
@ -89,7 +74,6 @@ pub fn client_message(input: &str) -> Result<ClientMessage> {
client_message_join,
client_message_mode,
client_message_who,
client_message_whois,
client_message_topic,
client_message_part,
client_message_privmsg,
@ -193,31 +177,6 @@ fn client_message_who(input: &str) -> IResult<&str, ClientMessage> {
Ok((input, ClientMessage::Who { target }))
}
fn client_message_whois(input: &str) -> IResult<&str, ClientMessage> {
let (input, _) = tag("WHOIS ")(input)?;
let args: Vec<_> = input.split_whitespace().collect();
match args.as_slice()[..] {
[nick] => Ok((
"",
ClientMessage::Whois {
arg: command_args::Whois::Nick(nick.into()),
},
)),
[target, nick, ..] => Ok((
"",
ClientMessage::Whois {
arg: command_args::Whois::TargetNick(target.into(), nick.into()),
},
)),
[] => Ok((
"",
ClientMessage::Whois {
arg: command_args::Whois::EmptyArgs,
},
)),
}
}
fn client_message_topic(input: &str) -> IResult<&str, ClientMessage> {
let (input, _) = tag("TOPIC ")(input)?;
let (input, chan) = chan(input)?;
@ -352,7 +311,6 @@ mod test {
use nonempty::nonempty;
use super::*;
#[test]
fn test_client_message_cap_ls() {
let input = "CAP LS 302";
@ -402,66 +360,6 @@ mod test {
assert_matches!(result, Ok(result) => assert_eq!(expected, result));
}
#[test]
fn test_client_message_whois() {
let test_user = "WHOIS val";
let test_user_user = "WHOIS val val";
let test_server_user = "WHOIS com.test.server user";
let test_user_server = "WHOIS user com.test.server";
let test_users_list = "WHOIS user_1,user_2,user_3";
let test_server_users_list = "WHOIS com.test.server user_1,user_2,user_3";
let test_more_than_two_params = "WHOIS test.server user_1,user_2,user_3 whatever spam";
let test_none_none_params = "WHOIS ";
let res_one_arg = client_message(test_user);
let res_user_user = client_message(test_user_user);
let res_server_user = client_message(test_server_user);
let res_user_server = client_message(test_user_server);
let res_users_list = client_message(test_users_list);
let res_server_users_list = client_message(test_server_users_list);
let res_more_than_two_params = client_message(test_more_than_two_params);
let res_none_none_params = client_message(test_none_none_params);
let expected_arg = ClientMessage::Whois {
arg: command_args::Whois::Nick("val".into()),
};
let expected_user_user = ClientMessage::Whois {
arg: command_args::Whois::TargetNick("val".into(), "val".into()),
};
let expected_server_user = ClientMessage::Whois {
arg: command_args::Whois::TargetNick("com.test.server".into(), "user".into()),
};
let expected_user_server = ClientMessage::Whois {
arg: command_args::Whois::TargetNick("user".into(), "com.test.server".into()),
};
let expected_user_list = ClientMessage::Whois {
arg: command_args::Whois::Nick("user_1,user_2,user_3".into()),
};
let expected_server_user_list = ClientMessage::Whois {
arg: command_args::Whois::TargetNick("com.test.server".into(), "user_1,user_2,user_3".into()),
};
let expected_more_than_two_params = ClientMessage::Whois {
arg: command_args::Whois::TargetNick("test.server".into(), "user_1,user_2,user_3".into()),
};
let expected_none_none_params = ClientMessage::Whois {
arg: command_args::Whois::EmptyArgs,
};
assert_matches!(res_one_arg, Ok(result) => assert_eq!(expected_arg, result));
assert_matches!(res_user_user, Ok(result) => assert_eq!(expected_user_user, result));
assert_matches!(res_server_user, Ok(result) => assert_eq!(expected_server_user, result));
assert_matches!(res_user_server, Ok(result) => assert_eq!(expected_user_server, result));
assert_matches!(res_users_list, Ok(result) => assert_eq!(expected_user_list, result));
assert_matches!(res_server_users_list, Ok(result) => assert_eq!(expected_server_user_list, result));
assert_matches!(res_more_than_two_params, Ok(result) => assert_eq!(expected_more_than_two_params, result));
assert_matches!(res_none_none_params, Ok(result) => assert_eq!(expected_none_none_params, result))
}
#[test]
fn test_client_message_user() {
let input = "USER SomeNick 8 * :Real Name";
let expected = ClientMessage::User {

View File

@ -1 +0,0 @@
pub mod whois;

View File

@ -1,67 +0,0 @@
use tokio::io::{AsyncWrite, AsyncWriteExt};
use crate::{prelude::Str, response::WriteResponse};
/// ErrNoSuchNick401
pub struct ErrNoSuchNick401 {
client: Str,
nick: Str,
}
impl ErrNoSuchNick401 {
pub fn new(client: Str, nick: Str) -> Self {
ErrNoSuchNick401 { client, nick }
}
}
/// ErrNoSuchServer402
struct ErrNoSuchServer402 {
client: Str,
/// target parameter in WHOIS
/// example: `/whois <target> <nick>`
server_name: Str,
}
/// ErrNoNicknameGiven431
pub struct ErrNoNicknameGiven431 {
client: Str,
}
impl ErrNoNicknameGiven431 {
pub fn new(client: Str) -> Self {
ErrNoNicknameGiven431 { client }
}
}
impl WriteResponse for ErrNoSuchNick401 {
async fn write_response(&self, writer: &mut (impl AsyncWrite + Unpin)) -> std::io::Result<()> {
writer.write_all(b"401 ").await?;
writer.write_all(self.client.as_bytes()).await?;
writer.write_all(b" ").await?;
writer.write_all(self.nick.as_bytes()).await?;
writer.write_all(b" :").await?;
writer.write_all("No such nick/channel".as_bytes()).await?;
Ok(())
}
}
impl WriteResponse for ErrNoNicknameGiven431 {
async fn write_response(&self, writer: &mut (impl AsyncWrite + Unpin)) -> std::io::Result<()> {
writer.write_all(b"431").await?;
writer.write_all(self.client.as_bytes()).await?;
writer.write_all(b" :").await?;
writer.write_all("No nickname given".as_bytes()).await?;
Ok(())
}
}
impl WriteResponse for ErrNoSuchServer402 {
async fn write_response(&self, writer: &mut (impl AsyncWrite + Unpin)) -> std::io::Result<()> {
writer.write_all(b"402 ").await?;
writer.write_all(self.client.as_bytes()).await?;
writer.write_all(b" ").await?;
writer.write_all(self.server_name.as_bytes()).await?;
writer.write_all(b" :").await?;
writer.write_all("No such server".as_bytes()).await?;
Ok(())
}
}

View File

@ -1,2 +0,0 @@
pub mod error;
pub mod response;

View File

@ -1,24 +0,0 @@
use tokio::io::{AsyncWrite, AsyncWriteExt};
use crate::{prelude::Str, response::WriteResponse};
pub struct RplEndOfWhois318 {
client: Str,
nick: Str,
}
impl RplEndOfWhois318 {
pub fn new(client: Str, nick: Str) -> Self {
RplEndOfWhois318 { client, nick }
}
}
impl WriteResponse for RplEndOfWhois318 {
async fn write_response(&self, writer: &mut (impl AsyncWrite + Unpin)) -> std::io::Result<()> {
writer.write_all(b"318 ").await?;
writer.write_all(self.client.as_bytes()).await?;
writer.write_all(b" ").await?;
writer.write_all(self.nick.as_bytes()).await?;
writer.write_all(b" :").await?;
writer.write_all("End of /WHOIS list".as_bytes()).await?;
Ok(())
}
}

View File

@ -1,8 +1,6 @@
//! Client-to-Server IRC protocol.
pub mod client;
pub mod commands;
mod prelude;
pub mod response;
pub mod server;
#[cfg(test)]
mod testkit;

View File

@ -1,47 +0,0 @@
use std::future::Future;
use tokio::io::{AsyncWrite, AsyncWriteExt};
use crate::prelude::Str;
use crate::Tag;
pub trait WriteResponse {
fn write_response(&self, writer: &mut (impl AsyncWrite + Unpin)) -> impl Future<Output = std::io::Result<()>>;
}
/// Server-to-client enum agnostic message
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct IrcResponseMessage<T> {
/// Optional tags section, prefixed with `@`
pub tags: Vec<Tag>,
/// Optional server name, prefixed with `:`.
pub sender: Option<Str>,
pub body: T,
}
impl<T> IrcResponseMessage<T> {
pub fn empty_tags(sender: Option<Str>, body: T) -> Self {
IrcResponseMessage {
tags: vec![],
sender,
body,
}
}
pub fn new(tags: Vec<Tag>, sender: Option<Str>, body: T) -> Self {
IrcResponseMessage { tags, sender, body }
}
}
impl<T: WriteResponse> WriteResponse for IrcResponseMessage<T> {
async fn write_response(&self, writer: &mut (impl AsyncWrite + Unpin)) -> std::io::Result<()> {
if let Some(sender) = &self.sender {
writer.write_all(b":").await?;
writer.write_all(sender.as_bytes()).await?;
writer.write_all(b" ").await?;
}
self.body.write_response(writer).await?;
writer.write_all(b"\r\n").await?;
Ok(())
}
}

View File

@ -1,10 +1,11 @@
use std::sync::Arc;
use nonempty::NonEmpty;
use tokio::io::AsyncWrite;
use tokio::io::AsyncWriteExt;
use crate::user::PrefixedNick;
use super::*;
use crate::user::PrefixedNick;
/// Server-to-client message.
#[derive(Clone, Debug, PartialEq, Eq)]
@ -113,12 +114,6 @@ pub enum ServerMessageBody {
/// Usually `b"End of WHO list"`
msg: Str,
},
N318EndOfWhois {
client: Str,
nick: Str,
/// Usually `b"End of /WHOIS list"`
msg: Str,
},
N332Topic {
client: Str,
chat: Chan,
@ -148,10 +143,6 @@ pub enum ServerMessageBody {
client: Str,
chan: Chan,
},
N431ErrNoNicknameGiven {
client: Str,
message: Str,
},
N474BannedFromChan {
client: Str,
chan: Chan,
@ -289,14 +280,6 @@ impl ServerMessageBody {
writer.write_all(b" :").await?;
writer.write_all(msg.as_bytes()).await?;
}
ServerMessageBody::N318EndOfWhois { client, nick, msg } => {
writer.write_all(b"318 ").await?;
writer.write_all(client.as_bytes()).await?;
writer.write_all(b" ").await?;
writer.write_all(nick.as_bytes()).await?;
writer.write_all(b" :").await?;
writer.write_all(msg.as_bytes()).await?;
}
ServerMessageBody::N332Topic { client, chat, topic } => {
writer.write_all(b"332 ").await?;
writer.write_all(client.as_bytes()).await?;
@ -359,12 +342,6 @@ impl ServerMessageBody {
chan.write_async(writer).await?;
writer.write_all(b" :End of /NAMES list").await?;
}
ServerMessageBody::N431ErrNoNicknameGiven { client, message } => {
writer.write_all(b"431").await?;
writer.write_all(client.as_bytes()).await?;
writer.write_all(b" :").await?;
writer.write_all(message.as_bytes()).await?;
}
ServerMessageBody::N474BannedFromChan { client, chan, message } => {
writer.write_all(b"474 ").await?;
writer.write_all(client.as_bytes()).await?;
@ -493,9 +470,8 @@ fn server_message_body_cap(input: &str) -> IResult<&str, ServerMessageBody> {
mod test {
use assert_matches::*;
use crate::testkit::*;
use super::*;
use crate::testkit::*;
#[test]
fn test_server_message_notice() {

View File

@ -127,16 +127,12 @@ impl FromXml for BindRequest {
}
}
#[derive(PartialEq, Eq, Debug)]
pub struct BindResponse(pub Jid);
impl ToXml for BindResponse {
fn serialize(&self, events: &mut Vec<Event<'static>>) {
events.extend_from_slice(&[
Event::Start(BytesStart::from_content(
r#"bind xmlns="urn:ietf:params:xml:ns:xmpp-bind""#,
4,
)),
Event::Start(BytesStart::new(r#"bind xmlns="urn:ietf:params:xml:ns:xmpp-bind""#)),
Event::Start(BytesStart::new(r#"jid"#)),
Event::Text(BytesText::new(self.0.to_string().as_str()).into_owned()),
Event::End(BytesEnd::new("jid")),

View File

@ -1,6 +1,7 @@
use anyhow::{anyhow, Result};
use quick_xml::events::{BytesEnd, BytesStart, BytesText, Event};
use quick_xml::name::{Namespace, ResolveResult};
use std::io::Read;
use crate::xml::*;

View File

@ -12,15 +12,15 @@ use prometheus::{Encoder, Registry as MetricsRegistry, TextEncoder};
use serde::{Deserialize, Serialize};
use tokio::net::TcpListener;
use lavina_core::auth::UpdatePasswordResult;
use lavina_core::player::{PlayerId, SendMessageResult};
use lavina_core::auth::{Authenticator, UpdatePasswordResult};
use lavina_core::player::{PlayerId, PlayerRegistry, SendMessageResult};
use lavina_core::prelude::*;
use lavina_core::room::RoomId;
use lavina_core::repo::Storage;
use lavina_core::room::{RoomId, RoomRegistry};
use lavina_core::terminator::Terminator;
use lavina_core::LavinaCore;
use mgmt_api::*;
mod clustering;
use mgmt_api::*;
type HttpResult<T> = std::result::Result<T, Infallible>;
@ -33,11 +33,12 @@ pub async fn launch(
config: ServerConfig,
metrics: MetricsRegistry,
core: LavinaCore,
storage: Storage,
) -> Result<Terminator> {
log::info!("Starting the http service");
let listener = TcpListener::bind(config.listen_on).await?;
log::debug!("Listener started");
let terminator = Terminator::spawn(|rx| main_loop(listener, metrics, core, rx.map(|_| ())));
let terminator = Terminator::spawn(|rx| main_loop(listener, metrics, core, storage, rx.map(|_| ())));
Ok(terminator)
}
@ -45,6 +46,7 @@ async fn main_loop(
listener: TcpListener,
metrics: MetricsRegistry,
core: LavinaCore,
storage: Storage,
termination: impl Future<Output = ()>,
) -> Result<()> {
pin!(termination);
@ -57,9 +59,12 @@ async fn main_loop(
let stream = TokioIo::new(stream);
let metrics = metrics.clone();
let core = core.clone();
let storage = storage.clone();
tokio::task::spawn(async move {
let svc_fn = service_fn(|r| route(&metrics, &core, r));
let server = http1::Builder::new().serve_connection(stream, svc_fn);
let registry = metrics.clone();
let core = core.clone();
let storage = storage.clone();
let server = http1::Builder::new().serve_connection(stream, service_fn(move |r| route(registry.clone(), core.clone(), storage.clone(), r)));
if let Err(err) = server.await {
tracing::error!("Error serving connection: {:?}", err);
}
@ -71,27 +76,26 @@ async fn main_loop(
Ok(())
}
#[tracing::instrument(skip_all)]
async fn route(
registry: &MetricsRegistry,
core: &LavinaCore,
registry: MetricsRegistry,
core: LavinaCore,
storage: Storage,
request: Request<hyper::body::Incoming>,
) -> HttpResult<Response<Full<Bytes>>> {
propagade_span_from_headers(&request);
let res = match (request.method(), request.uri().path()) {
(&Method::GET, "/metrics") => endpoint_metrics(registry),
(&Method::GET, "/rooms") => endpoint_rooms(core).await,
(&Method::POST, paths::CREATE_PLAYER) => endpoint_create_player(request, core).await.or5xx(),
(&Method::POST, paths::STOP_PLAYER) => endpoint_stop_player(request, core).await.or5xx(),
(&Method::POST, paths::SET_PASSWORD) => endpoint_set_password(request, core).await.or5xx(),
(&Method::GET, "/rooms") => endpoint_rooms(core.rooms).await,
(&Method::POST, paths::CREATE_PLAYER) => endpoint_create_player(request, storage).await.or5xx(),
(&Method::POST, paths::STOP_PLAYER) => endpoint_stop_player(request, core.players).await.or5xx(),
(&Method::POST, paths::SET_PASSWORD) => endpoint_set_password(request, storage).await.or5xx(),
(&Method::POST, rooms::paths::SEND_MESSAGE) => endpoint_send_room_message(request, core).await.or5xx(),
(&Method::POST, rooms::paths::SET_TOPIC) => endpoint_set_room_topic(request, core).await.or5xx(),
_ => clustering::route(core, request).await.unwrap_or_else(endpoint_not_found),
_ => endpoint_not_found(),
};
Ok(res)
}
fn endpoint_metrics(registry: &MetricsRegistry) -> Response<Full<Bytes>> {
fn endpoint_metrics(registry: MetricsRegistry) -> Response<Full<Bytes>> {
let mf = registry.gather();
let mut buffer = vec![];
TextEncoder.encode(&mf, &mut buffer).expect("write to vec cannot fail");
@ -99,23 +103,23 @@ fn endpoint_metrics(registry: &MetricsRegistry) -> Response<Full<Bytes>> {
}
#[tracing::instrument(skip_all)]
async fn endpoint_rooms(core: &LavinaCore) -> Response<Full<Bytes>> {
async fn endpoint_rooms(rooms: RoomRegistry) -> Response<Full<Bytes>> {
// TODO introduce management API types independent from core-domain types
// TODO remove `Serialize` implementations from all core-domain types
let room_list = core.get_all_rooms().await.to_body();
let room_list = rooms.get_all_rooms().await.to_body();
Response::new(room_list)
}
#[tracing::instrument(skip_all)]
async fn endpoint_create_player(
request: Request<hyper::body::Incoming>,
core: &LavinaCore,
mut storage: Storage,
) -> Result<Response<Full<Bytes>>> {
let str = request.collect().await?.to_bytes();
let Ok(res) = serde_json::from_slice::<CreatePlayerRequest>(&str[..]) else {
return Ok(malformed_request());
};
core.create_player(&PlayerId::from(res.name)?).await?;
storage.create_user(&res.name).await?;
log::info!("Player {} created", res.name);
let mut response = Response::new(Full::<Bytes>::default());
*response.status_mut() = StatusCode::CREATED;
@ -125,7 +129,7 @@ async fn endpoint_create_player(
#[tracing::instrument(skip_all)]
async fn endpoint_stop_player(
request: Request<hyper::body::Incoming>,
core: &LavinaCore,
players: PlayerRegistry,
) -> Result<Response<Full<Bytes>>> {
let str = request.collect().await?.to_bytes();
let Ok(res) = serde_json::from_slice::<StopPlayerRequest>(&str[..]) else {
@ -134,7 +138,7 @@ async fn endpoint_stop_player(
let Ok(player_id) = PlayerId::from(res.name) else {
return Ok(player_not_found());
};
let Some(()) = core.stop_player(&player_id).await? else {
let Some(()) = players.stop_player(&player_id).await? else {
return Ok(player_not_found());
};
Ok(empty_204_request())
@ -143,13 +147,13 @@ async fn endpoint_stop_player(
#[tracing::instrument(skip_all)]
async fn endpoint_set_password(
request: Request<hyper::body::Incoming>,
core: &LavinaCore,
storage: Storage,
) -> Result<Response<Full<Bytes>>> {
let str = request.collect().await?.to_bytes();
let Ok(res) = serde_json::from_slice::<ChangePasswordRequest>(&str[..]) else {
return Ok(malformed_request());
};
let verdict = core.set_password(&res.player_name, &res.password).await?;
let verdict = Authenticator::new(&storage).set_password(&res.player_name, &res.password).await?;
match verdict {
UpdatePasswordResult::PasswordUpdated => {}
UpdatePasswordResult::UserNotFound => {
@ -159,10 +163,9 @@ async fn endpoint_set_password(
Ok(empty_204_request())
}
#[tracing::instrument(skip_all)]
async fn endpoint_send_room_message(
request: Request<hyper::body::Incoming>,
core: &LavinaCore,
mut core: LavinaCore,
) -> Result<Response<Full<Bytes>>> {
let str = request.collect().await?.to_bytes();
let Ok(req) = serde_json::from_slice::<rooms::SendMessageReq>(&str[..]) else {
@ -174,7 +177,7 @@ async fn endpoint_send_room_message(
let Ok(player_id) = PlayerId::from(req.author_id) else {
return Ok(player_not_found());
};
let mut player = core.connect_to_player(&player_id).await;
let mut player = core.players.connect_to_player(&player_id).await;
let res = player.send_message(room_id, req.message.into()).await?;
match res {
SendMessageResult::NoSuchRoom => Ok(room_not_found()),
@ -182,10 +185,9 @@ async fn endpoint_send_room_message(
}
}
#[tracing::instrument(skip_all)]
async fn endpoint_set_room_topic(
request: Request<hyper::body::Incoming>,
core: &LavinaCore,
core: LavinaCore,
) -> Result<Response<Full<Bytes>>> {
let str = request.collect().await?.to_bytes();
let Ok(req) = serde_json::from_slice::<rooms::SetTopicReq>(&str[..]) else {
@ -197,7 +199,7 @@ async fn endpoint_set_room_topic(
let Ok(player_id) = PlayerId::from(req.author_id) else {
return Ok(player_not_found());
};
let mut player = core.connect_to_player(&player_id).await;
let mut player = core.players.connect_to_player(&player_id).await;
player.change_topic(room_id, req.topic.into()).await?;
Ok(empty_204_request())
}
@ -282,24 +284,3 @@ where
Full::new(Bytes::from(buffer))
}
}
fn propagade_span_from_headers<T>(req: &Request<T>) {
use opentelemetry::propagation::Extractor;
use tracing::Span;
use tracing_opentelemetry::OpenTelemetrySpanExt;
struct HttpReqExtractor<'a, T> {
req: &'a Request<T>,
}
impl<'a, T> Extractor for HttpReqExtractor<'a, T> {
fn get(&self, key: &str) -> Option<&str> {
self.req.headers().get(key).and_then(|v| v.to_str().ok())
}
fn keys(&self) -> Vec<&str> {
self.req.headers().keys().map(|k| k.as_str()).collect()
}
}
let ctx = opentelemetry::global::get_text_map_propagator(|pp| pp.extract(&HttpReqExtractor { req }));
Span::current().set_parent(ctx);
}

View File

@ -1,72 +0,0 @@
use http_body_util::{BodyExt, Full};
use hyper::body::Bytes;
use hyper::{Method, Request, Response};
use super::Or5xx;
use crate::http::{empty_204_request, malformed_request, player_not_found, room_not_found};
use lavina_core::clustering::room::{paths, JoinRoomReq, SendMessageReq};
use lavina_core::player::PlayerId;
use lavina_core::room::RoomId;
use lavina_core::LavinaCore;
// TODO move this into core
pub async fn route(core: &LavinaCore, request: Request<hyper::body::Incoming>) -> Option<Response<Full<Bytes>>> {
match (request.method(), request.uri().path()) {
(&Method::POST, paths::JOIN) => Some(endpoint_cluster_join_room(request, core).await.or5xx()),
(&Method::POST, paths::ADD_MESSAGE) => Some(endpoint_cluster_add_message(request, core).await.or5xx()),
_ => None,
}
}
#[tracing::instrument(skip_all, name = "endpoint_cluster_join_room")]
async fn endpoint_cluster_join_room(
request: Request<hyper::body::Incoming>,
core: &LavinaCore,
) -> lavina_core::prelude::Result<Response<Full<Bytes>>> {
let str = request.collect().await?.to_bytes();
let Ok(req) = serde_json::from_slice::<JoinRoomReq>(&str[..]) else {
return Ok(malformed_request());
};
tracing::info!("Incoming request: {:?}", &req);
let Ok(room_id) = RoomId::from(req.room_id) else {
dbg!(&req.room_id);
return Ok(room_not_found());
};
let Ok(player_id) = PlayerId::from(req.player_id) else {
dbg!(&req.player_id);
return Ok(player_not_found());
};
core.cluster_join_room(room_id, &player_id).await?;
Ok(empty_204_request())
}
#[tracing::instrument(skip_all, name = "endpoint_cluster_add_message")]
async fn endpoint_cluster_add_message(
request: Request<hyper::body::Incoming>,
core: &LavinaCore,
) -> lavina_core::prelude::Result<Response<Full<Bytes>>> {
let str = request.collect().await?.to_bytes();
let Ok(req) = serde_json::from_slice::<SendMessageReq>(&str[..]) else {
return Ok(malformed_request());
};
tracing::info!("Incoming request: {:?}", &req);
let Ok(created_at) = chrono::DateTime::parse_from_rfc3339(req.created_at) else {
dbg!(&req.created_at);
return Ok(malformed_request());
};
let Ok(room_id) = RoomId::from(req.room_id) else {
dbg!(&req.room_id);
return Ok(room_not_found());
};
let Ok(player_id) = PlayerId::from(req.player_id) else {
dbg!(&req.player_id);
return Ok(player_not_found());
};
let res = core.cluster_send_room_message(room_id, &player_id, req.message.into(), created_at.to_utc()).await?;
if let Some(_) = res {
Ok(empty_204_request())
} else {
Ok(room_not_found())
}
}

View File

@ -6,10 +6,8 @@ use std::path::Path;
use clap::Parser;
use figment::providers::Format;
use figment::{providers::Toml, Figment};
use opentelemetry::global::set_text_map_propagator;
use opentelemetry::KeyValue;
use opentelemetry_otlp::WithExportConfig;
use opentelemetry_sdk::propagation::TraceContextPropagator;
use opentelemetry_sdk::trace::{BatchConfig, RandomIdGenerator, Sampler};
use opentelemetry_sdk::{runtime, Resource};
use opentelemetry_semantic_conventions::resource::SERVICE_NAME;
@ -30,7 +28,6 @@ struct ServerConfig {
irc: projection_irc::ServerConfig,
xmpp: projection_xmpp::ServerConfig,
storage: lavina_core::repo::StorageConfig,
cluster: lavina_core::clustering::ClusterConfig,
tracing: Option<TracingConfig>,
}
@ -66,15 +63,14 @@ async fn main() -> Result<()> {
irc: irc_config,
xmpp: xmpp_config,
storage: storage_config,
cluster: cluster_config,
tracing: _,
} = config;
let mut metrics = MetricsRegistry::new();
let metrics = MetricsRegistry::new();
let storage = Storage::open(storage_config).await?;
let core = LavinaCore::new(&mut metrics, cluster_config, storage).await?;
let telemetry_terminator = http::launch(telemetry_config, metrics.clone(), core.clone()).await?;
let irc = projection_irc::launch(irc_config, core.clone(), metrics.clone()).await?;
let xmpp = projection_xmpp::launch(xmpp_config, core.clone(), metrics.clone()).await?;
let core = LavinaCore::new(metrics.clone(), storage.clone()).await?;
let telemetry_terminator = http::launch(telemetry_config, metrics.clone(), core.clone(), storage.clone()).await?;
let irc = projection_irc::launch(irc_config, core.clone(), metrics.clone(), storage.clone()).await?;
let xmpp = projection_xmpp::launch(xmpp_config, core.clone(), metrics.clone(), storage.clone()).await?;
tracing::info!("Started");
sleep.await;
@ -83,8 +79,7 @@ async fn main() -> Result<()> {
xmpp.terminate().await?;
irc.terminate().await?;
telemetry_terminator.terminate().await?;
let storage = core.shutdown().await;
storage.close().await;
core.shutdown().await?;
tracing::info!("Shutdown complete");
Ok(())
}
@ -144,7 +139,6 @@ fn set_up_logging(tracing_config: &Option<TracingConfig>) -> Result<()> {
.with_exporter(trace_exporter)
.install_batch(runtime::Tokio)?;
let subscriber = subscriber.with(OpenTelemetryLayer::new(tracer));
set_text_map_propagator(TraceContextPropagator::new());
targets.with_subscriber(subscriber).try_init()?;
} else {
targets.with_subscriber(subscriber).try_init()?;