Skip to content

Commit

Permalink
add ability to suspend user rights
Browse files Browse the repository at this point in the history
this will be useful for things like free trials, partially banning
abusive users, etc.

there's also an internal API so that big_money and big_central can
suspend users as needed, or remove suspensions, etc

suspensions are stored as jsonb so we can suspend more rights in the
future without adding new columns
  • Loading branch information
billyb2 committed Aug 23, 2024
1 parent c3586c2 commit 237a32d
Show file tree
Hide file tree
Showing 6 changed files with 212 additions and 65 deletions.
3 changes: 2 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,8 @@ tracing-opentelemetry = "0.23"
instant-acme = "0.4"
rcgen = "0.12"
warp = "0.3"
serde_json = "1"
serde = "1"
serde_json = { version = "1", features = ["raw_value"] }
bytes = "1.6"

[features]
Expand Down
34 changes: 29 additions & 5 deletions src/auth.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
// TODO create an AuthorizedToken struct so that type safety protects us from fucking up
use anyhow::anyhow;
use std::{collections::BTreeSet, fmt::Display, time::Duration};

use biscuit_auth::{
Expand All @@ -9,6 +10,8 @@ use biscuit_auth::{
};
use tracing::{event, Level};

use crate::meta_db::MetaDB;

#[derive(Debug)]
pub enum Right {
Read,
Expand All @@ -33,12 +36,33 @@ impl Right {
}

#[tracing::instrument(err, skip(token))]
pub fn authorize(
pub async fn authorize<M: MetaDB>(
right_being_checked: Right,
token: &Biscuit,
file_ids: Vec<String>,
_chunk_ids: Vec<String>,
) -> anyhow::Result<()> {
meta_db: &M,
) -> anyhow::Result<i64> {
let user_id = get_user_id(token)?;

// first, check if the user has been suspended from the right they're trying to execute
let suspensions = meta_db.suspensions(&[user_id]).await?;
let suspension = suspensions.get(&user_id).unwrap();

let can_perform_action = match right_being_checked {
Right::Read => !suspension.read_suspended,
Right::Write => !suspension.write_suspended,
Right::Delete => !suspension.delete_suspended,
Right::Query => !suspension.query_suspended,
_ => true,
};

if !can_perform_action {
return Err(anyhow!(
"suspended from performing action {}",
right_being_checked.to_str()
));
}

let mut authorizer = authorizer!(
r#"
check if user($user);
Expand Down Expand Up @@ -71,7 +95,7 @@ pub fn authorize(

authorizer.authorize().unwrap();

Ok(())
Ok(user_id)
}

#[derive(thiserror::Error, Debug)]
Expand All @@ -86,7 +110,7 @@ impl Display for GetUserIDError {
}

#[tracing::instrument(err, skip(token))]
pub fn get_user_id(token: &Biscuit) -> Result<i64, GetUserIDError> {
fn get_user_id(token: &Biscuit) -> Result<i64, GetUserIDError> {
let mut authorizer = authorizer!(
r#"
check if user($user);
Expand Down
21 changes: 20 additions & 1 deletion src/internal.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use std::sync::Arc;

use bfsp::internal::internal_file_server_message::Message;
use bfsp::internal::GetStorageCapResp;
use bfsp::internal::{GetStorageCapResp, GetSuspensionsResp, SuspendUsersResp};
use bfsp::{
chacha20poly1305::XChaCha20Poly1305,
internal::{
Expand Down Expand Up @@ -53,6 +53,25 @@ async fn handle_internal_message<M: MetaDB>(

SetStorageCapResp { err: None }.encode_to_vec()
}
Message::GetSuspensions(args) => {
let user_ids = args.user_ids;
let suspensions = meta_db.suspensions(&user_ids).await.unwrap();

GetSuspensionsResp {
response: Some(bfsp::internal::get_suspensions_resp::Response::Suspensions(
bfsp::internal::get_suspensions_resp::Suspensions {
suspension_info: suspensions,
},
)),
}
.encode_to_vec()
}
Message::SuspendUsers(args) => {
let suspensions = args.suspensions;
meta_db.set_suspensions(suspensions).await.unwrap();

SuspendUsersResp { err: None }.encode_to_vec()
}
}
.prepend_len()
}
Expand Down
90 changes: 35 additions & 55 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ mod meta_db;
mod tls;

use anyhow::anyhow;
use auth::{authorize, get_user_id, Right};
use auth::{authorize, Right};
use bfsp::base64_decode;
use bfsp::chacha20poly1305::KeyInit;
use bfsp::chacha20poly1305::XChaCha20Poly1305;
Expand Down Expand Up @@ -42,7 +42,7 @@ use wtransport::ServerConfig;
use crate::chunk_db::file::FSChunkDB;
#[cfg(feature = "s3")]
use crate::chunk_db::s3::S3ChunkDB;
use crate::meta_db::{InsertChunkError, MetaDB, PostgresMetaDB};
use crate::meta_db::{MetaDB, PostgresMetaDB};
use anyhow::Result;
use bfsp::{
chunks_uploaded_query_resp::{ChunkUploaded, ChunksUploaded},
Expand All @@ -54,7 +54,7 @@ use bfsp::{
},
ChunkID, ChunkMetadata, ChunksUploadedQueryResp, DownloadChunkResp, FileServerMessage, Message,
};
use bfsp::{EncryptedFileMetadata, EncryptionNonce, PrependLen};
use bfsp::{EncryptedFileMetadata, PrependLen};
use tls::get_tls_cert;
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tracing_subscriber::prelude::*;
Expand Down Expand Up @@ -502,9 +502,9 @@ pub async fn handle_download_chunk<M: MetaDB, C: ChunkDB>(
Vec<u8>,
)>,
> {
authorize(Right::Read, token, Vec::new(), vec![chunk_id.to_string()]).unwrap();

let user_id = get_user_id(token).unwrap();
let user_id = authorize(Right::Read, token, Vec::new(), meta_db)
.await
.unwrap();

if let Some(enc_chunk_meta) = meta_db.get_enc_chunk_meta(chunk_id, user_id).await? {
let chunk = chunk_db.get_chunk(&chunk_id, user_id).await?.unwrap();
Expand All @@ -524,19 +524,9 @@ async fn query_chunks_uploaded<M: MetaDB>(
token: &Biscuit,
chunks: HashSet<ChunkID>,
) -> Result<HashMap<ChunkID, bool>> {
authorize(
Right::Query,
token,
Vec::new(),
chunks
.clone()
.into_iter()
.map(|id| id.to_string())
.collect(),
)
.unwrap();

let user_id = get_user_id(token).unwrap();
let user_id = authorize(Right::Query, token, Vec::new(), meta_db)
.await
.unwrap();

let chunks_uploaded: HashMap<ChunkID, bool> =
futures::future::join_all(chunks.into_iter().map(|chunk_id| async move {
Expand All @@ -562,9 +552,9 @@ async fn handle_upload_chunk<M: MetaDB + 'static, C: ChunkDB + 'static>(
enc_chunk_metadata: EncryptedChunkMetadata,
chunk: Vec<u8>,
) -> Result<()> {
authorize(Right::Write, token, Vec::new(), Vec::new()).unwrap();

let user_id = get_user_id(token).unwrap();
let user_id = authorize(Right::Write, token, Vec::new(), meta_db.as_ref())
.await
.unwrap();

// 8MiB(?)
if chunk.len() > 1024 * 1024 * 8 {
Expand Down Expand Up @@ -613,19 +603,9 @@ pub async fn handle_delete_chunks<D: MetaDB, C: ChunkDB>(
token: &Biscuit,
chunk_ids: HashSet<ChunkID>,
) -> Result<()> {
authorize(
Right::Delete,
token,
Vec::new(),
chunk_ids
.clone()
.into_iter()
.map(|id| id.to_string())
.collect(),
)
.unwrap();

let user_id = get_user_id(token).unwrap();
let user_id = authorize(Right::Delete, token, Vec::new(), meta_db)
.await
.unwrap();

meta_db.delete_chunk_metas(&chunk_ids).await.unwrap();
let remove_chunk_files = chunk_ids.clone().into_iter().map(|chunk_id| {
Expand Down Expand Up @@ -660,8 +640,9 @@ pub async fn handle_upload_file_metadata<D: MetaDB>(
token: &Biscuit,
enc_file_meta: EncryptedFileMetadata,
) -> Result<(), UploadMetadataError> {
authorize(Right::Write, token, Vec::new(), Vec::new()).unwrap();
let user_id = get_user_id(token).unwrap();
let user_id = authorize(Right::Write, token, Vec::new(), meta_db)
.await
.unwrap();

let storage_usages = meta_db.total_usages(&[user_id]).await.unwrap();
let storage_usage = *storage_usages.get(&user_id).unwrap();
Expand All @@ -686,10 +667,11 @@ pub async fn handle_download_file_metadata<D: MetaDB>(
meta_db: &D,
token: &Biscuit,
file_id: String,
) -> Result<EncryptedFileMetadata, UploadMetadataError> {
authorize(Right::Read, token, vec![file_id.clone()], Vec::new()).unwrap();
) -> Result<EncryptedFileMetadata, anyhow::Error> {
let user_id = authorize(Right::Read, token, vec![file_id.clone()], meta_db)
.await
.unwrap();

let user_id = get_user_id(token).unwrap();
match meta_db.get_file_meta(file_id, user_id).await.unwrap() {
Some(meta) => Ok(meta),
None => Err(todo!()),
Expand All @@ -702,10 +684,11 @@ pub async fn handle_list_file_metadata<D: MetaDB>(
token: &Biscuit,
file_ids: Vec<String>,
) -> Result<HashMap<String, EncryptedFileMetadata>, UploadMetadataError> {
authorize(Right::Query, token, file_ids.clone(), Vec::new()).unwrap();
let user_id = authorize(Right::Query, token, file_ids.clone(), meta_db)
.await
.unwrap();
let meta_ids: HashSet<String> = HashSet::from_iter(file_ids.into_iter());

let user_id = get_user_id(token).unwrap();
let meta = meta_db.list_file_meta(meta_ids, user_id).await.unwrap();
Ok(meta)
}
Expand All @@ -716,15 +699,10 @@ pub async fn handle_list_chunk_metadata<D: MetaDB>(
token: &Biscuit,
chunk_ids: HashSet<ChunkID>,
) -> Result<HashMap<ChunkID, ChunkMetadata>, UploadMetadataError> {
authorize(
Right::Query,
token,
Vec::new(),
chunk_ids.iter().map(|id| id.to_string()).collect(),
)
.unwrap();

let user_id = get_user_id(token).unwrap();
let user_id = authorize(Right::Query, token, Vec::new(), meta_db)
.await
.unwrap();

let meta = meta_db.list_chunk_meta(chunk_ids, user_id).await.unwrap();
Ok(meta)
}
Expand All @@ -735,19 +713,21 @@ pub async fn handle_delete_file_metadata<D: MetaDB>(
token: &Biscuit,
file_id: String,
) -> Result<(), UploadMetadataError> {
authorize(Right::Delete, token, vec![file_id.clone()], Vec::new()).unwrap();
let user_id = authorize(Right::Delete, token, vec![file_id.clone()], meta_db)
.await
.unwrap();

let user_id = get_user_id(token).unwrap();
meta_db.delete_file_meta(file_id, user_id).await.unwrap();

Ok(())
}

#[tracing::instrument(err, skip(token))]
pub async fn handle_get_usage<D: MetaDB>(meta_db: &D, token: &Biscuit) -> anyhow::Result<u64> {
authorize(Right::Usage, token, Vec::new(), Vec::new()).unwrap();
let user_id = authorize(Right::Usage, token, Vec::new(), meta_db)
.await
.unwrap();

let user_id = get_user_id(token).unwrap();
Ok(*meta_db
.total_usages(&[user_id])
.await?
Expand Down
Loading

0 comments on commit 237a32d

Please sign in to comment.