forked from ukchukx/rocksdb-rust
-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
10 changed files
with
937 additions
and
584 deletions.
There are no files selected for viewing
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,15 +1,19 @@ | ||
[package] | ||
name = "smol-kv" | ||
version = "0.1.1" | ||
authors = ["Uk <ukchukundah@gmail.com>", "mpw <x@mpw.sh>"] | ||
version = "0.2.0" | ||
authors = ["mpw <x@mpw.sh>"] | ||
edition = "2021" | ||
|
||
[dependencies] | ||
rocksdb-client = { git = "https://github.com/mpwsh/rocksdb-client" } | ||
sha1 = "0.10.6" | ||
bytes = "1.5.0" | ||
env_logger = "0.11.1" | ||
serde_json = "1.0.104" | ||
rocksdb = { version = "0.22.0", features = ["multi-threaded-cf"] } | ||
thiserror = "1.0.61" | ||
serde = { version = "1.0.203", features = ["derive"] } | ||
actix-web = "4.3.1" | ||
log = "0.4.19" | ||
sha1 = "0.10.6" | ||
log = "0.4.22" | ||
rand = "0.8" | ||
hex = "0.4.3" | ||
num_cpus = "1.16.0" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,74 @@ | ||
import http from "k6/http"; | ||
import { sleep } from "k6"; | ||
import { randomIntBetween, randomItem } from "https://jslib.k6.io/k6-utils/1.2.0/index.js"; | ||
|
||
export let options = { | ||
scenarios: { | ||
players: { | ||
executor: 'constant-vus', | ||
vus: 50, | ||
duration: '30s', | ||
}, | ||
}, | ||
}; | ||
|
||
const playerStates = [ | ||
"Idle", | ||
"Moving", | ||
"Dashing", | ||
"PreparingPrimary", | ||
"CastingPrimary", | ||
"PreparingSecondary", | ||
"CastingSecondary", | ||
"Resting", | ||
"Meditating", | ||
"Dead", | ||
]; | ||
|
||
function generateRandomPlayerBundle(playerId) { | ||
return { | ||
player: playerId, | ||
name: `Player${playerId}`, | ||
health: Math.random() * 100, | ||
mana: Math.random() * 250, | ||
position: Math.random() * 100, | ||
size: Math.random() * 10, | ||
state: randomItem(playerStates), | ||
angle: Math.random() * 360, | ||
color: Math.random(), | ||
winner: Math.random() < 0.5, | ||
rounds_won: randomIntBetween(0, 10), | ||
points: randomIntBetween(-100, 100), | ||
}; | ||
} | ||
|
||
export default function () { | ||
const playerId = parseInt(__VU); | ||
const params = { headers: { "Content-Type": "application/json" } }; | ||
let matchId = "test-match" | ||
|
||
//Create collection | ||
http.post( | ||
`http://127.0.0.1:5050/api/${matchId}`, | ||
params | ||
); | ||
while (true) { | ||
const startTime = new Date().getTime(); | ||
|
||
const playerBundle = generateRandomPlayerBundle(playerId); | ||
|
||
// Save Player data | ||
http.post( | ||
`http://127.0.0.1:5050/api/${matchId}/${playerId}`, | ||
JSON.stringify(playerBundle), | ||
params | ||
); | ||
|
||
const endTime = new Date().getTime(); | ||
const executionTime = endTime - startTime; | ||
|
||
// Calculate sleep time to maintain 128 Hz (7.8125 ms per tick) plus 10 ms delay | ||
const sleepTime = Math.max(0, 7.8125 - executionTime); | ||
sleep(sleepTime / 1000); // sleep function takes seconds, not milliseconds | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,5 +1,5 @@ | ||
[toolchain] | ||
channel = "1.77.2" | ||
channel = "1.82.0" | ||
components = [ | ||
"cargo", | ||
"clippy", | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,217 @@ | ||
use crate::kv::{KVStore, RocksDB}; | ||
use rand::{distributions::Alphanumeric, Rng}; | ||
|
||
use actix_web::{ | ||
web::{Data, Query}, | ||
HttpRequest, HttpResponse, | ||
}; | ||
use serde_json::json; | ||
|
||
use serde::Deserialize; | ||
use std::time::Instant; | ||
|
||
#[derive(Deserialize)] | ||
pub struct BenchmarkParams { | ||
count: usize, // Number of operations to perform | ||
size: usize, // Size of each value in bytes | ||
#[serde(default = "default_batch_size")] | ||
batch_size: usize, // Optional: How many operations per batch | ||
} | ||
|
||
fn default_batch_size() -> usize { | ||
100 | ||
} | ||
|
||
fn generate(id: usize) -> serde_json::Value { | ||
let status = ["active", "inactive", "pending"]; | ||
let types = ["user", "admin", "guest"]; | ||
let mut rng = rand::thread_rng(); | ||
|
||
json!({ | ||
"id": format!("user_{}", id), | ||
"type": types[rng.gen_range(0..3)], | ||
"status": status[rng.gen_range(0..3)], | ||
"data": { | ||
"name": format!("User {}", id), | ||
"email": format!("user{}@example.com", id), | ||
"age": rng.gen_range(18..80), | ||
"verified": rng.gen_bool(0.7) | ||
}, | ||
"metadata": { | ||
"last_login": format!("2024-{:02}-{:02}T{:02}:{:02}:{:02}Z", | ||
rng.gen_range(1..13), | ||
rng.gen_range(1..29), | ||
rng.gen_range(0..24), | ||
rng.gen_range(0..60), | ||
rng.gen_range(0..60) | ||
), | ||
"created_at": "2024-10-26T20:00:00Z", | ||
"login_count": rng.gen_range(0..1000) | ||
} | ||
}) | ||
} | ||
|
||
pub async fn start( | ||
db: Data<RocksDB>, | ||
token: Data<String>, | ||
params: Query<BenchmarkParams>, | ||
req: HttpRequest, | ||
) -> HttpResponse { | ||
// Auth check | ||
let token_header = req | ||
.headers() | ||
.get("Authorization") | ||
.and_then(|hv| hv.to_str().ok()); | ||
if token_header.is_none() || token_header != Some(token.get_ref().as_str()) { | ||
return HttpResponse::Unauthorized().finish(); | ||
} | ||
// Generate test data with realistic JSON | ||
let random_data = (0..params.count) | ||
.map(|i| { | ||
let value = generate(i); | ||
(format!("bench_key_{}", i), value) | ||
}) | ||
.collect::<Vec<_>>(); | ||
|
||
let start_total = Instant::now(); | ||
let mut metrics = json!({ | ||
"params": { | ||
"count": params.count, | ||
"size": params.size, | ||
"batch_size": params.batch_size | ||
}, | ||
"operations": { | ||
"writes": { "count": 0, "success": 0, "duration_ms": 0 }, | ||
"reads": { "count": 0, "success": 0, "duration_ms": 0 }, | ||
"deletes": { "count": 0, "success": 0, "duration_ms": 0 } | ||
}, | ||
"throughput": { | ||
"writes_per_sec": 0.0, | ||
"reads_per_sec": 0.0, | ||
"mb_written_per_sec": 0.0 | ||
}, | ||
"total_duration_ms": 0, | ||
"total_data_written_mb": 0.0 | ||
}); | ||
|
||
// Print sample data size | ||
if let Ok(sample_json) = serde_json::to_vec(&random_data[0].1) { | ||
metrics["data_sample"] = json!({ | ||
"example": random_data[0].1.clone(), | ||
"size_bytes": sample_json.len() | ||
}); | ||
} | ||
// Generate test data once | ||
let random_data = (0..params.count) | ||
.map(|i| { | ||
let value: String = rand::thread_rng() | ||
.sample_iter(&Alphanumeric) | ||
.take(params.size) | ||
.map(char::from) | ||
.collect(); | ||
|
||
(format!("bench_key_{}", i), value) | ||
}) | ||
.collect::<Vec<_>>(); | ||
|
||
let total_data_size = params.count * params.size; | ||
|
||
// Perform writes in batches | ||
let write_start = Instant::now(); | ||
for chunk in random_data.chunks(params.batch_size) { | ||
let batch_items: Vec<_> = chunk.iter().map(|(k, v)| (k.as_str(), v)).collect(); | ||
|
||
if db.batch_insert(&batch_items).is_ok() { | ||
metrics["operations"]["writes"]["success"] = json!( | ||
metrics["operations"]["writes"]["success"].as_u64().unwrap() | ||
+ batch_items.len() as u64 | ||
); | ||
} | ||
metrics["operations"]["writes"]["count"] = json!( | ||
metrics["operations"]["writes"]["count"].as_u64().unwrap() + batch_items.len() as u64 | ||
); | ||
} | ||
let write_duration = write_start.elapsed(); | ||
metrics["operations"]["writes"]["duration_ms"] = json!(write_duration.as_millis()); | ||
|
||
// Perform reads | ||
let read_start = Instant::now(); | ||
for (key, _) in &random_data { | ||
if db.find(key).is_ok() { | ||
metrics["operations"]["reads"]["success"] = | ||
json!(metrics["operations"]["reads"]["success"].as_u64().unwrap() + 1); | ||
} | ||
metrics["operations"]["reads"]["count"] = | ||
json!(metrics["operations"]["reads"]["count"].as_u64().unwrap() + 1); | ||
} | ||
let read_duration = read_start.elapsed(); | ||
metrics["operations"]["reads"]["duration_ms"] = json!(read_duration.as_millis()); | ||
|
||
// Perform deletes | ||
let delete_start = Instant::now(); | ||
for (key, _) in &random_data { | ||
if db.delete(key).is_ok() { | ||
metrics["operations"]["deletes"]["success"] = json!( | ||
metrics["operations"]["deletes"]["success"] | ||
.as_u64() | ||
.unwrap() | ||
+ 1 | ||
); | ||
} | ||
metrics["operations"]["deletes"]["count"] = | ||
json!(metrics["operations"]["deletes"]["count"].as_u64().unwrap() + 1); | ||
} | ||
let delete_duration = delete_start.elapsed(); | ||
metrics["operations"]["deletes"]["duration_ms"] = json!(delete_duration.as_millis()); | ||
|
||
// Calculate throughput metrics | ||
let writes_per_sec = params.count as f64 / write_duration.as_secs_f64(); | ||
let reads_per_sec = params.count as f64 / read_duration.as_secs_f64(); | ||
let total_duration_secs = start_total.elapsed().as_secs_f64(); | ||
let total_ops = params.count as f64 * 3.0; // total operations (writes + reads + deletes) | ||
let total_ops_per_sec = total_ops / total_duration_secs; | ||
let mb_written = total_data_size as f64 / (1024.0 * 1024.0); | ||
let mb_per_sec = mb_written / write_duration.as_secs_f64(); | ||
|
||
// Create metrics without the duplicate fields | ||
let metrics = json!({ | ||
"params": { | ||
"count": params.count, | ||
"size": params.size, | ||
"batch_size": params.batch_size | ||
}, | ||
"operations": { | ||
"writes": { | ||
"count": metrics["operations"]["writes"]["count"], | ||
"success": metrics["operations"]["writes"]["success"], | ||
"duration_ms": write_duration.as_millis() | ||
}, | ||
"reads": { | ||
"count": metrics["operations"]["reads"]["count"], | ||
"success": metrics["operations"]["reads"]["success"], | ||
"duration_ms": read_duration.as_millis() | ||
}, | ||
"deletes": { | ||
"count": metrics["operations"]["deletes"]["count"], | ||
"success": metrics["operations"]["deletes"]["success"], | ||
"duration_ms": delete_duration.as_millis() | ||
} | ||
}, | ||
"sample_size": metrics["data_sample"], | ||
"throughput": { | ||
"writes_per_sec": writes_per_sec, | ||
"reads_per_sec": reads_per_sec, | ||
"mb_written_per_sec": mb_per_sec, | ||
"total_ops_per_sec": total_ops_per_sec | ||
}, | ||
"totals": { | ||
"duration_secs": total_duration_secs, | ||
"duration_ms": start_total.elapsed().as_millis(), | ||
"operations": total_ops, | ||
"data_written_mb": mb_written | ||
} | ||
}); | ||
HttpResponse::Ok() | ||
.content_type("application/json") | ||
.body(metrics.to_string()) | ||
} |
Oops, something went wrong.