Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1,909 changes: 1,184 additions & 725 deletions frameworks/Rust/actix/Cargo.lock

Large diffs are not rendered by default.

49 changes: 24 additions & 25 deletions frameworks/Rust/actix/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[package]
name = "tfb-actix"
version = "3.0.0"
edition = "2018"
version = "4.0.0"
edition = "2024"

[[bin]]
name = "tfb-web"
Expand All @@ -28,41 +28,40 @@ name = "tfb-web-pg-deadpool"
path = "src/main_pg_deadpool.rs"

[dependencies]
anyhow = "1"
actix = "0.13"
actix-web = "4.3.1"
actix-http = "3.3.1"
actix-rt = "2"
actix-codec = "0.4"
anyhow = "1.0.96"
actix = "0.13.5"
actix-web = "4.9.0"
actix-http = "3.9.0"
actix-rt = "2.10.0"
actix-codec = "0.5"
actix-server = "2"
actix-service = "2"

askama = "0.11"
askama = "0.12"
bytes = "1"
diesel = { version = "1.4", features = ["postgres"] }
env_logger = "0.9"
futures = "0.3.7"
http = "0.2"
diesel = { version = "2.2.7", features = ["postgres"] }
env_logger = "0.11"
futures = "0.3.31"
log = { version = "0.4", features = ["release_max_level_debug"] }
num_cpus = "1.13"
rand = { version = "0.8", features = ["small_rng"] }
rand = { version = "0.9", features = ["small_rng"] }
serde = { version = "1", features = ["derive"] }
serde_json = "1"
simd-json = "0.4"
simd-json-derive = "0.12"
snmalloc-rs = "0.2.6"
simd-json = "0.14"
simd-json-derive = "0.15"
snmalloc-rs = "0.3.8"
tokio = { version = "1", features = ["full"] }
tokio-util = "0.7.8"
tokio-postgres = "0.7.5"
deadpool-postgres = "0.10.1"
mongodb = "2.2.0"
url = "2.1"
v_htmlescape = "0.14"
tokio-util = "0.7.13"
tokio-postgres = "0.7.13"
deadpool-postgres = "0.14.1"
mongodb = "3.2.1"
url = "2.5"
v_htmlescape = "0.15"
yarte = { version = "0.15", features = ["bytes-buf"] }

[build-dependencies]
askama = "0.11"
bindgen = "0.59"
askama = "0.12"
bindgen = "0.71"

[profile.release]
lto = true
Expand Down
2 changes: 1 addition & 1 deletion frameworks/Rust/actix/actix-http.dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM rust:1.58
FROM rust:1.85

RUN apt-get update -yqq && apt-get install -yqq cmake g++

Expand Down
2 changes: 1 addition & 1 deletion frameworks/Rust/actix/actix-server.dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM rust:1.58
FROM rust:1.85

RUN apt-get update -yqq && apt-get install -yqq cmake g++

Expand Down
2 changes: 1 addition & 1 deletion frameworks/Rust/actix/actix-web-diesel.dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM rust:1.58
FROM rust:1.85

RUN apt-get update -yqq && apt-get install -yqq cmake g++

Expand Down
2 changes: 1 addition & 1 deletion frameworks/Rust/actix/actix-web-mongodb.dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM rust:1.57.0
FROM rust:1.85

ENV ACTIX_TECHEMPOWER_MONGODB_URL=mongodb://tfb-database:27017

Expand Down
2 changes: 1 addition & 1 deletion frameworks/Rust/actix/actix-web-pg-deadpool.dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM rust:1.57.0
FROM rust:1.85

RUN apt-get update -yqq && apt-get install -yqq cmake g++

Expand Down
2 changes: 1 addition & 1 deletion frameworks/Rust/actix/actix.dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM rust:1.58
FROM rust:1.85.0

RUN apt-get update -yqq && apt-get install -yqq cmake g++

Expand Down
Empty file modified frameworks/Rust/actix/benchmark_config.json
100755 → 100644
Empty file.
16 changes: 8 additions & 8 deletions frameworks/Rust/actix/src/db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use bytes::{Bytes, BytesMut};
use futures::{
stream::futures_unordered::FuturesUnordered, FutureExt, StreamExt, TryStreamExt,
};
use rand::{rngs::SmallRng, thread_rng, Rng, SeedableRng};
use rand::{rngs::SmallRng, Rng, SeedableRng};
use tokio_postgres::{connect, types::ToSql, Client, NoTls, Statement};

use crate::{
Expand Down Expand Up @@ -101,9 +101,9 @@ impl PgConnection {
}

pub async fn get_world(&self) -> Result<Bytes, PgError> {
let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap();
let mut rng = SmallRng::from_rng(&mut rand::rng());

let random_id = (rng.gen::<u32>() % 10_000 + 1) as i32;
let random_id = (rng.random::<u32>() % 10_000 + 1) as i32;

let world = self.query_one_world(random_id).await?;
let mut body = BytesMut::with_capacity(40);
Expand All @@ -113,26 +113,26 @@ impl PgConnection {
}

pub async fn get_worlds(&self, num: usize) -> Result<Vec<World>, PgError> {
let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap();
let mut rng = SmallRng::from_rng(&mut rand::rng());

let worlds = FuturesUnordered::new();

for _ in 0..num {
let w_id = (rng.gen::<u32>() % 10_000 + 1) as i32;
let w_id = (rng.random::<u32>() % 10_000 + 1) as i32;
worlds.push(self.query_one_world(w_id));
}

worlds.try_collect().await
}

pub async fn update(&self, num: u16) -> Result<Vec<World>, PgError> {
let mut rng = SmallRng::from_rng(&mut thread_rng()).unwrap();
let mut rng = SmallRng::from_rng(&mut rand::rng());

let worlds = FuturesUnordered::new();

for _ in 0..num {
let id = (rng.gen::<u32>() % 10_000 + 1) as i32;
let w_id = (rng.gen::<u32>() % 10_000 + 1) as i32;
let id = (rng.random::<u32>() % 10_000 + 1) as i32;
let w_id = (rng.random::<u32>() % 10_000 + 1) as i32;

worlds.push(self.query_one_world(w_id).map(move |res| match res {
Ok(mut world) => {
Expand Down
29 changes: 15 additions & 14 deletions frameworks/Rust/actix/src/db_diesel.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
use std::io;

use actix::prelude::*;
use diesel::{prelude::*, result::Error};
use diesel::prelude::*;
use rand::{rngs::SmallRng, Rng, SeedableRng};

use crate::models;
Expand All @@ -18,7 +18,7 @@ impl DbExecutor {
DbExecutor {
conn: PgConnection::establish(db_url)
.unwrap_or_else(|_| panic!("Error connecting to {}", db_url)),
rng: SmallRng::from_entropy(),
rng: SmallRng::from_os_rng(),
}
}
}
Expand All @@ -39,10 +39,10 @@ impl Handler<RandomWorld> for DbExecutor {
fn handle(&mut self, _: RandomWorld, _: &mut Self::Context) -> Self::Result {
use crate::schema::world::dsl::*;

let random_id = self.rng.gen_range(1..10_001);
let random_id = self.rng.random_range(1..10_001);
match world
.filter(id.eq(random_id))
.load::<models::World>(&self.conn)
.load::<models::World>(&mut self.conn)
{
Ok(mut items) => Ok(items.pop().unwrap()),
Err(_) => Err(io::Error::new(io::ErrorKind::Other, "Database error")),
Expand All @@ -64,8 +64,8 @@ impl Handler<RandomWorlds> for DbExecutor {

let mut worlds = Vec::with_capacity(msg.0 as usize);
for _ in 0..msg.0 {
let w_id = self.rng.gen_range(1..10_001);
let w = match world.filter(id.eq(w_id)).load::<models::World>(&self.conn) {
let w_id = self.rng.random_range(1..10_001);
let w = match world.filter(id.eq(w_id)).load::<models::World>(&mut self.conn) {
Ok(mut items) => items.pop().unwrap(),
Err(_) => {
return Err(io::Error::new(io::ErrorKind::Other, "Database error"));
Expand All @@ -91,27 +91,28 @@ impl Handler<UpdateWorld> for DbExecutor {

let mut worlds = Vec::with_capacity(msg.0 as usize);
for _ in 0..msg.0 {
let w_id: i32 = self.rng.gen_range(1..10_001);
let mut w = match world.filter(id.eq(w_id)).load::<models::World>(&self.conn) {
let w_id: i32 = self.rng.random_range(1..10_001);
let mut w = match world.filter(id.eq(w_id)).load::<models::World>(&mut self.conn) {
Ok(mut items) => items.pop().unwrap(),
Err(_) => {
return Err(io::Error::new(io::ErrorKind::Other, "Database error"));
}
};
w.randomnumber = self.rng.gen_range(1..10_001);
w.randomnumber = self.rng.random_range(1..10_001);
worlds.push(w);
}
worlds.sort_by_key(|w| w.id);

let _ = self.conn.transaction::<(), Error, _>(|| {
self.conn.transaction(|conn| {
for w in &worlds {
let _ = diesel::update(world)
diesel::update(world)
.filter(id.eq(w.id))
.set(randomnumber.eq(w.randomnumber))
.execute(&self.conn);
.execute(conn)?;
}
Ok(())
});
})
.map_err(|e: diesel::result::Error| io::Error::new(io::ErrorKind::Other, e))?;

Ok(worlds)
}
Expand All @@ -129,7 +130,7 @@ impl Handler<TellFortune> for DbExecutor {
fn handle(&mut self, _: TellFortune, _: &mut Self::Context) -> Self::Result {
use crate::schema::fortune::dsl::*;

match fortune.load::<models::Fortune>(&self.conn) {
match fortune.load::<models::Fortune>(&mut self.conn) {
Ok(mut items) => {
items.push(models::Fortune {
id: 0,
Expand Down
3 changes: 2 additions & 1 deletion frameworks/Rust/actix/src/main_http.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ use crate::{
utils::Writer,
};

#[allow(dead_code)]
#[derive(Debug)]
enum Error {
Pg(PgError),
Expand Down Expand Up @@ -134,7 +135,7 @@ impl Service<Request> for App {
}

_ => Box::pin(ok(Response::with_body(
http::StatusCode::NOT_FOUND,
StatusCode::NOT_FOUND,
Bytes::new(),
))),
}
Expand Down
13 changes: 6 additions & 7 deletions frameworks/Rust/actix/src/main_mongodb.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,12 @@ async fn find_random_world(data: web::Data<Data>) -> Result<World> {
let runtime = data.tokio_runtime.clone();
runtime
.spawn(async move {
let mut rng = SmallRng::from_entropy();
let id = (rng.gen::<u32>() % 10_000 + 1) as i32;
let mut rng = SmallRng::from_os_rng();
let id = (rng.random::<u32>() % 10_000 + 1) as i32;

let coll = data.client.database("hello_world").collection("world");
let world = coll
.find_one(doc! { "id": id as f32 }, None)
.find_one(doc! { "id": id as f32 })
.await?
.expect("should find world");
Ok(world)
Expand Down Expand Up @@ -101,10 +101,10 @@ async fn updates(

let mut worlds = find_random_worlds(data, query.q).await?;

let mut rng = SmallRng::from_entropy();
let mut rng = SmallRng::from_os_rng();
let mut updates = Vec::new();
for world in worlds.iter_mut() {
let new_random_number = (rng.gen::<u32>() % 10_000 + 1) as i32;
let new_random_number = (rng.random::<u32>() % 10_000 + 1) as i32;
updates.push(doc! {
"q": { "id": world.id }, "u": { "$set": { "randomNumber": new_random_number }}
});
Expand All @@ -121,7 +121,6 @@ async fn updates(
"updates": updates,
"ordered": false,
},
None,
)
.await
})
Expand All @@ -145,7 +144,7 @@ async fn fortune(data: web::Data<Data>) -> Result<HttpResponse<Vec<u8>>> {
let fortunes_cursor = client
.database("hello_world")
.collection::<Fortune>("fortune")
.find(None, None)
.find(doc! {})
.await?;

let mut fortunes: Vec<Fortune> = fortunes_cursor.try_collect().await?;
Expand Down
8 changes: 4 additions & 4 deletions frameworks/Rust/actix/src/main_pg_deadpool.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@ async fn find_random_world(pool: &Pool) -> Result<World> {
.await
.unwrap();

let mut rng = SmallRng::from_entropy();
let id = (rng.gen::<u32>() % 10_000 + 1) as i32;
let mut rng = SmallRng::from_os_rng();
let id = (rng.random::<u32>() % 10_000 + 1) as i32;

let row = conn.query_one(&world, &[&id]).await?;

Expand Down Expand Up @@ -95,14 +95,14 @@ async fn updates(
) -> Result<HttpResponse<Vec<u8>>> {
let mut worlds = find_random_worlds(&data, query.q).await?;

let mut rng = SmallRng::from_entropy();
let mut rng = SmallRng::from_os_rng();

let mut updates = "UPDATE world SET randomnumber = CASE id ".to_string();
let mut params: Vec<&(dyn ToSql + Sync)> = Vec::with_capacity(query.q as usize * 3);

let mut n_params = 1;
for world in worlds.iter_mut() {
let new_random_number = (rng.gen::<u32>() % 10_000 + 1) as i32;
let new_random_number = (rng.random::<u32>() % 10_000 + 1) as i32;
write!(&mut updates, "when ${} then ${} ", n_params, n_params + 1).unwrap();
world.randomnumber = new_random_number;
n_params += 2;
Expand Down
Loading