diff --git a/Cargo.lock b/Cargo.lock index 32ee61c1..44087546 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1464,6 +1464,23 @@ dependencies = [ "match_token", ] +[[package]] +name = "http" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "httparse" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" + [[package]] name = "iana-time-zone" version = "0.1.63" @@ -2591,7 +2608,6 @@ dependencies = [ "mysql", "noise", "num-integer", - "once_cell", "pathfinding", "percent-encoding", "png", @@ -3380,20 +3396,34 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "ureq" -version = "2.12.1" +version = "3.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02d1a66277ed75f640d608235660df48c8e3c19f3b4edb6a263315626cc3c01d" +checksum = "b7a3e9af6113ecd57b8c63d3cd76a385b2e3881365f1f489e54f49801d0c83ea" dependencies = [ "base64", "flate2", "log", - "once_cell", + "percent-encoding", "rustls", + "rustls-pemfile", "rustls-pki-types", - "url", + "ureq-proto", + "utf-8", "webpki-roots 0.26.11", ] +[[package]] +name = "ureq-proto" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fadf18427d33828c311234884b7ba2afb57143e6e7e69fda7ee883b624661e36" +dependencies = [ + "base64", + "http", + "httparse", + "log", +] + [[package]] name = "url" version = "2.5.4" diff --git a/Cargo.toml b/Cargo.toml index b309ff6c..c5c17bed 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -44,10 +44,9 @@ gix = { version = "0.72", optional = true, default-features = false, features = ] } noise = { version = "0.9", optional = true } redis = { version = "0.31", optional = true, features = ["ahash"] } -ureq = { version = "2.12", optional = true } +ureq = { version = "3.0", optional = true } serde = { version = "1.0", optional = true, features = ["derive"] } serde_json = { version = "1.0", optional = true } -once_cell = { version = "1.21", optional = true } mysql = { git = "https://github.com/ZeWaka/rust-mysql-simple.git", tag = "v26.0.0", default-features = false, optional = true } dashmap = { version = "6.1", optional = true, features = ["rayon", "serde"] } zip = { version = "2.6", optional = true } @@ -137,14 +136,13 @@ hash = [ "serde", "serde_json", ] -http = ["ureq", "serde", "serde_json", "once_cell", "jobs"] +http = ["ureq", "serde", "serde_json", "jobs"] iconforge = [ "dashmap", "dep:dmi", "hash", "image", "jobs", - "once_cell", "png", "rayon", "serde", @@ -156,7 +154,7 @@ json = ["serde", "serde_json"] log = ["chrono"] sanitize = ["ammonia", "serde_json"] sound_len = ["symphonia"] -sql = ["mysql", "serde", "serde_json", "once_cell", "dashmap", "jobs"] +sql = ["mysql", "serde", "serde_json", "dashmap", "jobs"] time = ["chrono"] toml = ["serde", "serde_json", "toml-dep"] url = ["url-dep", "percent-encoding"] diff --git a/build.rs b/build.rs index 3039dbf0..9a6a1b7c 100644 --- a/build.rs +++ b/build.rs @@ -18,18 +18,20 @@ fn main() { let mut f = File::create("target/rust_g.dm").unwrap(); // header - writeln!( - f, - "{}", - std::fs::read_to_string(feature_dm_file!("main")).unwrap() - ) - .unwrap(); + let header_content = std::fs::read_to_string(feature_dm_file!("main")).unwrap(); + writeln!(f, "{header_content}").unwrap(); + + // jobs is a dependency of other features + if feature_dm_exists!("jobs") { + let jobs_content = std::fs::read_to_string(feature_dm_file!("jobs")).unwrap(); + writeln!(f, "{jobs_content}").unwrap(); + } for (key, _value) in std::env::vars() { // CARGO_FEATURE_ — For each activated feature of the package being built, this environment variable will be present where is the name of the feature uppercased and having - translated to _. if let Some(uprfeature) = key.strip_prefix("CARGO_FEATURE_") { let feature = uprfeature.to_lowercase().replace('_', "-"); // actual proper name of the enabled feature - if feature_dm_exists!(&feature) { + if feature != "jobs" && feature_dm_exists!(&feature) { writeln!( f, "{}", diff --git a/dmsrc/http.dm b/dmsrc/http.dm index d63748d6..816b25e4 100644 --- a/dmsrc/http.dm +++ b/dmsrc/http.dm @@ -7,3 +7,106 @@ #define rustg_http_request_blocking(method, url, body, headers, options) RUSTG_CALL(RUST_G, "http_request_blocking")(method, url, body, headers, options) #define rustg_http_request_async(method, url, body, headers, options) RUSTG_CALL(RUST_G, "http_request_async")(method, url, body, headers, options) #define rustg_http_check_request(req_id) RUSTG_CALL(RUST_G, "http_check_request")(req_id) + +// If you don't have the following proc in your codebase, you will need to uncomment it. +/*** +/// Wrapper to let us runtime without killing the current proc, since CRASH only kills the exact proc it was called from +/proc/stack_trace(var/thing_to_crash) + CRASH(thing_to_crash) +***/ + +/datum/http_request + var/id + var/in_progress = FALSE + + var/method + var/body + var/headers + var/url + /// If present, the request body will be read from this file. + var/input_file = null + /// If present, the response body will be saved to this file. + var/output_file = null + /// If present, request will timeout after this duration. + var/timeout_seconds + + var/_raw_response + +/datum/http_request/proc/prepare(method, url, body = "", list/headers, output_file, input_file, timeout_seconds) + if (!length(headers)) + headers = "" + else + headers = json_encode(headers) + + src.method = method + src.url = url + src.body = body + src.headers = headers + src.input_file = input_file + src.output_file = output_file + src.timeout_seconds = timeout_seconds + +/datum/http_request/proc/execute_blocking() + _raw_response = rustg_http_request_blocking(method, url, body, headers, build_options()) + +/datum/http_request/proc/begin_async() + if (in_progress) + CRASH("Attempted to re-use a request object.") + + id = rustg_http_request_async(method, url, body, headers, build_options()) + + if (isnull(text2num(id))) + stack_trace("Proc error: [id]") + _raw_response = "Proc error: [id]" + else + in_progress = TRUE + +/datum/http_request/proc/build_options() + . = json_encode(list( + "input_filename" = (input_file ? input_file : null), + "output_filename" = (output_file ? output_file : null), + "timeout_seconds"=(timeout_seconds ? timeout_seconds : null) + )) + +/datum/http_request/proc/is_complete() + if (isnull(id)) + return TRUE + + if (!in_progress) + return TRUE + + var/r = rustg_http_check_request(id) + + if (r == RUSTG_JOB_NO_RESULTS_YET) + return FALSE + else + _raw_response = r + in_progress = FALSE + return TRUE + +/datum/http_request/proc/into_response() + var/datum/http_response/R = new() + + try + var/list/L = json_decode(_raw_response) + R.status_code = L["status_code"] + R.headers = L["headers"] + R.body = L["body"] + catch + R.errored = TRUE + R.error = _raw_response + + return R + +/datum/http_response + /// The HTTP Status code - e.g., `"404"` + var/status_code + /// The response body - e.g., `{ "message": "No query results for xyz." }` + var/body + /// A list of headers - e.g., list("Content-Type" = "application/json"). + var/list/headers + /// If the request errored, this will be TRUE. + var/errored = FALSE + /// If there was a 4xx/5xx error or the request failed to be sent, this will be the error message - e.g., `"HTTP error: 404"` + /// If it's the former, `status_code` will be set. + var/error diff --git a/src/error.rs b/src/error.rs index 204c8d42..9042745e 100644 --- a/src/error.rs +++ b/src/error.rs @@ -6,11 +6,21 @@ use std::{ }; use thiserror::Error; +#[cfg(feature = "http")] +use serde_json::Error as JsonError; +#[cfg(feature = "http")] +use ureq::Error as UreqError; + #[cfg(feature = "png")] use image::error::ImageError; #[cfg(feature = "png")] use png::{DecodingError, EncodingError}; +#[cfg(feature = "toml")] +use toml_dep::de::Error as TomlDeserializeError; +#[cfg(feature = "toml")] +use toml_dep::ser::Error as TomlSerializeError; + #[cfg(feature = "unzip")] use zip::result::ZipError; @@ -28,46 +38,60 @@ pub enum Error { Io(#[from] io::Error), #[error("Invalid algorithm specified.")] InvalidAlgorithm, - #[cfg(feature = "png")] #[error(transparent)] - ImageDecoding(#[from] DecodingError), - #[cfg(feature = "png")] + ParseInt(#[from] ParseIntError), #[error(transparent)] - ImageEncoding(#[from] EncodingError), + ParseFloat(#[from] ParseFloatError), + + #[cfg(feature = "hash")] + #[error("Unable to decode hex value.")] + HexDecode, + #[cfg(feature = "http")] #[error(transparent)] - JsonSerialization(#[from] serde_json::Error), + JsonSerialization(#[from] JsonError), + #[cfg(feature = "http")] #[error(transparent)] - ParseInt(#[from] ParseIntError), + Request(#[from] Box), + #[cfg(feature = "http")] + #[error("Unable to parse HTTP arguments: {0}")] + HttpParse(String), + #[cfg(feature = "http")] + #[error("HTTP response over size limit")] + HttpTooBig, + + #[cfg(feature = "iconforge")] + #[error("IconForge error: {0}")] + IconForge(String), + + #[cfg(feature = "png")] #[error(transparent)] - ParseFloat(#[from] ParseFloatError), + ImageDecoding(#[from] DecodingError), + #[cfg(feature = "png")] + #[error(transparent)] + ImageEncoding(#[from] EncodingError), #[cfg(feature = "png")] #[error(transparent)] GenericImage(#[from] ImageError), #[cfg(feature = "png")] #[error("Invalid png data.")] InvalidPngData, - #[cfg(feature = "http")] - #[error(transparent)] - Request(#[from] Box), + #[cfg(feature = "sound_len")] #[error("SoundLen error: {0}")] SoundLen(String), + #[cfg(feature = "toml")] #[error(transparent)] - TomlDeserialization(#[from] toml_dep::de::Error), + TomlDeserialization(#[from] TomlDeserializeError), #[cfg(feature = "toml")] #[error(transparent)] - TomlSerialization(#[from] toml_dep::ser::Error), + TomlSerialization(#[from] TomlSerializeError), + #[cfg(feature = "unzip")] #[error(transparent)] Unzip(#[from] ZipError), - #[cfg(feature = "hash")] - #[error("Unable to decode hex value.")] - HexDecode, - #[cfg(feature = "iconforge")] - #[error("IconForge error: {0}")] - IconForge(String), + #[error("Panic during function execution: {0}")] Panic(String), } diff --git a/src/http.rs b/src/http.rs index 4ebf8c49..3a68d7d0 100644 --- a/src/http.rs +++ b/src/http.rs @@ -1,28 +1,35 @@ -use crate::{error::Result, jobs}; -use once_cell::sync::Lazy; +use crate::{error::Error, error::Result, jobs}; use serde::{Deserialize, Serialize}; use std::collections::{BTreeMap, HashMap}; -use std::io::Write; +use std::fs::File; +use std::io::{BufWriter, Write}; +use std::sync::LazyLock; use std::time::Duration; +use ureq::http; // ---------------------------------------------------------------------------- -// Interface +// DM Interface #[derive(Deserialize)] struct RequestOptions { #[serde(default)] - output_filename: Option, + input_filename: Option, #[serde(default)] - body_filename: Option, + output_filename: Option, #[serde(default)] timeout_seconds: Option, } #[derive(Serialize)] -struct Response<'a> { +struct Response { + /// Will be set to the HTTP status code if the request was sent. status_code: u16, headers: HashMap, - body: Option<&'a str>, + /// If `body` is `Some`, the request was recieved. It might still be a 404 or 500. + body: Option, + /// If `error` is `Some`, either there was a 4xx/5xx error, or the request failed to be sent. + /// If it's the former, `status_code` will be set. + error: Option, } // If the response can be deserialized -> success. @@ -61,94 +68,129 @@ byond_fn!(fn http_check_request(id) { }); // ---------------------------------------------------------------------------- -// Shared HTTP client state const VERSION: &str = env!("CARGO_PKG_VERSION"); const PKG_NAME: &str = env!("CARGO_PKG_NAME"); - -pub static HTTP_CLIENT: Lazy = Lazy::new(ureq::agent); +const TLS_FEATURE: &str = if cfg!(feature = "native_tls") { + "+native_tls" +} else if cfg!(feature = "rustls_tls") { + "+rustls_tls" +} else { + "" +}; + +// Shared HTTP client for all requests (except for those with a custom timeout). +pub static HTTP_CLIENT: LazyLock = LazyLock::new(|| { + ureq::Agent::new_with_config( + ureq::Agent::config_builder() + .http_status_as_error(false) + .user_agent(format!("{PKG_NAME}/{VERSION}{TLS_FEATURE}")) + .build(), + ) +}); // ---------------------------------------------------------------------------- -// Request construction and execution struct RequestPrep { - req: ureq::Request, - output_filename: Option, - body: Vec, + builder: http::request::Builder, + body: Option>, + request_options: RequestOptions, } fn construct_request( method: &str, - url: &str, + uri: &str, body: &str, headers: &str, options: &str, ) -> Result { - let mut req = match method { - "post" => HTTP_CLIENT.post(url), - "put" => HTTP_CLIENT.put(url), - "patch" => HTTP_CLIENT.patch(url), - "delete" => HTTP_CLIENT.delete(url), - "head" => HTTP_CLIENT.head(url), - _ => HTTP_CLIENT.get(url), - } - .set("User-Agent", &format!("{PKG_NAME}/{VERSION}")); - - let mut final_body = body.as_bytes().to_vec(); + let mut builder = http::request::Builder::new() + .method(method.parse().unwrap_or(http::Method::GET)) + .uri(uri); if !headers.is_empty() { let headers: BTreeMap<&str, &str> = serde_json::from_str(headers)?; for (key, value) in headers { - req = req.set(key, value); + builder = builder.header(key, value); } } - let mut output_filename = None; - if !options.is_empty() { - let options: RequestOptions = serde_json::from_str(options)?; - output_filename = options.output_filename; - if let Some(fname) = options.body_filename { - final_body = std::fs::read(fname)?; + let options: RequestOptions = if !options.is_empty() { + serde_json::from_str(options)? + } else { + RequestOptions { + output_filename: None, + input_filename: None, + timeout_seconds: None, } + }; - if let Some(timeout_seconds) = options.timeout_seconds { - req = req.timeout(Duration::from_secs(timeout_seconds)); - } - } + let body_to_send = if let Some(fname) = options.input_filename.clone() { + Some(std::fs::read(fname)?) + } else if !body.is_empty() { + Some(body.as_bytes().to_vec()) + } else { + None + }; Ok(RequestPrep { - req, - output_filename, - body: final_body, + builder, + request_options: options, + body: body_to_send, }) } fn submit_request(prep: RequestPrep) -> Result { - let response = prep.req.send_bytes(&prep.body).map_err(Box::new)?; - - let body; - let mut resp = Response { - status_code: response.status(), - headers: HashMap::new(), - body: None, + let request = prep + .builder + .body(prep.body.unwrap_or_default()) + .map_err(|e| Error::HttpParse(e.to_string()))?; + + // Use the default HTTP_CLIENT if no timeout is specified, + // otherwise create a new agent with the specified timeout + let mut response = match prep.request_options.timeout_seconds { + Some(timeout_seconds) => { + let agent = ureq::Agent::new_with_config( + ureq::Agent::config_builder() + .http_status_as_error(false) + .user_agent(format!("{PKG_NAME}/{VERSION}{TLS_FEATURE}")) + .timeout_global(Some(Duration::from_secs(timeout_seconds))) + .build(), + ); + agent.run(request).map_err(Box::new)? + } + None => HTTP_CLIENT.run(request).map_err(Box::new)?, }; - for key in response.headers_names() { - let Some(value) = response.header(&key) else { - continue; - }; + let headers: HashMap = response + .headers() + .iter() + .filter_map(|(k, v)| Some((k.to_string(), v.to_str().ok()?.to_owned()))) + .collect(); - resp.headers.insert(key, value.to_owned()); - } - - if let Some(output_filename) = prep.output_filename { - let mut writer = std::io::BufWriter::new(std::fs::File::create(output_filename)?); - std::io::copy(&mut response.into_reader(), &mut writer)?; + let body = if let Some(output_filename) = prep.request_options.output_filename { + let mut writer = BufWriter::new(File::create(output_filename)?); + let mut reader = response.body_mut().as_reader(); + std::io::copy(&mut reader, &mut writer)?; writer.flush()?; + None } else { - body = response.into_string()?; - resp.body = Some(&body); - } + Some(response.body_mut().read_to_string().map_err(Box::new)?) + }; + + let status_code = response.status().as_u16(); + let error = if (400..600).contains(&status_code) { + Some(format!("HTTP error: {status_code}")) + } else { + None + }; + + let resp = Response { + status_code, + headers, + body, + error, + }; Ok(serde_json::to_string(&resp)?) } diff --git a/src/iconforge.rs b/src/iconforge.rs index 22877b1a..5e64091d 100644 --- a/src/iconforge.rs +++ b/src/iconforge.rs @@ -12,10 +12,8 @@ use dmi::{ icon::{DmiVersion, Icon, IconState}, }; use image::{DynamicImage, Pixel, RgbaImage}; -use once_cell::sync::Lazy; use rayon::iter::{IntoParallelIterator, IntoParallelRefIterator, ParallelIterator}; use serde::{Deserialize, Serialize}; -use std::collections::HashSet; use std::sync::RwLock; use std::{ collections::HashMap, @@ -24,21 +22,22 @@ use std::{ io::BufReader, sync::{Arc, Mutex}, }; +use std::{collections::HashSet, sync::LazyLock}; use tracy_full::{frame, zone}; use twox_hash::XxHash64; type SpriteJsonMap = HashMap, BuildHasherDefault>; /// This is used to save time decoding 'sprites' a second time between the cache step and the generate step. -static SPRITES_TO_JSON: Lazy>> = Lazy::new(|| { +static SPRITES_TO_JSON: LazyLock>> = LazyLock::new(|| { Arc::new(Mutex::new(HashMap::with_hasher(BuildHasherDefault::< XxHash64, >::default()))) }); /// A cache of DMI filepath -> Icon objects. -static ICON_FILES: Lazy, BuildHasherDefault>> = - Lazy::new(|| DashMap::with_hasher(BuildHasherDefault::::default())); +static ICON_FILES: LazyLock, BuildHasherDefault>> = + LazyLock::new(|| DashMap::with_hasher(BuildHasherDefault::::default())); /// A cache of icon_hash_input to RgbaImage (with transforms applied! This can only contain COMPLETED sprites). -static ICON_STATES: Lazy>> = - Lazy::new(|| DashMap::with_hasher(BuildHasherDefault::::default())); +static ICON_STATES: LazyLock>> = + LazyLock::new(|| DashMap::with_hasher(BuildHasherDefault::::default())); byond_fn!(fn iconforge_generate(file_path, spritesheet_name, sprites, hash_icons) { let file_path = file_path.to_owned(); @@ -1121,7 +1120,7 @@ struct GAGSData { config_icon: Arc, } -static GAGS_CACHE: Lazy> = Lazy::new(DashMap::new); +static GAGS_CACHE: LazyLock> = LazyLock::new(DashMap::new); /// Loads a GAGS config and the requested DMIs into memory for use by iconforge_gags() fn load_gags_config( diff --git a/src/sql.rs b/src/sql.rs index b631089d..c9123037 100644 --- a/src/sql.rs +++ b/src/sql.rs @@ -5,9 +5,9 @@ use mysql::{ prelude::Queryable, OptsBuilder, Params, Pool, PoolConstraints, PoolOpts, }; -use once_cell::sync::Lazy; use serde::Deserialize; use serde_json::{json, map::Map, Number}; +use std::sync::LazyLock; use std::{collections::HashMap, sync::atomic::AtomicUsize}; use std::{error::Error, time::Duration}; @@ -106,7 +106,7 @@ byond_fn!(fn sql_check_query(id) { // ---------------------------------------------------------------------------- // Main connect and query implementation -static POOL: Lazy> = Lazy::new(DashMap::new); +static POOL: LazyLock> = LazyLock::new(DashMap::new); static NEXT_ID: AtomicUsize = AtomicUsize::new(0); fn sql_connect(options: ConnectOptions) -> Result> { diff --git a/src/unzip.rs b/src/unzip.rs index b0a471a8..e565e397 100644 --- a/src/unzip.rs +++ b/src/unzip.rs @@ -5,7 +5,7 @@ use std::path::Path; use zip::ZipArchive; struct UnzipPrep { - req: ureq::Request, + req: ureq::RequestBuilder, unzip_directory: String, } @@ -28,10 +28,23 @@ byond_fn!(fn unzip_download_async(url, unzip_directory) { fn do_unzip_download(prep: UnzipPrep) -> Result { let unzip_path = Path::new(&prep.unzip_directory); - let response = prep.req.send_bytes(&[]).map_err(Box::new)?; + let response = prep.req.call().map_err(Box::new)?; - let mut content = Vec::new(); - response.into_reader().read_to_end(&mut content)?; + const LIMIT: u64 = 100 * 1024 * 1024; // 100MB + let content_length: u64 = response + .headers() + .get("Content-Length") + .and_then(|s| s.to_str().ok()) + .and_then(|s| s.parse().ok()) + .unwrap_or(0); + if content_length > LIMIT { + return Err(crate::error::Error::HttpTooBig); + } + let mut binding = response.into_body(); + let body = binding.with_config().limit(LIMIT); + let content = body + .read_to_vec() + .map_err(|e| crate::error::Error::HttpParse(e.to_string()))?; let reader = std::io::Cursor::new(content); let mut archive = ZipArchive::new(reader)?; diff --git a/tests/dm-tests.rs b/tests/dm-tests.rs index 96058787..3c48c50e 100644 --- a/tests/dm-tests.rs +++ b/tests/dm-tests.rs @@ -52,6 +52,12 @@ fn run_dm_tests(name: &str) { }; let rust_g = format!("target/{target_dir}/{profile}/{fname}"); + // Remove test-only comment blocks in target/rust_g.dm + let dm_path = "target/rust_g.dm"; + let mut rust_g_dm = std::fs::read_to_string(dm_path).unwrap(); + rust_g_dm = rust_g_dm.replace("/***", "").replace("***/", ""); + std::fs::write(dm_path, &rust_g_dm).unwrap(); + let output = Command::new("bash") .arg(&byondexec) .arg(&dream_maker)