Skip to content

Commit df9f7f7

Browse files
removes robot tag on ever request adds setting for custom response headers
1 parent 8403993 commit df9f7f7

File tree

2 files changed

+6
-2
lines changed

2 files changed

+6
-2
lines changed

crates/common/src/settings.rs

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ use error_stack::{Report, ResultExt};
55
use regex::Regex;
66
use serde::{de::DeserializeOwned, Deserialize, Deserializer, Serialize};
77
use serde_json::Value as JsonValue;
8+
use std::collections::HashMap;
89
use std::sync::OnceLock;
910
use url::Url;
1011
use validator::{Validate, ValidationError};
@@ -134,6 +135,8 @@ pub struct Settings {
134135
#[serde(default, deserialize_with = "vec_from_seq_or_map")]
135136
#[validate(nested)]
136137
pub handlers: Vec<Handler>,
138+
#[serde(default)]
139+
pub response_headers: HashMap<String, String>,
137140
}
138141

139142
#[allow(unused)]

crates/fastly/src/main.rs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -84,8 +84,9 @@ async fn route_request(settings: Settings, req: Request) -> Result<Response, Err
8484
// Convert any errors to HTTP error responses
8585
let mut response = result.unwrap_or_else(to_error_response);
8686

87-
// Add X-Robots-Tag header to prevent crawlers and indexers
88-
response.set_header("X-Robots-Tag", "noindex, nofollow");
87+
for (key, value) in &settings.response_headers {
88+
response.set_header(key, value);
89+
}
8990

9091
Ok(response)
9192
}

0 commit comments

Comments
 (0)