1
0
Fork 0
mirror of https://github.com/mat-1/metasearch2.git synced 2025-08-02 15:26:04 +00:00

Compare commits

...

5 commits

Author SHA1 Message Date
mat
77a5ddcd3f bump version 2025-07-07 02:39:01 +05:00
mat
9be8627c7b dst fixes for timezone answer engine 2025-07-06 12:52:49 -08:45
mat
97d86a4fbf clippy 2025-07-07 04:30:12 +07:00
mat
7349a8b461 upgrade deps 2025-07-06 23:29:42 +02:00
mat
ad1fb8a9af csrf and xss fix
thanks @JorianWoltjer <3
2025-07-07 06:25:04 +09:00
8 changed files with 550 additions and 465 deletions

931
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
[package]
name = "metasearch"
version = "0.2.3"
version = "0.2.4"
edition = "2021"
build = "src/build.rs"
description = "a cute metasearch engine"
@ -10,9 +10,9 @@ repository = "https://github.com/mat-1/metasearch2"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
ammonia = "4.0.0"
ammonia = "4.1.0"
async-stream = "0.3.6"
axum = { version = "0.8.1", default-features = false, features = [
axum = { version = "0.8.4", default-features = false, features = [
"tokio",
"http1",
"http2",
@ -20,21 +20,21 @@ axum = { version = "0.8.1", default-features = false, features = [
"json",
"form",
] }
axum-extra = { version = "0.10.0", features = ["cookie"] }
axum-extra = { version = "0.10.1", features = ["cookie"] }
base64 = "0.22.1"
bytes = "1.10.1"
chrono = "0.4.40"
chrono-tz = { version = "0.10.1", features = ["case-insensitive"] }
chrono = "0.4.41"
chrono-tz = { version = "0.10.3", features = ["case-insensitive"] }
eyre = "0.6.12"
fend-core = "1.5.5"
fend-core = "1.5.6"
futures = "0.3.31"
html-escape = "0.2.13"
maud = "0.27.0"
numbat = "1.16.0"
parking_lot = "0.12.3"
rand = "0.9.0"
parking_lot = "0.12.4"
rand = "0.9.1"
regex = "1.11.1"
reqwest = { version = "0.12.14", default-features = false, features = [
reqwest = { version = "0.12.22", default-features = false, features = [
"rustls-tls",
"gzip",
"deflate",
@ -44,11 +44,11 @@ scraper = "0.23.1"
serde = { version = "1.0.219", features = ["derive"] }
# preserve_order is needed for google images. yippee!
serde_json = { version = "1.0.140", features = ["preserve_order"] }
tokio = { version = "1.44.1", features = ["rt", "macros"] }
tokio = { version = "1.46.1", features = ["rt", "macros"] }
tokio-stream = "0.1.17"
toml = { version = "0.8.20", default-features = false, features = ["parse"] }
toml = { version = "0.8.23", default-features = false, features = ["parse"] }
tower = "0.5.2"
tower-http = "0.6.2"
tower-http = "0.6.6"
tracing = "0.1.41"
tracing-subscriber = "0.3.19"
url = "2.5.4"

View file

@ -14,6 +14,6 @@ fn main() {
Ok(output) => String::from_utf8(output.stdout).unwrap_or("unknown".into()),
Err(_) => "unknown".into(),
};
println!("cargo:rustc-env=GIT_HASH={}", git_hash);
println!("cargo:rustc-env=GIT_HASH_SHORT={}", git_hash_short);
println!("cargo:rustc-env=GIT_HASH={git_hash}");
println!("cargo:rustc-env=GIT_HASH_SHORT={git_hash_short}");
}

View file

@ -79,7 +79,7 @@ fn is_potential_request(query: &str) -> bool {
true
}
fn interpret(query: &str) -> Option<(Statement, Markup)> {
fn interpret(query: &str) -> Option<(Statement<'_>, Markup)> {
if !is_potential_request(query) {
return None;
}

View file

@ -104,7 +104,7 @@ fn evaluate(query: &str) -> Option<TimeResponse> {
let source_time_utc = chrono::Utc
.from_local_datetime(&source_time_naive)
.latest()?
- source_offset.base_utc_offset();
- (source_offset.base_utc_offset() + source_offset.dst_offset());
let source_time = source_time_utc.with_timezone(&source_timezone);
let target_time = source_time_utc.with_timezone(&target_timezone);
@ -141,6 +141,7 @@ fn evaluate(query: &str) -> Option<TimeResponse> {
fn parse_timezone(timezone_name: &str) -> Option<Tz> {
match timezone_name.to_lowercase().as_str() {
"cst" | "cdt" => Some(Tz::CST6CDT),
"est" | "edt" => Some(Tz::EST5EDT),
_ => Tz::from_str_insensitive(timezone_name)
.ok()
.or_else(|| Tz::from_str_insensitive(&format!("etc/{timezone_name}")).ok()),
@ -150,6 +151,7 @@ fn parse_timezone(timezone_name: &str) -> Option<Tz> {
fn timezone_to_string(tz: Tz) -> String {
match tz {
Tz::CST6CDT => "CST".to_string(),
Tz::EST5EDT => "EST".to_string(),
_ => {
let tz_string = tz.name();
if let Some(tz_string) = tz_string.strip_prefix("Etc/") {

View file

@ -6,6 +6,7 @@ use axum::{
response::{IntoResponse, Response},
Extension,
};
use reqwest::header;
use tracing::error;
use crate::{config::Config, engines};
@ -42,6 +43,9 @@ pub async fn route(
if res.content_length().unwrap_or_default() > max_size {
return (StatusCode::PAYLOAD_TOO_LARGE, "Image too large").into_response();
}
const ALLOWED_IMAGE_TYPES: &[&str] = &["apng", "avif", "gif", "jpeg", "png", "webp"];
// validate content-type
let content_type = res
.headers()
@ -49,8 +53,15 @@ pub async fn route(
.and_then(|v| v.to_str().ok())
.unwrap_or_default()
.to_string();
if !content_type.starts_with("image/") {
return (StatusCode::BAD_REQUEST, "Not an image").into_response();
let Some((base_type, subtype)) = content_type.split_once("/") else {
return (StatusCode::UNSUPPORTED_MEDIA_TYPE, "Invalid Content-Type").into_response();
};
if base_type != "image" {
return (StatusCode::UNSUPPORTED_MEDIA_TYPE, "Not an image").into_response();
}
if !ALLOWED_IMAGE_TYPES.contains(&subtype) {
return (StatusCode::UNSUPPORTED_MEDIA_TYPE, "Image type not allowed").into_response();
}
let mut image_bytes = Vec::new();
@ -63,11 +74,10 @@ pub async fn route(
(
[
(axum::http::header::CONTENT_TYPE, content_type),
(
axum::http::header::CACHE_CONTROL,
"public, max-age=31536000".to_owned(),
),
(header::CONTENT_TYPE, content_type),
(header::CACHE_CONTROL, "public, max-age=31536000".to_owned()),
(header::X_CONTENT_TYPE_OPTIONS, "nosniff".to_owned()),
(header::CONTENT_DISPOSITION, "attachment".to_owned()),
],
image_bytes,
)

View file

@ -25,7 +25,7 @@ fn render_image_result(
// serialize url params
let escaped_param =
url::form_urlencoded::byte_serialize(original_image_src.as_bytes()).collect::<String>();
format!("/image-proxy?url={}", escaped_param)
format!("/image-proxy?url={escaped_param}")
} else {
original_image_src.to_string()
};

View file

@ -1,6 +1,6 @@
use axum::{
http::{header, StatusCode},
response::IntoResponse,
http::{header, HeaderMap, StatusCode},
response::{IntoResponse, Response},
Extension, Form,
};
use axum_extra::extract::{cookie::Cookie, CookieJar};
@ -69,10 +69,24 @@ pub struct Settings {
pub stylesheet_str: String,
}
pub async fn post(mut jar: CookieJar, Form(settings): Form<Settings>) -> impl IntoResponse {
pub async fn post(
headers: HeaderMap,
mut jar: CookieJar,
Form(settings): Form<Settings>,
) -> Response {
let Some(origin) = headers.get("origin").and_then(|h| h.to_str().ok()) else {
return (StatusCode::BAD_REQUEST, "Missing or invalid Origin header").into_response();
};
let Some(host) = headers.get("host").and_then(|h| h.to_str().ok()) else {
return (StatusCode::BAD_REQUEST, "Missing or invalid Host header").into_response();
};
if origin != format!("http://{host}") && origin != format!("https://{host}") {
return (StatusCode::BAD_REQUEST, "Origin does not match Host").into_response();
}
let mut settings_cookie = Cookie::new("settings", serde_json::to_string(&settings).unwrap());
settings_cookie.make_permanent();
jar = jar.add(settings_cookie);
(StatusCode::FOUND, [(header::LOCATION, "/settings")], jar)
(StatusCode::FOUND, [(header::LOCATION, "/settings")], jar).into_response()
}