Merge branch 'LemmyNet-main' into dev

This commit is contained in:
Tangel 2024-03-15 13:55:09 +00:00 committed by GitHub
commit 3b1b2d8427
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 20 additions and 7 deletions

View file

@ -1,6 +1,6 @@
[package] [package]
name = "activitypub_federation" name = "activitypub_federation"
version = "0.5.1" version = "0.5.2"
edition = "2021" edition = "2021"
description = "High-level Activitypub framework" description = "High-level Activitypub framework"
keywords = ["activitypub", "activitystreams", "federation", "fediverse"] keywords = ["activitypub", "activitystreams", "federation", "fediverse"]

View file

@ -57,11 +57,14 @@ pub async fn fetch_object_http<T: Clone, Kind: DeserializeOwned>(
static ALT_CONTENT_TYPE: HeaderValue = HeaderValue::from_static( static ALT_CONTENT_TYPE: HeaderValue = HeaderValue::from_static(
r#"application/ld+json; profile="https://www.w3.org/ns/activitystreams""#, r#"application/ld+json; profile="https://www.w3.org/ns/activitystreams""#,
); );
static ALT_CONTENT_TYPE_MASTODON: HeaderValue =
HeaderValue::from_static(r#"application/activity+json; charset=utf-8"#);
let res = fetch_object_http_with_accept(url, data, &CONTENT_TYPE).await?; let res = fetch_object_http_with_accept(url, data, &CONTENT_TYPE).await?;
// Ensure correct content-type to prevent vulnerabilities. // Ensure correct content-type to prevent vulnerabilities.
if res.content_type.as_ref() != Some(&CONTENT_TYPE) if res.content_type.as_ref() != Some(&CONTENT_TYPE)
&& res.content_type.as_ref() != Some(&ALT_CONTENT_TYPE) && res.content_type.as_ref() != Some(&ALT_CONTENT_TYPE)
&& res.content_type.as_ref() != Some(&ALT_CONTENT_TYPE_MASTODON)
{ {
return Err(Error::FetchInvalidContentType(res.url)); return Err(Error::FetchInvalidContentType(res.url));
} }
@ -86,7 +89,9 @@ async fn fetch_object_http_with_accept<T: Clone, Kind: DeserializeOwned>(
config.verify_url_valid(url).await?; config.verify_url_valid(url).await?;
info!("Fetching remote object {}", url.to_string()); info!("Fetching remote object {}", url.to_string());
// let counter = data.request_counter.fetch_add(1, Ordering::SeqCst); // let mut counter = data.request_counter.fetch_add(1, Ordering::SeqCst);
// fetch_add returns old value so we need to increment manually here
// counter += 1;
// if counter > config.http_fetch_limit { // if counter > config.http_fetch_limit {
// return Err(Error::RequestLimit); // return Err(Error::RequestLimit);
// } // }

View file

@ -197,9 +197,9 @@ static ACTOR_REFETCH_INTERVAL_SECONDS_DEBUG: i64 = 20;
fn should_refetch_object(last_refreshed: DateTime<Utc>) -> bool { fn should_refetch_object(last_refreshed: DateTime<Utc>) -> bool {
let update_interval = if cfg!(debug_assertions) { let update_interval = if cfg!(debug_assertions) {
// avoid infinite loop when fetching community outbox // avoid infinite loop when fetching community outbox
ChronoDuration::seconds(ACTOR_REFETCH_INTERVAL_SECONDS_DEBUG) ChronoDuration::try_seconds(ACTOR_REFETCH_INTERVAL_SECONDS_DEBUG).expect("valid duration")
} else { } else {
ChronoDuration::seconds(ACTOR_REFETCH_INTERVAL_SECONDS) ChronoDuration::try_seconds(ACTOR_REFETCH_INTERVAL_SECONDS).expect("valid duration")
}; };
let refresh_limit = Utc::now() - update_interval; let refresh_limit = Utc::now() - update_interval;
last_refreshed.lt(&refresh_limit) last_refreshed.lt(&refresh_limit)
@ -362,10 +362,10 @@ pub mod tests {
#[test] #[test]
fn test_should_refetch_object() { fn test_should_refetch_object() {
let one_second_ago = Utc::now() - ChronoDuration::seconds(1); let one_second_ago = Utc::now() - ChronoDuration::try_seconds(1).unwrap();
assert!(!should_refetch_object(one_second_ago)); assert!(!should_refetch_object(one_second_ago));
let two_days_ago = Utc::now() - ChronoDuration::days(2); let two_days_ago = Utc::now() - ChronoDuration::try_days(2).unwrap();
assert!(should_refetch_object(two_days_ago)); assert!(should_refetch_object(two_days_ago));
} }
} }

View file

@ -121,7 +121,7 @@ where
T: Clone, T: Clone,
{ {
static WEBFINGER_REGEX: Lazy<Regex> = static WEBFINGER_REGEX: Lazy<Regex> =
Lazy::new(|| Regex::new(r"^acct:([\p{L}0-9_]+)@(.*)$").expect("compile regex")); Lazy::new(|| Regex::new(r"^acct:([\p{L}0-9_\.\-]+)@(.*)$").expect("compile regex"));
// Regex to extract usernames from webfinger query. Supports different alphabets using `\p{L}`. // Regex to extract usernames from webfinger query. Supports different alphabets using `\p{L}`.
// TODO: This should use a URL parser // TODO: This should use a URL parser
let captures = WEBFINGER_REGEX let captures = WEBFINGER_REGEX
@ -288,6 +288,14 @@ mod tests {
Ok("Владимир"), Ok("Владимир"),
extract_webfinger_name("acct:Владимир@example.com", &data) extract_webfinger_name("acct:Владимир@example.com", &data)
); );
assert_eq!(
Ok("example.com"),
extract_webfinger_name("acct:example.com@example.com", &data)
);
assert_eq!(
Ok("da-sh"),
extract_webfinger_name("acct:da-sh@example.com", &data)
);
assert_eq!( assert_eq!(
Ok("تجريب"), Ok("تجريب"),
extract_webfinger_name("acct:تجريب@example.com", &data) extract_webfinger_name("acct:تجريب@example.com", &data)