Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion apps/labrinth/AGENTS.md

This file was deleted.

20 changes: 20 additions & 0 deletions apps/labrinth/AGENTS.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
- Use `ApiError` as the error type for API routes
- Prefer `ApiError::Internal` and `ApiError::Request` over `ApiError::InvalidInput`
- Use `eyre!` to construct a value for `Internal` and `Request` variants
- Error messages (both for errors and exceptions) must be formatted as per the Rust API guidelines:
- lowercase message
- no trailing punctuation
- wrap code items e.g. type names in backticks
- Prefer `wrap_internal_err`, `wrap_request_err` when attaching context to an existing error (like Anyhow `context` or Eyre `wrap_err`)
- All operations should ideally have some context attached
- Database operations can have a message like `.wrap_internal_err("failed to fetch XYZ")`
- You can perform real-time queries against the databases in the Docker Compose
- `docker exec labrinth-postgres psql -c "select 1"`
- `docker exec labrinth-redis redis-cli flushall`
- `docker exec labrinth-clickhouse clickhouse-client "select 1"`
- On some machines, you may have to use `podman` instead of `docker` - check which one is available first
- Hardcoded credentials for admin:
- `Authorization: Bearer mra_admin` for default admin user
- `Authorization: Bearer mra_user` for a regular user
- `Modrinth-Admin: feedbeef` as admin key
- If some steps require you to create a project/mod or version for testing, ask the user to go into the web frontend and manually create a project/version
41 changes: 35 additions & 6 deletions apps/labrinth/src/routes/internal/admin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,9 @@ use crate::search::SearchBackend;
use crate::util::date::get_current_tenths_of_ms;
use crate::util::error::Context;
use crate::util::guards::admin_key_guard;
use crate::util::tags::valid_download_tags;
use actix_web::{HttpRequest, HttpResponse, patch, post, web};
use eyre::eyre;
use serde::Deserialize;
use std::collections::HashMap;
use std::net::Ipv4Addr;
Expand Down Expand Up @@ -40,9 +42,9 @@ pub struct DownloadBody {
/// [`DOWNLOAD_META_HEADER`] header.
#[derive(Debug, Clone, Deserialize)]
pub struct DownloadMeta {
pub reason: DownloadReason,
pub game_version: String,
pub loader: String,
pub reason: Option<DownloadReason>,
pub game_version: Option<String>,
pub loader: Option<String>,
}

pub const DOWNLOAD_META_HEADER: &str = "modrinth-download-meta";
Expand Down Expand Up @@ -139,6 +141,27 @@ pub async fn count_download(
None
};

if let Some(meta) = &meta {
let valid_download_tags = valid_download_tags(&pool, &redis)
.await
.wrap_internal_err("failed to fetch valid download tags")?;
if let Some(loader) = &meta.loader
&& !valid_download_tags.loaders.contains(loader)
{
return Err(ApiError::Request(eyre!(
"invalid download loader specified"
)));
}

if let Some(game_version) = &meta.game_version
&& !valid_download_tags.game_versions.contains(game_version)
{
return Err(ApiError::Request(eyre!(
"invalid download game version specified"
)));
}
}

let download = Download {
recorded: get_current_tenths_of_ms(),
domain: url.host_str().unwrap_or_default().to_string(),
Expand Down Expand Up @@ -176,13 +199,19 @@ pub async fn count_download(
.collect(),
reason: meta
.as_ref()
.map(|m| m.reason.to_string())
.and_then(|m| m.reason.as_ref())
.map(|s| s.to_string())
.unwrap_or_default(),
game_version: meta
.as_ref()
.map(|m| m.game_version.clone())
.and_then(|m| m.game_version.as_ref())
.map(|s| s.to_string())
.unwrap_or_default(),
loader: meta
.as_ref()
.and_then(|m| m.loader.as_ref())
.map(|s| s.to_string())
.unwrap_or_default(),
loader: meta.as_ref().map(|m| m.loader.clone()).unwrap_or_default(),
};
trace!("added download {download:#?}");

Expand Down
1 change: 1 addition & 0 deletions apps/labrinth/src/util/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,5 +17,6 @@ pub mod ratelimit;
pub mod redis;
pub mod routes;
pub mod sentry;
pub mod tags;
pub mod validate;
pub mod webhook;
76 changes: 76 additions & 0 deletions apps/labrinth/src/util/tags.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
use crate::database::PgPool;
use crate::database::models::legacy_loader_fields::MinecraftGameVersion;
use crate::database::models::loader_fields::Loader;
use crate::database::redis::RedisPool;
use crate::routes::ApiError;
use crate::util::error::Context;
use arc_swap::ArcSwapOption;
use std::collections::HashSet;
use std::sync::Arc;
use std::time::{Duration, Instant};
use tokio::sync::Mutex;

/// Cached set of valid loaders and game version tags.
///
/// Fetched using [`valid_download_tags`].
#[derive(Debug)]
pub struct DownloadTagsCache {
expires: Instant,
pub loaders: HashSet<String>,
pub game_versions: HashSet<String>,
}

/// Fetches download tags from the database or returns a cached version.
///
/// We cache tags since we get a large volume of download ingests, and querying
/// the database or even Redis for each request is too expensive.
pub async fn valid_download_tags(
pool: &PgPool,
redis: &RedisPool,
) -> Result<Arc<DownloadTagsCache>, ApiError> {
const DOWNLOAD_TAGS_CACHE_TTL: Duration = Duration::from_secs(60 * 5);

static DOWNLOAD_TAGS_CACHE: ArcSwapOption<DownloadTagsCache> =
ArcSwapOption::const_empty();
static DOWNLOAD_TAGS_CACHE_REFRESH_LOCK: Mutex<()> = Mutex::const_new(());

let now = Instant::now();
let cached = DOWNLOAD_TAGS_CACHE.load();
if let Some(cached) = &*cached
&& cached.expires > now
{
return Ok(cached.clone());
}

let _refresh_lock = DOWNLOAD_TAGS_CACHE_REFRESH_LOCK.lock().await;

let now = Instant::now();
let cached = DOWNLOAD_TAGS_CACHE.load();
if let Some(cached) = &*cached
&& cached.expires > now
{
return Ok(cached.clone());
}

let loaders = Loader::list(pool, redis)
.await
.wrap_internal_err("failed to fetch loaders")?
.into_iter()
.map(|loader| loader.loader)
.collect();
let game_versions = MinecraftGameVersion::list(None, None, pool, redis)
.await
.wrap_internal_err("failed to fetch game versions")?
.into_iter()
.map(|game_version| game_version.version)
.collect();

let cache = Arc::new(DownloadTagsCache {
expires: now + DOWNLOAD_TAGS_CACHE_TTL,
loaders,
game_versions,
});
DOWNLOAD_TAGS_CACHE.store(Some(cache.clone()));

Ok(cache)
}
Comment on lines +27 to +76
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Would update the cache asynchronously or at least use cache locking through a read/write lock to avoid cache stampede

Loading