diff --git a/apps/frontend/nuxt.config.ts b/apps/frontend/nuxt.config.ts index e2d9bfe113..0b3aa90b0f 100644 --- a/apps/frontend/nuxt.config.ts +++ b/apps/frontend/nuxt.config.ts @@ -161,7 +161,7 @@ export default defineNuxtConfig({ (state.errors ?? []).length === 0 ) { console.log( - 'Tags already recently generated. Delete apps/frontend/generated/state.json to force regeneration.', + 'Tags already recently generated. Delete apps/frontend/src/generated/state.json to force regeneration.', ) return } diff --git a/apps/labrinth/.env.docker-compose b/apps/labrinth/.env.docker-compose index 45be8c8281..eb04c737ca 100644 --- a/apps/labrinth/.env.docker-compose +++ b/apps/labrinth/.env.docker-compose @@ -128,7 +128,7 @@ PYRO_API_KEY=none BREX_API_URL=https://platform.brexapis.com/v2/ BREX_API_KEY=none -DELPHI_URL=none +DELPHI_URL=http://labrinth-delphi:59999 DELPHI_SLACK_WEBHOOK=none AVALARA_1099_API_URL=https://www.track1099.com/api diff --git a/apps/labrinth/.sqlx/query-0080a101c9ae040adbaadf9e46fbc457a08e70dcde320c6852074819e41f8ad9.json b/apps/labrinth/.sqlx/query-0080a101c9ae040adbaadf9e46fbc457a08e70dcde320c6852074819e41f8ad9.json new file mode 100644 index 0000000000..37dcad2943 --- /dev/null +++ b/apps/labrinth/.sqlx/query-0080a101c9ae040adbaadf9e46fbc457a08e70dcde320c6852074819e41f8ad9.json @@ -0,0 +1,24 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO delphi_report_issue_java_classes (issue_id, internal_class_name, decompiled_source)\n VALUES ($1, $2, $3)\n ON CONFLICT (issue_id, internal_class_name) DO UPDATE SET decompiled_source = $3\n RETURNING id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Text", + "Text" + ] + }, + "nullable": [ + false + ] + }, + "hash": "0080a101c9ae040adbaadf9e46fbc457a08e70dcde320c6852074819e41f8ad9" +} diff --git a/apps/labrinth/.sqlx/query-0ed2e6e3149352d12a673fddc50f9530c311eef084abb6fce35de5f37d79bcea.json b/apps/labrinth/.sqlx/query-0ed2e6e3149352d12a673fddc50f9530c311eef084abb6fce35de5f37d79bcea.json new file mode 100644 index 0000000000..6f7b991949 --- /dev/null +++ b/apps/labrinth/.sqlx/query-0ed2e6e3149352d12a673fddc50f9530c311eef084abb6fce35de5f37d79bcea.json @@ -0,0 +1,34 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n version_id AS \"version_id: crate::database::models::DBVersionId\",\n versions.mod_id AS \"project_id: crate::database::models::DBProjectId\",\n files.url AS \"url\"\n FROM files INNER JOIN versions ON files.version_id = versions.id\n WHERE files.id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "version_id: crate::database::models::DBVersionId", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "project_id: crate::database::models::DBProjectId", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "url", + "type_info": "Varchar" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false, + false + ] + }, + "hash": "0ed2e6e3149352d12a673fddc50f9530c311eef084abb6fce35de5f37d79bcea" +} diff --git a/apps/labrinth/.sqlx/query-10a332091be118f580d50ceb7a8724e9a4d5b9765d52305f99f859f939c2e854.json b/apps/labrinth/.sqlx/query-10a332091be118f580d50ceb7a8724e9a4d5b9765d52305f99f859f939c2e854.json new file mode 100644 index 0000000000..31ff65e350 --- /dev/null +++ b/apps/labrinth/.sqlx/query-10a332091be118f580d50ceb7a8724e9a4d5b9765d52305f99f859f939c2e854.json @@ -0,0 +1,35 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO delphi_report_issues (report_id, issue_type, status)\n VALUES ($1, $2, $3)\n ON CONFLICT (report_id, issue_type) DO UPDATE SET status = $3\n RETURNING id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Text", + { + "Custom": { + "name": "delphi_report_issue_status", + "kind": { + "Enum": [ + "pending", + "approved", + "rejected" + ] + } + } + } + ] + }, + "nullable": [ + false + ] + }, + "hash": "10a332091be118f580d50ceb7a8724e9a4d5b9765d52305f99f859f939c2e854" +} diff --git a/apps/labrinth/.sqlx/query-54ab8449978fce405ad46a8720ed299224bea59fb593310cfae59e41cc322da5.json b/apps/labrinth/.sqlx/query-54ab8449978fce405ad46a8720ed299224bea59fb593310cfae59e41cc322da5.json new file mode 100644 index 0000000000..2349d6c90a --- /dev/null +++ b/apps/labrinth/.sqlx/query-54ab8449978fce405ad46a8720ed299224bea59fb593310cfae59e41cc322da5.json @@ -0,0 +1,126 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n delphi_report_issues.id AS \"id\", report_id,\n issue_type,\n delphi_report_issues.status AS \"status: DelphiReportIssueStatus\",\n\n file_id, delphi_version, artifact_url, created, severity AS \"severity: DelphiReportSeverity\",\n json_array(SELECT to_jsonb(delphi_report_issue_java_classes)\n FROM delphi_report_issue_java_classes\n WHERE issue_id = delphi_report_issues.id\n ) AS \"classes: sqlx::types::Json>\",\n versions.mod_id AS \"project_id?\", mods.published AS \"project_published?\"\n FROM delphi_report_issues\n INNER JOIN delphi_reports ON delphi_reports.id = report_id\n LEFT OUTER JOIN files ON files.id = file_id\n LEFT OUTER JOIN versions ON versions.id = files.version_id\n LEFT OUTER JOIN mods ON mods.id = versions.mod_id\n WHERE\n (issue_type = $1 OR $1 IS NULL)\n AND (delphi_report_issues.status = $2 OR $2 IS NULL)\n ORDER BY\n CASE WHEN $3 = 'created_asc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $3 = 'created_desc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END DESC,\n CASE WHEN $3 = 'pending_status_first' THEN delphi_report_issues.status ELSE 'pending'::delphi_report_issue_status END ASC,\n CASE WHEN $3 = 'severity_asc' THEN delphi_reports.severity ELSE 'low'::delphi_report_severity END ASC,\n CASE WHEN $3 = 'severity_desc' THEN delphi_reports.severity ELSE 'low'::delphi_report_severity END DESC\n OFFSET $5\n LIMIT $4\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "report_id", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "issue_type", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "status: DelphiReportIssueStatus", + "type_info": { + "Custom": { + "name": "delphi_report_issue_status", + "kind": { + "Enum": [ + "pending", + "approved", + "rejected" + ] + } + } + } + }, + { + "ordinal": 4, + "name": "file_id", + "type_info": "Int8" + }, + { + "ordinal": 5, + "name": "delphi_version", + "type_info": "Int4" + }, + { + "ordinal": 6, + "name": "artifact_url", + "type_info": "Varchar" + }, + { + "ordinal": 7, + "name": "created", + "type_info": "Timestamptz" + }, + { + "ordinal": 8, + "name": "severity: DelphiReportSeverity", + "type_info": { + "Custom": { + "name": "delphi_report_severity", + "kind": { + "Enum": [ + "low", + "medium", + "high", + "severe" + ] + } + } + } + }, + { + "ordinal": 9, + "name": "classes: sqlx::types::Json>", + "type_info": "Jsonb" + }, + { + "ordinal": 10, + "name": "project_id?", + "type_info": "Int8" + }, + { + "ordinal": 11, + "name": "project_published?", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Text", + { + "Custom": { + "name": "delphi_report_issue_status", + "kind": { + "Enum": [ + "pending", + "approved", + "rejected" + ] + } + } + }, + "Text", + "Int8", + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + false, + true, + false, + false, + false, + false, + null, + true, + true + ] + }, + "hash": "54ab8449978fce405ad46a8720ed299224bea59fb593310cfae59e41cc322da5" +} diff --git a/apps/labrinth/.sqlx/query-f2054ae7dcc89b21ed6b2f04526de1e7cddd68ac956143bef994104280a8dc07.json b/apps/labrinth/.sqlx/query-f2054ae7dcc89b21ed6b2f04526de1e7cddd68ac956143bef994104280a8dc07.json new file mode 100644 index 0000000000..cc1c7b84f3 --- /dev/null +++ b/apps/labrinth/.sqlx/query-f2054ae7dcc89b21ed6b2f04526de1e7cddd68ac956143bef994104280a8dc07.json @@ -0,0 +1,37 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO delphi_reports (file_id, delphi_version, artifact_url, severity)\n VALUES ($1, $2, $3, $4)\n ON CONFLICT (file_id, delphi_version) DO UPDATE SET\n delphi_version = $2, artifact_url = $3, created = CURRENT_TIMESTAMP, severity = $4\n RETURNING id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Int4", + "Varchar", + { + "Custom": { + "name": "delphi_report_severity", + "kind": { + "Enum": [ + "low", + "medium", + "high", + "severe" + ] + } + } + } + ] + }, + "nullable": [ + false + ] + }, + "hash": "f2054ae7dcc89b21ed6b2f04526de1e7cddd68ac956143bef994104280a8dc07" +} diff --git a/apps/labrinth/.sqlx/query-fe571872262fe7d119b4b6eb1e55d818fde0499d8e5a08e9e22bee42014877f3.json b/apps/labrinth/.sqlx/query-fe571872262fe7d119b4b6eb1e55d818fde0499d8e5a08e9e22bee42014877f3.json new file mode 100644 index 0000000000..38db606828 --- /dev/null +++ b/apps/labrinth/.sqlx/query-fe571872262fe7d119b4b6eb1e55d818fde0499d8e5a08e9e22bee42014877f3.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT MAX(delphi_version) FROM delphi_reports", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "max", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + null + ] + }, + "hash": "fe571872262fe7d119b4b6eb1e55d818fde0499d8e5a08e9e22bee42014877f3" +} diff --git a/apps/labrinth/migrations/20250810155316_delphi-reports.sql b/apps/labrinth/migrations/20250810155316_delphi-reports.sql new file mode 100644 index 0000000000..d717d09f9d --- /dev/null +++ b/apps/labrinth/migrations/20250810155316_delphi-reports.sql @@ -0,0 +1,44 @@ +CREATE TYPE delphi_report_severity AS ENUM ('low', 'medium', 'high', 'severe'); + +CREATE TYPE delphi_report_issue_status AS ENUM ('pending', 'approved', 'rejected'); + +-- A Delphi analysis report for a project version +CREATE TABLE delphi_reports ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + file_id BIGINT REFERENCES files (id) + ON DELETE SET NULL + ON UPDATE CASCADE, + delphi_version INTEGER NOT NULL, + artifact_url VARCHAR(2048) NOT NULL, + created TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL, + severity DELPHI_REPORT_SEVERITY NOT NULL, + UNIQUE (file_id, delphi_version) +); +CREATE INDEX delphi_version ON delphi_reports (delphi_version); + +-- An issue found in a Delphi report. Every issue belongs to a report, +-- and a report can have zero, one, or more issues attached to it +CREATE TABLE delphi_report_issues ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + report_id BIGINT NOT NULL REFERENCES delphi_reports (id) + ON DELETE CASCADE + ON UPDATE CASCADE, + issue_type TEXT NOT NULL, + status DELPHI_REPORT_ISSUE_STATUS NOT NULL, + UNIQUE (report_id, issue_type) +); +CREATE INDEX delphi_report_issue_by_status_and_type ON delphi_report_issues (status, issue_type); + +-- A Java class affected by a Delphi report issue. Every affected +-- Java class belongs to a specific issue, and an issue can have zero, +-- one, or more affected classes. (Some issues may be artifact-wide, +-- or otherwise not really specific to any particular class.) +CREATE TABLE delphi_report_issue_java_classes ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + issue_id BIGINT NOT NULL REFERENCES delphi_report_issues (id) + ON DELETE CASCADE + ON UPDATE CASCADE, + internal_class_name TEXT NOT NULL, + decompiled_source TEXT, + UNIQUE (issue_id, internal_class_name) +); diff --git a/apps/labrinth/src/database/models/delphi_report_item.rs b/apps/labrinth/src/database/models/delphi_report_item.rs new file mode 100644 index 0000000000..d7757be92c --- /dev/null +++ b/apps/labrinth/src/database/models/delphi_report_item.rs @@ -0,0 +1,307 @@ +use std::{ + fmt::{self, Display, Formatter}, + ops::Deref, +}; + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; + +use crate::database::models::{ + DBFileId, DBProjectId, DatabaseError, DelphiReportId, DelphiReportIssueId, + DelphiReportIssueJavaClassId, +}; + +/// A Delphi malware analysis report for a project version file. +/// +/// Malware analysis reports usually belong to a specific project file, +/// but they can get orphaned if the versions they belong to are deleted. +/// Thus, deleting versions does not delete these reports. +#[derive(Serialize)] +pub struct DBDelphiReport { + pub id: DelphiReportId, + pub file_id: Option, + /// A sequential, monotonically increasing version number for the + /// Delphi version that generated this report. + pub delphi_version: i32, + pub artifact_url: String, + pub created: DateTime, + pub severity: DelphiReportSeverity, +} + +impl DBDelphiReport { + pub async fn upsert( + &self, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, + ) -> Result { + Ok(DelphiReportId(sqlx::query_scalar!( + " + INSERT INTO delphi_reports (file_id, delphi_version, artifact_url, severity) + VALUES ($1, $2, $3, $4) + ON CONFLICT (file_id, delphi_version) DO UPDATE SET + delphi_version = $2, artifact_url = $3, created = CURRENT_TIMESTAMP, severity = $4 + RETURNING id + ", + self.file_id as Option, + self.delphi_version, + self.artifact_url, + self.severity as DelphiReportSeverity, + ) + .fetch_one(&mut **transaction) + .await?)) + } +} + +/// A severity level for a Delphi report. +#[derive( + Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq, Hash, sqlx::Type, +)] +#[serde(rename_all = "UPPERCASE")] +#[sqlx(type_name = "delphi_report_severity", rename_all = "snake_case")] +pub enum DelphiReportSeverity { + Low, + Medium, + High, + Severe, +} + +/// An issue found in a Delphi report. Every issue belongs to a report, +/// and a report can have zero, one, or more issues attached to it. +#[derive(Deserialize, Serialize)] +pub struct DBDelphiReportIssue { + pub id: DelphiReportIssueId, + pub report_id: DelphiReportId, + pub issue_type: String, + pub status: DelphiReportIssueStatus, +} + +/// An status a Delphi report issue can have. +#[derive( + Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq, Hash, sqlx::Type, +)] +#[serde(rename_all = "snake_case")] +#[sqlx(type_name = "delphi_report_issue_status", rename_all = "snake_case")] +pub enum DelphiReportIssueStatus { + /// The issue is pending review by the moderation team. + Pending, + /// The issue has been approved (i.e., reviewed as a valid, true positive). + /// The affected artifact has thus been verified to be potentially malicious. + Approved, + /// The issue has been rejected (i.e., reviewed as a false positive). + /// The affected artifact has thus been verified to be clean, other issues + /// with it notwithstanding. + Rejected, +} + +impl Display for DelphiReportIssueStatus { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + self.serialize(f) + } +} + +/// An order in which Delphi report issues can be sorted during queries. +#[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[serde(rename_all = "snake_case")] +pub enum DelphiReportListOrder { + CreatedAsc, + CreatedDesc, + PendingStatusFirst, + SeverityAsc, + SeverityDesc, +} + +impl Display for DelphiReportListOrder { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + self.serialize(f) + } +} + +/// A result returned from a Delphi report issue query, slightly +/// denormalized with related entity information for ease of +/// consumption by clients. +#[derive(Serialize)] +pub struct DelphiReportIssueResult { + pub issue: DBDelphiReportIssue, + pub report: DBDelphiReport, + pub java_classes: Vec, + pub project_id: Option, + pub project_published: Option>, +} + +impl DBDelphiReportIssue { + pub async fn upsert( + &self, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, + ) -> Result { + Ok(DelphiReportIssueId( + sqlx::query_scalar!( + " + INSERT INTO delphi_report_issues (report_id, issue_type, status) + VALUES ($1, $2, $3) + ON CONFLICT (report_id, issue_type) DO UPDATE SET status = $3 + RETURNING id + ", + self.report_id as DelphiReportId, + self.issue_type, + self.status as DelphiReportIssueStatus, + ) + .fetch_one(&mut **transaction) + .await?, + )) + } + + pub async fn find_all_by( + ty: Option, + status: Option, + order_by: Option, + count: Option, + offset: Option, + exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>, + ) -> Result, DatabaseError> { + Ok(sqlx::query!( + r#" + SELECT + delphi_report_issues.id AS "id", report_id, + issue_type, + delphi_report_issues.status AS "status: DelphiReportIssueStatus", + + file_id, delphi_version, artifact_url, created, severity AS "severity: DelphiReportSeverity", + json_array(SELECT to_jsonb(delphi_report_issue_java_classes) + FROM delphi_report_issue_java_classes + WHERE issue_id = delphi_report_issues.id + ) AS "classes: sqlx::types::Json>", + versions.mod_id AS "project_id?", mods.published AS "project_published?" + FROM delphi_report_issues + INNER JOIN delphi_reports ON delphi_reports.id = report_id + LEFT OUTER JOIN files ON files.id = file_id + LEFT OUTER JOIN versions ON versions.id = files.version_id + LEFT OUTER JOIN mods ON mods.id = versions.mod_id + WHERE + (issue_type = $1 OR $1 IS NULL) + AND (delphi_report_issues.status = $2 OR $2 IS NULL) + ORDER BY + CASE WHEN $3 = 'created_asc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END ASC, + CASE WHEN $3 = 'created_desc' THEN delphi_reports.created ELSE TO_TIMESTAMP(0) END DESC, + CASE WHEN $3 = 'pending_status_first' THEN delphi_report_issues.status ELSE 'pending'::delphi_report_issue_status END ASC, + CASE WHEN $3 = 'severity_asc' THEN delphi_reports.severity ELSE 'low'::delphi_report_severity END ASC, + CASE WHEN $3 = 'severity_desc' THEN delphi_reports.severity ELSE 'low'::delphi_report_severity END DESC + OFFSET $5 + LIMIT $4 + "#, + ty, + status as Option, + order_by.map(|order_by| order_by.to_string()), + count.map(|count| count as i64), + offset, + ) + .map(|row| DelphiReportIssueResult { + issue: DBDelphiReportIssue { + id: DelphiReportIssueId(row.id), + report_id: DelphiReportId(row.report_id), + issue_type: row.issue_type, + status: row.status, + }, + report: DBDelphiReport { + id: DelphiReportId(row.report_id), + file_id: row.file_id.map(DBFileId), + delphi_version: row.delphi_version, + artifact_url: row.artifact_url, + created: row.created, + severity: row.severity, + }, + java_classes: row + .classes + .into_iter() + .flat_map(|class_list| class_list.0) + .collect(), + project_id: row.project_id.map(DBProjectId), + project_published: row.project_published, + }) + .fetch_all(exec) + .await?) + } +} + +/// A Java class affected by a Delphi report issue. Every affected +/// Java class belongs to a specific issue, and an issue can have zero, +/// one, or more affected classes. (Some issues may be artifact-wide, +/// or otherwise not really specific to any particular class.) +#[derive(Debug, Deserialize, Serialize)] +pub struct DBDelphiReportIssueJavaClass { + pub id: DelphiReportIssueJavaClassId, + pub issue_id: DelphiReportIssueId, + pub internal_class_name: InternalJavaClassName, + pub decompiled_source: Option, +} + +impl DBDelphiReportIssueJavaClass { + pub async fn upsert( + &self, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, + ) -> Result { + Ok(DelphiReportIssueJavaClassId(sqlx::query_scalar!( + " + INSERT INTO delphi_report_issue_java_classes (issue_id, internal_class_name, decompiled_source) + VALUES ($1, $2, $3) + ON CONFLICT (issue_id, internal_class_name) DO UPDATE SET decompiled_source = $3 + RETURNING id + ", + self.issue_id as DelphiReportIssueId, + self.internal_class_name.0, + self.decompiled_source.as_ref().map(|decompiled_source| &decompiled_source.0), + ) + .fetch_one(&mut **transaction) + .await?)) + } +} + +/// A [Java class name] with dots replaced by forward slashes (/). +/// +/// Because class names are usually the [binary names] passed to a classloader, top level interfaces and classes +/// have a binary name that matches its canonical, fully qualified name, such canonical names are prefixed by the +/// package path the class is in, and packages usually match the directory structure within a JAR for typical +/// classloaders, this usually (but not necessarily) corresponds to the path to the class file within its JAR. +/// +/// [Java class name]: https://docs.oracle.com/en/java/javase/21/docs/api/java.base/java/lang/Class.html#getName() +/// [binary names]: https://docs.oracle.com/javase/specs/jls/se21/html/jls-13.html#jls-13.1 +#[derive( + Deserialize, Serialize, Debug, Clone, PartialEq, Eq, Hash, sqlx::Type, +)] +#[serde(transparent)] +#[sqlx(transparent)] +pub struct InternalJavaClassName(String); + +impl Deref for InternalJavaClassName { + type Target = String; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Display for InternalJavaClassName { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +/// The decompiled source code of a Java class. +#[derive( + Deserialize, Serialize, Debug, Clone, PartialEq, Eq, Hash, sqlx::Type, +)] +#[serde(transparent)] +#[sqlx(transparent)] +pub struct DecompiledJavaClassSource(String); + +impl Deref for DecompiledJavaClassSource { + type Target = String; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Display for DecompiledJavaClassSource { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} diff --git a/apps/labrinth/src/database/models/ids.rs b/apps/labrinth/src/database/models/ids.rs index 483f6841e5..6ec97476fe 100644 --- a/apps/labrinth/src/database/models/ids.rs +++ b/apps/labrinth/src/database/models/ids.rs @@ -140,8 +140,8 @@ macro_rules! db_id_interface { }; } -macro_rules! short_id_type { - ($name:ident) => { +macro_rules! id_type { + ($name:ident as $type:ty) => { #[derive( Copy, Clone, @@ -154,7 +154,7 @@ macro_rules! short_id_type { Hash, )] #[sqlx(transparent)] - pub struct $name(pub i32); + pub struct $name(pub $type); }; } @@ -268,14 +268,17 @@ db_id_interface!( generator: generate_affiliate_code_id @ "affiliate_codes", ); -short_id_type!(CategoryId); -short_id_type!(GameId); -short_id_type!(LinkPlatformId); -short_id_type!(LoaderFieldEnumId); -short_id_type!(LoaderFieldEnumValueId); -short_id_type!(LoaderFieldId); -short_id_type!(LoaderId); -short_id_type!(NotificationActionId); -short_id_type!(ProjectTypeId); -short_id_type!(ReportTypeId); -short_id_type!(StatusId); +id_type!(CategoryId as i32); +id_type!(GameId as i32); +id_type!(LinkPlatformId as i32); +id_type!(LoaderFieldEnumId as i32); +id_type!(LoaderFieldEnumValueId as i32); +id_type!(LoaderFieldId as i32); +id_type!(LoaderId as i32); +id_type!(NotificationActionId as i32); +id_type!(ProjectTypeId as i32); +id_type!(ReportTypeId as i32); +id_type!(StatusId as i32); +id_type!(DelphiReportId as i64); +id_type!(DelphiReportIssueId as i64); +id_type!(DelphiReportIssueJavaClassId as i64); diff --git a/apps/labrinth/src/database/models/mod.rs b/apps/labrinth/src/database/models/mod.rs index fa43cff7fa..0be248125e 100644 --- a/apps/labrinth/src/database/models/mod.rs +++ b/apps/labrinth/src/database/models/mod.rs @@ -4,6 +4,7 @@ pub mod affiliate_code_item; pub mod categories; pub mod charge_item; pub mod collection_item; +pub mod delphi_report_item; pub mod flow_item; pub mod friend_item; pub mod ids; diff --git a/apps/labrinth/src/database/models/version_item.rs b/apps/labrinth/src/database/models/version_item.rs index c97a43e50c..d0ba9c0b97 100644 --- a/apps/labrinth/src/database/models/version_item.rs +++ b/apps/labrinth/src/database/models/version_item.rs @@ -6,6 +6,7 @@ use crate::database::models::loader_fields::{ }; use crate::database::redis::RedisPool; use crate::models::projects::{FileType, VersionStatus}; +use crate::routes::internal::delphi::DelphiRunParameters; use chrono::{DateTime, Utc}; use dashmap::{DashMap, DashSet}; use futures::TryStreamExt; @@ -164,6 +165,17 @@ impl VersionFileBuilder { .await?; } + if let Err(err) = crate::routes::internal::delphi::run( + &mut **transaction, + DelphiRunParameters { + file_id: file_id.into(), + }, + ) + .await + { + tracing::error!("Error submitting new file to Delphi: {err}"); + } + Ok(file_id) } } diff --git a/apps/labrinth/src/models/v3/projects.rs b/apps/labrinth/src/models/v3/projects.rs index dabceccc50..a1b742e1e4 100644 --- a/apps/labrinth/src/models/v3/projects.rs +++ b/apps/labrinth/src/models/v3/projects.rs @@ -5,7 +5,7 @@ use crate::database::models::loader_fields::VersionField; use crate::database::models::project_item::{LinkUrl, ProjectQueryResult}; use crate::database::models::version_item::VersionQueryResult; use crate::models::ids::{ - OrganizationId, ProjectId, TeamId, ThreadId, VersionId, + FileId, OrganizationId, ProjectId, TeamId, ThreadId, VersionId, }; use ariadne::ids::UserId; use chrono::{DateTime, Utc}; @@ -716,6 +716,7 @@ impl From for Version { .files .into_iter() .map(|f| VersionFile { + id: Some(FileId(f.id.0 as u64)), url: f.url, filename: f.filename, hashes: f.hashes, @@ -840,6 +841,10 @@ impl VersionStatus { /// A single project file, with a url for the file and the file's hash #[derive(Serialize, Deserialize, Clone)] pub struct VersionFile { + /// The ID of the file. Every file has an ID once created, but it + /// is not known until it indeed has been created. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub id: Option, /// A map of hashes of the file. The key is the hashing algorithm /// and the value is the string version of the hash. pub hashes: std::collections::HashMap, diff --git a/apps/labrinth/src/routes/internal/admin.rs b/apps/labrinth/src/routes/internal/admin.rs index 9d9de66797..945737f90c 100644 --- a/apps/labrinth/src/routes/internal/admin.rs +++ b/apps/labrinth/src/routes/internal/admin.rs @@ -1,13 +1,10 @@ use crate::auth::validate::get_user_record_from_bearer_token; -use crate::database::models::thread_item::ThreadMessageBuilder; use crate::database::redis::RedisPool; use crate::models::analytics::Download; use crate::models::ids::ProjectId; use crate::models::pats::Scopes; -use crate::models::threads::MessageBody; use crate::queue::analytics::AnalyticsQueue; use crate::queue::maxmind::MaxMindIndexer; -use crate::queue::moderation::AUTOMOD_ID; use crate::queue::session::AuthQueue; use crate::routes::ApiError; use crate::search::SearchConfig; @@ -17,17 +14,14 @@ use actix_web::{HttpRequest, HttpResponse, patch, post, web}; use serde::Deserialize; use sqlx::PgPool; use std::collections::HashMap; -use std::fmt::Write; use std::net::Ipv4Addr; use std::sync::Arc; -use tracing::info; pub fn config(cfg: &mut web::ServiceConfig) { cfg.service( web::scope("admin") .service(count_download) - .service(force_reindex) - .service(delphi_result_ingest), + .service(force_reindex), ); } @@ -163,98 +157,3 @@ pub async fn force_reindex( index_projects(pool.as_ref().clone(), redis.clone(), &config).await?; Ok(HttpResponse::NoContent().finish()) } - -#[derive(Deserialize)] -pub struct DelphiIngest { - pub url: String, - pub project_id: crate::models::ids::ProjectId, - pub version_id: crate::models::ids::VersionId, - pub issues: HashMap>, -} - -#[post("/_delphi", guard = "admin_key_guard")] -pub async fn delphi_result_ingest( - pool: web::Data, - redis: web::Data, - body: web::Json, -) -> Result { - if body.issues.is_empty() { - info!("No issues found for file {}", body.url); - return Ok(HttpResponse::NoContent().finish()); - } - - let webhook_url = dotenvy::var("DELPHI_SLACK_WEBHOOK")?; - - let project = crate::database::models::DBProject::get_id( - body.project_id.into(), - &**pool, - &redis, - ) - .await? - .ok_or_else(|| { - ApiError::InvalidInput(format!( - "Project {} does not exist", - body.project_id - )) - })?; - - let mut header = format!("Suspicious traces found at {}", body.url); - - for (issue, trace) in &body.issues { - for (path, code) in trace { - write!( - &mut header, - "\n issue {issue} found at file {path}: \n ```\n{code}\n```" - ) - .unwrap(); - } - } - - crate::util::webhook::send_slack_project_webhook( - body.project_id, - &pool, - &redis, - webhook_url, - Some(header), - ) - .await - .ok(); - - let mut thread_header = format!( - "Suspicious traces found at [version {}](https://modrinth.com/project/{}/version/{})", - body.version_id, body.project_id, body.version_id - ); - - for (issue, trace) in &body.issues { - for path in trace.keys() { - write!( - &mut thread_header, - "\n\n- issue {issue} found at file {path}" - ) - .unwrap(); - } - - if trace.is_empty() { - write!(&mut thread_header, "\n\n- issue {issue} found").unwrap(); - } - } - - let mut transaction = pool.begin().await?; - ThreadMessageBuilder { - author_id: Some(crate::database::models::DBUserId(AUTOMOD_ID)), - body: MessageBody::Text { - body: thread_header, - private: true, - replying_to: None, - associated_images: vec![], - }, - thread_id: project.thread_id, - hide_identity: false, - } - .insert(&mut transaction) - .await?; - - transaction.commit().await?; - - Ok(HttpResponse::NoContent().finish()) -} diff --git a/apps/labrinth/src/routes/internal/delphi.rs b/apps/labrinth/src/routes/internal/delphi.rs new file mode 100644 index 0000000000..bc5eb64fd4 --- /dev/null +++ b/apps/labrinth/src/routes/internal/delphi.rs @@ -0,0 +1,387 @@ +use std::{collections::HashMap, fmt::Write, sync::LazyLock, time::Instant}; + +use actix_web::{HttpRequest, HttpResponse, get, post, put, web}; +use chrono::{DateTime, Utc}; +use reqwest::header::{HeaderMap, HeaderValue, USER_AGENT}; +use serde::Deserialize; +use sqlx::PgPool; +use tokio::sync::Mutex; +use tracing::info; + +use crate::{ + auth::check_is_moderator_from_headers, + database::{ + models::{ + DBFileId, DelphiReportId, DelphiReportIssueId, + DelphiReportIssueJavaClassId, + delphi_report_item::{ + DBDelphiReport, DBDelphiReportIssue, + DBDelphiReportIssueJavaClass, DecompiledJavaClassSource, + DelphiReportIssueStatus, DelphiReportListOrder, + DelphiReportSeverity, InternalJavaClassName, + }, + }, + redis::RedisPool, + }, + models::{ + ids::{ProjectId, VersionId}, + pats::Scopes, + }, + queue::session::AuthQueue, + routes::ApiError, + util::guards::admin_key_guard, +}; + +pub fn config(cfg: &mut web::ServiceConfig) { + cfg.service( + web::scope("delphi") + .service(ingest_report) + .service(_run) + .service(version) + .service(issues) + .service(update_issue) + .service(issue_type_schema), + ); +} + +static DELPHI_CLIENT: LazyLock = LazyLock::new(|| { + reqwest::Client::builder() + .default_headers({ + HeaderMap::from_iter([( + USER_AGENT, + HeaderValue::from_static(concat!( + "Labrinth/", + env!("COMPILATION_DATE") + )), + )]) + }) + .build() + .unwrap() +}); + +#[derive(Deserialize)] +struct DelphiReport { + pub url: String, + pub project_id: crate::models::ids::ProjectId, + #[serde(rename = "version_id")] + pub _version_id: crate::models::ids::VersionId, + pub file_id: crate::models::ids::FileId, + /// A sequential, monotonically increasing version number for the + /// Delphi version that generated this report. + pub delphi_version: i32, + pub issues: HashMap< + String, + HashMap>, + >, + pub severity: DelphiReportSeverity, +} + +impl DelphiReport { + async fn send_to_slack( + &self, + pool: &PgPool, + redis: &RedisPool, + ) -> Result<(), ApiError> { + let webhook_url = dotenvy::var("DELPHI_SLACK_WEBHOOK")?; + + let mut message_header = + format!("⚠️ Suspicious traces found at {}", self.url); + + for (issue, trace) in &self.issues { + for (class, code) in trace { + let code = code.as_deref().map(|code| &**code); + write!( + &mut message_header, + "\n issue {issue} found at class `{class}`:\n```\n{}\n```", + code.unwrap_or("No decompiled source available") + ) + .ok(); + } + } + + crate::util::webhook::send_slack_project_webhook( + self.project_id, + pool, + redis, + webhook_url, + Some(message_header), + ) + .await + } +} + +#[derive(Deserialize)] +pub struct DelphiRunParameters { + pub file_id: crate::models::ids::FileId, +} + +#[post("ingest", guard = "admin_key_guard")] +async fn ingest_report( + pool: web::Data, + redis: web::Data, + web::Json(report): web::Json, +) -> Result { + if report.issues.is_empty() { + info!("No issues found for file {}", report.url); + return Ok(HttpResponse::NoContent().finish()); + } + + report.send_to_slack(&pool, &redis).await.ok(); + + let mut transaction = pool.begin().await?; + + let report_id = DBDelphiReport { + id: DelphiReportId(0), // This will be set by the database + file_id: Some(DBFileId(report.file_id.0 as i64)), + delphi_version: report.delphi_version, + artifact_url: report.url.clone(), + created: DateTime::::MIN_UTC, // This will be set by the database + severity: report.severity, + } + .upsert(&mut transaction) + .await?; + + for (issue_type, issue_java_classes) in report.issues { + let issue_id = DBDelphiReportIssue { + id: DelphiReportIssueId(0), // This will be set by the database + report_id, + issue_type, + status: DelphiReportIssueStatus::Pending, + } + .upsert(&mut transaction) + .await?; + + for (internal_class_name, decompiled_source) in issue_java_classes { + DBDelphiReportIssueJavaClass { + id: DelphiReportIssueJavaClassId(0), // This will be set by the database + issue_id, + internal_class_name, + decompiled_source, + } + .upsert(&mut transaction) + .await?; + } + } + + transaction.commit().await?; + + Ok(HttpResponse::NoContent().finish()) +} + +pub async fn run( + exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>, + run_parameters: DelphiRunParameters, +) -> Result { + let file_data = sqlx::query!( + r#" + SELECT + version_id AS "version_id: crate::database::models::DBVersionId", + versions.mod_id AS "project_id: crate::database::models::DBProjectId", + files.url AS "url" + FROM files INNER JOIN versions ON files.version_id = versions.id + WHERE files.id = $1 + "#, + run_parameters.file_id.0 as i64 + ) + .fetch_one(exec) + .await?; + + tracing::debug!( + "Running Delphi for project {}, version {}, file {}", + file_data.project_id.0, + file_data.version_id.0, + run_parameters.file_id.0 + ); + + DELPHI_CLIENT + .post(dotenvy::var("DELPHI_URL")?) + .json(&serde_json::json!({ + "url": file_data.url, + "project_id": ProjectId(file_data.project_id.0 as u64), + "version_id": VersionId(file_data.version_id.0 as u64), + "file_id": run_parameters.file_id, + })) + .send() + .await + .and_then(|res| res.error_for_status()) + .map_err(ApiError::Delphi)?; + + Ok(HttpResponse::NoContent().finish()) +} + +#[post("run")] +async fn _run( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, + run_parameters: web::Query, +) -> Result { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Scopes::PROJECT_READ, + ) + .await?; + + run(&**pool, run_parameters.into_inner()).await +} + +#[get("version")] +async fn version( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Scopes::PROJECT_READ, + ) + .await?; + + Ok(HttpResponse::Ok().json( + sqlx::query_scalar!("SELECT MAX(delphi_version) FROM delphi_reports") + .fetch_one(&**pool) + .await?, + )) +} + +#[derive(Deserialize)] +struct DelphiIssuesSearchOptions { + #[serde(rename = "type")] + ty: Option, + status: Option, + order_by: Option, + count: Option, + offset: Option, +} + +#[get("issues")] +async fn issues( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, + web::Query(search_options): web::Query, +) -> Result { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Scopes::PROJECT_READ, + ) + .await?; + + Ok(HttpResponse::Ok().json( + DBDelphiReportIssue::find_all_by( + search_options.ty, + search_options.status, + search_options.order_by, + search_options.count, + search_options + .offset + .map(|offset| offset.try_into()) + .transpose() + .map_err(|err| { + ApiError::InvalidInput(format!("Invalid offset: {err}")) + })?, + &**pool, + ) + .await?, + )) +} + +#[put("issue/{issue_id}")] +async fn update_issue( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, + issue_id: web::Path, + web::Json(update_data): web::Json, +) -> Result { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Scopes::PROJECT_READ, + ) + .await?; + + let new_id = issue_id.into_inner(); + + let mut transaction = pool.begin().await?; + + let modified_same_issue = (DBDelphiReportIssue { + id: new_id, // Doesn't matter, upsert done for values of other fields + report_id: update_data.report_id, + issue_type: update_data.issue_type, + status: update_data.status, + }) + .upsert(&mut transaction) + .await? + == new_id; + + transaction.commit().await?; + + if modified_same_issue { + Ok(HttpResponse::NoContent().finish()) + } else { + Ok(HttpResponse::Created().finish()) + } +} + +#[get("issue_type/schema")] +async fn issue_type_schema( + req: HttpRequest, + pool: web::Data, + redis: web::Data, + session_queue: web::Data, +) -> Result { + check_is_moderator_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Scopes::PROJECT_READ, + ) + .await?; + + // This route is expected to be called often by the frontend, and Delphi is not necessarily + // built to scale beyond malware analysis, so cache the result of its quasi-constant-valued + // schema route to alleviate the load on it + + static CACHED_ISSUE_TYPE_SCHEMA: Mutex< + Option<(serde_json::Map, Instant)>, + > = Mutex::const_new(None); + + match &mut *CACHED_ISSUE_TYPE_SCHEMA.lock().await { + Some((schema, last_fetch)) if last_fetch.elapsed().as_secs() < 60 => { + Ok(HttpResponse::Ok().json(schema)) + } + cache_entry => Ok(HttpResponse::Ok().json( + &cache_entry + .insert(( + DELPHI_CLIENT + .get(format!("{}/schema", dotenvy::var("DELPHI_URL")?)) + .send() + .await + .and_then(|res| res.error_for_status()) + .map_err(ApiError::Delphi)? + .json::>() + .await + .map_err(ApiError::Delphi)?, + Instant::now(), + )) + .0, + )), + } +} diff --git a/apps/labrinth/src/routes/internal/mod.rs b/apps/labrinth/src/routes/internal/mod.rs index 00730373fc..336dc2a772 100644 --- a/apps/labrinth/src/routes/internal/mod.rs +++ b/apps/labrinth/src/routes/internal/mod.rs @@ -1,6 +1,7 @@ pub(crate) mod admin; pub mod affiliate; pub mod billing; +pub mod delphi; pub mod external_notifications; pub mod flows; pub mod gdpr; @@ -29,6 +30,7 @@ pub fn config(cfg: &mut actix_web::web::ServiceConfig) { .configure(statuses::config) .configure(medal::config) .configure(external_notifications::config) - .configure(affiliate::config), + .configure(affiliate::config) + .configure(delphi::config), ); } diff --git a/apps/labrinth/src/routes/mod.rs b/apps/labrinth/src/routes/mod.rs index 66a20a91f0..ffb3de84d5 100644 --- a/apps/labrinth/src/routes/mod.rs +++ b/apps/labrinth/src/routes/mod.rs @@ -155,6 +155,8 @@ pub enum ApiError { RateLimitError(u128, u32), #[error("Error while interacting with payment processor: {0}")] Stripe(#[from] stripe::StripeError), + #[error("Error while interacting with Delphi: {0}")] + Delphi(reqwest::Error), } impl ApiError { @@ -194,6 +196,7 @@ impl ApiError { ApiError::Stripe(..) => "stripe_error", ApiError::TaxProcessor(..) => "tax_processor_error", ApiError::Slack(..) => "slack_error", + ApiError::Delphi(..) => "delphi_error", }, description: self.to_string(), } @@ -236,6 +239,7 @@ impl actix_web::ResponseError for ApiError { ApiError::Stripe(..) => StatusCode::FAILED_DEPENDENCY, ApiError::TaxProcessor(..) => StatusCode::INTERNAL_SERVER_ERROR, ApiError::Slack(..) => StatusCode::INTERNAL_SERVER_ERROR, + ApiError::Delphi(..) => StatusCode::INTERNAL_SERVER_ERROR, } } diff --git a/apps/labrinth/src/routes/v3/project_creation.rs b/apps/labrinth/src/routes/v3/project_creation.rs index e03d2dd58e..dad60fec85 100644 --- a/apps/labrinth/src/routes/v3/project_creation.rs +++ b/apps/labrinth/src/routes/v3/project_creation.rs @@ -339,9 +339,6 @@ async fn project_create_inner( redis: &RedisPool, session_queue: &AuthQueue, ) -> Result { - // The base URL for files uploaded to S3 - let cdn_url = dotenvy::var("CDN_URL")?; - // The currently logged in user let (_, current_user) = get_user_from_headers( &req, @@ -577,7 +574,6 @@ async fn project_create_inner( uploaded_files, &mut created_version.files, &mut created_version.dependencies, - &cdn_url, &content_disposition, project_id, created_version.version_id.into(), diff --git a/apps/labrinth/src/routes/v3/version_creation.rs b/apps/labrinth/src/routes/v3/version_creation.rs index 396395e69c..03ebb81283 100644 --- a/apps/labrinth/src/routes/v3/version_creation.rs +++ b/apps/labrinth/src/routes/v3/version_creation.rs @@ -38,7 +38,6 @@ use sha1::Digest; use sqlx::postgres::PgPool; use std::collections::{HashMap, HashSet}; use std::sync::Arc; -use tracing::error; use validator::Validate; fn default_requested_status() -> VersionStatus { @@ -158,8 +157,6 @@ async fn version_create_inner( session_queue: &AuthQueue, moderation_queue: &AutomatedModerationQueue, ) -> Result { - let cdn_url = dotenvy::var("CDN_URL")?; - let mut initial_version_data = None; let mut version_builder = None; let mut selected_loaders = None; @@ -355,7 +352,6 @@ async fn version_create_inner( uploaded_files, &mut version.files, &mut version.dependencies, - &cdn_url, &content_disposition, version.project_id.into(), version.version_id.into(), @@ -451,6 +447,7 @@ async fn version_create_inner( .files .iter() .map(|file| VersionFile { + id: None, hashes: file .hashes .iter() @@ -590,8 +587,6 @@ async fn upload_file_to_version_inner( version_id: models::DBVersionId, session_queue: &AuthQueue, ) -> Result { - let cdn_url = dotenvy::var("CDN_URL")?; - let mut initial_file_data: Option = None; let mut file_builders: Vec = Vec::new(); @@ -741,7 +736,6 @@ async fn upload_file_to_version_inner( uploaded_files, &mut file_builders, &mut dependencies, - &cdn_url, &content_disposition, project_id, version_id.into(), @@ -795,7 +789,6 @@ pub async fn upload_file( uploaded_files: &mut Vec, version_files: &mut Vec, dependencies: &mut Vec, - cdn_url: &str, content_disposition: &actix_web::http::header::ContentDisposition, project_id: ProjectId, version_id: VersionId, @@ -942,21 +935,17 @@ pub async fn upload_file( || force_primary || total_files_len == 1; - let file_path_encode = format!( - "data/{}/versions/{}/{}", - project_id, - version_id, + let file_path = format!( + "data/{project_id}/versions/{version_id}/{}", urlencoding::encode(file_name) ); - let file_path = - format!("data/{}/versions/{}/{}", project_id, version_id, &file_name); let upload_data = file_host .upload_file(content_type, &file_path, FileHostPublicity::Public, data) .await?; uploaded_files.push(UploadedFile { - name: file_path, + name: file_path.clone(), publicity: FileHostPublicity::Public, }); @@ -980,33 +969,9 @@ pub async fn upload_file( return Err(CreateError::InvalidInput(msg.to_string())); } - let url = format!("{cdn_url}/{file_path_encode}"); - - let client = reqwest::Client::new(); - let delphi_url = dotenvy::var("DELPHI_URL")?; - match client - .post(delphi_url) - .json(&serde_json::json!({ - "url": url, - "project_id": project_id, - "version_id": version_id, - })) - .send() - .await - { - Ok(res) => { - if !res.status().is_success() { - error!("Failed to upload file to Delphi: {url}"); - } - } - Err(e) => { - error!("Failed to upload file to Delphi: {url}: {e}"); - } - } - version_files.push(VersionFileBuilder { filename: file_name.to_string(), - url: format!("{cdn_url}/{file_path_encode}"), + url: format!("{}/{file_path}", dotenvy::var("CDN_URL")?), hashes: vec![ models::version_item::HashBuilder { algorithm: "sha1".to_string(), diff --git a/docker-compose.yml b/docker-compose.yml index 955e6586fb..27a8efbbe7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -98,6 +98,21 @@ services: watch: - path: ./apps/labrinth action: rebuild + delphi: + profiles: + - with-delphi + image: ghcr.io/modrinth/delphi:main + container_name: labrinth-delphi + ports: + - '59999:59999' + environment: + LABRINTH_ENDPOINT: http://labrinth:8000/_internal/delphi/ingest + LABRINTH_ADMIN_KEY: feedbeef + healthcheck: + test: ['CMD', 'wget', '-q', '-O/dev/null', 'http://localhost:59999/health'] + interval: 3s + timeout: 5s + retries: 3 volumes: meilisearch-data: db-data: