diff --git a/Cargo.toml b/Cargo.toml index d1ad844..eed66e0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,60 +1,2 @@ -[package] -name = "ainigma" -version = "0.1.0" -edition = "2024" - -[[bin]] -name = "ainigma" -path = "src/bin/cli.rs" - -[dependencies] -# Our async runtime -tokio = { version = "1.44", default-features = false, features = [ - "macros", - "rt-multi-thread", -] } - -# Crypto -sha3 = "0.10" -hmac = "0.12" -rand = "0.9" - -serde = { version = "1.0", default-features = false, features = ["derive"] } -toml = "0.8" - -# Cli -itertools = "0.14.0" -clap = { version = "4.5", features = ["derive", "cargo"] } -thiserror = "2" -tracing = { version = "0.1.41" } -tracing-subscriber = { version = "0.3.19", features = [] } -aws-sdk-s3 = { version = "1.82.0", default-features = false, features = [ - "rt-tokio", -] } -aws-config = { version = "1.6.1", default-features = false, features = [ - "client-hyper", - "rt-tokio", - "rustls", -] } -futures = "0.3.31" -moodle-xml = "0.2.0" -serde_json = "1" -once_cell = { version = "1", default-features = false } -tempfile = { version = "3", default-features = false } -[dependencies.uuid] -version = "1" -features = [ - "v7", # Lets you generate random UUIDs - "fast-rng", # Use a faster (but still sufficiently random) RNG - "macro-diagnostics", # Enable better diagnostics for compile-time UUIDs - "serde", -] - -[dev-dependencies] -insta = { version = "1" } -assert_cmd = "2" -predicates = "3" - -[profile.dev.package] -insta.opt-level = 3 -similar.opt-level = 3 +[workspace] +members = ["ainigma", "ainigma-backend"] diff --git a/ainigma-backend/Cargo.toml b/ainigma-backend/Cargo.toml new file mode 100644 index 0000000..d46df83 --- /dev/null +++ b/ainigma-backend/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "ainigma-backend" +version = "0.1.0" +edition = "2024" + +[dependencies] +ainigma = { path = "../ainigma" } + +# and any other dependencies you need: +hyper = "1.6.0" +axum = "0.8.4" +tokio = { version = "1", features = ["full"] } +uuid = { version = "1.17.0", features = ["v7", "serde"] } +serde = { version = "1.0.211", features = ["derive"] } +serde_json = "1" +tower = "0.5.2" +sqlx = { version = "0.8.6", features = ["postgres", "runtime-tokio-rustls"] } + +[dev-dependencies] +reqwest = { version = "0.12.19", features = ["json"] } +tokio = { version = "1", features = ["full"] } diff --git a/ainigma-backend/README.md b/ainigma-backend/README.md new file mode 100644 index 0000000..6aef51f --- /dev/null +++ b/ainigma-backend/README.md @@ -0,0 +1,81 @@ +## Configuration + +With the current implementation, Ainigma requires the following information. + +- **TOML configuration file** — Contains core settings for Ainigma. +- **Task number (identifier)** — The specific task identifier. + +Other things they need to take for backend to work + +- **User ID** — When using the server backend, this should be provided dynamically; currently it is generated using `Uuid::now_v7()`. +- **Course secret** +- **Output_dir**- In the task folder with name "output" + +## Software + +Sqlx - Database +Smithy - Generating code for backend + +## Serverside structure + +``` +/srv/ainigma/data/ + /courses/ + Index file (index for quick course lookup and listing) + // (or name) + config.toml (defined name for pathing) + // (name) + // + entrypoint.sh + code_files... + /output/ + // + task files for student + resource_files/ +``` + +## Database structure + +courses (1) ── (many) categories (1) ── (many) tasks +users (1) ── (many) user_task_progress (many) ── (1) tasks + +## workflow + +[Client] +| +|-- Request (uuid, task_id, course_id) --> +| +[Server] +|-- Load course config +|-- Check if task exists for student (uuid, task_id, course_id) +| |-- Yes: return existing task +| |-- No: +| |-- Generate flags +| |-- Build task using course config +| |-- Save built task +|-- Return task data --> +|-- Add Correct flag / answer to database +[Client receives task and starts solving] +[Client] +| +|-- Does exercise +| +[Server] +| +|-- Check for correct answer --> +|-- Yes: send correct response and add progress +|-- No: send feedback +| +[Client] receives feedback + +## Questions + +- Category no identifier and task has String +- Course secret storage? +- Changes only when server down? (configuration checked at start and expected to be correct during runtime) + or updates? (updates to config during server runtime, checked in runtime with functionality locked during update process ) + +## Feedback + +- No support for v7 uuid in postgre only v4 +- New build function that takes a uuid, and just takes module and task_id diff --git a/ainigma-backend/backend.rs b/ainigma-backend/backend.rs new file mode 100644 index 0000000..2ea1302 --- /dev/null +++ b/ainigma-backend/backend.rs @@ -0,0 +1,107 @@ +use ainigma::flag_generator::{Algorithm, Flag}; +use axum::{Json, Router, routing::post}; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +#[derive(Deserialize)] +struct GenerateFlagInput { + identifier: String, + algorithm: Algorithm, + user_id: Uuid, + course_secret: String, + task_id: String, +} + +#[derive(Serialize, Deserialize)] +struct GenerateFlagOutput { + flag: String, +} + +async fn generate_flag_handler(Json(payload): Json) -> Json { + let uuid = payload.user_id; + + let flag = Flag::new_user_flag( + payload.identifier, // identifier + &payload.algorithm, // algorithm + &payload.course_secret, // secret + &payload.task_id, // taskid + &uuid, // uuid + ) + .flag_string(); + + Json(GenerateFlagOutput { flag }) +} + +#[tokio::main] +async fn main() { + let app = Router::new().route("/generate-task", post(generate_flag_handler)); + + println!("Listening on http://localhost:3000"); + let listener = tokio::net::TcpListener::bind("0.0.0.0:3000").await.unwrap(); + axum::serve(listener, app).await.unwrap(); +} +fn app() -> Router { + Router::new().route("/generate-task", post(generate_flag_handler)) +} + +#[cfg(test)] +mod tests { + use super::*; + use axum::Json; + use std::net::SocketAddr; + use tokio::task; + use uuid::Uuid; + + #[tokio::test] + async fn test_generate_task_handler() { + let user_id = Uuid::now_v7(); + let input = GenerateFlagInput { + identifier: "id1".to_string(), + user_id: user_id.clone(), + task_id: "task1".to_string(), + algorithm: Algorithm::HMAC_SHA3_256, + course_secret: "secret".to_string(), + }; + + let response = generate_flag_handler(Json(input)).await; + let output = response.0; + + // Assert output is non-empty and contains the task id + assert!(output.flag.starts_with("id1:")); + println!("Generated flag: {}", output.flag); + } + + #[tokio::test] + async fn test_generate_task_integration() { + let app = app(); + + // Spawn the server on a random port + let addr = SocketAddr::from(([127, 0, 0, 1], 3001)); + task::spawn(async move { + let listener = tokio::net::TcpListener::bind(addr).await.unwrap(); + axum::serve(listener, app).await.unwrap(); + }); + + // Give the server time to start + tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; + + let client = reqwest::Client::new(); + let uuid = Uuid::now_v7(); + let res = client + .post("http://localhost:3001/generate-task") + .json(&serde_json::json!({ + "identifier": "id2", + "algorithm": Algorithm::HMAC_SHA3_256, + "user_id": uuid, + "course_secret": "course42", + "task_id": "taskA" + })) + .send() + .await + .unwrap(); + + let body: GenerateFlagOutput = res.json().await.unwrap(); + assert!(body.flag.starts_with("id2:")); + println!("Flag: {}", body.flag); + } +} diff --git a/ainigma-backend/migrations/0001_ainigma.sql b/ainigma-backend/migrations/0001_ainigma.sql new file mode 100644 index 0000000..54c6271 --- /dev/null +++ b/ainigma-backend/migrations/0001_ainigma.sql @@ -0,0 +1,40 @@ +CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; + +CREATE TABLE users ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + email VARCHAR UNIQUE NOT NULL, + created_at TIMESTAMPTZ DEFAULT now() +); + +CREATE TABLE courses ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + title VARCHAR NOT NULL, + description TEXT, + created_at TIMESTAMPTZ DEFAULT now() +); + +CREATE TABLE categories ( + id SERIAL PRIMARY KEY, + course_id UUID NOT NULL REFERENCES courses(id) ON DELETE CASCADE, + name VARCHAR NOT NULL, + number INTEGER +); + +CREATE TABLE tasks ( + id SERIAL PRIMARY KEY, + category_id INTEGER NOT NULL REFERENCES categories(id) ON DELETE CASCADE, + title VARCHAR NOT NULL, + description TEXT, + points INTEGER DEFAULT 1, + created_at TIMESTAMPTZ DEFAULT now() +); + + +CREATE TABLE user_task_progress ( + id SERIAL PRIMARY KEY, + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + task_id INTEGER NOT NULL REFERENCES tasks(id) ON DELETE CASCADE, + completed_at TIMESTAMPTZ, + score INTEGER, + UNIQUE (user_id, task_id) +); \ No newline at end of file diff --git a/ainigma-backend/model.smithy b/ainigma-backend/model.smithy new file mode 100644 index 0000000..ca7704d --- /dev/null +++ b/ainigma-backend/model.smithy @@ -0,0 +1,121 @@ +namespace com.yourorg.learningplatform + +service LearningPlatformService { + version: "1.0", + operations: [ + NewUser, + UserLogin, + ListCourses, + GetCourseConfig, + GetCategory, + GetTask, + CompareAnswer + ], + errors: [Unauthorized, NotFound, InternalError] +} + +operation NewUser{ + input: NewUserInput + output: User, +} + +structure NewUserInput { + @required + username: String, + @required + email: String, + @required + password: String, +} + +structure User { + id: Uuid, + username: String, + email: String, +} + +operation UserLogin { + input: UserLoginInput, + output: UserLoginOutput, + errors: [Unauthorized] +} + +structure UserLoginInput { + @required + username: String, + @required + password: String, +} + +structure UserLoginOutput { + token: String, + expiresAt: Timestamp, +} + +operation ListCourses{ + +} + +operation GetCourseConfig{ + input: CourseConfigInput, + output: CourseConfigOutput +} + +structure CourseConfigInput{ + @required + course_id: Uuid, +} + +structure CourseConfigOutput{} + +operation GetCategory{ + input: Category +} + +operation GetTask { + input: TaskInput, + output: TaskOutput, + errors: [NotFound] +} +structure TaskInput{ + @required + course_id: String, + @required + task_id: String, + @required + user_id: String, +} +// Needs to generate some kinda file for viewing the task and its parts like files +structure TaskOutput { + task: WebTask +} + +structure WebTask { + id: String, + title: String, + description: String, + category: String, + files: list, // for display/download +} +operation CompareAnswer{ + input: CheckAnswer + output: CompareAnswerOutput +} + +structure CheckAnswer{ + @required + course_id: Uuid, + @required + task_id: String, + @required + user_id: Uuid, + @required + answer: String, +} + + +structure CompareAnswerOutput { + correct: Boolean, + feedback: String, +} + diff --git a/ainigma-backend/src/backend.rs b/ainigma-backend/src/backend.rs new file mode 100644 index 0000000..2d750b8 --- /dev/null +++ b/ainigma-backend/src/backend.rs @@ -0,0 +1,81 @@ +use ainigma::config::{ModuleConfiguration, Task, read_check_toml, read_toml}; +use ainigma::errors::ConfigError; +use std::fs; +use std::path::Path; +use uuid::Uuid; + +const DATA_PATH: &str = "/srv/ainigma/data"; +const COURSES_DIR: &str = "courses"; + +pub async fn get_task() -> Result { + // This function is a placeholder for fetching a task information to display. + Ok(true) +} + +pub async fn check_all_config() -> Result { + let courses_path = Path::new(DATA_PATH).join(COURSES_DIR); + let entries = + fs::read_dir(&courses_path).map_err(|e| ConfigError::FileReadError(e.to_string()))?; + for entry_result in entries { + let entry = entry_result.map_err(|e| ConfigError::FileReadError(e.to_string()))?; + let path = entry.path(); + if path.is_dir() { + let config_path = path.join("config.toml"); + let os_str_path = config_path.as_os_str(); + read_check_toml(os_str_path)?; + } + } + Ok(true) +} + +pub async fn find_course_config(course_id: Uuid) -> Result { + // TODO: CHECK RACE CONDITION (Locking the directory) + let courses_path = Path::new(DATA_PATH).join(COURSES_DIR); + let course_string = course_id.to_string(); + + for entry in fs::read_dir(&courses_path)? { + let entry = entry?; + let path = entry.path(); + + if let Some(folder_name) = path.file_name().and_then(|n| n.to_str()) { + if path.is_dir() && folder_name == course_string.as_str() { + let config_path = path.join("config.toml"); + if config_path.exists() { + // course config need to be valid + let config = read_toml(config_path) + .expect("All course configs should be valid so they should read correctly"); + return Ok(config); + } + } + } + } + Err(std::io::Error::new( + std::io::ErrorKind::NotFound, + format!("Course with id {course_id} not found"), + )) +} + +pub async fn find_task( + course: ModuleConfiguration, + task_id: String, +) -> Result { + // Searches for a task in the course configuration + let task = course.get_task_by_id(task_id.as_str()); + if let Some(task) = task { + return Ok(task.clone()); + } + Err(std::io::Error::new( + std::io::ErrorKind::NotFound, + format!("Task with id {task_id} not found in course"), + )) +} + +pub async fn compareanswer( + _course_id: Uuid, + _task_id: String, + _user_id: Uuid, + _answer: String, +) -> Result { + // Compares the user's answer with the correct answer from database + Ok(false) // Placeholder for actual comparison logic +} diff --git a/ainigma-backend/src/lib.rs b/ainigma-backend/src/lib.rs new file mode 100644 index 0000000..fceb141 --- /dev/null +++ b/ainigma-backend/src/lib.rs @@ -0,0 +1 @@ +pub mod backend; diff --git a/ainigma-backend/src/main.rs b/ainigma-backend/src/main.rs new file mode 100644 index 0000000..6d9c61e --- /dev/null +++ b/ainigma-backend/src/main.rs @@ -0,0 +1,6 @@ +use std::error::Error; + +#[tokio::main] +async fn main() -> Result<(), Box> { + Ok(()) +} diff --git a/ainigma/Cargo.toml b/ainigma/Cargo.toml new file mode 100644 index 0000000..d1ad844 --- /dev/null +++ b/ainigma/Cargo.toml @@ -0,0 +1,60 @@ +[package] +name = "ainigma" +version = "0.1.0" +edition = "2024" + +[[bin]] +name = "ainigma" +path = "src/bin/cli.rs" + +[dependencies] +# Our async runtime +tokio = { version = "1.44", default-features = false, features = [ + "macros", + "rt-multi-thread", +] } + +# Crypto +sha3 = "0.10" +hmac = "0.12" +rand = "0.9" + +serde = { version = "1.0", default-features = false, features = ["derive"] } +toml = "0.8" + +# Cli +itertools = "0.14.0" +clap = { version = "4.5", features = ["derive", "cargo"] } +thiserror = "2" +tracing = { version = "0.1.41" } +tracing-subscriber = { version = "0.3.19", features = [] } +aws-sdk-s3 = { version = "1.82.0", default-features = false, features = [ + "rt-tokio", +] } +aws-config = { version = "1.6.1", default-features = false, features = [ + "client-hyper", + "rt-tokio", + "rustls", +] } +futures = "0.3.31" +moodle-xml = "0.2.0" +serde_json = "1" +once_cell = { version = "1", default-features = false } +tempfile = { version = "3", default-features = false } +[dependencies.uuid] +version = "1" +features = [ + "v7", # Lets you generate random UUIDs + "fast-rng", # Use a faster (but still sufficiently random) RNG + "macro-diagnostics", # Enable better diagnostics for compile-time UUIDs + "serde", +] + +[dev-dependencies] +insta = { version = "1" } +assert_cmd = "2" +predicates = "3" + +[profile.dev.package] +insta.opt-level = 3 +similar.opt-level = 3 diff --git a/LICENSE b/ainigma/LICENSE similarity index 100% rename from LICENSE rename to ainigma/LICENSE diff --git a/README.md b/ainigma/README.md similarity index 100% rename from README.md rename to ainigma/README.md diff --git a/course.toml b/ainigma/course.toml similarity index 100% rename from course.toml rename to ainigma/course.toml diff --git a/design.md b/ainigma/design.md similarity index 100% rename from design.md rename to ainigma/design.md diff --git a/src/README.txt b/ainigma/src/README.txt similarity index 100% rename from src/README.txt rename to ainigma/src/README.txt diff --git a/src/bin/cli.rs b/ainigma/src/bin/cli.rs similarity index 100% rename from src/bin/cli.rs rename to ainigma/src/bin/cli.rs diff --git a/src/build_process.rs b/ainigma/src/build_process.rs similarity index 86% rename from src/build_process.rs rename to ainigma/src/build_process.rs index fb659ce..f2c91e3 100644 --- a/src/build_process.rs +++ b/ainigma/src/build_process.rs @@ -1,526 +1,609 @@ -use std::collections::HashMap; -use std::fs; -use std::path::{Path, PathBuf}; -// use tracing::instrument; -use uuid::Uuid; - -use crate::config::{ - BuildConfig, Builder, DEFAULT_BUILD_MANIFEST, DEFAULT_FLAGS_FILENAME, FlagVariantKind, - ModuleConfiguration, OutputKind, Task, -}; -use crate::errors::BuildError; -use crate::flag_generator::Flag; - -/// Represents the build process of a task, including the initial configuration and produced output files and flags. -#[derive(serde::Serialize, Debug)] -pub struct TaskBuildContainer<'a> { - pub basedir: PathBuf, - pub task: &'a Task, - /// For batch mode, this is > 1, for a sequential build, this is 1 - pub outputs: Vec, - batched: bool, -} -impl<'a> TaskBuildContainer<'a> { - pub fn new( - out_dir: PathBuf, - task: &'a Task, - outputs: Vec, - batched: bool, - ) -> Self { - Self { - basedir: out_dir, - task, - outputs, - batched, - } - } -} - -impl TaskBuildContainer<'_> { - pub fn validate_output(&mut self) -> Result<(), BuildError> { - for intermediate in &mut self.outputs { - for item in &mut intermediate.outputs { - // The task instance directory should be defined already - let pre_cano = &intermediate - .task_instance_dir - .join(item.kind.get_filename()); - let path = match pre_cano.canonicalize() { - Ok(p) => p, - Err(e) => { - tracing::error!("Failure in file '{}", &pre_cano.display(),); - tracing::error!( - "Failed to verify that build output path for file `{}` exist : {}. Is builder using given output directory correctly or configuration has unintentional output files defined?", - &item.kind.get_filename().to_string_lossy(), - e - ); - return Err(BuildError::OutputVerificationFailed(e.to_string())); - } - }; - item.update_path(path); - } - } - Ok(()) - } - /// Check if the task has any files to distribute other than the readme.txt. Defined by the existence of `OutputKind::Resource`. - pub fn has_files_to_distribute(&self) -> bool { - self.outputs - .iter() - .any(|intermediate| !intermediate.get_resource_files().is_empty()) - } -} - -// All flags in a single task's stages -#[derive(serde::Serialize, Clone, Debug)] -pub struct IntermediateOutput { - pub uuid: Uuid, - pub stage_flags: Vec, - pub task_instance_dir: PathBuf, - pub outputs: Vec, -} - -impl IntermediateOutput { - pub fn new( - uuid: Uuid, - stage_flags: Vec, - task_instance_dir: PathBuf, - outputs: Vec, - ) -> Self { - Self { - uuid, - stage_flags, - task_instance_dir, - outputs, - } - } - pub fn validate_readme_count(&self) -> Result<(), BuildError> { - let readme_count = self - .outputs - .iter() - .filter(|output| matches!(output.kind, OutputKind::Readme(_))) - .count(); - if readme_count != 1 { - return Err(BuildError::OutputVerificationFailed(format!( - "Expected exactly one readme file, found {}", - readme_count - ))); - } - Ok(()) - } - - /// Files that should be delivered for the end-user - pub fn get_resource_files(&self) -> Vec { - self.outputs - .iter() - .filter_map(|output| match output.kind { - OutputKind::Resource(_) => Some(output.to_owned()), - _ => None, - }) - .collect() - } - /// Get readme.txt from the output files - pub fn get_readme(&self) -> Option<&OutputItem> { - self.outputs - .iter() - .find(|output| matches!(output.kind, OutputKind::Readme(_))) - } - /// Update common files that apply to all flag entries - pub fn update_files(&mut self, items: Vec) { - for item in items { - if let Some(index) = self.outputs.iter().position(|x| x.kind == item.kind) { - self.outputs[index] = item; - } else { - self.outputs.push(item); - } - } - } -} - -/// Build process can return metadata about the task -#[derive(serde::Serialize, serde::Deserialize, Debug)] -struct Meta { - pub task: String, - pub challenges: Vec, -} - -#[derive(serde::Serialize, serde::Deserialize, Debug)] -struct Challenge { - pub uuid: Uuid, - pub flag: String, - pub url: Option, -} - -fn create_flags_by_task<'a>( - task_config: &'a Task, - module_config: &'a ModuleConfiguration, - uuid: Uuid, -) -> Vec { - let mut flags = Vec::with_capacity(task_config.stages.len()); - for stage in &task_config.stages { - // Get ID from stage or fall back to task ID - let id = stage.id.as_deref().unwrap_or(&task_config.id); - let flag = match stage.flag.kind { - FlagVariantKind::UserDerived => Flag::new_user_flag( - id.into(), - &module_config.flag_config.user_derived.algorithm, - &module_config.flag_config.user_derived.secret, - id, - &uuid, - ), - FlagVariantKind::PureRandom => { - Flag::new_random_flag(id.into(), module_config.flag_config.pure_random.length) - } - FlagVariantKind::RngSeed => Flag::new_rng_seed( - id.into(), - &module_config.flag_config.user_derived.algorithm, - &module_config.flag_config.user_derived.secret, - id, - &uuid, - ), - }; - flags.push(flag); - } - flags -} - -#[allow(dead_code)] -fn get_build_info( - module_config: &mut ModuleConfiguration, - task_id: String, -) -> Result<&BuildConfig, String> { - for category in &mut module_config.categories { - for task in &category.tasks { - if task_id == task.id { - return Ok(task.build.as_ref()); - } - } - } - Err(format!( - "Build information for task with id {} not found!", - task_id - )) -} -/// Couple output items together, so we link points to the correct output -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] -pub struct OutputItem { - pub kind: OutputKind, - pub link: Option, -} - -impl OutputItem { - pub fn new(kind: OutputKind) -> Self { - Self { kind, link: None } - } - pub fn set_link(&mut self, link: String) { - self.link = Some(link); - } - pub fn update_path(&mut self, path: PathBuf) { - self.kind = self.kind.with_new_path(path); - } -} - -// Guarantee that the output directory exists -// The process's current working directory is set to be the build directory -// This means that output directory should relatively referenced based on the CWD of this program -fn verify_output_dir( - output_directory: &Path, - suffix: &str, - task_id: &str, -) -> Result { - let builder_output_dir = output_directory.join(suffix).join(task_id); - // Create all required directories in the path - match fs::create_dir_all(&builder_output_dir) { - Ok(_) => Ok(builder_output_dir), - Err(e) => { - tracing::error!( - "Failed to create the output directory for task {}: {}. Confirm the task build directory is correct.", - task_id, - e - ); - Err(BuildError::InvalidOutputDirectory(e.to_string())) - } - } -} - -fn run_subprocess( - program: &str, - args: Vec<&str>, - build_manifest: &mut TaskBuildContainer, - build_envs: HashMap, -) -> Result<(), BuildError> { - tracing::debug!("Running subprocess: {} with args: {:?}", program, args); - - let output = std::process::Command::new(program) - .args(args) - .envs(build_envs) // Use merged environment instead of env_clear() - .current_dir(&build_manifest.task.build.directory) - .output(); - - let output = match output { - Ok(output) => output, - Err(e) => { - return Err(BuildError::ShellSubprocessError(format!( - "The build process of task {} failed prematurely: {}", - build_manifest.task.id, e - ))); - } - }; - if output.status.success() { - let stdout = String::from_utf8_lossy(&output.stdout); - for line in stdout.lines() { - tracing::info!("{}", line); - } - build_manifest.validate_output()?; - - // If the task has a seed-based flag, we must capture the resulting flag from the process output - // Stored into the file flags.json by default, using same key as the passed environment variable - map_rng_seed_to_flag( - &mut build_manifest.outputs, - &build_manifest.basedir, - build_manifest.task, - )?; - - Ok(()) - } else { - Err(BuildError::ShellSubprocessError(format!( - "The build process for task {} failed with non-zero exit code. Error: {}", - build_manifest.task.id, - std::str::from_utf8(&output.stderr).unwrap_or("Unable to read stderr") - ))) - } -} - -pub fn build_batch<'a>( - module_config: &'a ModuleConfiguration, - task_config: &'a Task, - output_directory: &'a Path, - validate: bool, -) -> Result, BuildError> { - if !task_config.build.directory.exists() { - return Err(BuildError::InvalidOutputDirectory( - task_config.build.directory.display().to_string(), - )); - } - - // Extract batch count from stages if present - there should be some - - let batch_count = match task_config.batch { - Some(ref config) => config.count, - None => { - tracing::error!("Batch count not found in task stages."); - return Err(BuildError::StageHadNoBatch(format!( - "the criminal was task {}", - task_config.id - ))); - } - }; - // generate a UUID for each task based on batch count - let uuids: Vec = (0..batch_count).map(|_| Uuid::now_v7()).collect(); - - // let mut flags_of_flags = Vec::with_capacity(task_config.stages.len()); - let builder_output_dir = verify_output_dir(output_directory, "", "")?; - // - let mut entries = Vec::with_capacity(uuids.len()); - for uuid_value in uuids { - let flags = create_flags_by_task(task_config, module_config, uuid_value); - - let expected_outputs: Vec = task_config - .build - .output - .iter() - .map(|output| OutputItem::new(output.kind.clone())) - .collect(); - - // Create UUID-specific directory for this batch - let task_instance_dir = - verify_output_dir(output_directory, &uuid_value.to_string(), &task_config.id)?; - - let entry = IntermediateOutput { - uuid: uuid_value, - stage_flags: flags, - task_instance_dir, - outputs: expected_outputs, - }; - - entries.push(entry); - } - // PANICS: We are creating the file in the output directory, which is guaranteed to exist (unless someone removed it between check and this point) - let mut build_manifest = TaskBuildContainer { - basedir: builder_output_dir, - task: task_config, - outputs: entries, - batched: true, - }; - - let json_path = output_directory.join(DEFAULT_BUILD_MANIFEST); - serde_json::to_writer_pretty(fs::File::create(&json_path).unwrap(), &build_manifest) - .map_err(|e| BuildError::SerdeDerserializationFailed(e.to_string()))?; - if validate { - return Ok(build_manifest); - } - - let mut build_envs = HashMap::from([( - "BUILD_MANIFEST".to_string(), - json_path.to_str().unwrap_or_default().to_string(), - )]); - let (program, program_args) = match task_config.build.builder { - Builder::Shell(ref entrypoint) => ("sh", vec![entrypoint.entrypoint.as_str()]), - Builder::Nix(ref entrypoint) => { - // For nix to work, we need to set the environment variables - let mut preserved_env = HashMap::new(); - let env_vars_to_preserve = [ - "PATH", - "NIX_PATH", - "NIX_PROFILES", - "NIX_SSL_CERT_FILE", - "NIX_STORE", - "NIX_REMOTE", - "NIX_USER_PROFILE_DIR", - ]; - - for var in &env_vars_to_preserve { - if let Ok(value) = std::env::var(var) { - preserved_env.insert(var.to_string(), value); - } - } - let final_env = preserved_env; - build_envs.extend(final_env); - - ("nix", vec!["run", ".", &entrypoint.entrypoint]) - } - }; - - run_subprocess(program, program_args, &mut build_manifest, build_envs)?; - - Ok(build_manifest) -} - -fn map_rng_seed_to_flag( - flags: &mut [IntermediateOutput], - builder_output_dir: &Path, - task_config: &Task, -) -> Result<(), BuildError> { - // TODO batch mode not supported - for flag in flags[0].stage_flags.iter_mut() { - let flag_key = flag.get_flag_type_value_pair().0; - if let Flag::RngSeed(rng_seed) = flag { - let path = task_config - .build - .output - .iter() - .find_map(|output| { - if let OutputKind::Flags(ref pathbuf) = output.kind { - Some(builder_output_dir.join(pathbuf)) - } else { - None - } - }) - .unwrap_or_else(|| builder_output_dir.join(DEFAULT_FLAGS_FILENAME)); - let file = match fs::File::open(&path) { - Ok(file) => file, - Err(e) => { - tracing::error!( - "Failed to open flags.json for task {}: {}", - task_config.id, - e - ); - std::process::exit(1); - } - }; - let reader = std::io::BufReader::new(file); - let seeded_flags: HashMap = serde_json::from_reader(reader)?; - // Same key than passed for the build process - if let Some(seed) = seeded_flags.get(&flag_key) { - rng_seed.update_suffix(seed.to_owned()); - } else { - return Err(BuildError::FlagCollectionError(format!( - "Seeded flag for task {} is not found from the output file", - task_config.id - ))); - } - } - } - Ok(()) -} - -/// Build that is supposed to repeat many times and generate different variations -pub fn build_sequential<'a>( - module_config: &'a ModuleConfiguration, - task_config: &'a Task, - uuid: Uuid, - output_directory: &Path, - // If the build is repeated, tells the number, starting from 1 - _build_number: usize, - validate: bool, -) -> Result { - let flags = create_flags_by_task(task_config, module_config, uuid); - // Create the base output directory - if !output_directory.exists() { - fs::create_dir_all(output_directory).map_err(|e| { - BuildError::InvalidOutputDirectory(format!( - "Failed to create the base output directory: {}", - e - )) - })?; - } - - // Guarantee that the output directory exists with UUID/task_id structure - let task_instance_dir = - verify_output_dir(output_directory, &uuid.to_string(), &task_config.id)?; - - let expected_outputs: Vec = task_config - .build - .output - .iter() - .map(|output| OutputItem::new(output.kind.clone())) - .collect(); - let intermediate = IntermediateOutput::new(uuid, flags, task_instance_dir, expected_outputs); - - let json_path = if validate { - // No race condition if we are validating - output_directory.join(DEFAULT_BUILD_MANIFEST) - } else { - // For sequential builds we must use the task instance directory to avoid race condition - intermediate.task_instance_dir.join(DEFAULT_BUILD_MANIFEST) - }; - let mut build_manifest = TaskBuildContainer { - basedir: output_directory.to_path_buf(), - task: task_config, - outputs: vec![intermediate], - batched: false, - }; - serde_json::to_writer_pretty(fs::File::create(&json_path).unwrap(), &build_manifest) - .map_err(|e| BuildError::SerdeDerserializationFailed(e.to_string()))?; - - // We are just validating configuration and build-manifest.json - if validate { - return Ok(build_manifest.outputs[0].clone()); - } - - let build_envs = HashMap::from([( - "BUILD_MANIFEST".to_string(), - json_path.to_str().unwrap_or_default().to_string(), - )]); - - match task_config.build.builder { - Builder::Shell(ref entrypoint) => { - tracing::debug!( - "Running shell command: {} in directory: {}", - entrypoint.entrypoint, - &task_config.build.directory.display() - ); - - run_subprocess( - "sh", - vec![&entrypoint.entrypoint], - &mut build_manifest, - build_envs, - )? - } - Builder::Nix(_) => todo!("Nix builder not implemented"), - } - debug_assert!( - build_manifest.outputs.len() == 1, - "The sequential build should have only one output" - ); - Ok(build_manifest.outputs.remove(0)) -} +use std::collections::HashMap; +use std::fs; +use std::path::{Path, PathBuf}; +// use tracing::instrument; +use uuid::Uuid; + +use crate::config::{ + BuildConfig, Builder, DEFAULT_BUILD_MANIFEST, DEFAULT_FLAGS_FILENAME, FlagVariantKind, + ModuleConfiguration, OutputKind, Task, +}; +use crate::errors::BuildError; +use crate::flag_generator::Flag; + +/// Represents the build process of a task, including the initial configuration and produced output files and flags. +#[derive(serde::Serialize, Debug)] +pub struct TaskBuildContainer<'a> { + pub basedir: PathBuf, + pub task: &'a Task, + /// For batch mode, this is > 1, for a sequential build, this is 1 + pub outputs: Vec, + batched: bool, +} +impl<'a> TaskBuildContainer<'a> { + pub fn new( + out_dir: PathBuf, + task: &'a Task, + outputs: Vec, + batched: bool, + ) -> Self { + Self { + basedir: out_dir, + task, + outputs, + batched, + } + } +} + +impl TaskBuildContainer<'_> { + pub fn validate_output(&mut self) -> Result<(), BuildError> { + for intermediate in &mut self.outputs { + for item in &mut intermediate.outputs { + // The task instance directory should be defined already + let pre_cano = &intermediate + .task_instance_dir + .join(item.kind.get_filename()); + let path = match pre_cano.canonicalize() { + Ok(p) => p, + Err(e) => { + tracing::error!("Failure in file '{}", &pre_cano.display(),); + tracing::error!( + "Failed to verify that build output path for file `{}` exist : {}. Is builder using given output directory correctly or configuration has unintentional output files defined?", + &item.kind.get_filename().to_string_lossy(), + e + ); + return Err(BuildError::OutputVerificationFailed(e.to_string())); + } + }; + item.update_path(path); + } + } + Ok(()) + } + /// Check if the task has any files to distribute other than the readme.txt. Defined by the existence of `OutputKind::Resource`. + pub fn has_files_to_distribute(&self) -> bool { + self.outputs + .iter() + .any(|intermediate| !intermediate.get_resource_files().is_empty()) + } +} + +// All flags in a single task's stages +#[derive(serde::Serialize, Clone, Debug)] +pub struct IntermediateOutput { + pub uuid: Uuid, + pub stage_flags: Vec, + pub task_instance_dir: PathBuf, + pub outputs: Vec, +} + +impl IntermediateOutput { + pub fn new( + uuid: Uuid, + stage_flags: Vec, + task_instance_dir: PathBuf, + outputs: Vec, + ) -> Self { + Self { + uuid, + stage_flags, + task_instance_dir, + outputs, + } + } + pub fn validate_readme_count(&self) -> Result<(), BuildError> { + let readme_count = self + .outputs + .iter() + .filter(|output| matches!(output.kind, OutputKind::Readme(_))) + .count(); + if readme_count != 1 { + return Err(BuildError::OutputVerificationFailed(format!( + "Expected exactly one readme file, found {}", + readme_count + ))); + } + Ok(()) + } + + /// Files that should be delivered for the end-user + pub fn get_resource_files(&self) -> Vec { + self.outputs + .iter() + .filter_map(|output| match output.kind { + OutputKind::Resource(_) => Some(output.to_owned()), + _ => None, + }) + .collect() + } + /// Get readme.txt from the output files + pub fn get_readme(&self) -> Option<&OutputItem> { + self.outputs + .iter() + .find(|output| matches!(output.kind, OutputKind::Readme(_))) + } + /// Update common files that apply to all flag entries + pub fn update_files(&mut self, items: Vec) { + for item in items { + if let Some(index) = self.outputs.iter().position(|x| x.kind == item.kind) { + self.outputs[index] = item; + } else { + self.outputs.push(item); + } + } + } +} + +/// Build process can return metadata about the task +#[derive(serde::Serialize, serde::Deserialize, Debug)] +struct Meta { + pub task: String, + pub challenges: Vec, +} + +#[derive(serde::Serialize, serde::Deserialize, Debug)] +struct Challenge { + pub uuid: Uuid, + pub flag: String, + pub url: Option, +} + +fn create_flags_by_task<'a>( + task_config: &'a Task, + module_config: &'a ModuleConfiguration, + uuid: Uuid, +) -> Vec { + let mut flags = Vec::with_capacity(task_config.stages.len()); + for stage in &task_config.stages { + // Get ID from stage or fall back to task ID + let id = stage.id.as_deref().unwrap_or(&task_config.id); + let flag = match stage.flag.kind { + FlagVariantKind::UserDerived => Flag::new_user_flag( + id.into(), + &module_config.flag_config.user_derived.algorithm, + &module_config.flag_config.user_derived.secret, + id, + &uuid, + ), + FlagVariantKind::PureRandom => { + Flag::new_random_flag(id.into(), module_config.flag_config.pure_random.length) + } + FlagVariantKind::RngSeed => Flag::new_rng_seed( + id.into(), + &module_config.flag_config.user_derived.algorithm, + &module_config.flag_config.user_derived.secret, + id, + &uuid, + ), + }; + flags.push(flag); + } + flags +} + +#[allow(dead_code)] +fn get_build_info( + module_config: &mut ModuleConfiguration, + task_id: String, +) -> Result<&BuildConfig, String> { + for category in &mut module_config.categories { + for task in &category.tasks { + if task_id == task.id { + return Ok(task.build.as_ref()); + } + } + } + Err(format!( + "Build information for task with id {} not found!", + task_id + )) +} +/// Couple output items together, so we link points to the correct output +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct OutputItem { + pub kind: OutputKind, + pub link: Option, +} + +impl OutputItem { + pub fn new(kind: OutputKind) -> Self { + Self { kind, link: None } + } + pub fn set_link(&mut self, link: String) { + self.link = Some(link); + } + pub fn update_path(&mut self, path: PathBuf) { + self.kind = self.kind.with_new_path(path); + } +} + +// Guarantee that the output directory exists +// The process's current working directory is set to be the build directory +// This means that output directory should relatively referenced based on the CWD of this program +fn verify_output_dir( + output_directory: &Path, + suffix: &str, + task_id: &str, +) -> Result { + let builder_output_dir = output_directory.join(suffix).join(task_id); + // Create all required directories in the path + match fs::create_dir_all(&builder_output_dir) { + Ok(_) => Ok(builder_output_dir), + Err(e) => { + tracing::error!( + "Failed to create the output directory for task {}: {}. Confirm the task build directory is correct.", + task_id, + e + ); + Err(BuildError::InvalidOutputDirectory(e.to_string())) + } + } +} + +fn run_subprocess( + program: &str, + args: Vec<&str>, + build_manifest: &mut TaskBuildContainer, + build_envs: HashMap, +) -> Result<(), BuildError> { + tracing::debug!("Running subprocess: {} with args: {:?}", program, args); + + let output = std::process::Command::new(program) + .args(args) + .envs(build_envs) // Use merged environment instead of env_clear() + .current_dir(&build_manifest.task.build.directory) + .output(); + + let output = match output { + Ok(output) => output, + Err(e) => { + return Err(BuildError::ShellSubprocessError(format!( + "The build process of task {} failed prematurely: {}", + build_manifest.task.id, e + ))); + } + }; + if output.status.success() { + let stdout = String::from_utf8_lossy(&output.stdout); + for line in stdout.lines() { + tracing::info!("{}", line); + } + build_manifest.validate_output()?; + + // If the task has a seed-based flag, we must capture the resulting flag from the process output + // Stored into the file flags.json by default, using same key as the passed environment variable + map_rng_seed_to_flag( + &mut build_manifest.outputs, + &build_manifest.basedir, + build_manifest.task, + )?; + + Ok(()) + } else { + Err(BuildError::ShellSubprocessError(format!( + "The build process for task {} failed with non-zero exit code. Error: {}", + build_manifest.task.id, + std::str::from_utf8(&output.stderr).unwrap_or("Unable to read stderr") + ))) + } +} + +pub fn build_batch<'a>( + module_config: &'a ModuleConfiguration, + task_config: &'a Task, + output_directory: &'a Path, + validate: bool, +) -> Result, BuildError> { + if !task_config.build.directory.exists() { + return Err(BuildError::InvalidOutputDirectory( + task_config.build.directory.display().to_string(), + )); + } + + // Extract batch count from stages if present - there should be some + + let batch_count = match task_config.batch { + Some(ref config) => config.count, + None => { + tracing::error!("Batch count not found in task stages."); + return Err(BuildError::StageHadNoBatch(format!( + "the criminal was task {}", + task_config.id + ))); + } + }; + // generate a UUID for each task based on batch count + let uuids: Vec = (0..batch_count).map(|_| Uuid::now_v7()).collect(); + + // let mut flags_of_flags = Vec::with_capacity(task_config.stages.len()); + let builder_output_dir = verify_output_dir(output_directory, "", "")?; + // + let mut entries = Vec::with_capacity(uuids.len()); + for uuid_value in uuids { + let flags = create_flags_by_task(task_config, module_config, uuid_value); + + let expected_outputs: Vec = task_config + .build + .output + .iter() + .map(|output| OutputItem::new(output.kind.clone())) + .collect(); + + // Create UUID-specific directory for this batch + let task_instance_dir = + verify_output_dir(output_directory, &uuid_value.to_string(), &task_config.id)?; + + let entry = IntermediateOutput { + uuid: uuid_value, + stage_flags: flags, + task_instance_dir, + outputs: expected_outputs, + }; + + entries.push(entry); + } + // PANICS: We are creating the file in the output directory, which is guaranteed to exist (unless someone removed it between check and this point) + let mut build_manifest = TaskBuildContainer { + basedir: builder_output_dir, + task: task_config, + outputs: entries, + batched: true, + }; + + let json_path = output_directory.join(DEFAULT_BUILD_MANIFEST); + serde_json::to_writer_pretty(fs::File::create(&json_path).unwrap(), &build_manifest) + .map_err(|e| BuildError::SerdeDerserializationFailed(e.to_string()))?; + if validate { + return Ok(build_manifest); + } + + let mut build_envs = HashMap::from([( + "BUILD_MANIFEST".to_string(), + json_path.to_str().unwrap_or_default().to_string(), + )]); + let (program, program_args) = match task_config.build.builder { + Builder::Shell(ref entrypoint) => ("sh", vec![entrypoint.entrypoint.as_str()]), + Builder::Nix(ref entrypoint) => { + // For nix to work, we need to set the environment variables + let mut preserved_env = HashMap::new(); + let env_vars_to_preserve = [ + "PATH", + "NIX_PATH", + "NIX_PROFILES", + "NIX_SSL_CERT_FILE", + "NIX_STORE", + "NIX_REMOTE", + "NIX_USER_PROFILE_DIR", + ]; + + for var in &env_vars_to_preserve { + if let Ok(value) = std::env::var(var) { + preserved_env.insert(var.to_string(), value); + } + } + let final_env = preserved_env; + build_envs.extend(final_env); + + ("nix", vec!["run", ".", &entrypoint.entrypoint]) + } + }; + + run_subprocess(program, program_args, &mut build_manifest, build_envs)?; + + Ok(build_manifest) +} + +fn map_rng_seed_to_flag( + flags: &mut [IntermediateOutput], + builder_output_dir: &Path, + task_config: &Task, +) -> Result<(), BuildError> { + // TODO batch mode not supported + for flag in flags[0].stage_flags.iter_mut() { + let flag_key = flag.get_flag_type_value_pair().0; + if let Flag::RngSeed(rng_seed) = flag { + let path = task_config + .build + .output + .iter() + .find_map(|output| { + if let OutputKind::Flags(ref pathbuf) = output.kind { + Some(builder_output_dir.join(pathbuf)) + } else { + None + } + }) + .unwrap_or_else(|| builder_output_dir.join(DEFAULT_FLAGS_FILENAME)); + let file = match fs::File::open(&path) { + Ok(file) => file, + Err(e) => { + tracing::error!( + "Failed to open flags.json for task {}: {}", + task_config.id, + e + ); + std::process::exit(1); + } + }; + let reader = std::io::BufReader::new(file); + let seeded_flags: HashMap = serde_json::from_reader(reader)?; + // Same key than passed for the build process + if let Some(seed) = seeded_flags.get(&flag_key) { + rng_seed.update_suffix(seed.to_owned()); + } else { + return Err(BuildError::FlagCollectionError(format!( + "Seeded flag for task {} is not found from the output file", + task_config.id + ))); + } + } + } + Ok(()) +} + +/// Build that is supposed to repeat many times and generate different variations +pub fn build_sequential<'a>( + module_config: &'a ModuleConfiguration, + task_config: &'a Task, + uuid: Uuid, + output_directory: &Path, + // If the build is repeated, tells the number, starting from 1 + _build_number: usize, + validate: bool, +) -> Result { + let flags = create_flags_by_task(task_config, module_config, uuid); + // Create the base output directory + if !output_directory.exists() { + fs::create_dir_all(output_directory).map_err(|e| { + BuildError::InvalidOutputDirectory(format!( + "Failed to create the base output directory: {}", + e + )) + })?; + } + + // Guarantee that the output directory exists with UUID/task_id structure + let task_instance_dir = + verify_output_dir(output_directory, &uuid.to_string(), &task_config.id)?; + + let expected_outputs: Vec = task_config + .build + .output + .iter() + .map(|output| OutputItem::new(output.kind.clone())) + .collect(); + let intermediate = IntermediateOutput::new(uuid, flags, task_instance_dir, expected_outputs); + + let json_path = if validate { + // No race condition if we are validating + output_directory.join(DEFAULT_BUILD_MANIFEST) + } else { + // For sequential builds we must use the task instance directory to avoid race condition + intermediate.task_instance_dir.join(DEFAULT_BUILD_MANIFEST) + }; + let mut build_manifest = TaskBuildContainer { + basedir: output_directory.to_path_buf(), + task: task_config, + outputs: vec![intermediate], + batched: false, + }; + serde_json::to_writer_pretty(fs::File::create(&json_path).unwrap(), &build_manifest) + .map_err(|e| BuildError::SerdeDerserializationFailed(e.to_string()))?; + + // We are just validating configuration and build-manifest.json + if validate { + return Ok(build_manifest.outputs[0].clone()); + } + + let build_envs = HashMap::from([( + "BUILD_MANIFEST".to_string(), + json_path.to_str().unwrap_or_default().to_string(), + )]); + + match task_config.build.builder { + Builder::Shell(ref entrypoint) => { + tracing::debug!( + "Running shell command: {} in directory: {}", + entrypoint.entrypoint, + &task_config.build.directory.display() + ); + + run_subprocess( + "sh", + vec![&entrypoint.entrypoint], + &mut build_manifest, + build_envs, + )? + } + Builder::Nix(_) => todo!("Nix builder not implemented"), + } + debug_assert!( + build_manifest.outputs.len() == 1, + "The sequential build should have only one output" + ); + Ok(build_manifest.outputs.remove(0)) +} + +/// Build task function for serverside use +pub async fn build_task<'a>( + module_config: &'a ModuleConfiguration, + task_id: &str, + uuid: Uuid, +) -> Result, BuildError> { + // Task has to exist + if let Some(task) = module_config.get_task_by_id(task_id) { + let path = task.build.directory.clone(); + // path must exist checked in the module configuration + // check if output directory exists + let output_dir = path.join("output"); + // if not, create it + tokio::fs::create_dir_all(&output_dir) + .await + .map_err(|e| BuildError::InvalidOutputDirectory(e.to_string()))?; + // There isnt a student folder inside the output directory it is checked before + let student_output_dir = output_dir.join(uuid.to_string()); + // TODO: Add optional execution where student files are not saved + tokio::fs::create_dir_all(&student_output_dir) + .await + .map_err(|e| BuildError::InvalidOutputDirectory(e.to_string()))?; + + let flags = create_flags_by_task(task, module_config, uuid); + + let expected_outputs: Vec = task + .build + .output + .iter() + .map(|output| OutputItem::new(output.kind.clone())) + .collect(); + + let intermediate_output = + IntermediateOutput::new(uuid, flags, student_output_dir.clone(), expected_outputs); + + let mut build_container = TaskBuildContainer::new( + student_output_dir.to_path_buf(), + task, + vec![intermediate_output], + false, + ); + + let manifest_path = student_output_dir.join("build-manifest.json"); + + tokio::fs::write( + &manifest_path, + serde_json::to_string_pretty(&build_container)?, + ) + .await + .map_err(|e| BuildError::SerdeDerserializationFailed(e.to_string()))?; + + let build_envs = HashMap::from([( + "BUILD_MANIFEST".to_string(), + manifest_path.to_str().unwrap_or_default().to_string(), + )]); + + match task.build.builder { + Builder::Shell(ref entrypoint) => { + tracing::debug!( + "Running shell command: {} in directory: {}", + entrypoint.entrypoint, + student_output_dir.display() + ); + + run_subprocess( + "sh", + vec![&entrypoint.entrypoint], + &mut build_container, + build_envs, + )?; + } + Builder::Nix(_) => todo!("Nix builder not implemented"), + } + + // Validate the output files + build_container.validate_output()?; + + Ok(build_container) + } else { + Err(BuildError::TaskNotFound(task_id.to_string())) + } +} diff --git a/src/config.rs b/ainigma/src/config.rs similarity index 96% rename from src/config.rs rename to ainigma/src/config.rs index bfd6001..2f7fc6f 100644 --- a/src/config.rs +++ b/ainigma/src/config.rs @@ -698,6 +698,24 @@ pub fn read_check_toml(filepath: &OsStr) -> Result Result { + let mut file = File::open(filepath).map_err(|err| ConfigError::TomlParseError { + message: format!("Failed to open file: {err}"), + })?; + + let mut file_content = String::new(); + file.read_to_string(&mut file_content) + .map_err(|err| ConfigError::TomlParseError { + message: format!("Failed to read file content: {err}"), + })?; + let module_config = + toml::from_str(&file_content).map_err(|err| ConfigError::TomlParseError { + message: err.to_string(), + })?; + Ok(module_config) +} #[cfg(test)] mod tests { use insta::assert_debug_snapshot; diff --git a/src/errors/base.rs b/ainigma/src/errors/base.rs similarity index 100% rename from src/errors/base.rs rename to ainigma/src/errors/base.rs diff --git a/src/errors/build.rs b/ainigma/src/errors/build.rs similarity index 92% rename from src/errors/build.rs rename to ainigma/src/errors/build.rs index 07e7a73..19e7153 100644 --- a/src/errors/build.rs +++ b/ainigma/src/errors/build.rs @@ -21,6 +21,8 @@ pub enum BuildError { // Stage had not batch when attempting batch build #[error("Any stage had no batch when attempting batch build: {0}")] StageHadNoBatch(String), + #[error("Task with ID {0} not found in module configuration")] + TaskNotFound(String), } impl From for BuildError { diff --git a/src/errors/config.rs b/ainigma/src/errors/config.rs similarity index 95% rename from src/errors/config.rs rename to ainigma/src/errors/config.rs index b8235af..37d1708 100644 --- a/src/errors/config.rs +++ b/ainigma/src/errors/config.rs @@ -30,4 +30,6 @@ pub enum ConfigError { StageError(&'static str), #[error("Invalid build mode '{0}'. Available modes: [{1}]")] BuildModeError(String, String), + #[error("Error reading file {0}")] + FileReadError(String), } diff --git a/src/errors/mod.rs b/ainigma/src/errors/mod.rs similarity index 100% rename from src/errors/mod.rs rename to ainigma/src/errors/mod.rs diff --git a/src/flag_generator.rs b/ainigma/src/flag_generator.rs similarity index 100% rename from src/flag_generator.rs rename to ainigma/src/flag_generator.rs diff --git a/src/lib.rs b/ainigma/src/lib.rs similarity index 100% rename from src/lib.rs rename to ainigma/src/lib.rs diff --git a/src/moodle.rs b/ainigma/src/moodle.rs similarity index 98% rename from src/moodle.rs rename to ainigma/src/moodle.rs index 8159baf..9a8640a 100644 --- a/src/moodle.rs +++ b/ainigma/src/moodle.rs @@ -84,7 +84,7 @@ pub fn create_exam( }; question .add_answers(answers) - .map_err(|e| io::Error::new(io::ErrorKind::Other, format!("Error: {:?}", e)))?; + .map_err(|e| io::Error::other(format!("Error: {:?}", e)))?; questions.push(question.into()); } None => { @@ -98,7 +98,7 @@ pub fn create_exam( let categories = vec![category.into()]; quiz.set_categories(categories); quiz.to_xml(filename) - .map_err(|e| io::Error::new(io::ErrorKind::Other, format!("Error: {:?}", e)))?; + .map_err(|e| io::Error::other(format!("Error: {:?}", e)))?; Ok(()) } diff --git a/src/snapshots/ainigma__config__tests__batch_deserialization-2.snap b/ainigma/src/snapshots/ainigma__config__tests__batch_deserialization-2.snap similarity index 100% rename from src/snapshots/ainigma__config__tests__batch_deserialization-2.snap rename to ainigma/src/snapshots/ainigma__config__tests__batch_deserialization-2.snap diff --git a/src/snapshots/ainigma__config__tests__batch_deserialization.snap b/ainigma/src/snapshots/ainigma__config__tests__batch_deserialization.snap similarity index 100% rename from src/snapshots/ainigma__config__tests__batch_deserialization.snap rename to ainigma/src/snapshots/ainigma__config__tests__batch_deserialization.snap diff --git a/src/snapshots/ainigma__config__tests__deployment_upload_config.snap b/ainigma/src/snapshots/ainigma__config__tests__deployment_upload_config.snap similarity index 100% rename from src/snapshots/ainigma__config__tests__deployment_upload_config.snap rename to ainigma/src/snapshots/ainigma__config__tests__deployment_upload_config.snap diff --git a/src/snapshots/ainigma__config__tests__disabled_build_modes.snap b/ainigma/src/snapshots/ainigma__config__tests__disabled_build_modes.snap similarity index 100% rename from src/snapshots/ainigma__config__tests__disabled_build_modes.snap rename to ainigma/src/snapshots/ainigma__config__tests__disabled_build_modes.snap diff --git a/src/snapshots/ainigma__config__tests__toml.snap b/ainigma/src/snapshots/ainigma__config__tests__toml.snap similarity index 100% rename from src/snapshots/ainigma__config__tests__toml.snap rename to ainigma/src/snapshots/ainigma__config__tests__toml.snap diff --git a/src/storages.rs b/ainigma/src/storages.rs similarity index 100% rename from src/storages.rs rename to ainigma/src/storages.rs diff --git a/src/storages/s3.rs b/ainigma/src/storages/s3.rs similarity index 100% rename from src/storages/s3.rs rename to ainigma/src/storages/s3.rs diff --git a/src/storages/storage.rs b/ainigma/src/storages/storage.rs similarity index 100% rename from src/storages/storage.rs rename to ainigma/src/storages/storage.rs diff --git a/src/storages/upload.rs b/ainigma/src/storages/upload.rs similarity index 100% rename from src/storages/upload.rs rename to ainigma/src/storages/upload.rs diff --git a/tests/README.txt b/ainigma/tests/README.txt similarity index 100% rename from tests/README.txt rename to ainigma/tests/README.txt diff --git a/tests/batch_build.rs b/ainigma/tests/batch_build.rs similarity index 100% rename from tests/batch_build.rs rename to ainigma/tests/batch_build.rs diff --git a/tests/data/configs/batch_count.toml b/ainigma/tests/data/configs/batch_count.toml similarity index 100% rename from tests/data/configs/batch_count.toml rename to ainigma/tests/data/configs/batch_count.toml diff --git a/tests/data/configs/default.toml b/ainigma/tests/data/configs/default.toml similarity index 100% rename from tests/data/configs/default.toml rename to ainigma/tests/data/configs/default.toml diff --git a/tests/data/configs/deployment_upload.toml b/ainigma/tests/data/configs/deployment_upload.toml similarity index 100% rename from tests/data/configs/deployment_upload.toml rename to ainigma/tests/data/configs/deployment_upload.toml diff --git a/tests/data/configs/no_batch.toml b/ainigma/tests/data/configs/no_batch.toml similarity index 100% rename from tests/data/configs/no_batch.toml rename to ainigma/tests/data/configs/no_batch.toml diff --git a/tests/data/configs/no_sequential.toml b/ainigma/tests/data/configs/no_sequential.toml similarity index 100% rename from tests/data/configs/no_sequential.toml rename to ainigma/tests/data/configs/no_sequential.toml diff --git a/tests/data/configs/simple_shell.toml b/ainigma/tests/data/configs/simple_shell.toml similarity index 100% rename from tests/data/configs/simple_shell.toml rename to ainigma/tests/data/configs/simple_shell.toml diff --git a/tests/sequential_build.rs b/ainigma/tests/sequential_build.rs similarity index 100% rename from tests/sequential_build.rs rename to ainigma/tests/sequential_build.rs diff --git a/tests/tasks/reverse1/custom_entry.sh b/ainigma/tests/tasks/reverse1/custom_entry.sh similarity index 100% rename from tests/tasks/reverse1/custom_entry.sh rename to ainigma/tests/tasks/reverse1/custom_entry.sh diff --git a/tests/tasks/reverse1/entrypoint.sh b/ainigma/tests/tasks/reverse1/entrypoint.sh old mode 100755 new mode 100644 similarity index 100% rename from tests/tasks/reverse1/entrypoint.sh rename to ainigma/tests/tasks/reverse1/entrypoint.sh diff --git a/tests/tasks/reverse1/source.cpp b/ainigma/tests/tasks/reverse1/source.cpp similarity index 100% rename from tests/tasks/reverse1/source.cpp rename to ainigma/tests/tasks/reverse1/source.cpp diff --git a/tests/tasks/simple_shell/build.sh b/ainigma/tests/tasks/simple_shell/build.sh similarity index 100% rename from tests/tasks/simple_shell/build.sh rename to ainigma/tests/tasks/simple_shell/build.sh