diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 00000000..ee4adcb4 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,203 @@ +{ + "name": "node-nodejs-fundamentals", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "node-nodejs-fundamentals", + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "redline": "^1.0.1", + "tar-stream": "^3.1.8" + }, + "engines": { + "node": ">=24.10.0", + "npm": ">=10.9.2" + } + }, + "node_modules/b4a": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.8.0.tgz", + "integrity": "sha512-qRuSmNSkGQaHwNbM7J78Wwy+ghLEYF1zNrSeMxj4Kgw6y33O3mXcQ6Ie9fRvfU/YnxWkOchPXbaLb73TkIsfdg==", + "license": "Apache-2.0", + "peerDependencies": { + "react-native-b4a": "*" + }, + "peerDependenciesMeta": { + "react-native-b4a": { + "optional": true + } + } + }, + "node_modules/bare-events": { + "version": "2.8.2", + "resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.8.2.tgz", + "integrity": "sha512-riJjyv1/mHLIPX4RwiK+oW9/4c3TEUeORHKefKAKnZ5kyslbN+HXowtbaVEqt4IMUB7OXlfixcs6gsFeo/jhiQ==", + "license": "Apache-2.0", + "peerDependencies": { + "bare-abort-controller": "*" + }, + "peerDependenciesMeta": { + "bare-abort-controller": { + "optional": true + } + } + }, + "node_modules/bare-fs": { + "version": "4.5.5", + "resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-4.5.5.tgz", + "integrity": "sha512-XvwYM6VZqKoqDll8BmSww5luA5eflDzY0uEFfBJtFKe4PAAtxBjU3YIxzIBzhyaEQBy1VXEQBto4cpN5RZJw+w==", + "license": "Apache-2.0", + "dependencies": { + "bare-events": "^2.5.4", + "bare-path": "^3.0.0", + "bare-stream": "^2.6.4", + "bare-url": "^2.2.2", + "fast-fifo": "^1.3.2" + }, + "engines": { + "bare": ">=1.16.0" + }, + "peerDependencies": { + "bare-buffer": "*" + }, + "peerDependenciesMeta": { + "bare-buffer": { + "optional": true + } + } + }, + "node_modules/bare-os": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/bare-os/-/bare-os-3.7.1.tgz", + "integrity": "sha512-ebvMaS5BgZKmJlvuWh14dg9rbUI84QeV3WlWn6Ph6lFI8jJoh7ADtVTyD2c93euwbe+zgi0DVrl4YmqXeM9aIA==", + "license": "Apache-2.0", + "engines": { + "bare": ">=1.14.0" + } + }, + "node_modules/bare-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bare-path/-/bare-path-3.0.0.tgz", + "integrity": "sha512-tyfW2cQcB5NN8Saijrhqn0Zh7AnFNsnczRcuWODH0eYAXBsJ5gVxAUuNr7tsHSC6IZ77cA0SitzT+s47kot8Mw==", + "license": "Apache-2.0", + "dependencies": { + "bare-os": "^3.0.1" + } + }, + "node_modules/bare-stream": { + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/bare-stream/-/bare-stream-2.8.0.tgz", + "integrity": "sha512-reUN0M2sHRqCdG4lUK3Fw8w98eeUIZHL5c3H7Mbhk2yVBL+oofgaIp0ieLfD5QXwPCypBpmEEKU2WZKzbAk8GA==", + "license": "Apache-2.0", + "dependencies": { + "streamx": "^2.21.0", + "teex": "^1.0.1" + }, + "peerDependencies": { + "bare-buffer": "*", + "bare-events": "*" + }, + "peerDependenciesMeta": { + "bare-buffer": { + "optional": true + }, + "bare-events": { + "optional": true + } + } + }, + "node_modules/bare-url": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/bare-url/-/bare-url-2.3.2.tgz", + "integrity": "sha512-ZMq4gd9ngV5aTMa5p9+UfY0b3skwhHELaDkhEHetMdX0LRkW9kzaym4oo/Eh+Ghm0CCDuMTsRIGM/ytUc1ZYmw==", + "license": "Apache-2.0", + "dependencies": { + "bare-path": "^3.0.0" + } + }, + "node_modules/events-universal": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/events-universal/-/events-universal-1.0.1.tgz", + "integrity": "sha512-LUd5euvbMLpwOF8m6ivPCbhQeSiYVNb8Vs0fQ8QjXo0JTkEHpz8pxdQf0gStltaPpw0Cca8b39KxvK9cfKRiAw==", + "license": "Apache-2.0", + "dependencies": { + "bare-events": "^2.7.0" + } + }, + "node_modules/fast-fifo": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/fast-fifo/-/fast-fifo-1.3.2.tgz", + "integrity": "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==", + "license": "MIT" + }, + "node_modules/jquery": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/jquery/-/jquery-2.2.4.tgz", + "integrity": "sha512-lBHj60ezci2u1v2FqnZIraShGgEXq35qCzMv4lITyHGppTnA13rwR0MgwyNJh9TnDs3aXUvd1xjAotfraMHX/Q==", + "deprecated": "This version is deprecated. Please upgrade to the latest version or find support at https://www.herodevs.com/support/jquery-nes.", + "license": "MIT" + }, + "node_modules/jquery-bridget": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/jquery-bridget/-/jquery-bridget-1.1.0.tgz", + "integrity": "sha512-bCaa6mjKEJsB1nOt2sJ8rlbUGdT1F0eX31h6j6i1DmqHWZeY/5ikU8uHcrl1B3m9DeCy1YFOkhdCIoB3zVXmIQ==", + "license": "MIT", + "dependencies": { + "jquery": ">=1.4.2 <3" + } + }, + "node_modules/redline": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/redline/-/redline-1.0.1.tgz", + "integrity": "sha512-13lGxf2cPQKHsiIi6FZOjzcSKZTYjIQb1WyDHbfCmxwLd4DDd3laSjqCnPK+FJG7mXwMSG5sQPmoN+e7qyumzg==", + "license": "MIT", + "dependencies": { + "jquery-bridget": "^1.1.0" + } + }, + "node_modules/streamx": { + "version": "2.23.0", + "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.23.0.tgz", + "integrity": "sha512-kn+e44esVfn2Fa/O0CPFcex27fjIL6MkVae0Mm6q+E6f0hWv578YCERbv+4m02cjxvDsPKLnmxral/rR6lBMAg==", + "license": "MIT", + "dependencies": { + "events-universal": "^1.0.0", + "fast-fifo": "^1.3.2", + "text-decoder": "^1.1.0" + } + }, + "node_modules/tar-stream": { + "version": "3.1.8", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.8.tgz", + "integrity": "sha512-U6QpVRyCGHva435KoNWy9PRoi2IFYCgtEhq9nmrPPpbRacPs9IH4aJ3gbrFC8dPcXvdSZ4XXfXT5Fshbp2MtlQ==", + "license": "MIT", + "dependencies": { + "b4a": "^1.6.4", + "bare-fs": "^4.5.5", + "fast-fifo": "^1.2.0", + "streamx": "^2.15.0" + } + }, + "node_modules/teex": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/teex/-/teex-1.0.1.tgz", + "integrity": "sha512-eYE6iEI62Ni1H8oIa7KlDU6uQBtqr4Eajni3wX7rpfXD8ysFx8z0+dri+KWEPWpBsxXfxu58x/0jvTVT1ekOSg==", + "license": "MIT", + "dependencies": { + "streamx": "^2.12.5" + } + }, + "node_modules/text-decoder": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/text-decoder/-/text-decoder-1.2.7.tgz", + "integrity": "sha512-vlLytXkeP4xvEq2otHeJfSQIRyWxo/oZGEbXrtEEF9Hnmrdly59sUbzZ/QgyWuLYHctCHxFF4tRQZNQ9k60ExQ==", + "license": "Apache-2.0", + "dependencies": { + "b4a": "^1.6.4" + } + } + } +} diff --git a/package.json b/package.json index dfecb12a..1378f88b 100644 --- a/package.json +++ b/package.json @@ -34,5 +34,9 @@ "assignments" ], "author": "alreadybored", - "license": "ISC" + "license": "ISC", + "dependencies": { + "redline": "^1.0.1", + "tar-stream": "^3.1.8" + } } diff --git a/src/cli/interactive.js b/src/cli/interactive.js index d0e3e0d9..add2597d 100644 --- a/src/cli/interactive.js +++ b/src/cli/interactive.js @@ -1,8 +1,58 @@ +import readline from "node:readline"; +import { stdin as input, stdout as output } from "node:process"; + const interactive = () => { - // Write your code here - // Use readline module for interactive CLI - // Support commands: uptime, cwd, date, exit - // Handle Ctrl+C and unknown commands + const rl = readline.createInterface({ + input, + output, + prompt: "> ", + }); + + function handleCommand(cmd) { + const command = cmd.trim(); + + switch (command) { + case "uptime": + console.log(`Uptime: ${process.uptime().toFixed(2)}s`); + break; + + case "cwd": + console.log(process.cwd()); + break; + + case "date": + console.log(new Date().toISOString()); + break; + + case "exit": + console.log("Goodbye!"); + rl.close(); + return; + + case "": + break; + + default: + console.log("Unknown command"); + } + + rl.prompt(); + } + + rl.prompt(); + + rl.on("line", handleCommand); + + rl.on("close", () => { + console.log("Goodbye!"); + process.exit(0); + }); + + // Handle Ctrl+C + rl.on("SIGINT", () => { + console.log("Goodbye!"); + rl.close(); + }); }; interactive(); diff --git a/src/cli/progress.js b/src/cli/progress.js index 3e060763..9e2c12c5 100644 --- a/src/cli/progress.js +++ b/src/cli/progress.js @@ -1,8 +1,58 @@ const progress = () => { - // Write your code here - // Simulate progress bar from 0% to 100% over ~5 seconds - // Update in place using \r every 100ms - // Format: [████████████████████ ] 67% + const args = process.argv.slice(2); + + const getArg = (name, def) => { + const index = args.indexOf(name); + if (index !== -1 && args[index + 1]) { + return args[index + 1]; + } + return def; + }; + + const duration = Number(getArg("--duration", 5000)); + const interval = Number(getArg("--interval", 100)); + const length = Number(getArg("--length", 30)); + const colorHex = getArg("--color", null); + + let colorStart = ""; + const colorEnd = "\x1b[0m"; + + // Validate hex color + if (colorHex && /^#([0-9a-fA-F]{6})$/.test(colorHex)) { + const r = parseInt(colorHex.slice(1, 3), 16); + const g = parseInt(colorHex.slice(3, 5), 16); + const b = parseInt(colorHex.slice(5, 7), 16); + + // ANSI 24-bit color + colorStart = `\x1b[38;2;${r};${g};${b}m`; + } + + const steps = Math.ceil(duration / interval); + let currentStep = 0; + + const timer = setInterval(() => { + currentStep++; + + const percent = Math.min(Math.round((currentStep / steps) * 100), 100); + const filledLength = Math.round((percent / 100) * length); + const emptyLength = length - filledLength; + + const filledBarRaw = "█".repeat(filledLength); + const filledBar = colorStart + ? `${colorStart}${filledBarRaw}${colorEnd}` + : filledBarRaw; + + const emptyBar = " ".repeat(emptyLength); + + const bar = `[${filledBar}${emptyBar}] ${percent}%`; + + process.stdout.write("\r" + bar); + + if (percent >= 100) { + clearInterval(timer); + process.stdout.write("\nDone!\n"); + } + }, interval); }; progress(); diff --git a/src/cp/execCommand.js b/src/cp/execCommand.js index 34a89c8d..09eda372 100644 --- a/src/cp/execCommand.js +++ b/src/cp/execCommand.js @@ -1,10 +1,34 @@ +import { spawn } from "node:child_process"; + const execCommand = () => { - // Write your code here - // Take command from CLI argument - // Spawn child process - // Pipe child stdout/stderr to parent stdout/stderr - // Pass environment variables - // Exit with same code as child + const commandString = process.argv[2]; + + if (!commandString) { + console.error("No command provided."); + process.exit(1); + } + + const parts = commandString.split(" "); + const command = parts[0]; + const args = parts.slice(1); + + const child = spawn(command, args, { + stdio: ["inherit", "pipe", "pipe"], + env: process.env, + shell: false, + }); + + child.stdout.pipe(process.stdout); + child.stderr.pipe(process.stderr); + + child.on("close", (code) => { + process.exit(code ?? 0); + }); + + child.on("error", (err) => { + console.error(err.message); + process.exit(1); + }); }; execCommand(); diff --git a/src/fs/files/a.txt b/src/fs/files/a.txt new file mode 100644 index 00000000..9118d6c9 --- /dev/null +++ b/src/fs/files/a.txt @@ -0,0 +1 @@ +asdasd \ No newline at end of file diff --git a/src/fs/files/b.txt b/src/fs/files/b.txt new file mode 100644 index 00000000..89da2b9c --- /dev/null +++ b/src/fs/files/b.txt @@ -0,0 +1 @@ +aaa123 \ No newline at end of file diff --git a/src/fs/findByExt.js b/src/fs/findByExt.js index 24f06cb8..51f13d9b 100644 --- a/src/fs/findByExt.js +++ b/src/fs/findByExt.js @@ -1,7 +1,53 @@ +import { readdir, stat } from "node:fs/promises"; +import path from "node:path"; + const findByExt = async () => { - // Write your code here - // Recursively find all files with specific extension - // Parse --ext CLI argument (default: .txt) + const workspacePath = path.resolve("./src/fs/files"); + + // Parse CLI args + const args = process.argv.slice(2); + const extIndex = args.indexOf("--ext"); + let extension = ".txt"; + + if (extIndex !== -1 && args[extIndex + 1]) { + extension = args[extIndex + 1].startsWith(".") + ? args[extIndex + 1] + : `.${args[extIndex + 1]}`; + } + + // Check workspace existence + try { + const stats = await stat(workspacePath); + if (!stats.isDirectory()) throw new Error(); + } catch { + throw new Error("FS operation failed"); + } + + const results = []; + + const scan = async (dir) => { + const items = await readdir(dir); + + for (const item of items) { + const fullPath = path.join(dir, item); + const itemStat = await stat(fullPath); + + if (itemStat.isDirectory()) { + await scan(fullPath); + } else if (itemStat.isFile() && path.extname(item) === extension) { + const relativePath = path.relative(workspacePath, fullPath); + results.push(relativePath); + } + } + }; + + await scan(workspacePath); + + results.sort(); + + for (const file of results) { + console.log(file); + } }; await findByExt(); diff --git a/src/fs/merge.js b/src/fs/merge.js index cb8e0d8f..9df5b498 100644 --- a/src/fs/merge.js +++ b/src/fs/merge.js @@ -1,8 +1,53 @@ +import { readdir, readFile, writeFile, stat } from "node:fs/promises"; +import path from "node:path"; + const merge = async () => { - // Write your code here - // Default: read all .txt files from workspace/parts in alphabetical order - // Optional: support --files filename1,filename2,... to merge specific files in provided order - // Concatenate content and write to workspace/merged.txt + const workspacePath = path.resolve("./src/fs/files"); + const partsPath = path.join(workspacePath, "parts"); + const outputFile = path.join(workspacePath, "merged.txt"); + + // check parts folder + try { + const s = await stat(partsPath); + if (!s.isDirectory()) throw new Error(); + } catch { + throw new Error("FS operation failed"); + } + + const args = process.argv.slice(2); + const filesIndex = args.indexOf("--files"); + + let files = []; + + if (filesIndex !== -1 && args[filesIndex + 1]) { + // manual file list + files = args[filesIndex + 1].split(","); + + for (const file of files) { + try { + await stat(path.join(partsPath, file)); + } catch { + throw new Error("FS operation failed"); + } + } + } else { + // automatic discovery + const items = await readdir(partsPath); + files = items.filter((f) => path.extname(f) === ".txt").sort(); + + if (files.length === 0) { + throw new Error("FS operation failed"); + } + } + + let mergedContent = ""; + + for (const file of files) { + const content = await readFile(path.join(partsPath, file), "utf8"); + mergedContent += content; + } + + await writeFile(outputFile, mergedContent); }; await merge(); diff --git a/src/fs/restore.js b/src/fs/restore.js index 96ae1ffb..d4c9429e 100644 --- a/src/fs/restore.js +++ b/src/fs/restore.js @@ -1,8 +1,46 @@ +import { readFile, writeFile, mkdir, stat } from "node:fs/promises"; +import path from "node:path"; + const restore = async () => { - // Write your code here - // Read snapshot.json - // Treat snapshot.rootPath as metadata only - // Recreate directory/file structure in workspace_restored + const snapshotPath = path.resolve("./src/fs/snapshot.json"); + const restorePath = path.resolve("./src/fs/workspace_restored"); + + let snapshot; + + // Check snapshot.json + try { + const data = await readFile(snapshotPath, "utf8"); + snapshot = JSON.parse(data); + } catch { + throw new Error("FS operation failed"); + } + + // Ensure workspace_restored does NOT exist + try { + await stat(restorePath); + throw new Error("FS operation failed"); + } catch (err) { + if (err.message === "FS operation failed") throw err; + } + + // Create root restore directory + await mkdir(restorePath, { recursive: true }); + + for (const entry of snapshot.entries) { + const targetPath = path.join(restorePath, entry.path); + + if (entry.type === "directory") { + await mkdir(targetPath, { recursive: true }); + } + + if (entry.type === "file") { + const dir = path.dirname(targetPath); + await mkdir(dir, { recursive: true }); + + const buffer = Buffer.from(entry.content, "base64"); + await writeFile(targetPath, buffer); + } + } }; await restore(); diff --git a/src/fs/snapshot.js b/src/fs/snapshot.js index 050103d3..25c06722 100644 --- a/src/fs/snapshot.js +++ b/src/fs/snapshot.js @@ -1,9 +1,58 @@ +import { readdir, stat, readFile, writeFile } from "node:fs/promises"; +import path from "node:path"; + const snapshot = async () => { - // Write your code here - // Recursively scan workspace directory - // Write snapshot.json with: - // - rootPath: absolute path to workspace - // - entries: flat array of relative paths and metadata + const rootPath = path.resolve("./src/fs/files"); + + try { + const stats = await stat(rootPath); + if (!stats.isDirectory()) { + throw new Error(); + } + } catch { + throw new Error("FS operation failed"); + } + + const entries = []; + + const scan = async (dir) => { + const items = await readdir(dir); + + for (const item of items) { + const fullPath = path.join(dir, item); + const itemStat = await stat(fullPath); + const relativePath = path.relative(rootPath, fullPath); + + if (itemStat.isDirectory()) { + entries.push({ + path: relativePath, + type: "directory", + }); + + await scan(fullPath); + } else if (itemStat.isFile()) { + const buffer = await readFile(fullPath); + + entries.push({ + path: relativePath, + type: "file", + size: itemStat.size, + content: buffer.toString("base64"), + }); + } + } + }; + + await scan(rootPath); + + const snapshotData = { + rootPath, + entries, + }; + + const snapshotPath = path.resolve("./src/fs/snapshot.json"); + + await writeFile(snapshotPath, JSON.stringify(snapshotData, null, 2)); }; await snapshot(); diff --git a/src/hash/verify.js b/src/hash/verify.js index 7f1e8961..de9ee55b 100644 --- a/src/hash/verify.js +++ b/src/hash/verify.js @@ -1,8 +1,36 @@ +import { createReadStream } from "node:fs"; +import { readFile } from "node:fs/promises"; +import { createHash } from "node:crypto"; + const verify = async () => { - // Write your code here - // Read checksums.json - // Calculate SHA256 hash using Streams API - // Print result: filename — OK/FAIL + let checksums; + + try { + const data = await readFile("./src/hash/checksums.json", "utf8"); + checksums = JSON.parse(data); + } catch { + throw new Error("FS operation failed"); + } + + const calculateHash = (file) => + new Promise((resolve, reject) => { + const hash = createHash("sha256"); + const stream = createReadStream(file); + + stream.on("data", (chunk) => hash.update(chunk)); + stream.on("end", () => resolve(hash.digest("hex"))); + stream.on("error", reject); + }); + + for (const [filename, expectedHash] of Object.entries(checksums)) { + try { + const actualHash = await calculateHash(filename); + const result = actualHash === expectedHash ? "OK" : "FAIL"; + console.log(`${filename} — ${result}`); + } catch { + console.log(`${filename} — FAIL`); + } + } }; await verify(); diff --git a/src/modules/dynamic.js b/src/modules/dynamic.js index 008ca387..661e2456 100644 --- a/src/modules/dynamic.js +++ b/src/modules/dynamic.js @@ -1,9 +1,24 @@ +import path from "node:path"; +import { pathToFileURL } from "node:url"; + const dynamic = async () => { - // Write your code here - // Accept plugin name as CLI argument - // Dynamically import plugin from plugins/ directory - // Call run() function and print result - // Handle missing plugin case + const pluginName = process.argv[2]; + + if (!pluginName) { + console.log("Plugin not found"); + process.exit(1); + } + + try { + const pluginPath = path.resolve(`./src/modules/plugins/${pluginName}.js`); + const module = await import(pathToFileURL(pluginPath)); + + const result = module.run(); + console.log(result); + } catch { + console.log("Plugin not found"); + process.exit(1); + } }; await dynamic(); diff --git a/src/streams/filter.js b/src/streams/filter.js index 3868ab46..98ce262d 100644 --- a/src/streams/filter.js +++ b/src/streams/filter.js @@ -1,9 +1,37 @@ +import { Transform } from "node:stream"; + const filter = () => { - // Write your code here - // Read from process.stdin - // Filter lines by --pattern CLI argument - // Use Transform Stream - // Write to process.stdout + const args = process.argv.slice(2); + const patternIndex = args.indexOf("--pattern"); + + const pattern = + patternIndex !== -1 && args[patternIndex + 1] ? args[patternIndex + 1] : ""; + + let leftover = ""; + + const transformer = new Transform({ + transform(chunk, encoding, callback) { + const data = leftover + chunk.toString(); + const lines = data.split("\n"); + + leftover = lines.pop(); // keep incomplete line + + const filtered = lines + .filter((line) => line.includes(pattern)) + .join("\n"); + + callback(null, filtered ? filtered + "\n" : ""); + }, + + flush(callback) { + if (leftover && leftover.includes(pattern)) { + this.push(leftover + "\n"); + } + callback(); + }, + }); + + process.stdin.pipe(transformer).pipe(process.stdout); }; filter(); diff --git a/src/streams/lineNumberer.js b/src/streams/lineNumberer.js index 579d662e..3a93155c 100644 --- a/src/streams/lineNumberer.js +++ b/src/streams/lineNumberer.js @@ -1,8 +1,32 @@ +import { Transform } from "node:stream"; + const lineNumberer = () => { - // Write your code here - // Read from process.stdin - // Use Transform Stream to prepend line numbers - // Write to process.stdout + let lineNumber = 1; + let leftover = ""; + + const transformer = new Transform({ + transform(chunk, encoding, callback) { + const data = leftover + chunk.toString(); + const lines = data.split("\n"); + + leftover = lines.pop(); // save incomplete line + + const numbered = lines + .map((line) => `${lineNumber++} | ${line}`) + .join("\n"); + + callback(null, numbered + "\n"); + }, + + flush(callback) { + if (leftover) { + this.push(`${lineNumber++} | ${leftover}\n`); + } + callback(); + }, + }); + + process.stdin.pipe(transformer).pipe(process.stdout); }; lineNumberer(); diff --git a/src/streams/split.js b/src/streams/split.js index f8f814fa..e1b24b86 100644 --- a/src/streams/split.js +++ b/src/streams/split.js @@ -1,8 +1,61 @@ +import { createReadStream, createWriteStream } from "node:fs"; +import { Transform } from "node:stream"; + const split = async () => { - // Write your code here - // Read source.txt using Readable Stream - // Split into chunk_1.txt, chunk_2.txt, etc. - // Each chunk max N lines (--lines CLI argument, default: 10) + const args = process.argv.slice(2); + const index = args.indexOf("--lines"); + + const maxLines = + index !== -1 && args[index + 1] ? Number(args[index + 1]) : 10; + + let leftover = ""; + let lineCount = 0; + let chunkIndex = 1; + + let writer = createWriteStream(`./src/streams/chunk_${chunkIndex}.txt`); + + const transformer = new Transform({ + transform(chunk, encoding, callback) { + const data = leftover + chunk.toString(); + const lines = data.split("\n"); + + leftover = lines.pop(); + + for (const line of lines) { + if (lineCount === maxLines) { + writer.end(); + chunkIndex++; + writer = createWriteStream(`./src/streams/chunk_${chunkIndex}.txt`); + lineCount = 0; + } + + writer.write(line + "\n"); + lineCount++; + } + + callback(); + }, + + flush(callback) { + if (leftover) { + if (lineCount === maxLines) { + writer.end(); + chunkIndex++; + writer = createWriteStream(`chunk_${chunkIndex}.txt`); + lineCount = 0; + } + + writer.write(leftover + "\n"); + } + + writer.end(); + callback(); + }, + }); + + const reader = createReadStream("./src/streams/source.txt"); + + reader.pipe(transformer); }; await split(); diff --git a/src/zip/compress/a.txt b/src/zip/compress/a.txt new file mode 100644 index 00000000..e69de29b diff --git a/src/zip/compress/b.txt b/src/zip/compress/b.txt new file mode 100644 index 00000000..e69de29b diff --git a/src/zip/compressDir.js b/src/zip/compressDir.js index 3a3c5089..6381c151 100644 --- a/src/zip/compressDir.js +++ b/src/zip/compressDir.js @@ -1,9 +1,68 @@ +import { createWriteStream, createReadStream } from "node:fs"; +import { mkdir, stat, readdir } from "node:fs/promises"; +import path from "node:path"; +import zlib from "node:zlib"; + const compressDir = async () => { - // Write your code here - // Read all files from workspace/toCompress/ - // Compress entire directory structure into archive.br - // Save to workspace/compressed/ - // Use Streams API + const sourceDir = path.resolve("./src/zip/compress"); + const destDir = path.resolve("./src/zip/compressed"); + const archivePath = path.join(destDir, "archive.br"); + + try { + const s = await stat(sourceDir); + if (!s.isDirectory()) throw new Error(); + } catch { + throw new Error("FS operation failed"); + } + + await mkdir(destDir, { recursive: true }); + + const getFiles = async (dir, base = "") => { + const entries = await readdir(dir, { withFileTypes: true }); + const files = []; + + for (const entry of entries) { + const fullPath = path.join(dir, entry.name); + const relPath = path.join(base, entry.name); + + if (entry.isDirectory()) { + files.push(...(await getFiles(fullPath, relPath))); + } else if (entry.isFile()) { + files.push({ fullPath, relPath }); + } + } + return files; + }; + + const files = await getFiles(sourceDir); + + const brotli = zlib.createBrotliCompress(); + const output = createWriteStream(archivePath); + + const writeArchive = async () => { + for (const { fullPath, relPath } of files) { + const stats = await stat(fullPath); + const header = `FILE:${relPath}\n${stats.size}\n`; + if (!brotli.write(header)) { + await new Promise((r) => brotli.once("drain", r)); + } + + const fileStream = createReadStream(fullPath); + await new Promise((resolve, reject) => { + fileStream.pipe(brotli, { end: false }); + fileStream.on("end", resolve); + fileStream.on("error", reject); + }); + } + brotli.end(); + }; + + await Promise.all([ + writeArchive(), + new Promise((r, e) => output.on("close", r).on("error", e)), + ]); + + brotli.pipe(output); }; await compressDir(); diff --git a/src/zip/compressed/archive.br b/src/zip/compressed/archive.br new file mode 100644 index 00000000..e69de29b diff --git a/src/zip/decompressDir.js b/src/zip/decompressDir.js index d6e770f6..f14be650 100644 --- a/src/zip/decompressDir.js +++ b/src/zip/decompressDir.js @@ -1,8 +1,85 @@ +import { createReadStream, createWriteStream } from "node:fs"; +import { mkdir, stat, writeFile } from "node:fs/promises"; +import path from "node:path"; +import zlib from "node:zlib"; + const decompressDir = async () => { - // Write your code here - // Read archive.br from workspace/compressed/ - // Decompress and extract to workspace/decompressed/ - // Use Streams API + const compressedDir = path.resolve("./src/zip/compressed"); + const archivePath = path.join(compressedDir, "archive.br"); + const destDir = path.resolve("./src/zip/decompressed"); + + try { + const stats = await stat(compressedDir); + if (!stats.isDirectory()) throw new Error(); + await stat(archivePath); + } catch { + throw new Error("FS operation failed"); + } + + await mkdir(destDir, { recursive: true }); + + const input = createReadStream(archivePath); + const brotli = zlib.createBrotliDecompress(); + + let leftover = ""; + let currentFile = null; + let remainingBytes = 0; + let buffers = []; + + const processChunk = (chunk) => { + let data = leftover + chunk.toString("binary"); + let index = 0; + + while (index < data.length) { + if (!currentFile) { + const newline1 = data.indexOf("\n", index); + if (newline1 === -1) break; + const headerLine = data.slice(index, newline1); + if (!headerLine.startsWith("FILE:")) { + throw new Error("Invalid archive format"); + } + const relPath = headerLine.slice(5); + index = newline1 + 1; + + const newline2 = data.indexOf("\n", index); + if (newline2 === -1) break; + const sizeLine = data.slice(index, newline2); + const size = Number(sizeLine); + if (isNaN(size)) throw new Error("Invalid archive format"); + index = newline2 + 1; + + currentFile = relPath; + remainingBytes = size; + buffers = []; + } + + const available = data.length - index; + const toRead = Math.min(remainingBytes, available); + buffers.push(Buffer.from(data.slice(index, index + toRead), "binary")); + index += toRead; + remainingBytes -= toRead; + + if (remainingBytes === 0) { + const fullBuffer = Buffer.concat(buffers); + const filePath = path.join(destDir, currentFile); + mkdir(path.dirname(filePath), { recursive: true }).then(() => + writeFile(filePath, fullBuffer) + ); + currentFile = null; + buffers = []; + } + } + + leftover = data.slice(index); + }; + + brotli.on("data", processChunk); + + await new Promise((resolve, reject) => { + brotli.on("end", resolve); + brotli.on("error", reject); + input.pipe(brotli); + }); }; await decompressDir();