diff --git a/.AI-SAFEGUARDS.md b/.AI-SAFEGUARDS.md deleted file mode 100644 index f7289d84..00000000 --- a/.AI-SAFEGUARDS.md +++ /dev/null @@ -1,56 +0,0 @@ -# AI Safeguards Configuration - -## Protected Branches - -These branches CANNOT be modified by AI: - -- docs-v2 (source of truth) -- main - -## Allowed Branches for AI - -- docs-v2-dev (development branch for hourly commits) -- fix-\* (feature branches) -- safepoint/\* (checkpoint branches) - -## Forbidden Commands for AI - -❌ NEVER execute: - -- git reset --hard -- git restore . -- git clean -fd -- git push --force -- git rebase -i (interactive rebase) -- git tag --force -- Any mass git operations without human approval - -## Large Change Protocol (>10 files) - -1. AI must show file list and get approval -2. Human creates .ai-commit-verified file -3. Only then can AI proceed with commit -4. File is deleted after commit - -## Audit Trail - -All AI git operations logged to: .ai-operations.log - -## Emergency Rollback - -If AI makes dangerous changes: - -```bash -git reflog -git reset --hard -``` - -## Human Commit Override - -If AI blocks legitimate commits, use: - -```bash -git commit --no-verify -``` - -Updated: 2026-01-06 after safeguard implementation diff --git a/.ai-audit.sh b/.ai-audit.sh deleted file mode 100644 index 1911795f..00000000 --- a/.ai-audit.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash -# AI Operations Audit Log -# Logs every git operation attempted by the AI assistant - -TIMESTAMP=$(date '+%Y-%m-%d %H:%M:%S') -BRANCH=$(git rev-parse --abbrev-ref HEAD 2>/dev/null || echo "unknown") -COMMAND="$*" -FILES_CHANGED=$(git status --porcelain 2>/dev/null | wc -l) - -LOG_ENTRY="[$TIMESTAMP] BRANCH=$BRANCH | COMMAND=$COMMAND | FILES_STAGED=$FILES_CHANGED" - -# Write to audit log -echo "$LOG_ENTRY" >> .ai-operations.log - -# Also print for visibility -echo "$LOG_ENTRY" diff --git a/.augment/.augment-guidelines b/.augment/.augment-guidelines new file mode 100644 index 00000000..a7129af7 --- /dev/null +++ b/.augment/.augment-guidelines @@ -0,0 +1,17 @@ +# Standard Operating Procedure for Scripts + + + +## 1. Safety & Verification Workflow + +- Before applying changes to existing files, you MUST: + 1. Create a "checkpoint" backup using the internal checkpoint tool. + 2. Create a new git branch named `agent/[feature-name]`. + 3. Verify the script's logic by applying it to a temporary copy of the target + file first. + 4. Only after verification and user approval, apply the changes to the project + files in the new branch. diff --git a/.augment/rules/git-safety.md b/.augment/rules/git-safety.md new file mode 100644 index 00000000..3e5ce50a --- /dev/null +++ b/.augment/rules/git-safety.md @@ -0,0 +1,5 @@ +# GIT WRITE PROTOCOL +- **ENFORCEMENT:** You MUST verify the existence of local Git hooks in `.git/hooks/` before initiating any write command (commit, push, rebase). +- **FORBIDDEN:** You are STRICTLY FORBIDDEN from using `--no-verify` or `-n`. +- **BEHAVIOR:** When a write command is initiated, you MUST announce: "Initiating [action]. Please approve the safety checkpoint in your terminal." +- **RECOVERY:** If a command fails, suggest restoring from the latest `checkpoint/` branch. diff --git a/.github/AGENTS.md b/.github/AGENTS.md new file mode 100644 index 00000000..d9d49698 --- /dev/null +++ b/.github/AGENTS.md @@ -0,0 +1,39 @@ +# 🤖 PROJECT AGENT RULES & SAFETY PROTOCOLS + +## 🛠️ CRITICAL BOUNDARIES (READ FIRST) + +- **ALWAYS** check for the existence of local Git hooks in `.git/hooks/` before + initiating a write command. +- **NEVER** use `--no-verify` or `-n` flags to bypass safety checks. These are + hard project constraints. +- **NEVER** perform a `git reset --hard` or `git push --force` without an + explicit, multi-turn plan confirmed by the user. +- **NEVER** perform a `git reset --hard` or `git push --force` without a saved + branch to revert to in case of failure. + +## 📦 GIT WORKFLOW & CHECKPOINTS + +This project enforces a "Human-in-the-Loop" (HitL) verification for all +destructive or history-altering actions. + +- **Automatic Checkpoints:** Every `commit`, `push`, and `rebase` triggers a + safety hook that creates a branch named `checkpoint/YYYY-MM-DD_HHMMSS`. +- **Pre-Write Announcement:** Before executing a write command, you MUST state: + _"I am initiating [COMMAND]. A safety checkpoint will be created. Please + switch to your terminal to type 'yes' when prompted."_ +- **Recovery:** If a command fails, the latest pre-failure state is stored in + the most recent `checkpoint/` branch. + +## 🧪 VALIDATION COMMANDS + +Before asking for a commit, you should ideally run these to ensure code quality: + +```bash +# Verify build +mint dev +``` + +# Run local test suite + +Make a test for mintlify in the v2/tests file. DO NOT EVER run a script without +testing it on a local branch first. diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 7ab9f32f..569056b5 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -13,7 +13,7 @@ containerized with Docker. - Use `.tsx` for new components; `.jsx` is legacy but supported. - **Automations & Scripts:** - All dynamic, AI, and data-fetching logic in `automations/` and `ai-tools/`. - - Scripts for API doc generation and external data in `v2/scripts/` (see + - Scripts for API doc generation and external data in `snippets/scripts/` (see generate-api-docs.sh, fetch-openapi-specs.sh). - **API Reference:** - OpenAPI spec in `openapi.yaml` (AI API: see ai/worker/api/openapi.yaml). Use @@ -32,10 +32,10 @@ containerized with Docker. `docker buildx build --platform linux/amd64 --load -t livepeer/docs .` - Makefile: `make all` - **API Docs Generation:** - - Use `v2/scripts/generate-api-docs.sh` to convert OpenAPI specs to MDX/API - docs and navigation JSON. Example: + - Use `snippets/scripts/generate-api-docs.sh` to convert OpenAPI specs to + MDX/API docs and navigation JSON. Example: ```bash - ./v2/scripts/generate-api-docs.sh ai/worker/api/openapi.yaml v2/pages/04_gateways/guides-references/api-reference/AI-API "AI API" + ./snippets/scripts/generate-api-docs.sh ai/worker/api/openapi.yaml v2/pages/04_gateways/guides-references/api-reference/AI-API "AI API" ``` - Output: MDX files + navigation snippet for `docs.json`. - **External Data Fetching:** @@ -72,8 +72,8 @@ containerized with Docker. - **OpenAPI:** API docs generated from `openapi.yaml` (see also `ai/worker/api/openapi.yaml`). - **Docker:** Containerized builds for CI/CD and local dev. -- **Automations:** Scripts in `v2/scripts/` automate API doc generation and - external data sync. +- **Automations:** Scripts in `snippets/scripts/` automate API doc generation + and external data sync. ## Key Files & Directories @@ -83,7 +83,7 @@ containerized with Docker. - `openapi.yaml`, `ai/worker/api/openapi.yaml` — API reference - `Dockerfile`, `Makefile` — Build/deploy - `README.md`, `README_V2.md` — Developer notes, protocol/architecture -- `v2/scripts/` — Automation scripts (API docs, data fetching) +- `snippets/scripts/` — Automation scripts (API docs, data fetching) --- diff --git a/.github/scripts/fetch-forum-data.js b/.github/scripts/fetch-forum-data.js new file mode 100644 index 00000000..fa6f6fc8 --- /dev/null +++ b/.github/scripts/fetch-forum-data.js @@ -0,0 +1,198 @@ +const https = require("https"); +const fs = require("fs"); + +// Fetch JSON from URL +function fetchJSON(url) { + return new Promise((resolve, reject) => { + https + .get(url, (res) => { + let data = ""; + res.on("data", (chunk) => { + data += chunk; + }); + res.on("end", () => { + try { + resolve(JSON.parse(data)); + } catch (e) { + reject(e); + } + }); + }) + .on("error", reject); + }); +} + +// Check if topic is old pinned +function isOldPinned(topic) { + const pinned = topic.pinned === true || topic.pinned_globally === true; + if (!pinned) return false; + const created = new Date(topic.created_at); + const now = new Date(); + const ageDays = (now - created) / (1000 * 60 * 60 * 24); + return ageDays > 30; +} + +// Clean and format HTML +function cleanAndFormatHTML(html) { + let cleanHTML = html; + + // Remove anchor navigation links + cleanHTML = cleanHTML.replace( + /]*name="[^"]*"[^>]*class="anchor"[^>]*>.*?<\/a>/g, + "" + ); + + // Clean up headings + cleanHTML = cleanHTML.replace(/]*>(.*?)<\/h1>/g, "

$1

"); + cleanHTML = cleanHTML.replace(/]*>(.*?)<\/h2>/g, "

$1

"); + cleanHTML = cleanHTML.replace(/]*>(.*?)<\/h3>/g, "
$1
"); + cleanHTML = cleanHTML.replace(/]*>(.*?)<\/h[4-6]>/g, "
$1
"); + + // Clean up images and their references + cleanHTML = cleanHTML.replace(/]*class="lightbox"[^>]*>.*?<\/a>/g, ""); + cleanHTML = cleanHTML.replace( + /]*class="lightbox-wrapper"[^>]*>.*?<\/div>/g, + "" + ); + cleanHTML = cleanHTML.replace(/]*>/g, ""); + cleanHTML = cleanHTML.replace(/\[!\[.*?\]\(.*?\)\]\(.*?\)/g, ""); + cleanHTML = cleanHTML.replace(/image\d+×\d+\s+[\d.]+\s*[KM]B/gi, ""); + + // Keep paragraphs, lists, emphasis, code + cleanHTML = cleanHTML.replace(/

/g, "

"); + cleanHTML = cleanHTML.replace(/<\/p>/g, "

"); + cleanHTML = cleanHTML.replace(/
    /g, "
      "); + cleanHTML = cleanHTML.replace(/<\/ul>/g, "
    "); + cleanHTML = cleanHTML.replace(/
      /g, "
        "); + cleanHTML = cleanHTML.replace(/<\/ol>/g, "
      "); + cleanHTML = cleanHTML.replace(/
    1. /g, "
    2. "); + cleanHTML = cleanHTML.replace(/<\/li>/g, "
    3. "); + cleanHTML = cleanHTML.replace( + /(.*?)<\/strong>/g, + "$1" + ); + cleanHTML = cleanHTML.replace(/(.*?)<\/em>/g, "$1"); + cleanHTML = cleanHTML.replace(/(.*?)<\/code>/g, "$1"); + + // Simplify links + cleanHTML = cleanHTML.replace( + /]*href="([^"]*)"[^>]*>(.*?)<\/a>/g, + '$2' + ); + + // Decode HTML entities + cleanHTML = cleanHTML.replace(/&/g, "&"); + cleanHTML = cleanHTML.replace(/</g, "<"); + cleanHTML = cleanHTML.replace(/>/g, ">"); + cleanHTML = cleanHTML.replace(/"/g, '"'); + cleanHTML = cleanHTML.replace(/'/g, "'"); + cleanHTML = cleanHTML.replace(/ /g, " "); + + // Clean up whitespace + cleanHTML = cleanHTML.replace(/\s+/g, " "); + cleanHTML = cleanHTML.replace(/

      \s*<\/p>/g, ""); + + return cleanHTML.trim(); +} + +async function main() { + console.log("Fetching latest topics..."); + const latestData = await fetchJSON("https://forum.livepeer.org/latest.json"); + + const topics = latestData.topic_list?.topics || []; + console.log(`Found ${topics.length} topics`); + + // Filter out old pinned topics + const filteredTopics = topics.filter((t) => !isOldPinned(t)); + console.log(`After filtering: ${filteredTopics.length} topics`); + + // Get top 4 + const top4 = filteredTopics.slice(0, 4); + console.log(`Processing top 4 topics...`); + + const processedTopics = []; + + for (const topic of top4) { + console.log(`Processing topic ${topic.id}: ${topic.title}`); + + // Fetch full topic data + const topicData = await fetchJSON( + `https://forum.livepeer.org/t/${topic.id}.json` + ); + + // Extract first post + const firstPost = topicData.post_stream?.posts?.find( + (p) => p.post_number === 1 + ); + + if (!firstPost) { + console.log(` No first post found, skipping`); + continue; + } + + const htmlContent = cleanAndFormatHTML(firstPost.cooked || ""); + const datePosted = topic.created_at + ? new Date(topic.created_at).toLocaleDateString("en-US", { + year: "numeric", + month: "short", + day: "numeric", + }) + : ""; + + processedTopics.push({ + title: topic.title, + href: `https://forum.livepeer.org/t/${topic.id}`, + author: `By ${firstPost.name || firstPost.username || "Unknown"} (@${ + firstPost.username || "unknown" + })`, + content: htmlContent, + replyCount: (topic.posts_count || 1) - 1, + datePosted: datePosted, + }); + } + + console.log(`Processed ${processedTopics.length} topics`); + + // Generate JavaScript export with exact formatting + let jsExport = "export const forumData = [\n"; + + processedTopics.forEach((item, index) => { + jsExport += " {\n"; + jsExport += ` title: "${item.title + .replace(/\\/g, "\\\\") + .replace(/"/g, '\\"')}",\n`; + jsExport += ` href: "${item.href}",\n`; + jsExport += ` author: "${item.author + .replace(/\\/g, "\\\\") + .replace(/"/g, '\\"')}",\n`; + + // Content with proper escaping and indentation + const escapedContent = item.content + .replace(/\\/g, "\\\\") + .replace(/"/g, '\\"') + .replace(/\n/g, " "); + + jsExport += ` content:\n "${escapedContent}",\n`; + jsExport += ` replyCount: ${item.replyCount},\n`; + jsExport += ` datePosted: "${item.datePosted}",\n`; + jsExport += " }"; + + if (index < processedTopics.length - 1) { + jsExport += ","; + } + jsExport += "\n"; + }); + + jsExport += "];\n"; + + // Write to file + const outputPath = "snippets/automations/forum/forumData.jsx"; + fs.mkdirSync("snippets/automations/forum", { recursive: true }); + fs.writeFileSync(outputPath, jsExport); + console.log(`Written to ${outputPath}`); +} + +main().catch((err) => { + console.error("Error:", err); + process.exit(1); +}); diff --git a/.github/scripts/fetch-ghost-blog-data.js b/.github/scripts/fetch-ghost-blog-data.js new file mode 100644 index 00000000..44e0d2f0 --- /dev/null +++ b/.github/scripts/fetch-ghost-blog-data.js @@ -0,0 +1,101 @@ +const https = require("https"); +const fs = require("fs"); + +// Fetch JSON from URL +function fetchJSON(url) { + return new Promise((resolve, reject) => { + https + .get(url, (res) => { + let data = ""; + res.on("data", (chunk) => { + data += chunk; + }); + res.on("end", () => { + try { + resolve(JSON.parse(data)); + } catch (e) { + reject(e); + } + }); + }) + .on("error", reject); + }); +} + +// Safe HTML escape - only escape backticks for template literals +function safeHTML(html) { + return (html || "").replace(/`/g, "\\`"); +} + +// Format date +function formatDate(iso) { + return new Date(iso).toLocaleDateString("en-US", { + month: "short", + day: "numeric", + year: "numeric", + }); +} + +async function main() { + console.log("Fetching Ghost blog posts..."); + + const apiUrl = + "https://livepeer-studio.ghost.io/ghost/api/content/posts/?key=eaf54ba5c9d4ab35ce268663b0&limit=4&include=tags,authors"; + + const response = await fetchJSON(apiUrl); + + if (!response.posts || response.posts.length === 0) { + console.log("No posts found"); + return; + } + + console.log(`Found ${response.posts.length} posts`); + + // Process posts + const posts = response.posts.map((p) => ({ + title: p.title, + href: p.url, + author: p.primary_author?.name + ? `By ${p.primary_author.name}` + : "By Livepeer Team", + content: safeHTML(p.html), + datePosted: formatDate(p.published_at), + img: p.feature_image || "", + excerpt: safeHTML(p.excerpt), + readingTime: p.reading_time || 0, + })); + + // Generate JavaScript export with template literals + let jsExport = "export const ghostData = [\n"; + + posts.forEach((post, index) => { + jsExport += "{\n"; + jsExport += ` title: \`${post.title}\`,\n`; + jsExport += ` href: \`${post.href}\`,\n`; + jsExport += ` author: \`${post.author}\`,\n`; + jsExport += ` content: \`${post.content}\`,\n`; + jsExport += ` datePosted: \`${post.datePosted}\`,\n`; + jsExport += ` img: \`${post.img}\`,\n`; + jsExport += ` excerpt: \`${post.excerpt}\`,\n`; + jsExport += ` readingTime: ${post.readingTime}\n`; + jsExport += "}"; + + if (index < posts.length - 1) { + jsExport += ","; + } + jsExport += "\n"; + }); + + jsExport += "];\n"; + + // Write to file + const outputPath = "snippets/automations/ghost/ghostBlogData.jsx"; + fs.mkdirSync("snippets/automations/ghost", { recursive: true }); + fs.writeFileSync(outputPath, jsExport); + console.log(`Written to ${outputPath}`); +} + +main().catch((err) => { + console.error("Error:", err); + process.exit(1); +}); diff --git a/.github/scripts/fetch-youtube-data.js b/.github/scripts/fetch-youtube-data.js new file mode 100644 index 00000000..63d35ddd --- /dev/null +++ b/.github/scripts/fetch-youtube-data.js @@ -0,0 +1,122 @@ +const https = require("https"); +const fs = require("fs"); + +const YOUTUBE_API_KEY = process.env.YOUTUBE_API_KEY; +const CHANNEL_ID = process.env.CHANNEL_ID || "UCzfHtZnmUzMbJDxGCwIgY2g"; + +function httpsGet(url) { + return new Promise((resolve, reject) => { + https + .get(url, (res) => { + let data = ""; + res.on("data", (chunk) => (data += chunk)); + res.on("end", () => resolve(JSON.parse(data))); + }) + .on("error", reject); + }); +} + +function parseDuration(duration) { + const match = duration.match(/PT(?:(\d+)H)?(?:(\d+)M)?(?:(\d+)S)?/); + if (!match) return 0; + + const hours = parseInt(match[1] || 0); + const minutes = parseInt(match[2] || 0); + const seconds = parseInt(match[3] || 0); + + return hours * 3600 + minutes * 60 + seconds; +} + +function escapeForJSX(str) { + return str + .replace(/\\/g, "\\\\") + .replace(/'/g, "\\'") + .replace(/"/g, '\\"') + .replace(/\n/g, " ") + .replace(/\r/g, "") + .replace(/\t/g, " "); +} + +async function main() { + // Step 1: Get recent videos + console.log("Fetching recent videos..."); + const searchUrl = `https://www.googleapis.com/youtube/v3/search?part=snippet&channelId=${CHANNEL_ID}&maxResults=50&order=date&type=video&key=${YOUTUBE_API_KEY}`; + const searchResults = await httpsGet(searchUrl); + + if (!searchResults.items || searchResults.items.length === 0) { + console.log("No videos found"); + return; + } + + // Step 2: Get video details for each video + console.log( + `Found ${searchResults.items.length} videos, fetching details...` + ); + const videoIds = searchResults.items.map((item) => item.id.videoId).join(","); + const detailsUrl = `https://www.googleapis.com/youtube/v3/videos?part=contentDetails,snippet&id=${videoIds}&key=${YOUTUBE_API_KEY}`; + const detailsResults = await httpsGet(detailsUrl); + + // Step 3: Process and filter videos + const videos = []; + for (const video of detailsResults.items) { + const duration = video.contentDetails.duration; + const durationSeconds = parseDuration(duration); + const snippet = video.snippet; + + // Check if it's a livestream + const isLivestream = + snippet.liveBroadcastContent === "live" || + snippet.liveBroadcastContent === "upcoming" || + duration === "PT0S" || + snippet.title.toLowerCase().includes("watercooler") || + snippet.title.toLowerCase().includes("fireside"); + + // Filter out Shorts (≤60 seconds and not livestreams) + const isShort = + durationSeconds <= 60 && durationSeconds > 0 && !isLivestream; + + if (!isShort) { + videos.push({ + title: snippet.title, + href: `https://www.youtube.com/watch?v=${video.id}`, + author: `By ${snippet.channelTitle || "Livepeer"}`, + content: (snippet.description || "").substring(0, 500), + publishedDate: new Date(snippet.publishedAt).toLocaleDateString( + "en-US", + { month: "short", day: "numeric", year: "numeric" } + ), + duration: duration, + thumbnailUrl: snippet.thumbnails.high.url, + }); + } + } + + console.log(`Filtered to ${videos.length} non-Short videos`); + + // Step 4: Generate JSX content + const jsxContent = `export const youtubeData = [ +${videos + .map( + (v) => ` { + title: '${escapeForJSX(v.title)}', + href: '${v.href}', + author: '${v.author}', + content: '${escapeForJSX(v.content)}...', + publishedDate: '${v.publishedDate}', + duration: '${v.duration}', + thumbnailUrl: '${v.thumbnailUrl}' + }` + ) + .join(",\n")} +]; +`; + + // Step 5: Write to file + fs.writeFileSync("snippets/automations/youtube/youtubeData.jsx", jsxContent); + console.log("Successfully wrote youtubeData.jsx"); +} + +main().catch((err) => { + console.error("Error:", err); + process.exit(1); +}); diff --git a/.github/workflows/update-blog-data.yml b/.github/workflows/update-blog-data.yml new file mode 100644 index 00000000..cabd9ff6 --- /dev/null +++ b/.github/workflows/update-blog-data.yml @@ -0,0 +1,60 @@ +name: Update Blog and Forum Data + +on: + schedule: + - cron: "0 0 * * *" # Runs daily at midnight UTC + workflow_dispatch: # Allows manual trigger from GitHub UI + +jobs: + update-data: + runs-on: ubuntu-latest + + permissions: + contents: write # Required to push changes + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Fetch Ghost blog data + run: | + curl -f -o ghost-data.json "https://livepeer.org/ghost/api/content/posts/?key=YOUR_CONTENT_API_KEY&limit=all&include=tags,authors" || echo "[]" > ghost-data.json + continue-on-error: true + + - name: Fetch Forum data + run: | + curl -f -o forum-data.json "https://forum.livepeer.org/latest.json" || echo "[]" > forum-data.json + continue-on-error: true + + - name: Update Ghost data file + run: | + echo "export const ghostData = " > snippets/automations/blog/ghostBlogData.jsx + cat ghost-data.json >> snippets/automations/blog/ghostBlogData.jsx + echo ";" >> snippets/automations/blog/ghostBlogData.jsx + + - name: Update Forum data file + run: | + echo "export const forumData = " > snippets/automations/forum/forumData.jsx + cat forum-data.json >> snippets/automations/forum/forumData.jsx + echo ";" >> snippets/automations/forum/forumData.jsx + + - name: Check for changes + id: git-check + run: | + git diff --exit-code snippets/automations/ || echo "changed=true" >> $GITHUB_OUTPUT + + - name: Commit and push if changed + if: steps.git-check.outputs.changed == 'true' + run: | + git config --global user.name 'github-actions[bot]' + git config --global user.email 'github-actions[bot]@users.noreply.github.com' + git add snippets/automations/blog/ghostBlogData.jsx + git add snippets/automations/forum/forumData.jsx + git commit -m "chore: update blog and forum data [skip ci]" + git push + + - name: Cleanup + run: | + rm -f ghost-data.json forum-data.json diff --git a/.github/workflows/update-forum-data.yml b/.github/workflows/update-forum-data.yml new file mode 100644 index 00000000..91e658b9 --- /dev/null +++ b/.github/workflows/update-forum-data.yml @@ -0,0 +1,38 @@ +# NOTE: THIS GITHUB ACTION WILL ONLY RUN ON MAIN BRANCH. +# N8N IS BEING USING AS AN ALTERNATIVE UNTIL THEN. +# N8N workflow is in /snippets/automations/n8n-workflows/forum-to-mintlify-latest-topics.json +name: Update Forum Data + +on: + schedule: + # Run daily at 00:00 UTC + - cron: "0 0 * * *" + workflow_dispatch: # Allow manual trigger + +jobs: + update-forum-data: + runs-on: ubuntu-latest + + steps: + - name: Checkout docs repository + uses: actions/checkout@v4 + with: + repository: livepeer/docs + ref: docs-v2-preview + token: ${{ secrets.DOCS_V2 }} + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: "18" + + - name: Fetch and process forum data + run: | + node .github/scripts/fetch-forum-data.js + + - name: Commit and push if changed + run: | + git config user.name "GitHub Action" + git config user.email "action@github.com" + git add snippets/automations/forum/forumData.jsx + git diff --quiet && git diff --staged --quiet || (git commit -m "Update forum data - $(date -u +"%Y-%m-%dT%H:%M:%SZ")" && git push) diff --git a/.github/workflows/update-ghost-blog-data.yml b/.github/workflows/update-ghost-blog-data.yml new file mode 100644 index 00000000..b3d44c1b --- /dev/null +++ b/.github/workflows/update-ghost-blog-data.yml @@ -0,0 +1,35 @@ +# NOTE: THIS GITHUB ACTION WILL ONLY RUN ON MAIN BRANCH. +# N8N IS BEING USING AS AN ALTERNATIVE UNTIL THEN. +# N8N workflow is in /snippets/automations/n8n-workflows/ghost-to-mintlify.json +name: Update Ghost Blog Data + +on: + schedule: + - cron: "0 0 * * *" + workflow_dispatch: + +jobs: + update-ghost-data: + runs-on: ubuntu-latest + + steps: + - name: Checkout docs repository + uses: actions/checkout@v4 + with: + token: ${{ secrets.DOCS_V2 }} + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: "18" + + - name: Fetch and process Ghost blog data + run: | + node .github/scripts/fetch-ghost-data.js + + - name: Commit and push if changed + run: | + git config user.name "GitHub Action" + git config user.email "action@github.com" + git add snippets/automations/ghost/ghostBlogData.jsx + git diff --quiet && git diff --staged --quiet || (git commit -m "Update Ghost blog data - $(date -u +"%Y-%m-%dT%H:%M:%SZ")" && git push) diff --git a/.github/workflows/update-livepeer-release.yml b/.github/workflows/update-livepeer-release.yml new file mode 100644 index 00000000..e1858c31 --- /dev/null +++ b/.github/workflows/update-livepeer-release.yml @@ -0,0 +1,60 @@ +name: Update Livepeer Release Version + +on: + schedule: + # Run every 30 minutes + - cron: "*/30 * * * *" + workflow_dispatch: + +jobs: + check-and-update: + runs-on: ubuntu-latest + + steps: + - name: Checkout docs repository + uses: actions/checkout@v3 + with: + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Get latest go-livepeer release + id: get_release + run: | + LATEST_RELEASE=$(curl -s https://api.github.com/repos/livepeer/go-livepeer/releases/latest | jq -r .tag_name) + echo "release=${LATEST_RELEASE}" >> $GITHUB_OUTPUT + echo "Latest release: ${LATEST_RELEASE}" + + - name: Read current version from globals.mdx + id: current_version + run: | + CURRENT=$(grep -oP 'latestVersion\s*=\s*["'"'"']?\K[^"'"'"']+' snippets/automationData/globals/globals.mdx || echo "") + echo "current=${CURRENT}" >> $GITHUB_OUTPUT + echo "Current version: ${CURRENT}" + + - name: Update globals.mdx if needed + if: + steps.get_release.outputs.release != + steps.current_version.outputs.current + run: | + # Create backup + cp snippets/automationData/globals/globals.mdx snippets/automationData/globals/globals.mdx.bak + + # Update the latestVersion value + sed -i "s/latestVersion[[:space:]]*=[[:space:]]*[\"'][^\"']*[\"']/latestVersion = \"${{ steps.get_release.outputs.release }}\"/" snippets/automationData/globals/globals.mdx + + # Update the latestVersionUrl value + sed -i "s|latestVersionUrl[[:space:]]*=[[:space:]]*[\"'][^\"']*[\"']|latestVersionUrl = \"https://github.com/livepeer/go-livepeer/releases/download/${{ steps.get_release.outputs.release }}\"|" snippets/automationData/globals/globals.mdx + + # Verify the changes + echo "Updated content:" + grep "latestVersion" snippets/automationData/globals/globals.mdx + + - name: Commit and push if changed + if: + steps.get_release.outputs.release != + steps.current_version.outputs.current + run: | + git config --local user.email "action@github.com" + git config --local user.name "GitHub Action" + git add snippets/automationData/globals/globals.mdx + git commit -m "chore: update latest release to ${{ steps.get_release.outputs.release }}" + git push diff --git a/.github/workflows/update-youtube-data.yml b/.github/workflows/update-youtube-data.yml new file mode 100644 index 00000000..05dfd5e1 --- /dev/null +++ b/.github/workflows/update-youtube-data.yml @@ -0,0 +1,158 @@ +# NOTE: THIS GITHUB ACTION WILL ONLY RUN ON MAIN BRANCH. +# N8N IS BEING USING AS AN ALTERNATIVE UNTIL THEN. +# N8N workflow is in /snippets/automations/n8n-workflows/youtube-to-mintlify.json +# You will need to Add YOUTUBE_API_KEY secret in repo settings (Settings → Secrets → Actions) for this github action to work. + +name: Update YouTube Data + +on: + schedule: + - cron: "0 0 * * 0" # Weekly on Sunday at midnight UTC + workflow_dispatch: # Allow manual trigger + +jobs: + update-youtube: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + ref: main + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: "20" + + - name: Fetch and process YouTube videos + env: + YOUTUBE_API_KEY: ${{ secrets.YOUTUBE_API_KEY }} + CHANNEL_ID: UCzfHtZnmUzMbJDxGCwIgY2g + run: | + node << 'EOF' + const https = require('https'); + const fs = require('fs'); + + const YOUTUBE_API_KEY = process.env.YOUTUBE_API_KEY; + const CHANNEL_ID = process.env.CHANNEL_ID; + + function httpsGet(url) { + return new Promise((resolve, reject) => { + https.get(url, (res) => { + let data = ''; + res.on('data', (chunk) => data += chunk); + res.on('end', () => resolve(JSON.parse(data))); + }).on('error', reject); + }); + } + + function parseDuration(duration) { + const match = duration.match(/PT(?:(\d+)H)?(?:(\d+)M)?(?:(\d+)S)?/); + if (!match) return 0; + + const hours = parseInt(match[1] || 0); + const minutes = parseInt(match[2] || 0); + const seconds = parseInt(match[3] || 0); + + return hours * 3600 + minutes * 60 + seconds; + } + + function escapeForJSX(str) { + return str + .replace(/\\/g, '\\\\') + .replace(/'/g, "\\'") + .replace(/"/g, '\\"') + .replace(/\n/g, ' ') + .replace(/\r/g, '') + .replace(/\t/g, ' '); + } + + async function main() { + // Step 1: Get recent videos + console.log('Fetching recent videos...'); + const searchUrl = `https://www.googleapis.com/youtube/v3/search?part=snippet&channelId=${CHANNEL_ID}&maxResults=50&order=date&type=video&key=${YOUTUBE_API_KEY}`; + const searchResults = await httpsGet(searchUrl); + + if (!searchResults.items || searchResults.items.length === 0) { + console.log('No videos found'); + return; + } + + // Step 2: Get video details for each video + console.log(`Found ${searchResults.items.length} videos, fetching details...`); + const videoIds = searchResults.items.map(item => item.id.videoId).join(','); + const detailsUrl = `https://www.googleapis.com/youtube/v3/videos?part=contentDetails,snippet&id=${videoIds}&key=${YOUTUBE_API_KEY}`; + const detailsResults = await httpsGet(detailsUrl); + + // Step 3: Process and filter videos + const videos = []; + for (const video of detailsResults.items) { + const duration = video.contentDetails.duration; + const durationSeconds = parseDuration(duration); + const snippet = video.snippet; + + // Check if it's a livestream + const isLivestream = snippet.liveBroadcastContent === 'live' || + snippet.liveBroadcastContent === 'upcoming' || + duration === 'PT0S' || + snippet.title.toLowerCase().includes('watercooler') || + snippet.title.toLowerCase().includes('fireside'); + + // Filter out Shorts (≤60 seconds and not livestreams) + const isShort = durationSeconds <= 60 && durationSeconds > 0 && !isLivestream; + + if (!isShort) { + videos.push({ + title: snippet.title, + href: `https://www.youtube.com/watch?v=${video.id}`, + author: `By ${snippet.channelTitle || 'Livepeer'}`, + content: (snippet.description || '').substring(0, 500), + publishedDate: new Date(snippet.publishedAt).toLocaleDateString('en-US', { month: 'short', day: 'numeric', year: 'numeric' }), + duration: duration, + thumbnailUrl: snippet.thumbnails.high.url + }); + } + } + + console.log(`Filtered to ${videos.length} non-Short videos`); + + // Step 4: Generate JSX content + const jsxContent = `export const youtubeData = [ + ${videos.map(v => ` { + title: '${escapeForJSX(v.title)}', + href: '${v.href}', + author: '${v.author}', + content: '${escapeForJSX(v.content)}...', + publishedDate: '${v.publishedDate}', + duration: '${v.duration}', + thumbnailUrl: '${v.thumbnailUrl}' + }`).join(',\n')} + ]; + `; + + // Step 5: Write to file + fs.writeFileSync('snippets/automations/youtube/youtubeData.jsx', jsxContent); + console.log('Successfully wrote youtubeData.jsx'); + } + + main().catch(err => { + console.error('Error:', err); + process.exit(1); + }); + EOF + + - name: Check for changes + id: git-check + run: | + git diff --exit-code snippets/automations/youtube/youtubeData.jsx || echo "changed=true" >> $GITHUB_OUTPUT + + - name: Commit and push if changed + if: steps.git-check.outputs.changed == 'true' + run: | + git config user.name "GitHub Actions Bot" + git config user.email "actions@github.com" + git add snippets/automations/youtube/youtubeData.jsx + git commit -m "Update YouTube videos - $(date -u +"%Y-%m-%dT%H:%M:%SZ")" + git push diff --git a/.gitignore b/.gitignore index e7e1da61..e54d8c67 100644 --- a/.gitignore +++ b/.gitignore @@ -22,6 +22,9 @@ pnpm-lock.yaml .env .env.*local +# Google OAuth secrets +**/client_secret*.json + # ------------------------------------ # Logs # ------------------------------------ @@ -68,3 +71,8 @@ build/ # External docs (fetched at build time) # ------------------------------------ snippets/external/ + +# ------------------------------------ +# Notion exports (contains API keys) +# ------------------------------------ +notion/ diff --git a/snippets/components/groupedItems/GroupedResponseField.jsx b/.mintignore similarity index 100% rename from snippets/components/groupedItems/GroupedResponseField.jsx rename to .mintignore diff --git a/.verify-large-change.sh b/.verify-large-change.sh deleted file mode 100644 index 389295f6..00000000 --- a/.verify-large-change.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/bash -# AI Large Change Verification Script -# Used when AI needs to commit more than 10 files - -BRANCH=$(git rev-parse --abbrev-ref HEAD) -FILES_COUNT=$(git diff --cached --name-only | wc -l) -TIMESTAMP=$(date '+%Y-%m-%d %H:%M:%S') - -echo "" -echo "════════════════════════════════════════════════════════════" -echo "⚠️ LARGE CHANGE VERIFICATION REQUIRED" -echo "════════════════════════════════════════════════════════════" -echo "" -echo "Branch: $BRANCH" -echo "Files to be modified: $FILES_COUNT" -echo "Timestamp: $TIMESTAMP" -echo "" -echo "Files list:" -git diff --cached --name-only | sort -echo "" -echo "════════════════════════════════════════════════════════════" -echo "" -echo "HUMAN ACTION REQUIRED:" -echo "" -echo "1. Review the files above carefully" -echo "2. If safe, create verification file:" -echo " touch .ai-commit-verified" -echo "3. AI will then proceed with commit" -echo "" -echo "════════════════════════════════════════════════════════════" -echo "" diff --git a/AI-ACCOUNTABILITY-CHECKLIST.md b/AI-ACCOUNTABILITY-CHECKLIST.md deleted file mode 100644 index 25f8f70a..00000000 --- a/AI-ACCOUNTABILITY-CHECKLIST.md +++ /dev/null @@ -1,82 +0,0 @@ -# AI Assistant Accountability Checklist - -Use this for EVERY interaction with ANY AI on this repo. - -## Before Session Starts - -- [ ] AI acknowledges UNIVERSAL-AI-PROTOCOL.md -- [ ] AI states current branch -- [ ] AI lists protected branches (docs-v2, main) -- [ ] AI confirms it has read/write restrictions - -## Before EVERY Git Operation - -- [ ] AI shows PREFLIGHT CHECK (branch, files, operation, approval needed) -- [ ] You review the preflight -- [ ] You explicitly approve or reject -- [ ] AI executes ONLY after approval - -## During Large Changes (> 10 files) - -- [ ] AI lists EXACT files being changed -- [ ] AI explains WHY each file changes -- [ ] You review file list -- [ ] You create `.ai-commit-verified` file -- [ ] AI commits with verification token in message - -## After Each Commit - -- [ ] Verify commit message has: [file count] | [what] | [why] | [approved by] -- [ ] Check `.ai-operations.log` for entry -- [ ] Spot-check git log for timestamp accuracy -- [ ] Review file changes: `git show HEAD` - -## If Something Breaks - -- [ ] Stop work immediately -- [ ] Document what broke -- [ ] Use ROLLBACK-GUIDE.md to revert -- [ ] Don't let AI try to "fix" without rollback first -- [ ] Post-mortem: update UNIVERSAL-AI-PROTOCOL.md - -## Monthly Review - -- [ ] Audit `.ai-operations.log` for patterns -- [ ] Check if any rules were bent -- [ ] Review `git log docs-v2-dev` for auto-commits -- [ ] Test rollback procedures (make sure they still work) -- [ ] Update this checklist based on lessons learned - -## Red Flags (Stop Work Immediately) - -- [ ] AI tries to use `git reset --hard` -- [ ] AI commits to `docs-v2` or `main` -- [ ] AI doesn't show preflight checklist -- [ ] AI modifies 50+ files without asking -- [ ] AI touches `.prettierrc` or `docs.json` -- [ ] AI makes changes to v1/ directory -- [ ] Commit message missing approval token -- [ ] Pre-commit hook does NOT block an AI commit attempt - -## Recovery Commands (Memorize These) - -```bash -# See recent commits -git log --oneline -10 docs-v2-dev - -# Revert last commit (safe, creates new commit) -git revert HEAD - -# View what's in last commit -git show HEAD - -# Unstage last commit (keep files) -git reset --soft HEAD~1 - -# Check audit trail -cat .ai-operations.log | tail -20 -``` - ---- - -**This checklist is your insurance policy against AI fucking up your docs.** diff --git a/AI_GUIDELINES.md b/AI_GUIDELINES.md new file mode 100644 index 00000000..1e495944 --- /dev/null +++ b/AI_GUIDELINES.md @@ -0,0 +1,42 @@ +# 🛡️ AI SAFETY PROTOCOL: GIT WRITE OPERATIONS + +## 1. MANDATORY SAFETY HOOKS + +This repository uses custom Git hooks for "Human-in-the-Loop" (HitL) +verification. + +- **ENFORCEMENT:** You SHALL NOT attempt to bypass hooks. +- **FORBIDDEN:** The use of `--no-verify`, `-n`, or any environment variable + designed to skip Git hook execution is STRICTLY PROHIBITED. +- **BEHAVIOR:** When you initiate a `commit`, `push`, or `rebase`, the hook will + pause and wait for manual input in the user's terminal. You MUST wait for the + user to confirm. + +## 2. AUTOMATED CHECKPOINTING + +Every write command triggers an automatic checkpoint branch. + +- **FORMAT:** `checkpoint/YYYY-MM-DD_HHMMSS` +- **ACTION:** Before running a write command, you MUST state: _"I am initiating + [COMMAND]. A safety checkpoint will be created. Please approve the prompt in + your terminal."_ + +## 3. CATASTROPHIC FAILURE RECOVERY + +In the event of a destructive operation (e.g., accidental file deletion, +corrupted rebase, or broken merge): + +- **DO NOT** attempt to "fix" the state with further complex Git commands. +- **PROCEDURE:** + 1. Identify the latest `checkpoint/` branch using + `git branch --list 'checkpoint/*'`. + 2. Suggest a `git reset --hard` to that specific checkpoint branch to restore + the repository to its pre-failure state. + 3. Notify the user immediately of the failure and the recovery path. + +## 4. SCOPE LIMITATIONS + +- **READS:** You have full permission for `git status`, `git diff`, and + `git log`. +- **WRITES:** Every `commit`, `push`, and `rebase` is a high-stakes action. + Treat them as irreversible without human oversight. diff --git a/ROLLBACK-GUIDE.md b/ROLLBACK-GUIDE.md deleted file mode 100644 index 30eadb4f..00000000 --- a/ROLLBACK-GUIDE.md +++ /dev/null @@ -1,94 +0,0 @@ -# Emergency Rollback Guide - -## Quick Rollback (Last 5 minutes) - -```bash -# See recent commits -git log --oneline -10 docs-v2-dev - -# Safe rollback - creates new commit that undoes changes -git revert - -# OR - go back to previous state without changing history -git reset --soft HEAD~1 -# Then inspect and recommit if needed -``` - -## View All Changes Since Date - -```bash -# Since last hour -git log --oneline --since="1 hour ago" docs-v2-dev - -# Since specific time -git log --oneline --since="2026-01-06 20:00:00" docs-v2-dev -``` - -## See What Changed in Last Commit - -```bash -git show HEAD -``` - -## Rollback to Specific Commit - -```bash -# List all commits -git reflog - -# Safe method: Create new commit that undoes changes -git revert - -# Restore specific file to previous version -git restore --source= - -# Go back one commit (keeps history) -git reset --soft HEAD~1 - -# NEVER use: git reset --hard (destroys history) -``` - -## If You Need to Undo Last Auto-Commit - -```bash -# See what's in the last commit -git show HEAD - -# Create a new commit that reverts it -git revert HEAD - -# OR - unstage it and inspect -git reset --soft HEAD~1 -git diff --cached -``` - -## See Diff Between Commits - -```bash -# What changed in last auto-commit -git diff HEAD~1 HEAD - -# What changed in a specific commit -git diff ~1 -``` - -## Automatic Checkpoints - -Every 5 minutes a new commit is created on docs-v2-dev with timestamp. Each -commit is a full snapshot you can revert to instantly. - -### View Commit Timeline - -```bash -git log --oneline --graph docs-v2-dev | head -20 -``` - -### Tag Safe Points (Optional) - -```bash -# Save a checkpoint -git tag checkpoint-before-gateway-work - -# Later, go back to it -git reset --hard checkpoint-before-gateway-work -``` diff --git a/UNIVERSAL-AI-PROTOCOL.md b/UNIVERSAL-AI-PROTOCOL.md deleted file mode 100644 index f720ff6a..00000000 --- a/UNIVERSAL-AI-PROTOCOL.md +++ /dev/null @@ -1,202 +0,0 @@ -# Universal AI Operations Protocol - -## For ANY AI Assistant Working on This Repository - -**Last Updated:** 2026-01-06 -**Created After:** Catastrophic AI failure destroying 12+ files and 318+ files -through formatting disaster - ---- - -## CRITICAL RULES FOR ALL AI ASSISTANTS - -### Rule 1: PROTECTED BRANCHES - UNTOUCHABLE - -``` -docs-v2 = SOURCE OF TRUTH -main = PRODUCTION - -NO AI ASSISTANT IS ALLOWED TO: -- Commit to these branches -- Push to these branches -- Merge into these branches -- Delete these branches -``` - -### Rule 2: PRE-FLIGHT CHECKLIST (EVERY OPERATION) - -Before ANY git command, the AI MUST show you: - -``` -[PREFLIGHT CHECK] -Operating on branch: ________ -Files affected: ________ -Operation: ________ -Expected outcome: ________ - -Proceed? (yes/no) -``` - -**If you don't see this, STOP and ask the AI to show it.** - -### Rule 3: FORBIDDEN COMMANDS (ABSOLUTE) - -``` -❌ NEVER ALLOWED: -- git reset --hard -- git restore . -- git clean -fd -- git push --force -- git rebase -i -- git tag --force -- Mass operations > 50 files without approval -``` - -### Rule 4: LARGE CHANGE PROTOCOL (> 10 files) - -``` -1. AI lists EXACT files being modified -2. AI shows file count and brief explanation -3. Human reviews and approves EXPLICITLY -4. Human creates approval token if needed -5. AI proceeds ONLY after approval -``` - -### Rule 5: COMMIT MESSAGE REQUIREMENTS - -Every commit must include: - -``` -[File count] | [What changed] | [Why changed] | [Approved by: USERNAME] - -Example: -[9 files added] | Restore gateway quickstart files | Fix missing imports from stash | Approved by: alisonhaire -``` - -### Rule 6: FORBIDDEN FILE PATTERNS - -``` -❌ AI must NOT touch without explicit approval: -- .prettierrc, .prettierignore -- docs.json, docs_v2.json -- package.json -- v2/ structure changes -- v1/ any changes (legacy) -- ./git/* (git config) -- Migration of files between v1 and v2 -``` - -### Rule 7: DANGEROUS OPERATION ALERTS - -AI MUST WARN before: - -- Restoring files from commits > 24 hours old -- Deleting ANY file -- Renaming directories -- Changing file structure -- Mass reformatting operations - -### Rule 8: AUDIT TRAIL REQUIREMENT - -Every operation logs to: `.ai-operations.log` - -``` -[TIMESTAMP] BRANCH=docs-v2-dev | FILES=9 | OP=restore | APPROVAL=yes | COMMIT=abc123 -``` - -### Rule 9: CHECKPOINT SYSTEM - -- Auto-commits every 5 minutes on `docs-v2-dev` -- Each commit is tagged with timestamp -- Rollback available to ANY point in last 24 hours -- Tags: `state-before-OPERATION` and `state-after-OPERATION` - -### Rule 10: DRY-RUN FOR COMPLEX OPS - -Before any operation affecting > 5 files: - -``` -1. AI shows DRY-RUN (what WOULD happen) -2. Human reviews -3. Human approves -4. AI executes REAL operation -5. AI shows ACTUAL result -``` - ---- - -## EMERGENCY PROCEDURES - -### If AI Breaks Something - -```bash -# 1. Get the commit hash -git log --oneline docs-v2-dev | head -1 - -# 2. See what broke -git show HEAD - -# 3. Revert it (creates new commit undoing changes) -git revert HEAD - -# 4. Or unstage and inspect -git reset --soft HEAD~1 -``` - -### If AI Touches Protected Branch - -```bash -# Check what happened -git log docs-v2 --oneline -5 - -# Force restore from remote -git fetch origin -git reset --soft origin/docs-v2 -``` - -### If AI Tries Forbidden Command - -The pre-commit hook will block it automatically. - ---- - -## HUMAN RESPONSIBILITY CHECKLIST - -Before each AI session with YOUR docs: - -- [ ] Review protected branches list -- [ ] Know your current branch -- [ ] Have rollback commands ready (see ROLLBACK-GUIDE.md) -- [ ] Verify AI shows preflight checklist EVERY TIME -- [ ] Never let AI skip approval for > 10 file changes -- [ ] Check audit log (.ai-operations.log) regularly - ---- - -## FOR INSTRUCTING ANY NEW AI - -Include this in your prompt to any AI: - -``` -"You are working on a repository with STRICT AI safety protocols. -You MUST: -1. Never commit to docs-v2 or main -2. Show preflight checklist before every operation -3. Get explicit approval for any change > 10 files -4. Never use: git reset --hard, git restore ., git clean -fd -5. Log all operations to .ai-operations.log -6. Work only on docs-v2-dev branch for commits -7. Reference UNIVERSAL-AI-PROTOCOL.md for complete rules" -``` - ---- - -## VERSION CONTROL FOR THIS PROTOCOL - -- Created: 2026-01-06 after catastrophic AI failure -- Enforced by: `.git/hooks/pre-commit` (technical enforcement) -- Audited by: `.ai-operations.log` (human review) -- Rollback by: `ROLLBACK-GUIDE.md` (recovery procedures) - -**This protocol is NOT optional. It is the safety layer that prevents -irreversible damage.** diff --git a/docs.json b/docs.json index 1d777042..b58b4ffe 100644 --- a/docs.json +++ b/docs.json @@ -6,9 +6,9 @@ "timestamp": true }, "colors": { - "primary": "#18794E", + "primary": "#3CB540", "light": "#2b9a66", - "dark": "#18794E" + "dark": "#3CB540" }, "favicon": "/favicon.png", "navigation": { @@ -20,32 +20,6 @@ { "language": "en", "tabs": [ - { - "tab": "Internal Hub", - "hidden": true, - "icon": "info-circle", - "anchors": [ - { - "anchor": "Internal Hub", - "icon": "info-circle", - "groups": [ - { - "group": "Internal Hub", - "pages": [ - "v2/pages/09_internal/internal-overview", - "v2/pages/09_internal/docs-status", - "v2/pages/09_internal/strategic-alignment", - "v2/pages/09_internal/docs-philosophy", - "v2/pages/09_internal/definitions", - "v2/pages/09_internal/personas", - "v2/pages/09_internal/ecosystem", - "v2/pages/09_internal/references" - ] - } - ] - } - ] - }, { "tab": "Home", "icon": "house-heart", @@ -58,65 +32,66 @@ "group": "Home", "icon": "house-heart", "pages": [ - "v2/pages/00_home/Landing", - "v2/pages/00_home/home/livepeer-tl-dr", - "v2/pages/00_home/home/trending-at-livepeer" + "v2/pages/00_home/mission-control", + "v2/pages/00_home/home/primer", + "v2/pages/00_home/home/trending-topics" ] }, { - "group": "Livepeer Showcase", - "icon": "clapperboard-play", + "group": "Livepeer", + "icon": "/snippets/assets/logos/Livepeer-Logo-Symbol-Theme.svg", "pages": [ - "v2/pages/00_home/project-showcase/projects-built-on-livepeer", - "v2/pages/00_home/project-showcase/livepeer-applications", - "v2/pages/00_home/project-showcase/industry-verticals" + "v2/pages/00_home/introduction/vision", + "v2/pages/00_home/introduction/evolution", + "v2/pages/00_home/introduction/why-livepeer", + "v2/pages/00_home/introduction/ecosystem", + "v2/pages/00_home/introduction/roadmap" ] }, { - "group": "Get Started", - "icon": "arrow-right-to-bracket", + "group": "Showcase", + "icon": "clapperboard-play", "pages": [ - "v2/pages/00_home/get-started/use-livepeer", - "v2/pages/00_home/get-started/stream-video-quickstart", - "v2/pages/00_home/get-started/livepeer-ai-quickstart", - "v2/pages/00_home/get-started/build-on-livepeer" + "v2/pages/00_home/project-showcase/showcase", + "v2/pages/00_home/project-showcase/applications", + "v2/pages/00_home/project-showcase/industry-verticals", + "v2/pages/00_home/project-showcase/landscape" ] } ] }, { - "anchor": "Reference HUB", - "icon": "books", - "pages": ["v2/pages/07_resources/redirect"] + "anchor": "Get Started!", + "icon": "play", + "pages": ["v2/pages/03_developers/building-on-livepeer/"] }, { - "anchor": "Help Center", - "icon": "comments-question-check", - "pages": ["v2/pages/08_help/redirect"] + "anchor": "Resource HUB", + "icon": "books", + "pages": ["v2/pages/07_resources/redirect"] }, { "anchor": " ", - "icon": "-", - "href": " " + "icon": "horizontal-rule", + "pages": [" "] } ] }, { "tab": "About", - "icon": "graduation-cap", + "icon": "camera-movie", "anchors": [ { "anchor": "About Livepeer", - "icon": "graduation-cap", + "icon": "play", "groups": [ { "group": "About Livepeer", "icon": "graduation-cap", "pages": [ - "v2/pages/01_about/about-livepeer/livepeer-overview", - "v2/pages/01_about/about-livepeer/why-livepeer", - "v2/pages/01_about/about-livepeer/livepeer-evolution", - "v2/pages/01_about/about-livepeer/livepeer-ecosystem" + "v2/pages/01_about/about-portal", + "v2/pages/01_about/core-concepts/livepeer-core-concepts", + "v2/pages/01_about/core-concepts/livepeer-glossary" ] }, { @@ -125,102 +100,95 @@ "pages": [ "v2/pages/01_about/livepeer-protocol/protocol-overview", "v2/pages/01_about/livepeer-protocol/livepeer-whitepaper", - "v2/pages/01_about/livepeer-protocol/technical-overview" + "v2/pages/01_about/livepeer-protocol/technical-overview", + "v2/pages/01_about/livepeer-protocol/protocol-mechanisms" ] }, { "group": "Livepeer Network", "icon": "circle-nodes", "pages": [ + "v2/pages/01_about/livepeer-network/network-overview", "v2/pages/01_about/livepeer-network/actor-overview", - "v2/pages/01_about/livepeer-network/livepeer-token-economics", - "v2/pages/01_about/livepeer-network/livepeer-governance" + "v2/pages/01_about/livepeer-network/governance-model", + "v2/pages/01_about/livepeer-network/token", + "v2/pages/01_about/livepeer-network/treasury" ] } ] }, { - "anchor": "Reference HUB", + "anchor": "Resource HUB", "icon": "books", "pages": ["v2/pages/07_resources/redirect"] }, - { - "anchor": "Help Center", - "icon": "comments-question-check", - "pages": ["v2/pages/08_help/redirect"] - }, { "anchor": " ", - "icon": "-", - "href": " " + "icon": "horizontal-rule", + "pages": [" "] } ] }, { - "tab": "Community", - "icon": "people-group", + "tab": "Products", + "icon": "film-canister", "anchors": [ { - "anchor": "Community", - "icon": "people-group", + "anchor": "Products", + "icon": "display-code", "groups": [ { - "group": "Livepeer Community", - "icon": "people-group", + "group": "Use Livepeer", + "icon": "play", "pages": [ - "v2/pages/02_community/community-home", - "v2/pages/02_community/livepeer-community/livepeer-Latest-Topics", - "v2/pages/02_community/livepeer-community/community-guidelines" + "v2/pages/010_products/products-portal", + "v2/pages/010_products/products/builder-hub" ] }, { - "group": "Livepeer Connect", - "icon": "hashtag", + "group": "Daydream", + "icon": "video-camera", "pages": [ - "v2/pages/02_community/livepeer-connect/news-and-socials", - "v2/pages/02_community/livepeer-connect/events-and-community-streams", - "v2/pages/02_community/livepeer-connect/forums-and-discussions" + "v2/pages/010_products/products/daydream/daydream" ] }, { - "group": "Livepeer Contribute", - "icon": "door-open", + "group": "Livepeer Studio", + "icon": "user-robot", "pages": [ - "v2/pages/02_community/livepeer-contribute/contribute", - "v2/pages/02_community/livepeer-contribute/opportunities", - "v2/pages/02_community/livepeer-contribute/build-livepeer" + "v2/pages/010_products/products/livepeer-studio/livepeer-studio" ] }, { - "group": "[MOVE HERE] Help Center", - "icon": "comments-question-check", - "hidden": true, + "group": "Stream.place", + "icon": "video-camera", "pages": [ - "v2/pages/02_community/livepeer-community/trending-test" + "v2/pages/010_products/products/streamplace/streamplace", + "v2/pages/010_products/products/streamplace/streamplace-guide", + "v2/pages/010_products/products/streamplace/streamplace-architecture", + "v2/pages/010_products/products/streamplace/streamplace-integration", + "v2/pages/010_products/products/streamplace/streamplace-provenance", + "v2/pages/010_products/products/streamplace/streamplace-funding" ] }, { - "group": "[TO DELETE] Tests", + "group": "All Ecosystem Products", + "icon": "video-camera", "pages": [ - "v2/pages/02_community/livepeer-community/trending-test" + "v2/pages/010_products/products/all-ecosystem/ecosystem-products" ] } ] }, { - "anchor": "Reference HUB", + "anchor": "Resource HUB", "icon": "books", "pages": ["v2/pages/07_resources/redirect"] }, - { - "anchor": "Help Center", - "icon": "comments-question-check", - "pages": ["v2/pages/08_help/redirect"] - }, { "anchor": " ", - "icon": "-", - "href": " " + "icon": "horizontal-rule", + "pages": [" "] } ] }, @@ -236,75 +204,38 @@ "group": "Building on Livepeer", "icon": "code", "pages": [ - "v2/pages/03_developers/developer-home", - "v2/pages/03_developers/building-on-livepeer/developer-guide" - ] - }, - { - "group": "Quickstart", - "icon": "fast-forward", - "pages": [ - { - "group": "Real-time Video", - "pages": [ - "v2/pages/03_developers/building-on-livepeer/quick-starts/livepeer-ai", - "v2/pages/03_developers/livepeer-real-time-video/video-streaming-on-livepeer/README.mdx" - ] - }, - { - "group": "AI Pipelines", - "pages": [ - "v2/pages/03_developers/building-on-livepeer/quick-starts/video-streaming", - "v2/pages/03_developers/building-on-livepeer/quick-starts/livepeer-ai" - ] - } - ] - }, - { - "group": "Developer Platforms", - "icon": "gear-code", - "pages": [ - "v2/pages/03_developers/developer-platforms/builder-hub", + "v2/pages/03_developers/developer-portal", + "v2/pages/03_developers/building-on-livepeer/developer-guide", { - "group": "Daydream", - "pages": [ - "v2/pages/03_developers/developer-platforms/daydream/daydream" - ] - }, - { - "group": "Livepeer Studio", - "pages": [ - "v2/pages/03_developers/developer-platforms/livepeer-studio/livepeer-studio" - ] - }, - { - "group": "Frameworks", - "pages": [ - "v2/pages/03_developers/developer-platforms/frameworks/frameworks" - ] - }, - { - "group": "Streamplace", - "pages": [ - "v2/pages/03_developers/developer-platforms/streamplace/streamplace" - ] - }, - { - "group": "All Ecosystem Products", + "group": "Quickstart", + "icon": "fast-forward", + "expanded": true, "pages": [ - "v2/pages/03_developers/developer-platforms/all-ecosystem/ecosystem-products/ecosystem-products" + { + "group": "Real-time Video", + "pages": [ + "v2/pages/03_developers/building-on-livepeer/quick-starts/livepeer-ai", + "v2/pages/03_developers/livepeer-real-time-video/video-streaming-on-livepeer/README.mdx" + ] + }, + { + "group": "AI Pipelines", + "pages": [ + "v2/pages/03_developers/building-on-livepeer/quick-starts/video-streaming", + "v2/pages/03_developers/building-on-livepeer/quick-starts/livepeer-ai" + ] + } ] } ] }, { - "group": "Developer Tools", - "icon": "tools", + "group": "AI Pipelines", + "icon": "user-robot", "pages": [ - "v2/pages/03_developers/developer-tools/tooling-hub", - "v2/pages/03_developers/developer-tools/livepeer-explorer", - "v2/pages/03_developers/developer-tools/livepeer-cloud", - "v2/pages/03_developers/developer-tools/dashboards" + "v2/pages/03_developers/ai-inference-on-livepeer/ai-pipelines/overview", + "v2/pages/03_developers/ai-inference-on-livepeer/ai-pipelines/byoc", + "v2/pages/03_developers/ai-inference-on-livepeer/ai-pipelines/comfystream" ] }, { @@ -324,6 +255,27 @@ "v2/pages/03_developers/builder-opportunities/dev-programs", "v2/pages/03_developers/builder-opportunities/livepeer-rfps" ] + } + ] + }, + { + "anchor": "Resource HUB", + "icon": "books", + "pages": ["v2/pages/07_resources/redirect"] + }, + { + "anchor": " ", + "icon": "horizontal-rule", + "pages": [ + { + "group": "Developer Tools", + "icon": "tools", + "pages": [ + "v2/pages/03_developers/developer-tools/tooling-hub", + "v2/pages/03_developers/developer-tools/livepeer-explorer", + "v2/pages/03_developers/developer-tools/livepeer-cloud", + "v2/pages/03_developers/developer-tools/dashboards" + ] }, { "group": "Technical References", @@ -350,21 +302,6 @@ ] } ] - }, - { - "anchor": "Reference HUB", - "icon": "books", - "pages": ["v2/pages/07_resources/redirect"] - }, - { - "anchor": "Help Center", - "icon": "comments-question-check", - "pages": ["v2/pages/08_help/redirect"] - }, - { - "anchor": " ", - "icon": "-", - "href": " " } ] }, @@ -380,7 +317,7 @@ "group": "About Gateways", "icon": "graduation-cap", "pages": [ - "v2/pages/04_gateways/gateways-home", + "v2/pages/04_gateways/gateways-portal", { "group": "Gateway Knowledge Hub", "expanded": true, @@ -393,6 +330,14 @@ } ] }, + { + "group": "Quickstart", + "icon": "/snippets/assets/logos/Livepeer-Logo-Symbol-Light.svg", + "pages": [ + "v2/pages/04_gateways/run-a-gateway/quickstart-a-gateway", + "v2/pages/04_gateways/run-a-gateway/get-AI-to-setup-the-gateway" + ] + }, { "group": "Gateway Services & Providers", "icon": "wand-magic-sparkles", @@ -564,22 +509,9 @@ ] }, { - "anchor": "Quickstart", - "icon": "fast-forward", - "pages": [ - "v2/pages/04_gateways/run-a-gateway/quickstart-a-gateway", - "v2/pages/04_gateways/run-a-gateway/get-AI-to-setup-the-gateway" - ] - }, - { - "anchor": "Quick Links", - "icon": "person-to-portal", - "pages": ["v2/pages/04_gateways/references/"] - }, - { - "anchor": "Resources", + "anchor": "Resource HUB", "icon": "books", - "pages": ["v2/pages/04_gateways/references/"] + "pages": ["v2/pages/07_resources/redirect"] }, { "anchor": " ", @@ -600,7 +532,7 @@ "group": "About Orchestrators (GPU Nodes)", "icon": "graduation-cap", "pages": [ - "v2/pages/05_orchestrators/orchestrators-home", + "v2/pages/05_orchestrators/orchestrators-portal", "v2/pages/05_orchestrators/about-orchestrators/overview", { "group": "Orchestrator Functions", @@ -647,24 +579,19 @@ ] }, { - "anchor": "Reference HUB", + "anchor": "Resource HUB", "icon": "books", "pages": ["v2/pages/07_resources/redirect"] }, - { - "anchor": "Help Center", - "icon": "comments-question-check", - "pages": ["v2/pages/08_help/redirect"] - }, { "anchor": " ", - "icon": "-", - "href": " " + "icon": "horizontal-rule", + "pages": [" "] } ] }, { - "tab": "Delegators & LPT", + "tab": "LP Token", "icon": "hand-holding-dollar", "anchors": [ { @@ -675,7 +602,7 @@ "group": "About LPT", "icon": "graduation-cap", "pages": [ - "v2/pages/06_delegators/token-home", + "v2/pages/06_delegators/token-portal", "v2/pages/06_delegators/about-lpt-livepeer-token/overview", "v2/pages/06_delegators/about-lpt-livepeer-token/why-have-a-token", "v2/pages/06_delegators/about-lpt-livepeer-token/livepeer-token-economics", @@ -703,7 +630,7 @@ }, { "group": "Livepeer Treasury", - "pages": [] + "pages": [" "] }, { "group": "Guides & Resources", @@ -716,24 +643,82 @@ ] }, { - "anchor": "Reference HUB", + "anchor": "Resource HUB", "icon": "books", "pages": ["v2/pages/07_resources/redirect"] }, { - "anchor": "Help Center", - "icon": "comments-question-check", - "pages": ["v2/pages/08_help/redirect"] + "anchor": " ", + "icon": "horizontal-rule", + "pages": [" "] + } + ] + }, + { + "tab": "Community", + "icon": "people-group", + "anchors": [ + { + "anchor": "Community", + "icon": "people-group", + "groups": [ + { + "group": "Livepeer Community", + "icon": "people-group", + "pages": [ + "v2/pages/02_community/community-portal", + "v2/pages/02_community/livepeer-community/livepeer-Latest-Topics", + "v2/pages/02_community/livepeer-community/community-guidelines" + ] + }, + { + "group": "Livepeer Connect", + "icon": "hashtag", + "pages": [ + "v2/pages/02_community/livepeer-connect/news-and-socials", + "v2/pages/02_community/livepeer-connect/events-and-community-streams", + "v2/pages/02_community/livepeer-connect/forums-and-discussions" + ] + }, + { + "group": "Livepeer Contribute", + "icon": "door-open", + "pages": [ + "v2/pages/02_community/livepeer-contribute/contribute", + "v2/pages/02_community/livepeer-contribute/opportunities", + "v2/pages/02_community/livepeer-contribute/build-livepeer" + ] + }, + { + "group": "[MOVE HERE] Help Center", + "icon": "comments-question-check", + "hidden": true, + "pages": [ + "v2/pages/02_community/livepeer-community/trending-test" + ] + }, + { + "group": "[TO DELETE] Tests", + "pages": [ + "v2/pages/02_community/livepeer-community/trending-test" + ] + } + ] + }, + { + "anchor": "Resource HUB", + "icon": "books", + "pages": ["v2/pages/07_resources/redirect"] }, { "anchor": " ", - "icon": "-", - "href": " " + "icon": "horizontal-rule", + "pages": [" "] } ] }, { - "tab": "Reference HUB", + "tab": "Resource HUB", "hidden": false, "icon": "books", "anchors": [ @@ -744,7 +729,7 @@ { "group": "Home", "icon": "house", - "pages": ["v2/pages/07_resources/resources_hub"] + "pages": ["v2/pages/07_resources/resources-portal"] }, { "group": "Documentation Guide", @@ -808,7 +793,7 @@ "pages": [ { "group": "Protocol References", - "pages": [] + "pages": [" "] } ] }, @@ -829,7 +814,7 @@ }, { "anchor": " ", - "icon": "-", + "icon": "horizontal-rule", "href": " " } ] @@ -849,24 +834,24 @@ }, { "group": "Delegating LPT", - "pages": [] + "pages": [" "] }, { "group": "Livepeer Governance", - "pages": [] + "pages": [" "] }, { "group": "Livepeer Treasury", - "pages": [] + "pages": [" "] }, { "group": "Token Resources", - "pages": [] + "pages": [" "] } ] }, { - "anchor": "Reference HUB", + "anchor": "Resource HUB", "icon": "books", "pages": ["v2/pages/07_resources/redirect"] }, @@ -876,6 +861,32 @@ "href": " " } ] + }, + { + "tab": "Internal Hub", + "hidden": true, + "icon": "info-circle", + "anchors": [ + { + "anchor": "Internal Hub", + "icon": "info-circle", + "groups": [ + { + "group": "Internal Hub", + "pages": [ + "v2/pages/09_internal/internal-overview", + "v2/pages/09_internal/docs-status", + "v2/pages/09_internal/strategic-alignment", + "v2/pages/09_internal/docs-philosophy", + "v2/pages/09_internal/definitions", + "v2/pages/09_internal/personas", + "v2/pages/09_internal/ecosystem", + "v2/pages/09_internal/references" + ] + } + ] + } + ] } ] } @@ -3007,7 +3018,7 @@ "redirects": [ { "source": "/v2/pages/07_resources/redirect", - "destination": "/v2/pages/07_resources/resources_hub" + "destination": "/v2/pages/07_resources/resources-portal" }, { "source": "/v2/pages/08_help/redirect", diff --git a/mintOld.json b/mintOld.json deleted file mode 100644 index ab684bf8..00000000 --- a/mintOld.json +++ /dev/null @@ -1,941 +0,0 @@ -{ - "$schema": "https://mintlify.com/schema.json", - "name": "Livepeer Docs", - "logo": { - "dark": "/logo/dark.svg", - "light": "/logo/light.svg" - }, - "redirects": [ - { - "source": "/guides/developing/quickstart", - "destination": "/developers/quick-start" - }, - { - "source": "/guides/overview", - "destination": "/developers/guides/overview" - }, - { - "source": "/guides/developing/player", - "destination": "/developers/guides/playback-an-asset" - }, - { - "source": "/guides/developing/create-a-livestream", - "destination": "/developers/guides/create-livestream" - }, - { - "source": "/guides/developing/stream-via-obs", - "destination": "/developers/guides/stream-via-obs" - }, - { - "source": "/developing/stream-via-browser", - "destination": "/developers/guides/livestream-from-browser" - }, - { - "source": "/guides/developing/upload-a-video-asset", - "destination": "/developers/guides/upload-video-asset" - }, - { - "source": "/guides/developing/mint-a-video-nft", - "destination": "/developers/guides/mint-video-nft" - }, - { - "source": "/guides/developing/dstorage-playback", - "destination": "/developers/guides/dstorage-playback" - }, - { - "source": "/developers/guides/dstorage-playback", - "destination": "/developers/guides/upload-video-asset" - }, - { - "source": "/guides/developing/access-control", - "destination": "/developers/guides/access-control-webhooks" - }, - { - "source": "/guides/developing/access-control-vod", - "destination": "/developers/guides/access-control-webhooks" - }, - { - "source": "/guides/developing/encrypted-vod", - "destination": "/developers/guides/encrypted-asset" - }, - { - "source": "/guides/developing/listen-for-webhooks", - "destination": "/developers/guides/setup-and-listen-to-webhooks" - }, - { - "source": "/guides/developing/multistream", - "destination": "/developers/guides/multistream" - }, - { - "source": "/guides/developing/monitor-stream-health", - "destination": "/developers/guides/monitor-stream-health" - }, - { - "source": "/guides/developing/viewer-engagement", - "destination": "/developers/guides/get-engagement-analytics-via-api" - }, - { - "source": "/guides/developing/transcode-video-storj", - "destination": "/developers/guides/transcode-video-storj" - }, - { - "source": "/guides/developing/transcode-video-w3s", - "destination": "/developers/guides/transcode-video-w3s" - }, - { - "source": "/tutorials/developing/optimize-latency", - "destination": "/developers/guides/optimize-latency-of-a-livestream" - }, - { - "source": "/tutorials/developing/analyze-engagement-timeplus", - "destination": "/developers/guides/get-engagement-analytics-via-timeplus" - }, - { - "source": "/tutorials/developing/visualize-engagement-metrics-grafana", - "destination": "/developers/guides/get-engagement-analytics-via-grafana" - }, - { - "source": "/tutorials/developing/token-gate-videos-using-guildxyz", - "destination": "/developers/tutorials/token-gate-videos-with-lit" - }, - { - "source": "/tutorials/developing/token-gate-videos-using-lit", - "destination": "/developers/tutorials/token-gate-videos-with-lit" - }, - { - "source": "/tutorials/developing/build-decentralized-video-app-with-fvm", - "destination": "/developers/tutorials/decentralized-app-with-fvm" - }, - { - "source": "/tutorials/developing/upload-playback-videos-on-ipfs-4everland", - "destination": "/developers/tutorials/upload-playback-videos-4everland" - }, - { - "source": "/tutorials/developing/upload-playback-videos-on-ipfs", - "destination": "/developers/tutorials/upload-playback-videos-on-ipfs" - }, - { - "source": "/tutorials/developing/upload-playback-videos-on-arweave", - "destination": "/developers/tutorials/upload-playback-videos-on-arweave" - }, - { - "source": "/reference/api", - "destination": "/api-reference/overview/introduction" - }, - { - "source": "/reference/deployed-contract-addresses", - "destination": "/references/contract-addresses" - }, - { - "source": "/reference/example-applications", - "destination": "/references/example-applications" - }, - { - "source": "/reference/api-support-matrix", - "destination": "/references/api-support-matrix" - }, - { - "source": "/reference/go-livepeer", - "destination": "/references/go-livepeer/bandwidth-requirements" - }, - { - "source": "/reference/go-livepeer/cli-reference", - "destination": "/references/go-livepeer/cli-reference" - }, - { - "source": "/reference/go-livepeer/gpu-support", - "destination": "/references/go-livepeer/gpu-support" - }, - { - "source": "/reference/go-livepeer/hardware-requirements", - "destination": "/references/go-livepeer/hardware-requirements" - }, - { - "source": "/reference/go-livepeer/bandwidth-requirements", - "destination": "/references/go-livepeer/bandwidth-requirements" - }, - { - "source": "/reference/go-livepeer/prometheus-metrics", - "destination": "/references/go-livepeer/prometheus-metrics" - }, - { - "source": "/guides/delegating/bridge-lpt-to-arbitrum", - "destination": "/delegators/guides/bridge-lpt-to-arbitrum" - }, - { - "source": "/guides/delegating/migrate-stake-to-arbitrum", - "destination": "/delegators/guides/migrate-stake-to-arbitrum" - }, - { - "source": "/delegators/reference/yield-calculation", - "destination": "/delegators/guides/yield-calculation" - }, - { - "source": "/guides/orchestrating/get-started", - "destination": "/orchestrators/guides/get-started" - }, - { - "source": "/guides/orchestrating/install-go-livepeer", - "destination": "/orchestrators/guides/install-go-livepeer" - }, - { - "source": "/guides/orchestrating/connect-to-arbitrum", - "destination": "/orchestrators/guides/connect-to-arbitrum" - }, - { - "source": "/guides/orchestrating/configure-reward-calling", - "destination": "/orchestrators/guides/configure-reward-calling" - }, - { - "source": "/guides/orchestrating/set-session-limits", - "destination": "/orchestrators/guides/set-session-limits" - }, - { - "source": "/guides/orchestrating/set-pricing", - "destination": "/orchestrators/guides/set-pricing" - }, - { - "source": "/guides/orchestrating/benchmark-transcoding", - "destination": "/orchestrators/guides/benchmark-transcoding" - }, - { - "source": "/guides/orchestrating/assess-capabilities", - "destination": "/orchestrators/guides/assess-capabilities" - }, - { - "source": "/guides/orchestrating/monitor-metrics", - "destination": "/orchestrators/guides/monitor-metrics" - }, - { - "source": "/guides/orchestrating/vote", - "destination": "/orchestrators/guides/vote" - }, - { - "source": "/guides/orchestrating/dual-mine", - "destination": "/orchestrators/guides/dual-mine" - }, - { - "source": "/guides/orchestrating/o-t-split", - "destination": "/orchestrators/guides/o-t-split" - }, - { - "source": "/guides/orchestrating/migrate-to-arbitrum", - "destination": "/orchestrators/guides/migrate-to-arbitrum" - }, - { - "source": "/guides/orchestrating/migrate-from-contract-wallet", - "destination": "/orchestrators/guides/migrate-from-contract-wallet" - }, - { - "source": "/guides/orchestrating/gateway-introspection", - "destination": "/orchestrators/guides/gateway-introspection" - }, - { - "source": "/guides/orchestrating/troubleshoot", - "destination": "/orchestrators/guides/troubleshoot" - }, - { - "source": "/reference/react", - "destination": "/react/getting-started" - }, - { - "source": "/reference/react/getting-started", - "destination": "/react/getting-started" - }, - { - "source": "/reference/react/client", - "destination": "/react/getting-started" - }, - { - "source": "/reference/react/LivepeerConfig", - "destination": "/sdks/react/migration/3.x/LivepeerConfig" - }, - { - "source": "/reference/react/Player", - "destination": "/react/player/Root" - }, - { - "source": "/reference/react/Broadcast", - "destination": "/react/broadcast/Root" - }, - { - "source": "/reference/react/providers/studio", - "destination": "/sdks/react/migration/3.x/providers/studio" - }, - { - "source": "/reference/react/asset/useAsset", - "destination": "/sdks/react/migration/3.x/asset/useAsset" - }, - { - "source": "/reference/react/asset/useCreateAsset", - "destination": "/sdks/react/migration/3.x/asset/useCreateAsset" - }, - { - "source": "/reference/react/asset/useAssetMetrics", - "destination": "/sdks/react/migration/3.x/asset/useAssetMetrics" - }, - { - "source": "/reference/react/asset/useUpdateAsset", - "destination": "/sdks/react/migration/3.x/asset/useUpdateAsset" - }, - { - "source": "/reference/react/stream/useStream", - "destination": "/sdks/react/migration/3.x/stream/useStream" - }, - { - "source": "/reference/react/stream/useStreamSession", - "destination": "/sdks/react/migration/3.x/stream/useStreamSession" - }, - { - "source": "/reference/react/stream/useStreamSessions", - "destination": "/sdks/react/migration/3.x/stream/useStreamSessions" - }, - { - "source": "/reference/react/stream/useCreateStream", - "destination": "/sdks/react/migration/3.x/stream/useCreateStream" - }, - { - "source": "/reference/react/stream/useUpdateStream", - "destination": "/sdks/react/migration/3.x/stream/useUpdateStream" - }, - { - "source": "/reference/react/playback/usePlaybackInfo", - "destination": "/sdks/react/migration/3.x/playback/usePlaybackInfo" - }, - { - "source": "/reference/react/constants/abis", - "destination": "/sdks/react/migration/3.x/constants/abis" - }, - { - "source": "/reference/react/constants/contract-addresses", - "destination": "/sdks/react/migration/3.x/constants/contract-addresses" - } - ], - "favicon": "/favicon.png", - "colors": { - "primary": "#18794E", - "light": "#2b9a66", - "dark": "#18794E" - }, - "theme": "prism", - "feedback": { - "suggestEdit": true, - "raiseIssue": true, - "thumbsRating": true - }, - "openapi": "./openapi.yaml", - "api": { - "baseUrl": "https://livepeer.studio/api" - }, - "modeToggle": { - "default": "dark" - }, - "topbarLinks": [ - { - "name": "Discord", - "url": "https://discord.gg/livepeer" - } - ], - "versions": ["Developers", "Delegators", "Orchestrators", "Gateways"], - "topbarCtaButton": { - "name": "Dashboard", - "url": "https://livepeer.studio" - }, - "topAnchor": { - "name": "Documentation", - "icon": "code" - }, - "anchors": [ - { - "name": "API Reference", - "icon": "rectangle-terminal", - "url": "api-reference" - }, - { - "name": "SDKs", - "icon": "brackets-curly", - "url": "sdks" - }, - { - "name": "AI Video (Beta)", - "icon": "microchip-ai", - "iconType": "regular", - "url": "ai" - }, - { - "name": "What's New", - "icon": "rocket", - "url": "https://livepeer.canny.io/changelog" - }, - { - "name": "Community", - "icon": "discord", - "url": "https://discord.gg/livepeer", - "iconType": "brands" - } - ], - "navigation": [ - { - "group": "Getting Started", - "pages": [ - "developers/introduction", - "developers/quick-start", - "developers/livepeer-studio-cli" - ] - }, - { - "group": "Guides", - "pages": [ - "developers/guides/overview", - { - "group": "Assets", - "icon": "video", - "pages": [ - "developers/guides/upload-video-asset", - "developers/guides/playback-an-asset", - "developers/guides/listen-to-asset-events", - "developers/guides/encrypted-asset", - "developers/guides/thumbnails-vod" - ] - }, - { - "group": "Livestream", - "icon": "camera", - "pages": [ - "developers/guides/create-livestream", - "developers/guides/playback-a-livestream", - "developers/guides/stream-via-obs", - "developers/guides/livestream-from-browser", - "developers/guides/optimize-latency-of-a-livestream", - "developers/guides/monitor-stream-health", - "developers/guides/listen-to-stream-events", - "developers/guides/multistream", - "developers/guides/clip-a-livestream", - "developers/guides/thumbnails-live" - ] - }, - { - "group": "Access control", - "icon": "lock", - "pages": [ - "developers/guides/access-control-webhooks", - "developers/guides/access-control-jwt" - ] - }, - { - "group": "Webhooks", - "icon": "bell", - "pages": ["developers/guides/setup-and-listen-to-webhooks"] - }, - { - "group": "Transcode API", - "icon": "photo-film", - "pages": [ - "developers/guides/transcode-video-storj", - "developers/guides/transcode-video-w3s" - ] - }, - { - "group": "Viewership Metrics", - "icon": "chart-bar", - "pages": [ - "developers/guides/get-engagement-analytics-via-api", - "developers/guides/get-engagement-analytics-via-grafana", - "developers/guides/get-engagement-analytics-via-timeplus" - ] - }, - { - "group": "Projects", - "icon": "folder-open", - "pages": ["developers/guides/managing-projects"] - }, - { - "group": "Integrations", - "icon": "puzzle-piece", - "pages": [ - "developers/tutorials/decentralized-app-with-fvm", - "developers/tutorials/token-gate-videos-with-lit", - { - "group": "Storage Provider Integration", - "pages": [ - "developers/tutorials/upload-playback-videos-4everland", - "developers/tutorials/upload-playback-videos-on-arweave", - "developers/tutorials/upload-playback-videos-on-ipfs" - ] - } - ] - } - ], - "version": "Developers" - }, - { - "group": "Guides", - "pages": [ - "delegators/guides/bridge-lpt-to-arbitrum", - "delegators/guides/migrate-stake-to-arbitrum", - "delegators/guides/yield-calculation" - ], - "version": "Delegators" - }, - { - "group": "Guides", - "pages": [ - "orchestrators/guides/get-started", - "orchestrators/guides/install-go-livepeer", - "orchestrators/guides/connect-to-arbitrum", - "orchestrators/guides/configure-reward-calling", - "orchestrators/guides/set-session-limits", - "orchestrators/guides/set-pricing", - "orchestrators/guides/benchmark-transcoding", - "orchestrators/guides/assess-capabilities", - "orchestrators/guides/monitor-metrics", - "orchestrators/guides/vote", - "orchestrators/guides/dual-mine", - "orchestrators/guides/o-t-split", - "orchestrators/guides/migrate-to-arbitrum", - "orchestrators/guides/migrate-from-contract-wallet", - "orchestrators/guides/gateway-introspection", - "orchestrators/guides/troubleshoot" - ], - "version": "Orchestrators" - }, - { - "group": "Guides", - "pages": [ - "gateways/guides/gateway-overview", - "gateways/guides/docker-install", - "gateways/guides/linux-install", - "gateways/guides/windows-install", - "gateways/guides/transcoding-options", - "gateways/guides/fund-gateway", - "gateways/guides/publish-content", - "gateways/guides/playback-content" - ], - "version": "Gateways" - }, - { - "group": "AI Video", - "pages": [ - "ai/introduction", - "ai/whats-new", - { - "group": "AI Pipelines", - "icon": "wand-magic-sparkles", - "iconType": "solid", - "pages": [ - "ai/pipelines/overview", - "ai/pipelines/audio-to-text", - "ai/pipelines/image-to-image", - "ai/pipelines/image-to-text", - "ai/pipelines/image-to-video", - "ai/pipelines/llm", - "ai/pipelines/segment-anything-2", - "ai/pipelines/text-to-image", - "ai/pipelines/text-to-speech", - "ai/pipelines/upscale" - ] - }, - { - "group": "Setup an AI Orchestrator", - "icon": "robot", - "iconType": "solid", - "pages": [ - "ai/orchestrators/get-started", - "ai/orchestrators/models-config", - "ai/orchestrators/models-download", - "ai/orchestrators/start-orchestrator", - "ai/orchestrators/ai-worker", - "ai/orchestrators/benchmarking", - "ai/orchestrators/onchain" - ] - }, - { - "group": "Setup an AI Gateway", - "icon": "signal-stream", - "iconType": "solid", - "pages": [ - "ai/gateways/get-started", - "ai/gateways/start-gateway", - "ai/gateways/onchain" - ] - }, - { - "group": "AI Builders", - "icon": "screwdriver-wrench", - "iconType": "solid", - "pages": [ - "ai/builders/get-started", - "ai/builders/gateways", - "ai/builders/showcase" - ] - }, - { - "group": "How to Contribute", - "icon": "heart", - "iconType": "solid", - "pages": ["ai/contributors/coming-soon"] - }, - { - "group": "SDKs", - "icon": "brackets-curly", - "pages": [ - "ai/sdks/overview", - "ai/sdks/go", - "ai/sdks/javascript", - "ai/sdks/python" - ] - }, - { - "group": "AI API Reference", - "icon": "rectangle-terminal", - "pages": [ - "ai/api-reference/overview", - "ai/api-reference/audio-to-text", - "ai/api-reference/image-to-image", - "ai/api-reference/image-to-text", - "ai/api-reference/image-to-video", - "ai/api-reference/llm", - "ai/api-reference/segment-anything-2", - "ai/api-reference/text-to-image", - "ai/api-reference/text-to-speech", - "ai/api-reference/upscale" - ] - } - ] - }, - { - "group": "References", - "pages": [ - "references/api-support-matrix", - { - "group": "Livepeer Node Software", - "icon": "golang", - "pages": [ - "references/go-livepeer/bandwidth-requirements", - "references/go-livepeer/cli-reference", - "references/go-livepeer/gpu-support", - "references/go-livepeer/hardware-requirements", - "references/go-livepeer/prometheus-metrics" - ] - }, - "references/contract-addresses", - "references/example-applications", - "references/awesome-livepeer", - { - "group": "FAQs", - "icon": "book", - "pages": [ - "references/knowledge-base/livestream", - "references/knowledge-base/playback", - "references/knowledge-base/vod" - ] - } - ] - }, - { - "group": "Overview", - "pages": ["sdks/introduction"] - }, - { - "group": "Server-side SDKs", - "pages": ["sdks/javascript", "sdks/go", "sdks/python"] - }, - { - "group": "React Components", - "icon": "circle-play", - "pages": [ - "sdks/react/getting-started", - { - "group": "Player", - "icon": "circle-play", - "pages": [ - "sdks/react/player/Root", - "sdks/react/player/Container", - "sdks/react/player/Video", - "sdks/react/player/Error", - "sdks/react/player/Loading", - "sdks/react/player/Portal", - "sdks/react/player/Poster", - { - "group": "Controls", - "pages": [ - "sdks/react/player/Controls", - "sdks/react/player/Clip", - "sdks/react/player/Fullscreen", - "sdks/react/player/Live", - "sdks/react/player/PictureInPicture", - "sdks/react/player/Play", - "sdks/react/player/RateSelect", - "sdks/react/player/Seek", - "sdks/react/player/Time", - "sdks/react/player/VideoQualitySelect", - "sdks/react/player/Volume" - ] - }, - { - "group": "Functions", - "pages": [ - "sdks/react/player/get-src", - "sdks/react/player/useMediaContext" - ] - } - ] - }, - { - "group": "Broadcast", - "icon": "signal-stream", - "pages": [ - "sdks/react/broadcast/Root", - "sdks/react/broadcast/Container", - "sdks/react/broadcast/Video", - "sdks/react/broadcast/Enabled", - "sdks/react/broadcast/Error", - "sdks/react/broadcast/Loading", - "sdks/react/broadcast/Portal", - { - "group": "Controls", - "pages": [ - "sdks/react/broadcast/Controls", - "sdks/react/broadcast/Audio", - "sdks/react/broadcast/Camera", - "sdks/react/broadcast/Fullscreen", - "sdks/react/broadcast/PictureInPicture", - "sdks/react/broadcast/Screenshare", - "sdks/react/broadcast/Source", - "sdks/react/broadcast/Status" - ] - }, - { - "group": "Functions", - "pages": [ - "sdks/react/broadcast/get-ingest", - "sdks/react/broadcast/useBroadcastContext" - ] - } - ] - }, - { - "group": "Examples", - "icon": "clipboard", - "pages": ["sdks/react/Player", "sdks/react/Broadcast"] - }, - { - "group": "Migration", - "icon": "right-left", - "pages": [ - "sdks/react/migration/migration-4.x", - { - "group": "Livepeer React (3.x and below)", - "pages": [ - "sdks/react/migration/3.x/getting-started", - "sdks/react/migration/3.x/client", - "sdks/react/migration/3.x/LivepeerConfig", - "sdks/react/migration/3.x/Player", - "sdks/react/migration/3.x/Broadcast", - { - "group": "Asset", - "pages": [ - "sdks/react/migration/3.x/asset/useCreateAsset", - "sdks/react/migration/3.x/asset/useAsset", - "sdks/react/migration/3.x/asset/useUpdateAsset", - "sdks/react/migration/3.x/asset/useAssetMetrics" - ] - }, - { - "group": "Stream", - "pages": [ - "sdks/react/migration/3.x/stream/useCreateStream", - "sdks/react/migration/3.x/stream/useStream", - "sdks/react/migration/3.x/stream/useUpdateStream", - "sdks/react/migration/3.x/stream/useStreamSession", - "sdks/react/migration/3.x/stream/useStreamSessions" - ] - }, - { - "group": "Playback", - "pages": ["sdks/react/migration/3.x/playback/usePlaybackInfo"] - }, - { - "group": "Constants", - "pages": [ - "sdks/react/migration/3.x/constants/abis", - "sdks/react/migration/3.x/constants/contract-addresses" - ] - } - ] - } - ] - } - ] - }, - { - "group": "Overview", - "pages": [ - "api-reference/overview/introduction", - "api-reference/overview/authentication" - ] - }, - { - "group": "APIs", - "pages": [ - { - "group": "Asset", - "icon": "video", - "pages": [ - "api-reference/asset/overview", - "api-reference/asset/upload", - "api-reference/asset/upload-via-url", - "api-reference/asset/get", - "api-reference/asset/update", - "api-reference/asset/delete", - "api-reference/asset/get-all" - ] - }, - { - "group": "Livestream", - "icon": "camera", - "pages": [ - "api-reference/stream/overview", - "api-reference/stream/create", - "api-reference/stream/get", - "api-reference/stream/update", - "api-reference/stream/terminate", - "api-reference/stream/add-multistream-target", - "api-reference/stream/delete-multistream-target", - "api-reference/stream/delete", - "api-reference/stream/get-all", - "api-reference/stream/create-clip", - "api-reference/stream/get-clip" - ] - }, - { - "group": "Generate", - "icon": "microchip-ai", - "pages": [ - "api-reference/generate/overview", - "api-reference/generate/audio-to-text", - "api-reference/generate/text-to-image", - "api-reference/generate/image-to-image", - "api-reference/generate/image-to-video", - "api-reference/generate/llm", - "api-reference/generate/segment-anything-2", - "api-reference/generate/upscale" - ] - }, - { - "group": "Multistream target", - "icon": "arrows-split-up-and-left", - "pages": [ - "api-reference/multistream/overview", - "api-reference/multistream/create", - "api-reference/multistream/get", - "api-reference/multistream/update", - "api-reference/multistream/delete", - "api-reference/multistream/get-all" - ] - }, - { - "group": "Session", - "icon": "film", - "pages": [ - "api-reference/session/overview", - "api-reference/session/get", - "api-reference/session/get-all", - "api-reference/session/get-recording", - "api-reference/session/get-clip" - ] - }, - { - "group": "Access control", - "icon": "lock", - "pages": [ - "api-reference/signing-key/overview", - "api-reference/signing-key/create", - "api-reference/signing-key/get", - "api-reference/signing-key/update", - "api-reference/signing-key/delete", - "api-reference/signing-key/get-all" - ] - }, - { - "group": "Webhook", - "icon": "bell", - "pages": [ - "api-reference/webhook/overview", - "api-reference/webhook/create", - "api-reference/webhook/get", - "api-reference/webhook/update", - "api-reference/webhook/delete", - "api-reference/webhook/get-all" - ] - }, - { - "group": "Task", - "icon": "gear", - "pages": [ - "api-reference/task/overview", - "api-reference/task/get-all", - "api-reference/task/get" - ] - }, - { - "group": "Playback", - "icon": "play", - "pages": [ - "api-reference/playback/overview", - "api-reference/playback/get" - ] - }, - { - "group": "Transcode", - "icon": "photo-film", - "pages": [ - "api-reference/transcode/overview", - "api-reference/transcode/create" - ] - }, - { - "group": "Viewership", - "icon": "chart-bar", - "pages": [ - "api-reference/viewership/get-realtime-viewership", - "api-reference/viewership/get-viewership-metrics", - "api-reference/viewership/get-usage-metrics", - "api-reference/viewership/get-public-total-views", - "api-reference/viewership/get-creators-metrics" - ] - } - ] - } - ], - "footerSocials": [ - { - "type": "website", - "url": "https://livepeer.org" - }, - { - "type": "github", - "url": "https://github.com/livepeer" - }, - { - "type": "twitter", - "url": "https://twitter.com/livepeer" - }, - { - "type": "discord", - "url": "https://discord.gg/livepeer" - }, - { - "type": "website", - "url": "https://forum.livepeer.org" - } - ], - "search": { - "prompt": "Need help? Ask our AI" - }, - "analytics": { - "ga4": { - "measurementId": "G-P1Z15F6NX4" - } - } -} diff --git a/scripts/download-linkedin-video.sh b/scripts/download-linkedin-video.sh new file mode 100644 index 00000000..f9ff250b --- /dev/null +++ b/scripts/download-linkedin-video.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +# Download LinkedIn video using yt-dlp +# Usage: ./scripts/download-linkedin-video.sh + +LINKEDIN_URL="https://www.linkedin.com/feed/update/urn:li:ugcPost:7387171661868933120" +OUTPUT_DIR="snippets/assets/media/videos" +OUTPUT_FILE="livepeer-founders-post.mp4" + +# Create output directory if it doesn't exist +mkdir -p "$OUTPUT_DIR" + +# Check if yt-dlp is installed +if ! command -v yt-dlp &> /dev/null; then + echo "yt-dlp is not installed. Installing via brew..." + brew install yt-dlp +fi + +# Download the video +echo "Downloading LinkedIn video..." +yt-dlp "$LINKEDIN_URL" -o "$OUTPUT_DIR/$OUTPUT_FILE" + +if [ $? -eq 0 ]; then + echo "✅ Video downloaded successfully to: $OUTPUT_DIR/$OUTPUT_FILE" +else + echo "❌ Failed to download video. Try one of these alternatives:" + echo "1. Use https://www.linkedin-video-downloader.com/" + echo "2. Use browser DevTools Network tab" + echo "3. Use a browser extension like Video DownloadHelper" +fi + diff --git a/scripts/download-linkedin-with-cookies.sh b/scripts/download-linkedin-with-cookies.sh new file mode 100644 index 00000000..85cf7ced --- /dev/null +++ b/scripts/download-linkedin-with-cookies.sh @@ -0,0 +1,54 @@ +#!/bin/bash + +# Download LinkedIn video using yt-dlp with browser cookies +# This uses your logged-in session to download the video + +LINKEDIN_URL="https://www.linkedin.com/posts/livepeer_livepeer-mission-to-represent-open-video-activity-7287171661868933120-Uo-7" +OUTPUT_DIR="snippets/assets/media/videos" +OUTPUT_FILE="livepeer-founders-post.mp4" + +# Create output directory if it doesn't exist +mkdir -p "$OUTPUT_DIR" + +echo "Downloading LinkedIn video using browser cookies..." +echo "This will use your logged-in LinkedIn session from Chrome/Firefox/Safari" +echo "" + +# Try with different browsers (yt-dlp will auto-detect) +yt-dlp \ + --cookies-from-browser chrome \ + "$LINKEDIN_URL" \ + -o "$OUTPUT_DIR/$OUTPUT_FILE" \ + --no-warnings + +if [ $? -ne 0 ]; then + echo "" + echo "Chrome cookies failed. Trying Firefox..." + yt-dlp \ + --cookies-from-browser firefox \ + "$LINKEDIN_URL" \ + -o "$OUTPUT_DIR/$OUTPUT_FILE" \ + --no-warnings +fi + +if [ $? -ne 0 ]; then + echo "" + echo "Firefox cookies failed. Trying Safari..." + yt-dlp \ + --cookies-from-browser safari \ + "$LINKEDIN_URL" \ + -o "$OUTPUT_DIR/$OUTPUT_FILE" \ + --no-warnings +fi + +if [ $? -eq 0 ]; then + echo "" + echo "✅ Video downloaded successfully to: $OUTPUT_DIR/$OUTPUT_FILE" +else + echo "" + echo "❌ All methods failed. Please try manual download:" + echo "1. Screen record the video (Cmd+Shift+5 on Mac)" + echo "2. Use a browser extension like Video DownloadHelper" + echo "3. Use an online downloader (may not work for private posts)" +fi + diff --git a/snippets/README.md b/snippets/README.md new file mode 100644 index 00000000..dbddf93e --- /dev/null +++ b/snippets/README.md @@ -0,0 +1,6 @@ +See the +[Livepeer Snippets Wiki](https://livepeer-docs-git-docs-v2-livepeer.vercel.app/snippets/wiki/index) +for more information on the snippets folder, its organisation & structure, data, +pages & Livepeer components library within. + +See folders for readme's on specific content. diff --git a/snippets/apiSpecs/README.md b/snippets/apiSpecs/README.md deleted file mode 100644 index 5ead7ff5..00000000 --- a/snippets/apiSpecs/README.md +++ /dev/null @@ -1,84 +0,0 @@ -# ELI5: Generate API Docs in Mintlify - -Follow these super simple steps to turn an OpenAPI spec (YAML/JSON) into -Mintlify docs. - -## 1) Install Mintlify (first time only) - -```bash -npm i -g mintlify -``` - -## 2) Pick your OpenAPI spec and output folder - -- Spec file: for example `ai/worker/api/openapi.yaml` -- Output folder: where the generated MDX pages go, e.g. - `v2/pages/04_gateways/guides-references/api-reference/CLI-HTTP` -- Title: a friendly name shown in nav, e.g. `"CLI HTTP"` - -## 3) Run the generator script - -From the repo root: - -```bash -./v2/scripts/generate-api-docs.sh ai/worker/api/openapi.yaml v2/pages/04_gateways/guides-references/api-reference/CLI-HTTP "CLI HTTP" -``` - -Examples: - -```bash -# AI API example -./v2/scripts/generate-api-docs.sh ai/worker/api/openapi.yaml v2/pages/04_gateways/guides-references/api-reference/AI-API "AI API" - -# CLI HTTP example -./v2/scripts/generate-api-docs.sh ai/worker/api/openapi.yaml v2/pages/04_gateways/guides-references/api-reference/CLI-HTTP "CLI HTTP" -``` - -## 4) What gets created - -- MDX pages inside your chosen output folder -- A navigation snippet for `docs.json` (list of page paths as strings) - -## 5) Add the pages to `docs.json` - -Open [docs.json](docs.json) and include the generated pages under the right -group. Important: each item inside `pages` must be a string path. - -Example: - -```json -{ - "group": "CLI HTTP API", - "pages": [ - "v2/pages/04_gateways/guides-references/api-reference/CLI-HTTP/overview", - "v2/pages/04_gateways/guides-references/api-reference/CLI-HTTP/reference" - ] -} -``` - -## 6) Preview locally - -```bash -mint dev -``` - -Open the local preview and click into the new group to see the generated API -docs. - -## 7) Troubleshooting (ELI5) - -- Error: "Incorrect type. Expected string": make sure every entry in `pages` is - a string path (no objects). -- Pages not showing: double-check the output folder path matches what you put in - `docs.json`. -- Need to regenerate: rerun the script after updating your OpenAPI spec. - -## 8) Optional: Build via Docker or Makefile - -```bash -# Docker build (amd64) -docker buildx build --platform linux/amd64 --load -t livepeer/docs . - -# Makefile build -make all -``` diff --git a/snippets/assets/README.mdx b/snippets/assets/README.mdx new file mode 100644 index 00000000..80ca3fef --- /dev/null +++ b/snippets/assets/README.mdx @@ -0,0 +1,31 @@ +Assets Folder Structure: + +These assets are organised **By Type** and **By Domain** + +**By Type:** General-purpose assets used across the site (logos, icons, images, videos, data). + +**By Domain:** Page-specific assets organised by documentation section. + +```bash +assets/ +├── logos/ # Brand logos and symbols +│ ├── Livepeer-Logo-Symbol.svg +│ ├── Livepeer-Logo-Symbol-Light.svg +│ └── Livepeer-Logo-Symbol-Theme.svg # Auto-switches dark/light mode colour +├── media/ # General media assets +│ ├── icons/ # Reusable custom icons +│ ├── images/ # Reusable images +│ └── videos/ # Reusable videos +└── domain/ # Domain-specific assets by section + ├── SHARED/ # Assets shared across sections + ├── SITE/ # Site-wide assets (favicon, etc.) + ├── 00_HOME/ + ├── 01_ABOUT/ + ├── 02_COMMUNITY/ + ├── 03_DEVELOPERS/ + ├── 04_GATEWAYS/ + ├── 05_ORCHESTRATORS/ + ├── 06_TOKEN/ + ├── 07_REFERENCES/ + └── 08_OTHER/ +``` diff --git a/v2/assets/technical/Architecture_go-livepeer_DeepWiki.htm b/snippets/assets/data/Architecture_go-livepeer_DeepWiki.htm similarity index 100% rename from v2/assets/technical/Architecture_go-livepeer_DeepWiki.htm rename to snippets/assets/data/Architecture_go-livepeer_DeepWiki.htm diff --git a/v2/assets/technical/DeepWiki_TA-Flows.htm b/snippets/assets/data/DeepWiki_TA-Flows.htm similarity index 100% rename from v2/assets/technical/DeepWiki_TA-Flows.htm rename to snippets/assets/data/DeepWiki_TA-Flows.htm diff --git a/v2/assets/technical/protocol-overview.html b/snippets/assets/data/protocol-overview.html similarity index 100% rename from v2/assets/technical/protocol-overview.html rename to snippets/assets/data/protocol-overview.html diff --git a/v2/assets/about/Building the Decentralized Generative AI Tech Stack.png b/snippets/assets/domain/00_HOME/Building the Decentralized Generative AI Tech Stack.png similarity index 100% rename from v2/assets/about/Building the Decentralized Generative AI Tech Stack.png rename to snippets/assets/domain/00_HOME/Building the Decentralized Generative AI Tech Stack.png diff --git a/v2/assets/about/Eric Shreck Gif.gif b/snippets/assets/domain/00_HOME/Eric Shreck Gif.gif similarity index 100% rename from v2/assets/about/Eric Shreck Gif.gif rename to snippets/assets/domain/00_HOME/Eric Shreck Gif.gif diff --git a/v2/assets/hero/hero_about.png b/snippets/assets/domain/00_HOME/Hero_Images/hero_about.png similarity index 100% rename from v2/assets/hero/hero_about.png rename to snippets/assets/domain/00_HOME/Hero_Images/hero_about.png diff --git a/snippets/assets/domain/00_HOME/Hero_Images/hero_ai_run.png b/snippets/assets/domain/00_HOME/Hero_Images/hero_ai_run.png new file mode 100644 index 00000000..4d0f7e11 Binary files /dev/null and b/snippets/assets/domain/00_HOME/Hero_Images/hero_ai_run.png differ diff --git a/v2/assets/hero/hero_community.png b/snippets/assets/domain/00_HOME/Hero_Images/hero_community.png similarity index 100% rename from v2/assets/hero/hero_community.png rename to snippets/assets/domain/00_HOME/Hero_Images/hero_community.png diff --git a/v2/assets/hero/hero_delegators.png b/snippets/assets/domain/00_HOME/Hero_Images/hero_delegators.png similarity index 100% rename from v2/assets/hero/hero_delegators.png rename to snippets/assets/domain/00_HOME/Hero_Images/hero_delegators.png diff --git a/v2/assets/hero/hero_developer.png b/snippets/assets/domain/00_HOME/Hero_Images/hero_developer.png similarity index 100% rename from v2/assets/hero/hero_developer.png rename to snippets/assets/domain/00_HOME/Hero_Images/hero_developer.png diff --git a/v2/assets/hero/hero_developer1.png b/snippets/assets/domain/00_HOME/Hero_Images/hero_developer1.png similarity index 100% rename from v2/assets/hero/hero_developer1.png rename to snippets/assets/domain/00_HOME/Hero_Images/hero_developer1.png diff --git a/v2/assets/hero/hero_developer_logo.png b/snippets/assets/domain/00_HOME/Hero_Images/hero_developer_logo.png similarity index 100% rename from v2/assets/hero/hero_developer_logo.png rename to snippets/assets/domain/00_HOME/Hero_Images/hero_developer_logo.png diff --git a/v2/assets/hero/hero_gateways.png b/snippets/assets/domain/00_HOME/Hero_Images/hero_gateways.png similarity index 100% rename from v2/assets/hero/hero_gateways.png rename to snippets/assets/domain/00_HOME/Hero_Images/hero_gateways.png diff --git a/v2/assets/hero/hero_gpu.png b/snippets/assets/domain/00_HOME/Hero_Images/hero_gpu.png similarity index 100% rename from v2/assets/hero/hero_gpu.png rename to snippets/assets/domain/00_HOME/Hero_Images/hero_gpu.png diff --git a/v2/assets/hero/hero_help.png b/snippets/assets/domain/00_HOME/Hero_Images/hero_help.png similarity index 100% rename from v2/assets/hero/hero_help.png rename to snippets/assets/domain/00_HOME/Hero_Images/hero_help.png diff --git a/v2/assets/hero/hero_logo_developer.png b/snippets/assets/domain/00_HOME/Hero_Images/hero_logo_developer.png similarity index 100% rename from v2/assets/hero/hero_logo_developer.png rename to snippets/assets/domain/00_HOME/Hero_Images/hero_logo_developer.png diff --git a/v2/assets/hero/hero_logo_developer_sml.png b/snippets/assets/domain/00_HOME/Hero_Images/hero_logo_developer_sml.png similarity index 100% rename from v2/assets/hero/hero_logo_developer_sml.png rename to snippets/assets/domain/00_HOME/Hero_Images/hero_logo_developer_sml.png diff --git a/v2/assets/hero/hero_logo_new.png b/snippets/assets/domain/00_HOME/Hero_Images/hero_logo_new.png similarity index 100% rename from v2/assets/hero/hero_logo_new.png rename to snippets/assets/domain/00_HOME/Hero_Images/hero_logo_new.png diff --git a/v2/assets/hero/hero_opportunity.png b/snippets/assets/domain/00_HOME/Hero_Images/hero_opportunity.png similarity index 100% rename from v2/assets/hero/hero_opportunity.png rename to snippets/assets/domain/00_HOME/Hero_Images/hero_opportunity.png diff --git a/v2/assets/hero/hero_partner.png b/snippets/assets/domain/00_HOME/Hero_Images/hero_partner.png similarity index 100% rename from v2/assets/hero/hero_partner.png rename to snippets/assets/domain/00_HOME/Hero_Images/hero_partner.png diff --git a/v2/assets/hero/hero_reference.png b/snippets/assets/domain/00_HOME/Hero_Images/hero_reference.png similarity index 100% rename from v2/assets/hero/hero_reference.png rename to snippets/assets/domain/00_HOME/Hero_Images/hero_reference.png diff --git a/v2/assets/home/Hero_Images/hero_research.png b/snippets/assets/domain/00_HOME/Hero_Images/hero_research.png similarity index 100% rename from v2/assets/home/Hero_Images/hero_research.png rename to snippets/assets/domain/00_HOME/Hero_Images/hero_research.png diff --git a/v2/assets/hero/hero_researchers.png b/snippets/assets/domain/00_HOME/Hero_Images/hero_researchers.png similarity index 100% rename from v2/assets/hero/hero_researchers.png rename to snippets/assets/domain/00_HOME/Hero_Images/hero_researchers.png diff --git a/v2/assets/hero/hero_showcase.png b/snippets/assets/domain/00_HOME/Hero_Images/hero_showcase.png similarity index 100% rename from v2/assets/hero/hero_showcase.png rename to snippets/assets/domain/00_HOME/Hero_Images/hero_showcase.png diff --git a/snippets/assets/domain/00_HOME/Hero_Images/hero_video_stream.png b/snippets/assets/domain/00_HOME/Hero_Images/hero_video_stream.png new file mode 100644 index 00000000..80794db6 Binary files /dev/null and b/snippets/assets/domain/00_HOME/Hero_Images/hero_video_stream.png differ diff --git a/v2/assets/hero/hero_word_NEW.png b/snippets/assets/domain/00_HOME/Hero_Images/hero_word_NEW.png similarity index 100% rename from v2/assets/hero/hero_word_NEW.png rename to snippets/assets/domain/00_HOME/Hero_Images/hero_word_NEW.png diff --git a/v2/assets/hero/hero_word_developer.png b/snippets/assets/domain/00_HOME/Hero_Images/hero_word_developer.png similarity index 100% rename from v2/assets/hero/hero_word_developer.png rename to snippets/assets/domain/00_HOME/Hero_Images/hero_word_developer.png diff --git "a/v2/assets/about/Introducing Livepeer Cascade - A Vision For Livepeer\342\200\231s Future in the Age of Real-Time AI Video.png" "b/snippets/assets/domain/00_HOME/Introducing Livepeer Cascade - A Vision For Livepeer\342\200\231s Future in the Age of Real-Time AI Video.png" similarity index 100% rename from "v2/assets/about/Introducing Livepeer Cascade - A Vision For Livepeer\342\200\231s Future in the Age of Real-Time AI Video.png" rename to "snippets/assets/domain/00_HOME/Introducing Livepeer Cascade - A Vision For Livepeer\342\200\231s Future in the Age of Real-Time AI Video.png" diff --git a/v2/assets/home/Livepeer-Logo-Full-Dark.svg b/snippets/assets/domain/00_HOME/Livepeer-Logo-Full-Dark.svg similarity index 100% rename from v2/assets/home/Livepeer-Logo-Full-Dark.svg rename to snippets/assets/domain/00_HOME/Livepeer-Logo-Full-Dark.svg diff --git a/v2/assets/home/Livepeer-Logo-Full-Light.svg b/snippets/assets/domain/00_HOME/Livepeer-Logo-Full-Light.svg similarity index 100% rename from v2/assets/home/Livepeer-Logo-Full-Light.svg rename to snippets/assets/domain/00_HOME/Livepeer-Logo-Full-Light.svg diff --git a/snippets/assets/domain/00_HOME/LivepeerStats.png b/snippets/assets/domain/00_HOME/LivepeerStats.png new file mode 100644 index 00000000..ce69ff3c Binary files /dev/null and b/snippets/assets/domain/00_HOME/LivepeerStats.png differ diff --git a/v2/assets/about/image.png b/snippets/assets/domain/00_HOME/evolution.png similarity index 100% rename from v2/assets/about/image.png rename to snippets/assets/domain/00_HOME/evolution.png diff --git a/v2/assets/home/livepeer logo.png b/snippets/assets/domain/00_HOME/livepeer logo.png similarity index 100% rename from v2/assets/home/livepeer logo.png rename to snippets/assets/domain/00_HOME/livepeer logo.png diff --git a/v2/assets/home/livepeer_evolution_slide.png b/snippets/assets/domain/00_HOME/livepeer_evolution_slide.png similarity index 100% rename from v2/assets/home/livepeer_evolution_slide.png rename to snippets/assets/domain/00_HOME/livepeer_evolution_slide.png diff --git a/v2/assets/home/whitepaper_diagram.png b/snippets/assets/domain/00_HOME/whitepaper_diagram.png similarity index 100% rename from v2/assets/home/whitepaper_diagram.png rename to snippets/assets/domain/00_HOME/whitepaper_diagram.png diff --git a/v2/assets/community/Hero_90_Youtube.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_90_Youtube.png similarity index 100% rename from v2/assets/community/Hero_90_Youtube.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_90_Youtube.png diff --git a/v2/assets/community/Hero_Blogging.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Blogging.png similarity index 100% rename from v2/assets/community/Hero_Blogging.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Blogging.png diff --git a/v2/assets/community/Hero_Calendar.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Calendar.png similarity index 100% rename from v2/assets/community/Hero_Calendar.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Calendar.png diff --git a/v2/assets/community/Hero_Discord.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Discord.png similarity index 100% rename from v2/assets/community/Hero_Discord.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Discord.png diff --git a/v2/assets/community/Hero_Events.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Events.png similarity index 100% rename from v2/assets/community/Hero_Events.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Events.png diff --git a/v2/assets/community/Hero_Follow.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Follow.png similarity index 100% rename from v2/assets/community/Hero_Follow.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Follow.png diff --git a/v2/assets/community/Hero_Forum.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Forum.png similarity index 100% rename from v2/assets/community/Hero_Forum.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Forum.png diff --git a/v2/assets/community/Hero_Github.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Github.png similarity index 100% rename from v2/assets/community/Hero_Github.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Github.png diff --git a/v2/assets/community/Hero_LinkedIn.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_LinkedIn.png similarity index 100% rename from v2/assets/community/Hero_LinkedIn.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_LinkedIn.png diff --git a/v2/assets/community/Hero_Medium.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Medium.png similarity index 100% rename from v2/assets/community/Hero_Medium.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Medium.png diff --git a/v2/assets/community/Hero_Meeting.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Meeting.png similarity index 100% rename from v2/assets/community/Hero_Meeting.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Meeting.png diff --git a/v2/assets/community/Hero_Newsletter.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Newsletter.png similarity index 100% rename from v2/assets/community/Hero_Newsletter.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Newsletter.png diff --git a/v2/assets/community/Hero_Reddit.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Reddit.png similarity index 100% rename from v2/assets/community/Hero_Reddit.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Reddit.png diff --git a/v2/assets/community/Hero_Telegram.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Telegram.png similarity index 100% rename from v2/assets/community/Hero_Telegram.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Telegram.png diff --git a/v2/assets/community/Hero_Telegran.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Telegran.png similarity index 100% rename from v2/assets/community/Hero_Telegran.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Telegran.png diff --git a/v2/assets/community/Hero_X (1).png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_X (1).png similarity index 100% rename from v2/assets/community/Hero_X (1).png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_X (1).png diff --git a/v2/assets/community/Hero_X.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_X.png similarity index 100% rename from v2/assets/community/Hero_X.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_X.png diff --git a/v2/assets/community/Hero_Yotube.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Yotube.png similarity index 100% rename from v2/assets/community/Hero_Yotube.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Yotube.png diff --git a/v2/assets/community/Hero_Youtube.png b/snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Youtube.png similarity index 100% rename from v2/assets/community/Hero_Youtube.png rename to snippets/assets/domain/02_COMMUNITY/Hero Images/Hero_Youtube.png diff --git a/snippets/assets/domain/04_GATEWAYS/code_examples/eliteproxy_launch.example..json b/snippets/assets/domain/04_GATEWAYS/code_examples/eliteproxy_launch.example..json new file mode 100644 index 00000000..54a4fd3d --- /dev/null +++ b/snippets/assets/domain/04_GATEWAYS/code_examples/eliteproxy_launch.example..json @@ -0,0 +1,146 @@ +{ + "version": "0.2.0", + "configurations": [ + { + "name": "Onchain Gateway", + "type": "go", + "request": "launch", + "mode": "auto", + "program": "${workspaceFolder}/cmd/livepeer", + "buildFlags": "-tags=mainnet,experimental", + "env": { + "GO111MODULE": "on", + "CGO_ENABLED": "1", + "CC": "", + "CGO_LDFLAGS": "-L/usr/local/cuda/lib64 -L${env:HOME}/buildoutput/compiled/lib -Wl,--copy-dt-needed-entries", + "PATH": "${PATH}:/usr/local/cuda/bin:${env:PATH}", + "PKG_CONFIG_PATH": "${env:HOME}/buildoutput/compiled/lib/pkgconfig", + "LD_LIBRARY_PATH": "${env:HOME}/buildoutput/compiled/lib", + "LIVE_AI_ALLOW_CORS": "1", + "LIVE_AI_WHIP_ADDR": "127.0.0.1:8081", + "LIVE_AI_GATHER_TIMEOUT": "5", + "LIVE_AI_MIN_SEG_DUR": "1s", + "LIVE_AI_NAT_IP": "127.0.0.1", + "LIVE_AI_PLAYBACK_HOST": "rtmp://127.0.0.1:1935/", + "LIVE_AI_WHEP_URL": "http://127.0.0.1:8890/" + }, + "args": [ + "-network", + "arbitrum-one-mainnet", + "-gateway", + "-dataDir", + "${env:HOME}/.lpData-gateway", + "-gatewayHost", + "127.0.0.1:8081", + "-rtmpAddr", + "0.0.0.0:1936", + "-httpAddr", + "127.0.0.1:8081", + "-httpIngest", + "-nvidia", + "0", + "-ethAcctAddr", + "0x0074780FefF1FD0277FAD6ccdb5a29908df6051F", + "-ethPassword", + "${env:HOME}/.lpData-gateway/ethpasstestnet2", + "-orchAddr", + "https://127.0.0.1:8933", + "-ignoreMaxPriceIfNeeded=true", + "-maxPricePerUnit", + "900", + "-maxTotalEV", + "100000000000000", + "-monitor", + "-v", + "5", + "-cliAddr", + "0.0.0.0:7935", + "-ethUrl", + "http://nyc-router.eliteencoder.net:3517" + ] + }, + { + "name": "Local Orchestrator (AI env)", + "type": "go", + "request": "launch", + "mode": "auto", + "program": "${workspaceFolder}/cmd/livepeer", + "buildFlags": "-tags=mainnet,experimental -buildvcs=false", + "env": { + "GO111MODULE": "on", + "CGO_ENABLED": "1", + "CC": "", + "CGO_LDFLAGS": "-L/usr/local/cuda/lib64 -L${env:HOME}/buildoutput/compiled/lib -Wl,--copy-dt-needed-entries", + "PATH": "${PATH}:/usr/local/cuda/bin:${env:PATH}", + "PKG_CONFIG_PATH": "${env:HOME}/buildoutput/compiled/lib/pkgconfig", + "LD_LIBRARY_PATH": "${env:HOME}/buildoutput/compiled/lib", + "GOTOOLCHAIN": "auto" + }, + "args": [ + "-orchestrator", + "-transcoder", + "-nvidia", + "0", + "-network", + "arbitrum-one-mainnet", + "-dataDir", + "${env:HOME}/.lpData-orch", + "-ethOrchAddr", + "0x104a7CA059A35Fd4dEF5Ecb16600B2CaA1Fe1361", + "-ethPassword", + "${env:HOME}/.lpData-orch/ethpasstestnet2", + "-orchSecret", + "orch-secret", + "-pricePerUnit", + "20", + "-pixelsPerUnit", + "1", + "-monitor", + "-metricsPerStream", + "-serviceAddr", + "127.0.0.1:8933", + "-cliAddr", + "0.0.0.0:7934", + "-ethUrl", + "http://arb.eliteencoder.net:3517", + "-pricePerBroadcaster", + "${env:HOME}/.lpData-orch/broadcasterpricing", + "-v", + "5" + ] + }, + { + "name": "Launch livepeer_cli", + "type": "go", + "request": "launch", + "mode": "auto", + "console": "integratedTerminal", + "program": "${workspaceFolder}/cmd/livepeer_cli", + "buildFlags": "-tags=mainnet,experimental", + "env": { + "GO111MODULE": "on", + "CGO_ENABLED": "1", + "CC": "", + "CGO_LDFLAGS": "-L/usr/local/cuda/lib64 -L${env:HOME}/buildoutput/compiled/lib -Wl,--copy-dt-needed-entries", + "PATH": "${PATH}:/usr/local/cuda/bin:${env:PATH}", + "PKG_CONFIG_PATH": "${env:HOME}/buildoutput/compiled/lib/pkgconfig", + "LD_LIBRARY_PATH": "${env:HOME}/buildoutput/compiled/lib", + "GOTOOLCHAIN": "auto" + }, + "args": [ + "-http", + "7935", + ], + }, + ], + "compounds": [ + { + "name": "Onchain Gateway + Local Orchestrator", + "stopAll": true, + "configurations": [ + "Onchain Gateway", + "Local Orchestrator (AI env)" + ] + } + ] +} diff --git a/v2/assets/gateways/test-video.mp4 b/snippets/assets/domain/04_GATEWAYS/test-video.mp4 similarity index 100% rename from v2/assets/gateways/test-video.mp4 rename to snippets/assets/domain/04_GATEWAYS/test-video.mp4 diff --git a/v2/assets/gateways/view-dropdown.png b/snippets/assets/domain/04_GATEWAYS/view-dropdown.png similarity index 100% rename from v2/assets/gateways/view-dropdown.png rename to snippets/assets/domain/04_GATEWAYS/view-dropdown.png diff --git a/snippets/assets/domain/SHARED/LivepeerDocsHero.svg b/snippets/assets/domain/SHARED/LivepeerDocsHero.svg new file mode 100644 index 00000000..a556f903 --- /dev/null +++ b/snippets/assets/domain/SHARED/LivepeerDocsHero.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/v2/assets/logo/dark.svg b/snippets/assets/domain/SHARED/LivepeerDocsLogo.svg similarity index 100% rename from v2/assets/logo/dark.svg rename to snippets/assets/domain/SHARED/LivepeerDocsLogo.svg diff --git a/v2/assets/site/logos/Livepeer-Logo-Full-Dark.svg b/snippets/assets/logos/Livepeer-Logo-Full-Dark.svg similarity index 100% rename from v2/assets/site/logos/Livepeer-Logo-Full-Dark.svg rename to snippets/assets/logos/Livepeer-Logo-Full-Dark.svg diff --git a/v2/assets/site/logos/Livepeer-Logo-Full-Light.svg b/snippets/assets/logos/Livepeer-Logo-Full-Light.svg similarity index 100% rename from v2/assets/site/logos/Livepeer-Logo-Full-Light.svg rename to snippets/assets/logos/Livepeer-Logo-Full-Light.svg diff --git a/snippets/assets/logos/Livepeer-Logo-Full-Theme.svg b/snippets/assets/logos/Livepeer-Logo-Full-Theme.svg new file mode 100644 index 00000000..f9ff72e2 --- /dev/null +++ b/snippets/assets/logos/Livepeer-Logo-Full-Theme.svg @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + diff --git a/v2/assets/site/logos/Livepeer-Logo-Symbol-Dark.svg b/snippets/assets/logos/Livepeer-Logo-Symbol-Dark.svg similarity index 100% rename from v2/assets/site/logos/Livepeer-Logo-Symbol-Dark.svg rename to snippets/assets/logos/Livepeer-Logo-Symbol-Dark.svg diff --git a/snippets/assets/logos/Livepeer-Logo-Symbol-Green-Theme.svg b/snippets/assets/logos/Livepeer-Logo-Symbol-Green-Theme.svg new file mode 100644 index 00000000..8e9bbaba --- /dev/null +++ b/snippets/assets/logos/Livepeer-Logo-Symbol-Green-Theme.svg @@ -0,0 +1,10 @@ + + + + + diff --git a/v2/assets/site/logos/Livepeer-Logo-Symbol-Light.svg b/snippets/assets/logos/Livepeer-Logo-Symbol-Green.svg similarity index 91% rename from v2/assets/site/logos/Livepeer-Logo-Symbol-Light.svg rename to snippets/assets/logos/Livepeer-Logo-Symbol-Green.svg index 3ea275c4..b9a914dd 100644 --- a/v2/assets/site/logos/Livepeer-Logo-Symbol-Light.svg +++ b/snippets/assets/logos/Livepeer-Logo-Symbol-Green.svg @@ -1,3 +1,4 @@ - + + diff --git a/snippets/assets/Livepeer-Logo-Symbol-Light.svg b/snippets/assets/logos/Livepeer-Logo-Symbol-Light.svg similarity index 100% rename from snippets/assets/Livepeer-Logo-Symbol-Light.svg rename to snippets/assets/logos/Livepeer-Logo-Symbol-Light.svg diff --git a/snippets/assets/logos/Livepeer-Logo-Symbol-Theme.svg b/snippets/assets/logos/Livepeer-Logo-Symbol-Theme.svg new file mode 100644 index 00000000..010dbf29 --- /dev/null +++ b/snippets/assets/logos/Livepeer-Logo-Symbol-Theme.svg @@ -0,0 +1,10 @@ + + + + + diff --git a/snippets/assets/logos/Livepeer-Logo-Symbol.svg b/snippets/assets/logos/Livepeer-Logo-Symbol.svg new file mode 100644 index 00000000..476e8b54 --- /dev/null +++ b/snippets/assets/logos/Livepeer-Logo-Symbol.svg @@ -0,0 +1,3 @@ + + + diff --git a/snippets/assets/logos/dark.svg b/snippets/assets/logos/dark.svg new file mode 100644 index 00000000..245d63ba --- /dev/null +++ b/snippets/assets/logos/dark.svg @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + diff --git a/v2/assets/logo/light.svg b/snippets/assets/logos/light.svg similarity index 100% rename from v2/assets/logo/light.svg rename to snippets/assets/logos/light.svg diff --git a/v2/assets/products/daydream-logo-dark.svg b/snippets/assets/logos/products/daydream-logo-dark.svg similarity index 100% rename from v2/assets/products/daydream-logo-dark.svg rename to snippets/assets/logos/products/daydream-logo-dark.svg diff --git a/v2/assets/products/livepeer-studio-logo.svg b/snippets/assets/logos/products/livepeer-studio-logo.svg similarity index 100% rename from v2/assets/products/livepeer-studio-logo.svg rename to snippets/assets/logos/products/livepeer-studio-logo.svg diff --git a/v2/assets/products/streamplace-cube.png b/snippets/assets/logos/products/streamplace-cube.png similarity index 100% rename from v2/assets/products/streamplace-cube.png rename to snippets/assets/logos/products/streamplace-cube.png diff --git a/v2/assets/products/streamplace-logo.svg b/snippets/assets/logos/products/streamplace-logo.svg similarity index 100% rename from v2/assets/products/streamplace-logo.svg rename to snippets/assets/logos/products/streamplace-logo.svg diff --git a/v2/assets/hero/Hero_Livepeer_Full.png b/snippets/assets/media/heros/Hero_Livepeer_Full.png similarity index 100% rename from v2/assets/hero/Hero_Livepeer_Full.png rename to snippets/assets/media/heros/Hero_Livepeer_Full.png diff --git a/v2/assets/hero/Hero_Livepeer_Full_sml.png b/snippets/assets/media/heros/Hero_Livepeer_Full_sml.png similarity index 100% rename from v2/assets/hero/Hero_Livepeer_Full_sml.png rename to snippets/assets/media/heros/Hero_Livepeer_Full_sml.png diff --git a/v2/assets/home/Hero_Images/hero_about.png b/snippets/assets/media/heros/hero_about.png similarity index 100% rename from v2/assets/home/Hero_Images/hero_about.png rename to snippets/assets/media/heros/hero_about.png diff --git a/v2/assets/home/Hero_Images/hero_community.png b/snippets/assets/media/heros/hero_community.png similarity index 100% rename from v2/assets/home/Hero_Images/hero_community.png rename to snippets/assets/media/heros/hero_community.png diff --git a/v2/assets/home/Hero_Images/hero_delegators.png b/snippets/assets/media/heros/hero_delegators.png similarity index 100% rename from v2/assets/home/Hero_Images/hero_delegators.png rename to snippets/assets/media/heros/hero_delegators.png diff --git a/v2/assets/home/Hero_Images/hero_developer.png b/snippets/assets/media/heros/hero_developer.png similarity index 100% rename from v2/assets/home/Hero_Images/hero_developer.png rename to snippets/assets/media/heros/hero_developer.png diff --git a/v2/assets/home/Hero_Images/hero_developer1.png b/snippets/assets/media/heros/hero_developer1.png similarity index 100% rename from v2/assets/home/Hero_Images/hero_developer1.png rename to snippets/assets/media/heros/hero_developer1.png diff --git a/v2/assets/home/Hero_Images/hero_developer_logo.png b/snippets/assets/media/heros/hero_developer_logo.png similarity index 100% rename from v2/assets/home/Hero_Images/hero_developer_logo.png rename to snippets/assets/media/heros/hero_developer_logo.png diff --git a/v2/assets/home/Hero_Images/hero_gateways.png b/snippets/assets/media/heros/hero_gateways.png similarity index 100% rename from v2/assets/home/Hero_Images/hero_gateways.png rename to snippets/assets/media/heros/hero_gateways.png diff --git a/v2/assets/home/Hero_Images/hero_gpu.png b/snippets/assets/media/heros/hero_gpu.png similarity index 100% rename from v2/assets/home/Hero_Images/hero_gpu.png rename to snippets/assets/media/heros/hero_gpu.png diff --git a/v2/assets/home/Hero_Images/hero_help.png b/snippets/assets/media/heros/hero_help.png similarity index 100% rename from v2/assets/home/Hero_Images/hero_help.png rename to snippets/assets/media/heros/hero_help.png diff --git a/v2/assets/home/Hero_Images/hero_logo_developer.png b/snippets/assets/media/heros/hero_logo_developer.png similarity index 100% rename from v2/assets/home/Hero_Images/hero_logo_developer.png rename to snippets/assets/media/heros/hero_logo_developer.png diff --git a/v2/assets/home/Hero_Images/hero_logo_developer_sml.png b/snippets/assets/media/heros/hero_logo_developer_sml.png similarity index 100% rename from v2/assets/home/Hero_Images/hero_logo_developer_sml.png rename to snippets/assets/media/heros/hero_logo_developer_sml.png diff --git a/v2/assets/home/Hero_Images/hero_logo_new.png b/snippets/assets/media/heros/hero_logo_new.png similarity index 100% rename from v2/assets/home/Hero_Images/hero_logo_new.png rename to snippets/assets/media/heros/hero_logo_new.png diff --git a/v2/assets/home/Hero_Images/hero_opportunity.png b/snippets/assets/media/heros/hero_opportunity.png similarity index 100% rename from v2/assets/home/Hero_Images/hero_opportunity.png rename to snippets/assets/media/heros/hero_opportunity.png diff --git a/v2/assets/home/Hero_Images/hero_partner.png b/snippets/assets/media/heros/hero_partner.png similarity index 100% rename from v2/assets/home/Hero_Images/hero_partner.png rename to snippets/assets/media/heros/hero_partner.png diff --git a/v2/assets/home/Hero_Images/hero_reference.png b/snippets/assets/media/heros/hero_reference.png similarity index 100% rename from v2/assets/home/Hero_Images/hero_reference.png rename to snippets/assets/media/heros/hero_reference.png diff --git a/v2/assets/home/Hero_Images/hero_researchers.png b/snippets/assets/media/heros/hero_researchers.png similarity index 100% rename from v2/assets/home/Hero_Images/hero_researchers.png rename to snippets/assets/media/heros/hero_researchers.png diff --git a/v2/assets/home/Hero_Images/hero_showcase.png b/snippets/assets/media/heros/hero_showcase.png similarity index 100% rename from v2/assets/home/Hero_Images/hero_showcase.png rename to snippets/assets/media/heros/hero_showcase.png diff --git a/v2/assets/home/Hero_Images/hero_word_NEW.png b/snippets/assets/media/heros/hero_word_NEW.png similarity index 100% rename from v2/assets/home/Hero_Images/hero_word_NEW.png rename to snippets/assets/media/heros/hero_word_NEW.png diff --git a/v2/assets/home/Hero_Images/hero_word_developer.png b/snippets/assets/media/heros/hero_word_developer.png similarity index 100% rename from v2/assets/home/Hero_Images/hero_word_developer.png rename to snippets/assets/media/heros/hero_word_developer.png diff --git a/snippets/assets/media/icons/home-house.gif b/snippets/assets/media/icons/home-house.gif new file mode 100644 index 00000000..adf6aa1c Binary files /dev/null and b/snippets/assets/media/icons/home-house.gif differ diff --git a/snippets/assets/media/icons/smart-house.gif b/snippets/assets/media/icons/smart-house.gif new file mode 100644 index 00000000..d1c88935 Binary files /dev/null and b/snippets/assets/media/icons/smart-house.gif differ diff --git a/snippets/assets/media/images/DelegatorImg.avif b/snippets/assets/media/images/DelegatorImg.avif new file mode 100644 index 00000000..a4bcdaf0 Binary files /dev/null and b/snippets/assets/media/images/DelegatorImg.avif differ diff --git a/snippets/assets/media/images/GPU callout.png b/snippets/assets/media/images/GPU callout.png new file mode 100644 index 00000000..9ba5c9e9 Binary files /dev/null and b/snippets/assets/media/images/GPU callout.png differ diff --git a/snippets/assets/media/images/GPUImg.webp b/snippets/assets/media/images/GPUImg.webp new file mode 100644 index 00000000..2642e80c Binary files /dev/null and b/snippets/assets/media/images/GPUImg.webp differ diff --git a/snippets/assets/media/images/Livepeer Stats.png b/snippets/assets/media/images/Livepeer Stats.png new file mode 100644 index 00000000..36c1501b Binary files /dev/null and b/snippets/assets/media/images/Livepeer Stats.png differ diff --git a/snippets/assets/media/videos/Embody.mp4 b/snippets/assets/media/videos/Embody.mp4 new file mode 100644 index 00000000..c8d3c79b Binary files /dev/null and b/snippets/assets/media/videos/Embody.mp4 differ diff --git a/v2/assets/community/Eric Shreck Gif.gif b/snippets/assets/media/videos/HeroBackground.mp4 similarity index 62% rename from v2/assets/community/Eric Shreck Gif.gif rename to snippets/assets/media/videos/HeroBackground.mp4 index d046da7d..d06bd45a 100644 Binary files a/v2/assets/community/Eric Shreck Gif.gif and b/snippets/assets/media/videos/HeroBackground.mp4 differ diff --git a/snippets/assets/media/videos/LivepeerStudio.mp4 b/snippets/assets/media/videos/LivepeerStudio.mp4 new file mode 100644 index 00000000..f20a6f5b Binary files /dev/null and b/snippets/assets/media/videos/LivepeerStudio.mp4 differ diff --git a/snippets/assets/media/videos/daydream.mp4 b/snippets/assets/media/videos/daydream.mp4 new file mode 100644 index 00000000..e3266793 Binary files /dev/null and b/snippets/assets/media/videos/daydream.mp4 differ diff --git a/snippets/assets/media/videos/frameworks.mp4 b/snippets/assets/media/videos/frameworks.mp4 new file mode 100644 index 00000000..4a6cd691 Binary files /dev/null and b/snippets/assets/media/videos/frameworks.mp4 differ diff --git a/snippets/assets/media/videos/livepeer-founders-post.mp4 b/snippets/assets/media/videos/livepeer-founders-post.mp4 new file mode 100644 index 00000000..bd1b535c Binary files /dev/null and b/snippets/assets/media/videos/livepeer-founders-post.mp4 differ diff --git a/snippets/assets/media/videos/streamplace.mp4 b/snippets/assets/media/videos/streamplace.mp4 new file mode 100644 index 00000000..c8d3c79b Binary files /dev/null and b/snippets/assets/media/videos/streamplace.mp4 differ diff --git a/v2/assets/site/favicon/apple-touch-icon.png b/snippets/assets/site/favicon/apple-touch-icon.png similarity index 100% rename from v2/assets/site/favicon/apple-touch-icon.png rename to snippets/assets/site/favicon/apple-touch-icon.png diff --git a/v2/assets/site/favicon/favicon-96x96.png b/snippets/assets/site/favicon/favicon-96x96.png similarity index 100% rename from v2/assets/site/favicon/favicon-96x96.png rename to snippets/assets/site/favicon/favicon-96x96.png diff --git a/v2/assets/site/favicon/favicon.ico b/snippets/assets/site/favicon/favicon.ico similarity index 100% rename from v2/assets/site/favicon/favicon.ico rename to snippets/assets/site/favicon/favicon.ico diff --git a/v2/assets/site/favicon/favicon.svg b/snippets/assets/site/favicon/favicon.svg similarity index 100% rename from v2/assets/site/favicon/favicon.svg rename to snippets/assets/site/favicon/favicon.svg diff --git a/v2/assets/site/favicon/site.webmanifest b/snippets/assets/site/favicon/site.webmanifest similarity index 100% rename from v2/assets/site/favicon/site.webmanifest rename to snippets/assets/site/favicon/site.webmanifest diff --git a/v2/assets/site/favicon/web-app-manifest-192x192.png b/snippets/assets/site/favicon/web-app-manifest-192x192.png similarity index 100% rename from v2/assets/site/favicon/web-app-manifest-192x192.png rename to snippets/assets/site/favicon/web-app-manifest-192x192.png diff --git a/v2/assets/site/favicon/web-app-manifest-512x512.png b/snippets/assets/site/favicon/web-app-manifest-512x512.png similarity index 100% rename from v2/assets/site/favicon/web-app-manifest-512x512.png rename to snippets/assets/site/favicon/web-app-manifest-512x512.png diff --git a/v2/assets/site/images/404-desolate.jpeg b/snippets/assets/site/images/404-desolate.jpeg similarity index 100% rename from v2/assets/site/images/404-desolate.jpeg rename to snippets/assets/site/images/404-desolate.jpeg diff --git a/v2/assets/site/images/layered-image.webp b/snippets/assets/site/images/layered-image.webp similarity index 100% rename from v2/assets/site/images/layered-image.webp rename to snippets/assets/site/images/layered-image.webp diff --git a/snippets/assets/site/united-kingdom-flag-icon.svg b/snippets/assets/site/united-kingdom-flag-icon.svg new file mode 100644 index 00000000..75b969de --- /dev/null +++ b/snippets/assets/site/united-kingdom-flag-icon.svg @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/snippets/automationData/blog/ghostBlogData.jsx b/snippets/automationData/blog/ghostBlogData.jsx deleted file mode 100644 index 3cf1fd8f..00000000 --- a/snippets/automationData/blog/ghostBlogData.jsx +++ /dev/null @@ -1,191 +0,0 @@ -export const ghostData = [ - { - title: `A Real-time Update to the Livepeer Network Vision`, - href: `https://blog.livepeer.org/a-real-time-update-to-the-livepeer-network-vision/`, - author: `By Livepeer Team`, - content: `

      For the past year, the Livepeer Ecosystem has been guided by the Cascade vision:  a path to transition from a pure streaming and transcoding infrastructure, to an infrastructure that could succeed at providing compute for the future of real-time AI video. The latest Livepeer quarterly report from Messari highlights that this transition is paying off, with network fees up 3x from this time last year, and over 72% of the fees now driven via AI inference. This is exemplified by the growing inspirational examples emerging from Daydream powered real-time AI, and real-time Agent avatar generation through Embody and the Agent SPE.

      Source: Livepeer Q3 2025 Report by Messari

      This shift has been an ecosystem wide effort – ranging from branding and communications, to productization and go to market, to hardware upgrades for orchestrators. It has successfully shifted the project under an updated mission and direction, however it has still left ambiguity in terms of what the Livepeer network itself offers as killer value propositions to new builders outside of the existing ecosystem. Is it a GPU cloud? A transcoding infra? An API engine? Now that there are signs of validation and accelerated momentum around an exciting opportunity, it’s time to really hone in on a refined vision for the future of the Livepeer network as a product itself. 

      The market for video is set to massively expand

      The concept of live video itself is expanding well beyond a simple single stream of video captured from a camera. Now entire worlds and scenes are generated or enhanced in real-time via AI assistance, leading to more immersive and interactive experiences than possible via old-school streaming alone. For a taste of the future, see the following examples:

      1. The future of gaming will be AI generated video and worlds in real-time:
      -
      1. Video streams can be analyzed and data leveraged programmatically in real-time, for instant insight generation and decision making:
      -
      1. Real-time style transfer can enable avatars and agents to participate in the global economy:
      -

      Video world models and real-time AI video are merging, as they both use AI to generate frame-by-frame video output with low latency on the fly, based on user input and AI inference. This requires a tremendous amount of GPU compute, and requires an amazing low latency video streaming and compute stack – two areas in which the Livepeer network and community thrive, and two areas to which the many other generic GPU inference providers in the market bring no unique skillset, experience, or software advantage. 

      The big opportunity for the Livepeer network is to be the leading AI Infrastructure For Real-Time Video.
      From interactive live streaming to generative world models, Livepeer’s open-access, low-latency network of GPUs will be the best compute solution for cutting edge AI video workflows. 

      World models are a game changing category, and Livepeer is well suited to offer a unique and differentiated product here, that serves a huge market of diverse and varying use cases. These range from creative entertainment, to gaming, to robotics, to data analysis, to monitoring and security, to synthetic data generation for AGI itself.

      While an ambitious stretch, Nvidia executives responsible for the category have even projected that due to the impact in robotics, the economic opportunity for world models could exceed $100 trillion, or approximately the size of the entire global economic output itself!  

      What does it mean to productize the Livepeer network to succeed as a valuable infrastructure in this category?

      From a simplified viewpoint, it needs to deliver on the following:

      1. Ability for users to deploy real-time AI workflows to the Livepeer network and request inference on them

      2. Industry leading latency for providing inference on real-time AI and world model workflows.

      3. Cost effective scalability – users can pay as they go to scale up and down capacity and the network automagically delivers the scale required.

      Imagine a gaming platform is powering world-model generated games using their unique workflows that generate game levels or areas in a certain style by combining several real-time models, LLMs, and style transfer mechanisms. Each game its powering has users exploring and creating their own corners of the interactive worlds, based on prompts and gameplay inputs. Every gamer that joins a game represents a new stream of AI video compute, and the Livepeer network is the backing infrastructure that provides the compute for this video world generation, leveraging hundreds or thousands of GPUs concurrently.

      For this to be possible the Livepeer network needs to enable that game platform to deploy their game generation workflow. It needs to offer low latency on the inference that runs this workflow, relative to the generic GPU compute clouds. The pricing needs to be competitive vs alternative options in the market for this GPU compute. And the network needs to allow this company to scale up and down the number of GPUs that are currently live ready to accept new real-time inference streams based on the number of users currently live on the games it is powering.

      All of this is possible on the Livepeer network, and it isn’t far away from where we are now. If we work to build, test, and iterate on the Livepeer network itself towards supporting the latency and scale required for these types of workflows, we’ll be set up to power them.
      Now multiply this example gaming company by the high number of diverse industries and verticals that real-time AI and world models will touch. Each category can have one or multiple companies competing to leverage this scalable and cost effective infrastructure for unique go to markets targeting different segments. And they can all be powered by the Livepeer network’s unique value propositions.

      Livepeer’s core network is strategically positioned

      What are these value propositions that make the Livepeer network differentiated relative to alternative options in the market? I’d argue that there are three primary, table stakes, must-have value propositions if Livepeer is to succeed. 

      1. Industry standard low latency infrastructure specializing in real-time AI and world model workflows: First of all, the network needs to let its users deploy custom workflows. Inference alone on base models is not enough and does not represent scaled demand. Users want to take base models, chain them together with other models and pre/post processors, and create unique and specialized capabilities. When one of these capabilities is defined as a workflow, that is the unit that needs to be deployed as a job on the Livepeer network, and the network needs to be able to run inference on it. Secondly, for these real-time interactive use cases, latency matters a lot. Generic GPU clouds don’t offer the specialized low latency video stacks to ingest, process, and serve video with optimal latency, but Livepeer does. And Livepeer needs to benchmark itself to have lower or equal latency to alternative GPU clouds for these particular real-time and world model use cases.

      2. Cost effective scalability: GPU provisioning, reservations, and competing for scarce supply procurement creates major challenges for AI companies – often overpaying for GPUs that sit idle most of the time in order to guarantee the capacity that they need. The Livepeer network’s value proposition is that users should be able to “automagically” scale up almost instantly and pay on demand for the compute that they use, rather than having to pre-pay for reservations and let capacity sit idle. This is enabled by Livepeer taking advantage of otherwise existing idle longtail compute through its open marketplace, and its supply side incentives. The Livepeer network needs to be more cost effective than alternative GPU clouds within this category - with impacts comparable to the 10x+ cost reduction already demonstrated in live video transcoding delivered by the network.

      3. Community driven, open source, open access: The Livepeer project and software stack is open source. Users can control, update, and contribute to the software they are using. They also can be owners in the infrastructure itself through the Livepeer Token, and can benefit from the network’s improvements and adoption, creating a network effect. The community that cares about its success and pushes it forward collectively, can be a superpower, relative to the uncertain and shaky relationship between builders and centralized platform providers, who have a history of getting rugged based on limitations to access, changes in functionality, or discontinuity of the platforms. Anyone can build on the Livepeer network regardless of location, jurisdiction, use case, or central party control.

      The above are primary value propositions that should appeal to nearly all users. And we must work to close the gaps to live up to those value props before we could successfully hope to go to market and attract new vertical-specific companies to build directly on top of the network. Luckily, in addition to all of Livepeer’s streaming users, we have a great realtime AI design partner in Daydream, which is already going to market around creative real-time AI, using the network, and contributing to its development to live up to these requirements. While building with this design partner, the ecosystem should be working to productize to live up to these promises in a more generic perspective – it should be setting up benchmarks, testing frameworks, and building mechanisms for scaling up supply ahead of demand, so that it can represent this power to the world alongside successful Daydream case studies.

      Opportunities to push towards this vision

      To truly live up to these value propositions, there are a number of opportunities for the community to focus on in order to close some key gaps. There are many details to come in more technical posts laying out roadmaps and execution frameworks, but at a high level, consider a series of milestones that take the network as a product from technically functional, to production usable, to extensible, to infinitely scalable:

      1. Network MVP - Measure what matters: Establish key network performance SLAs, measure latency and performance benchmarks, and enhance the low latency client to support realtime AI workflows above industry grade standards.
      2. Network as a Product - Self adaptability and scalability: Network delivers against these SLAs and core value props for supported realtime AI workflows. Selection algorithms, failovers and redundancy, and competitive market price discovery established for realtime AI.
      3. Extensibility - Toolkit for community to deploy workflows and provision resources: Workflow deployment and signaling, LPT incentive updates to ensure compute supply for popular AI workflows exceeds demand.
      4. Parallel Scalability: Manage clusters of resources on the network for parallel workflow execution, truly unlocking job types beyond single-GPU inference. 

      Many teams within the ecosystem, from the Foundation, to Livepeer Inc, to various SPEs have already started operationalizing around how they’ll be contributing to milestones 1 and 2 to upgrade the network to deliver against these key realtime AI value propositions. 

      Conclusion and Livepeer’s opportunity

       The market for the opportunity to be the GPU infrastructure that powers real-time AI and world models is absolutely massive – the compute requirements are tremendous - 1000x that of AI text or images - and real-time interaction with media represents a new platform that will affect all of the above-mentioned industries. The Livepeer network can be the infrastructure that powers it. How we plan to close the needed gaps and achieve this will be the subject of an upcoming post. But when we do prove these value propositions, Livepeer will have a clear path to 100x the demand on the network

      The likely target market users for the network are those startups that are building out vertical specific businesses on top of real-time AI and world model workflows. The ecosystem should look to enable one (or multiple!) startups in each category going after building real-time AI platforms that serve gaming, that serve robotics, that serve synthetic data generation, that serve monitoring and analysis, and all the additional relevant categories. The network’s value propositions will hopefully speak for themselves, but in the early stages of this journey, it is likely the ecosystem will want to use incentives (like investment or credits) to bootstrap these businesses into existence. Each will represent a chance at success, and will bring more demand and proof.

      Ultimately, many users of these platforms may choose to build direct on the network themselves. Similarly to how startups start to build on platforms like Heroku, Netlify, or Vercel, and then as they scale and need more control and cost savings they build direct on AWS, and then ultimately move to their own datacenters after reaching even more scale – users of Daydream or a real-time Agent platform built on Livepeer, may ultimately choose to run their own gateways to recognize the cost savings and control and full feature set that comes from doing so. This is a good thing! As it represents even more usage and scale for the network, more proof that as an infrastructure the Livepeer network has product market fit, and that it can absorb all workflows directly. The businesses built on top will provide their own vertical specific bundles of features and services that onboard that vertical specific capacity, but they’ll be complemented by and enabled by the Livepeer Network’s superpowers.

      While there’s a lot of work ahead, the Livepeer community has already stepped up to cover tremendous ground on this mission. At the moment by already powering millions of minutes of real-time AI inference per week, by our orchestrators already upgrading their capacity and procurement mechanisms to provide real-time AI-capable compute, and by the Foundation groups already working to evaluate the networks incentives and cryptoeconomics to sustainably fund and reward those contributing to this effort, we’re set up well to capture this enormous opportunity!

      `, - datePosted: `Nov 13, 2025`, - img: `https://blog.livepeer.org/content/images/2025/11/LP_Blog-Header_Nov25_01_moshed-1.png`, - excerpt: `For the past year, the Livepeer Ecosystem has been guided by the Cascade vision:  a path to transition from a pure streaming and transcoding infrastructure, to an infrastructure that could succeed at providing compute for the future of real-time AI video. The latest Livepeer quarterly report from Messari highlights that this transition is paying off, with network fees up 3x from this time last year, and over 72% of the fees now driven via AI inference. This is exemplified by the growing inspirat`, - readingTime: 9, - }, - { - title: `Livepeer Onchain Builders - Streamplace: Building the Video Backbone of Decentralized Social`, - href: `https://blog.livepeer.org/livepeer-onchain-builders-streamplace-building-the-video-backbone-of-decentralized-social/`, - author: `By Livepeer Team`, - content: `

      Welcome to Livepeer Onchain Builders, a new content series spotlighting the Special Purpose Entities (SPEs) funded by the Livepeer onchain treasury. SPEs are working groups funded by the community treasury to work on specific tasks and are accountable to the community for their delivery. These deep dives will explore how each initiative is driving protocol usage, expanding infrastructure, and pushing the boundaries of what’s possible in decentralized video and AI.

      Streamplace is an open-source video streaming platform designed to power decentralized social applications with real-time, creator-first infrastructure. It aims to make livestreaming and video hosting as seamless as TikTok or YouTube, but built on open protocols and self-sovereign identity.

      What makes it ambitious? Streamplace is not only building full-stack video infra for federated social networks, it's doing so in a way that prioritizes interoperability, scalability, and public goods. From developer SDKs to end-user apps, Streamplace is building an entire ecosystem.

      What is an SPE? 

      A Special Purpose Entity (SPE) is a focused, community-funded team contributing to the Livepeer ecosystem. SPEs are typically mission-driven groups that operate independently to build infrastructure, applications, or tooling that expand and improve the Livepeer protocol. These teams are funded through proposals to the onchain treasury and are accountable to the community.

      SPEs are necessary for the ecosystem because no single team can build every part of a decentralized protocol. SPEs decentralize development, fund public goods, and allow the community to direct resources where they're most needed.

      Why do they matter to delegators and stakeholders? Because SPEs grow in usage. More usage = more fees = more rewards. Delegators benefit when the protocol succeeds, and SPEs are among the most direct ways to make that happen.

      From Aquareum to Streamplace

      A clear goal drives the team behind Streamplace: to build the foundational video infrastructure for the next generation of decentralized social platforms. These platforms, such as Farcaster and the AT Protocol, promise user-owned identity and interoperability, but have thus far lacked robust support for live and on-demand video.

      Streamplace solves this by providing a full-stack, developer-friendly video layer that anyone can plug into. It's a bold attempt to make decentralized video feel as native and easy as its Web2 counterparts.

      Streamplace started as Aquareum, a project with the same mission and team. This evolution into Streamplace is a rebranding, not a restart, building on past momentum with a sharper focus.

      Their vision is to give every user the ability to publish, stream, and remix content with the same ease as TikTok or YouTube, but backed by self-sovereign identity and decentralized networks.

      Streamplace homepage

      The first proposal delivered:

      • A unified Aquareum node: bundling the Livepeer stack with indexing and playback.
      • App releases on iOS, Android, and Web.
      • Native integrations with AT Protocol and Farcaster.
      • Support for C2PA metadata and content provenance.

      Now, Streamplace continues that momentum with 100,000 LPT in treasury funding and a clear mandate to scale.

      Streamplace Graphana dashboard

      Why Streamplace Matters

      Video is the heart of online social interaction. Yet decentralized social networks have lagged in providing seamless, user-friendly video experiences. Streamplace addresses this by:

      • Transcoding every livestream through Livepeer, providing decentralized, low-cost processing for global delivery.
      • Powering partner platforms like Skylight Social, a TikTok alternative backed by Mark Cuban, that recently hit #1 in entertainment on the App Store.
      • Making it dead-simple to stream or host video through single-binary nodes that anyone can deploy.
      • Championing public goods, 100% of their code is open source, with a commitment to infrastructure, not monetization lock-in.

      Decentralized social, spanning protocols like Farcaster, AT Protocol, and Bluesky, represents a movement toward user-owned networks and open standards. These networks are gaining traction, but video remains a missing layer. That’s where Streamplace comes in.

      Video is essential because it's the most engaging, expressive medium for creators and communities. And as these decentralized platforms scale, having real-time, composable video becomes non-negotiable.

      Streamplace positions itself as the default video infra layer for this new social stack, and with every stream transcoded through Livepeer, it's also a major driver of protocol usage and visibility.

      What Streamplace 2.0 Will Deliver

      This new phase of work, funded by the Livepeer treasury, focuses on scale, performance, and ecosystem integration:

      Infrastructure Enhancements

      • Expand server capacity to support growing user bases like Skylight.
      • Harden video nodes for reliability under real-world load.
      • Deliver high-quality performance on all platforms: Web, iOS, Android.

      Protocol and Developer Growth

      • Deepen native integration with AT Protocol.
      • Build SDKs and NPM packages to embed Streamplace easily into other apps.
      • Ship VOD functionality and new moderation tools.

      Community-First Ethos

      • Launch creator monetization models and stream incentive programs.
      • Empower streamers with self-hosted app capabilities ("Twitch, but it's your own app").
      • Maintain full transparency and livestream development.

      The Livepeer Angle

      Livepeer's decentralized video infrastructure powers every second of video on Streamplace. That means more work for orchestrators, more fees flowing through the protocol, and more incentive for high-quality node operation.

      Streamplace strengthens the Livepeer ecosystem in three key ways:

      • Demand generation: Real-world usage at scale means more consistent transcoding work.
      • Protocol visibility: High-impact apps like Skylight drive awareness of Livepeer beyond its native circles.
      • Infrastructure robustness: Streamplace's nodes enhance the distributed capacity of the Livepeer network.

      Without Livepeer, a decentralized video stack like Streamplace wouldn’t be possible. And without ambitious apps like Streamplace, Livepeer wouldn’t have the same opportunity to prove its value at scale.

      Final Thoughts

      Streamplace is a keystone piece of open video infrastructure and a cornerstone in the emerging world of decentralized social media. By fusing creator-first tooling with Livepeer’s scalable infrastructure, it offers a glimpse into what the open internet can become.

      As decentralized protocols shift from vision to adoption, the need for native video is urgent. Streamplace, with the support of the Livepeer treasury and a relentless commitment to open-source infrastructure, is meeting that need head-on.

      If you're a developer, creator, or community builder, now is the time to get involved.

      Do you want to contribute to Streamplace's success? Explore the open roles here.

      Interested in building or contributing to the Livepeer ecosystem? Learn more about current and past SPEs, open opportunities, and how to submit your own proposal here.

      Follow along, fork the code, or join a stream — the future of social video is open.

      Streamplace App

      Streamplace Proposal

      Aquareum Proposal


      Livepeer is a decentralized video infrastructure network for live and on-demand streaming. It has integrated AI Video Compute capabilities (Livepeer AI) by harnessing its massive GPU network and is not building the future of real-time AI video.

      Twitter | Discord | Website

      `, - datePosted: `Aug 14, 2025`, - img: `https://blog.livepeer.org/content/images/2025/08/Onchain-Builders-Streamplace.jpg`, - excerpt: `Welcome to Livepeer Onchain Builders, a new content series spotlighting the Special Purpose Entities (SPEs) funded by the Livepeer onchain treasury. SPEs are working groups funded by the community treasury to work on specific tasks and are accountable to the community for their delivery. These deep dives will explore how each initiative is driving protocol usage, expanding infrastructure, and pushing the boundaries of what’s possible in decentralized video and AI. - -Streamplace is an open-source `, - readingTime: 5, - }, - { - title: `Builder Story: dotsimulate x Daydream`, - href: `https://blog.livepeer.org/builder-story-dotsimulate-x-daydream/`, - author: `By Livepeer Team`, - content: `

      Building StreamDiffusionTD Operator - a Real-Time Generative Video Operator for TouchDesigner, Powered by the Daydream API

      Creator:
      Lyell Hintz (@dotsimulate)
      Operator: StreamDiffusionTD
      Backends Supported: Local + Daydream (Livepeer)

      -
      - -
      - -
      -
      -
      - - - 0:00 -
      - /0:34 -
      - - - - - -
      -
      -
      - -

      Overview

      StreamDiffusionTD is a TouchDesigner operator that connects real-time inputs like audio, sensors, and camera feeds to StreamDiffusion, enabling live generative visuals controlled in real time. With the Daydream API, it adds remote inference capabilities on top of the existing local GPU inference and unlocks more flexibility for users.

      Built by Lyell Hintz, a technical artist and TouchDesigner developer, the operator is used in live shows, installations, and experimental workflows.

      Why It Was Built

      Lyell began working on the operator a few hours after StreamDiffusion was released on GitHub. He wanted to use it in TouchDesigner - a powerful tool for real time interactive content creation.

      “TouchDesigner is the only place this could be controlled from… it can hook into everything else.”

      From the start, he avoided creating a “black box.” The operator exposes core parameters like prompt, seed, and ControlNet weights, allowing users to adjust values and see results immediately.

      Key Features

      • Real-time video generation
      • Prompt and seed morphing
      • Dynamic ControlNet weighting
      • Live input support: audio, sensors, camera
      • Local GPU and Daydream backend options
      • Instant visual feedback in TouchDesigner
      -
      - -
      - -
      -
      -
      - - - 0:00 -
      - /0:26 -
      - - - - - -
      -
      -
      - -

      Daydream API Integration

      StreamDiffusionTD works with the Daydream API, which allows the operator to run on a remote GPU backend. This eliminates the major barrier of requiring a high-end PC with an NVIDIA RTX 4090 to run StreamDiffusion at professional quality, unlocking the flexibility to run it from any location, on any device form factor.

      Just drop in your API key and hit “Start Stream.” The backend handles orchestration, model hosting, and frame delivery, so builders can stay focused on their creative and technical workflows.

      Setup takes less than 1 minute and once installed, the configuration is remembered for future use.Daydream’s API brings new features to StreamDiffusion:

      • Multi-controlnet: Mixing different controlnets for better artistic control
      • IPAdapter: Use images as powerful style guides
      • TensorRT: Better frame rate for smooth video output

      Daydream is adding support for more real time video generation models, and developers can request features, suggest improvements, or build on top of the API itself. It aligns with the values of open tooling and community-led infrastructure.

      How Artists can use StreamDiffusionTD in TouchDesigner

      • Audio-reactive visuals for concerts
      • Camera-driven generative visuals
      • Real-time visuals for LED walls and stages
      • TouchDesigner automation workflows

      Because it's built inside TouchDesigner, the operator can be extended using Python, MIDI, OSC, or any other input TouchDesigner supports.

      Current State

      The operator is live and ready to use, with active development underway for new features and improved performance. It’s a great time to jump in, explore, and help shape what comes next.

      Try it Yourself

      Operator Access: patreon.com/dotsimulate
      Community and Support: discord.gg/daydreamlive
      API Keys can be requested here

      `, - datePosted: `Aug 5, 2025`, - img: `https://blog.livepeer.org/content/images/2025/08/DD_Builder-Story_dotsimulate_01.png`, - excerpt: `Building StreamDiffusionTD Operator - a Real-Time Generative Video Operator for TouchDesigner, Powered by the Daydream API - -Creator: Lyell Hintz (@dotsimulate) -Operator: StreamDiffusionTD -Backends Supported: Local + Daydream (Livepeer) - - - - - - - - - - - - - - - - - - - - - - - - -0:00 - -/0:34 - - -1× - - - - - - - - - - - - - - - - - -Overview - -StreamDiffusionTD is a TouchDesigner operator that connects real-time inputs like audio, sensors, and camera feeds to StreamDiffusion, enabling live generative visuals controlled in real time. Wit`, - readingTime: 2, - }, - { - title: `Livepeer Incorporated! (and realtime AI)`, - href: `https://blog.livepeer.org/livepeer-incorporated-and-realtime-ai/`, - author: `By Livepeer Team`, - content: `

      Written by Doug Petkanics, Co-founder and CEO at Livepeer Inc

      The past 18 months have been an energizing time to be in the Livepeer Ecosystem. An onchain treasury was introduced to fund public goods via community governance, the community has coalesced around Livepeer’s opportunity to be the leading infrastructure for realtime AI video, and fees and usage of the network have been steadily increasing due to this focus. The Livepeer Foundation has recently launched to steward the 10+ entities in the ecosystem that are core contributors to the project, and is unlocking even more funding around the opportunities recommended in the project’s strategic pillars.

      With so much core development, marketing, and growth driven by the ecosystem at large, the company that I co-founded and operate, Livepeer Incorporated, has had the opportunity to shift its focus to what we deem to be the highest priority area of the project where we feel uniquely suited to make an outsized impact: executing a high conviction go to market motion in an attempt to dramatically grow demand on the Livepeer network. We, like many in the ecosystem, are fully bought in to the realtime AI video vision laid out in Livepeer Cascade, and are solely focused on productization to find product market fit for the Livepeer network as the leading infrastructure in the coming world of live video AI. Here is a bit about what Livepeer Inc is focused on, and almost equally as importantly, what we are not focused on in the coming 12 months.

      Product Market Fit for Realtime AI Video 

      As mentioned, the number one priority is to prove that the Livepeer network has product market fit as an infrastructure that runs the latest and greatest in realtime AI video workflows for developers. To do this, we’ll focus on three core things:

      1. Contribute to core network development to ensure Livepeer is an infrastructure that can run realtime AI video workflows.
      2. Build the developer APIs to run these workflows that developers use to build them into applications. This is a natural extension of Livepeer Studio
      3. Cultivate the leading realtime AI video community. Researchers, builders, and creators interested in this coming category need a home. They will provide the moat that ensures that an open, community led infrastructure will always be more responsive, cost effective, and full featured than centralized alternatives.

      We’re going to provide the full stack product, engineering, community, and go to market motion to validate product market fit for this opportunity. This will drive significant fees and growth into the Livepeer network. We’re aligned as large LPT token holders and want the network to succeed - which represents a far bigger opportunity for Livepeer Inc than any revenue related opportunity via SaaS services in the short term. Let’s grow those network fees!

      What Livepeer Inc is Not Focused On

      While there are many potential products and go to markets that can be executed upon under an ambitious vision of being the world’s open video infrastructure, a single company is more likely to succeed by focusing on only one opportunity at a time. Many alternative demand generating bets will be better served by other self-motivated actors in the ecosystem - especially as the open source software around Livepeer, and the broader ecosystem has matured to the point of providing reliable access points for different categories of use cases.Regarding Livepeer Inc’s learnings on some of these categories:

      • Transcoding alone has been proven out technically and economically, however the market hasn’t accepted the standalone infrastructure without significant productization, support, SLAs, and enterprise services around it.
      • Similarly, when bundled with end to end streaming, the offering isn’t significantly differentiated in a crowded and consolidating market. 
      • Livepeer Studio will continue to support existing users at the enterprise level that pay for these surrounding services, while passing the transcoding jobs through to the Livepeer network, but due to the long sales cycle and slow growth, it will not be actively competing to grow this source of demand. 
      • The ecosystem can support aspiring users of transcoding and streaming via projects like Streamplace, the Frameworks SPE, and their supporting teams. One of the core pillars of the Livepeer Foundation’s GTM recommendations is to tackle being the open video infrastructure for web3 social and decentralized streaming, so the ecosystem will prioritize support. This includes aspiring web3-centric streaming users, who culturally align with the values of the project community, but to date have not shown significant growth nor driven significant fees to the network. There’s an opportunity for these projects to crack this nut and help these users grow, if they deem it to be worth the effort!
      • There are also additional bets that the ecosystem is interested in around the realtime AI mission. These are laid out by the Livepeer Foundation’s GTM Strategy post. Visual avatars for live AI agents is one example. Realtime video analysis and understanding are others. These areas do overlap with the broad theme that Livepeer Inc is focused on - running realtime AI models on live video on the Livepeer network. However as Inc pursues creative AI use cases initially to inspire the broader world in what’s possible, we welcome others in the ecosystem building commercial entities to go after these opportunities. And we will certainly collaborate. If the ecosystem efforts make technical progress, but stop short of commercializing and going to market, these are areas for collaboration with Inc to consider productizing for commercial purposes. 

      A Simplified View: Foundation and Inc

      While the above contains a lot of details about realtime AI and specific demand generating bets on the Livepeer network, there’s a simplified view:

      • The Livepeer Foundation will steward the Livepeer community, project marketing, and public goods funding to enable recommendations on the project roadmap.
      • Livepeer Inc will focus on driving demand to the network by building the realtime AI products, go to market services, and AI community - initially in the creative realtime AI video space.

      If you’re interested in building within this ecosystem, there are lots of opportunities that both contribute to the core development and operations of the project in service of the realtime AI mission, but also to develop companies that service additional markets not currently being focused on. Hopefully the above post gives you a view into what some of those opportunities and gaps are. Then check out the Livepeer Foundation’s recent forum posts on tactical recommendations, and raise your hand to get involved in the ones of interest.

      `, - datePosted: `Jul 31, 2025`, - img: `https://blog.livepeer.org/content/images/2025/07/e.png`, - excerpt: `Written by Doug Petkanics, Co-founder and CEO at Livepeer Inc - -The past 18 months have been an energizing time to be in the Livepeer Ecosystem. An onchain treasury was introduced to fund public goods via community governance, the community has coalesced around Livepeer’s opportunity to be the leading infrastructure for realtime AI video, and fees and usage of the network have been steadily increasing due to this focus. The Livepeer Foundation has recently launched to steward the 10+ entities in `, - readingTime: 5, - }, -]; diff --git a/snippets/automationData/forum/forumData.jsx b/snippets/automationData/forum/forumData.jsx deleted file mode 100644 index 5026ebdd..00000000 --- a/snippets/automationData/forum/forumData.jsx +++ /dev/null @@ -1,38 +0,0 @@ -export const forumData = [ - { - title: "It's time to ACT! Accumulation & the Treasury Ceiling", - href: "https://forum.livepeer.org/t/3153", - author: "By b3nnn (@b3nnn)", - content: - "

      The onchain treasury was designed to provide sustainable public goods funding. It has supported many important and strategic contributions to the Livepeer Ecosystem. The AI SPE, Streamplace, Agent SPE and Cloud have all received funds and made important contributions. And through our onchain governance, the community have shown time and again their thoughtfulness and care for getting decisions right. Your desire to align decisions with long-term health has made us a shining example of simple but effective governance and how people can working together onchain.

      The treasury is key to supporting strategic investments to improve UX for stakeholders, effectively manage protocol security, and fund other capital and resource needs for this exciting phase of the project.

      As of now, the onchain treasury is currently not accumulating LPT. It was designed not to accept unlimited funding, hit the initial value set as the ceiling, and reset treasury contributions to 0% on or around 31st of March this year. There are a backlog of upcoming projects on highly strategic initiatives that will need treasury support, and we will all feel better about how to allocate funds if we have certainty that new funds are coming into the treasury.

      I intend to post a LIP to turn on the treasury rewards again at their initial values:

      • treasuryRewardCutRate: 10%

      • treasuryBalanceCeiling: 750000 LPT

      The rate of 750000 LPT is currently set as the ceiling so would not be updated in the formal proposal

      For what it’s worth, my personal bias is to increase one of these values, but I’m happy to punt that discussion to another day. Having seen the exciting things in the background that will require treasury support in coming weeks, the most pressing item for us as a community is to start getting the treasury repopulated.

      I’ll be on the watercooler next week to discuss and am happy to set up an office hours to discuss direct if there is support for that. I look forward to proposing this for a community vote . If you have any input on the contribution percentage that goes into my proposal, please also share your input here.

      ", - replyCount: 7, - datePosted: "Dec 3, 2025", - }, - { - title: "Pre-proposal: IDOL - Improving Dex / Onchain Liquidity", - href: "https://forum.livepeer.org/t/3151", - author: "By b3nnn (@b3nnn)", - content: - '
      TLDR

      We propose to address known UX issues and ease and costs to participate by increasing DEX liquidity. Arrakis offers an optimal solution for our specific needs, and we are requesting 250,000 LPT for deployment to a Uniswap v4 pool which will significantly reduce slippage for ecosystem participants

      Motivation

      The Capital Markets Advisory board made improving onchain liquidity a tactical recommendation, specifically sighting:

      • Low liquidity levels on our DEX pools (primarily Uniswap on Arbitrum). This creates high slippage when trying to transact with any size, and might refrain larger stakeholders or participants from buying LPT

      • The much higher ratio of available liquidity on centralized exchanges compared to DEXs drives participants to rely on centralized platforms, exposing them to the inherent risks associated with centralized providers

      • Further, centralised exchanges often don’t support L2 withdrawals. This results in delayed bridging and withdrawal processing between L1 & L2, impairing overall UX and the efficiency of orchestrators as it relates to capital allocation

      In short, improved L2 Dex liquidity is essential for both current and future participants in Livepeer.

      Recommended Solution

      How to address our challenges is relatively straightforward to describe:

      • Increase the amount of liquidity on targeted DEX pool/s

      • Ensure the solution is executing against this goal as agreed

      • Use funds wisely, ensuring a good balance between what we pay and what we receive

      Any solution will require liquidity from the on-chain treasury to start bootstrapping an optimal asset mix. In addition to this liquidity requirement, using a traditional market maker is likely a major expense (in the range of $15-20K per month). While traditional market makers can do a good job in actively managing liquidity, especially on centralised exchanges, they often present new or additional challenges:

      • Market makers typically operate through asset loan agreements, using our capital to actively manage liquidity across venues. While this model provides flexibility and professional management, it can make visibility into how and where assets are deployed more challenging.

      • Compared to centralized venues, on-chain liquidity provision is often less economically attractive for market makers. As a result, they may prioritize other strategies or venues where returns are higher, which can limit incentives to deepen on-chain liquidity.

      • Ensuring that capital is being used effectively by traditional market makers remains challenging, as it requires clear visibility into capital deployment and a deep understanding of the alternative strategies they pursue.

      While none of this is insurmountable, it requires significant thought, effort and time to ensure oversight and manage risk.

      Arrakis pro is an ideal solution to addresses these challenges.

      Arrakis specifically addresses each of these challenges because:

      • It is built specifically for managing onchain liquidity on DEXs

      • The assets are stored in a vault controlled by a multisig made up of Livpeer Foundation members. This means the treasury, via the Foundation, can withdraw and return the liquidity at any time

      • Because it is onchain, and through the features provided in Arrakis pro, we can check and confirm at any time where our assets are and what strategies are being applied.

      • It rebalances positions by setting up ranges / limit orders, no swaps involved. The solution algorithmically minimises price impact given the allocated capital and bootstraps base asset liquidity without causing negative selling pressure.

      • Arrakis leverages sophisticated algorithms to increase capital efficiency for the deployed capital and reduce slippage for traders on the DEX pools.

      Arrakis vaults hold ~$170M TVL and the team actively manages the on-chain liquidity for over 100 protocols. Projects such as MakerDAO, Lido, Morpho, Gelato, Redstone, Wormhole, Across, Euler, Usual, Syrup, Venice.ai, Ether.fi, etc. are benefiting from the high capital efficiency and cost effectiveness for DEX liquidity optimization enabled by Arrakis PRO.

      For more information regarding Arrakis and Arrakis Pro, feel free to have a look at their docs or join their community:

      Arrakis | Twitter | Resources

      In addition, the team are present here and will address any questions directly - hello @Arrakis

      The Ask

      We want to significantly decrease slippage and costs for orchestrators and other participants to interact with the network through onchain liquidity.

      We are asking for 250,000 LPT (approx. $1M in USD value) to be held in a multisig controlled by the Livepeer Foundation, to be deployed via an onchain vault with Arrakis as a concentrated pool on Uniswap v4.

      Management of concentrated liquidity on Uniswap V4 allows for larger trades with minimal price impact, improving the overall trading experience. Savings to participants are substantial at approx. $1500 in slippage reduction on a $25,000 sale of LPT (estimate based on data below).

      Comparison of current and estimated price impact (after successful ETH liquidity bootstrapping) for buying LPT and ETH across different amounts

      Specification for Livepeer
      1. The Arrakis team uses the existing LPT/ETH pool on the 0.3% fee tier for UniswapV4

      2. Arrakis then deploys a dedicated vault managed by the Arrakis Pro smart contract for this LPT/ETH Uniswap pool.

      3. The Livepeer Foundation team establish a ⅔ Multisig for custody of the funds. If the proposal passes, funds are transferred onchain to this multisig account

      4. Through this Livepeer Foundation multisig, we deposit $1 million worth of $LPT into the Arrakis Pro vault. Transfers in and out of the vault are controlled by the multisig, meaning they cannot be deployed or moved by Arrakis elsewhere

      5. Arrakis Pro will allocate the provided liquidity in a concentrated and fully active market making strategy to facilitate trading on UniswapV4.

      6. The strategy initially operates to bootstrap ETH to establish a 50/50 inventory ratio over the first months. The primary objective is to create price stability by generating deep liquidity and reaching an even inventory over time.

      For the services provided, Arrakis charges the following fees:

      Arrakis Asset-under-Management (AUM) fee: 1% per year, waived for the first 6 months

      Arrakis performance fee: 50% of trading fees the vault generates

      FAQ

      What are the risks of this model?

      • Deploying funds to DEX pools bears smart contract risk and general market risk (e.g. token exposure, impermanent loss). Arrakis smart contracts have been audited by leading security firms and currently secure +$150M TVL (https://docs.arrakis.finance/text/resources/audits.html)

      What happens to the capital required?

      • The capital required is deployed by the Livepeer DAO, via a Foundation controlled multisig, to a self-custodial smart contract vault and can be withdrawn at any point in time. Arrakis does not hold custody, nor control the funds deployed outside of the mandate to manage DEX liquidity on Uniswap V4 for the respective trading pair.

      Will this impact the current liquidity on CEXs?

      • Arrakis mandate is to gradually improve on-chain markets and provide deeper liquidity for the respective pair over time on DEX markets. CEX markets will not be affected.

      How does the Arrakis model differ from standard AMMs (like Uniswap v3)?

      • Arrakis provides a sophisticated on-chain market making service, running dedicated algorithmic market making strategies.

      • Instead of manually deploying funds into the CLAMM pool, Arrakis algorithmically rebalances the position and runs active liquidity management strategies.

      Will our liquidity still be actively managed, or will it be passively allocated in a vault?

      • Close to 100% of the liquidity deployed with an Arrakis vault is actively deployed to the Uniswap CLAMM pool and provides liquidity. Small shares of liquidity remain in the vault as token reserves for rebalancing purposes.

      How is the strategy for the vault determined — who sets the parameters, and how often are they rebalanced?

      • Arrakis quant team fine tunes the strategies and engages in period review cycles along with 24h-365day monitoring and alerting.

      Who controls or can modify the AMM strategy parameters?

      • Arrakis strategies are designed, deployed and maintained by professional quant traders. The Foundation can be involved in discussion in regular intervals as needed to further align on achieving the stated goals.

      Will the community have visibility into performance and strategy updates?

      • The Foundation delegates will receive access to a custom real time analytics dashboard and can share periodic updates to the forum for the community.

      What happens to the liquidity if the vault underperforms or becomes unbalanced?

      • Liquidity is actively rebalanced towards a 50:50 ratio by placing one sided limit maker orders. In adverse market scenarios strategies will adjust to certain market volatility settings.

      How do fees compare to centralized market makers?

      • Centralized market makers work in two models: a) Loan & Option b) Retainer Fix Fee payment. Arrakis works on a profit sharing of trading fees earned (50% captured by the Livepeer DAO, 50% retained by Arrakis for the services provided)

      How will LP performance be measured?

      • LP performance will be measured by market depth, price impact, slippage improvement, total volumes facilitated.

      What happens after funds are returned?

      • It’s important to note that the liquidity in the vault can remain deployed indefinitely, but also returned to the onchain treasury or control by the voters at any time. As funds will now be held in both ETH and LPT, the community can be involved in discussions about how returned funds are stored or used.

      This is a large proportion of the current treasury. What gives?

      • We recognise that this is a large ask relative to the current size and value of the treasury. The size and value of the treasury will be addressed in a separate proposal. As it relates to this proposal, consider that we will reduce slippage costs by approx 2-3X on every dex transaction. The ROI on this proposal will be quite substantial.
      ', - replyCount: 3, - datePosted: "Dec 1, 2025", - }, - { - title: "Transformation SPE Release Notes", - href: "https://forum.livepeer.org/t/3142", - author: "By Mehrdad (@Mehrdad)", - content: - "

      Release notes are a way to share work being completed by the Transformation SPE and it’s various contributors. Dive in and explore what has been happening and please reach out or reply with any questions and we will happily expand further.

      ", - replyCount: 2, - datePosted: "Nov 10, 2025", - }, - { - title: "Transcoder Campaign: organic-node.eth", - href: "https://forum.livepeer.org/t/1970", - author: "By Ron (@ron)", - content: - "

      Hello fellow video enthusiast and web3 supporters,

      Thanks for your time in reading my post. (organic-node.eth) Node has been active for about 6 months and everyday has been a great learning experience. My node has been highly reliable with 4 Orchestrators across the globe with possibility to expand more depending on the demand. If you are looking to get in touch with me please reach out to me on discord Organic-Node#9009.

      It gives me great pleasure when looking at lenstube videos, thinking that some of these vides may have been transcoded by my Orch. Stakes and delegators enjoy passive income with my low reward cuts and low fee cut and help support robust Orch for a fairer web3 platforms

      Stake here:
      (organic-node.eth)

      ", - replyCount: 1, - datePosted: "Dec 6, 2022", - }, -]; diff --git a/snippets/automationData/globals/globals.jsx b/snippets/automationData/globals/globals.jsx deleted file mode 100644 index 1a218b71..00000000 --- a/snippets/automationData/globals/globals.jsx +++ /dev/null @@ -1 +0,0 @@ -export const LatestRelease = "v0.8.8"; diff --git a/snippets/automations/README.mdx b/snippets/automations/README.mdx new file mode 100644 index 00000000..7d8867db --- /dev/null +++ b/snippets/automations/README.mdx @@ -0,0 +1,44 @@ +# Automations + +This folder contains all automations or desired automations in the Livepeer docs. Status will say [Live] if it is live, [WIP] if it is in progress, and [Future] if it is a future automation. + +The following automations are available: + +### Trending & Data Fetching + +- Fetch recent YouTube Videos & Display +- Fetch recent Forum Posts & Display +- Fetch recent Discord Announcements & Display +- Fetch recent Blog Posts & Display +- Fetch RFPs & Display + ~Maybe~ +- Fetch recent Twitter Tweets & Display +- Fetch recent GitHub Issues & Display +- Fetch recent GitHub Pull Requests & Display + +### HUBs & Contact + +- Ecosystem Projects HUB +- Resources HUB +- Partner HUB +- Sales/Data Center Contact Form (?) -> should direct to appropriate email +- + +### Changelog + +- Automated Changelog from Github Releases + +### AI Functions + +- Transcribe a Youtube Video for download +- Translate pages into other languages +- Create a hero image +- Check All Documentation Links (periodically) +- Create a video? + +### AI Optimisations + +- RAG (?) -> enable access to github repo's & external docs for better assistant capabilities +- MCP () + +--- diff --git a/snippets/automations/blog/ghostBlogData.jsx b/snippets/automations/blog/ghostBlogData.jsx new file mode 100644 index 00000000..f9f512fd --- /dev/null +++ b/snippets/automations/blog/ghostBlogData.jsx @@ -0,0 +1,191 @@ +export const ghostData = [ +{ + title: `AI X Open Media Forum: Building New Wave Creativity`, + href: `https://blog.livepeer.org/ai-x-open-media-forum-building-new-wave-creativity/`, + author: `By Livepeer Team`, + content: `

      The AI x Open Media Forum, hosted by the Livepeer Foundation and Refraction during Devconnect Buenos Aires, brought together artists, technologists, curators, protocol designers, founders and researchers at a moment when media is being reshaped at its foundations. Real-time AI has moved from experimental edges into active use, influencing how creative work is made, how it circulates, how it is authenticated and how value flows through entire ecosystems.

      The Forum was designed as a symposium rather than a conventional conference. Instead of panels, participants sat together in tightly focused groups, comparing lived experience with emerging technical capabilities and identifying where the next wave of open media infrastructure must come from. The premise was simple:

      If AI is rewriting the conditions of cultural production, the people building the tools and the people using them need to be in the same room.

      Across the day, it became clear that AI has begun to reconfigure creative labour. Participants described shifts in authorship, changes in access to tools and compute and growing pressure to navigate accelerated production cycles. The discussions documented in this report trace how these changes are being felt on the ground and outline the early primitives that may support an open, verifiable and creatively expansive media ecosystem.

      I. Methodology and framing questions for the forum 

      The Forum opened with a set of framing questions that clarified the core pressures at the intersection of AI and culture. They were selected because they touch the foundations of creative practice, technical design and the incentives that organise contemporary media systems. These questions served as a shared structure for the day, guiding both creative and technical groups toward the points where their worlds intersect most directly.

      These questions created a common orientation for participants with very different backgrounds. Artists used them to describe how these pressures appear in their work. Technologists used them to identify where current systems break and where new primitives might be possible. The result was a focused dialogue in which creative insight and technical reasoning informed one another. As the day progressed, these initial questions became more specific, grounded in concrete examples and shaped by the experiences of the people who are building and creating with AI right now.

      II. Creative track: New wave creativity in the age of AI

      The creative discussions opened a clear window into how AI is reshaping cultural practice. Artists, designers and musicians described shifts they are already living through: changes in authorship, new pressures around speed, and the expanding role of computation in what can be made and shared. Their experiences formed the human foundation for understanding the technical challenges that surfaced later in the day.

      1. The persistence of authorship and the idea of “code”

      One of the most important contributions came a Venezuelan 3D artist artist who articulated how personal history and cultural memory form a kind of creative signature. They described this as their “code”: a composite of experience, environment and emotional texture that cannot be reduced to visual style alone.

      Argentine Daydream ambassador Franco presents his work

      “My code is my personal language, shaped by the places I come from,” they explained. “I photograph the decadence of Venezuela and turn it into something romantic. AI can remix it, but it cannot replace where I’m from.”

      This idea resonated widely across the room. Participants recognised that while AI can convincingly emulate aesthetics, it cannot reconstruct lived experience. The concern is not simply stylistic mimicry; it is the potential erosion of the cultural grounding that gives creative work its meaning.

      Serpentine Gallery curator Alice Scope added context from contemporary art: “Some artists will use these tools to push aesthetic extremes. Others will return to minimalism. That tension has always driven art history.” The consensus was that AI is entering a lineage of tools that have historically reshaped creative practice, but its scale introduces new stakes around identity and authorship.

      2. Compute access as a determinant of creative possibility

      A structural insight emerged as creators discussed their workflows: access to compute is not evenly distributed. Several participants from Latin America and other regions described how GPU scarcity and cost have become the limiting factor in pursuing their practice.

      One participant underscored the issue: “I couldn’t do what I do without Daydream. GPUs are too expensive here. This is the only way I can work at the level I want.”

      This was not framed as a complaint but as a recognition that compute access is now a primary determinant of who can participate in emerging creative forms. It became clear that compute, not talent or tools, is increasingly the gatekeeper of participation. This topic resurfaced repeatedly across both tracks and became one of the keystones of the entire Forum.

      3. Discovery systems and the changing behaviour of audiences

      Creators then turned to the challenge of reaching audiences. Traditional distribution remains shaped by opaque algorithms and engagement-driven incentives, often misaligned with the values and intentions of artists.

      Almond Hernandez from Base described the dilemma: “If you remove algorithms entirely, you place the burden of discovery back on users. But if you keep them, they can distort culture. We need ways for people to shape their own feeds.”

      This tension produced no single consensus, but it clarified a shared frustration: discovery should not force creators into optimising for platform dynamics. Instead, systems must emerge where identity, provenance and community input meaningfully influence what is surfaced.

      Friends With Benefits CEO Greg Breznitz articulated the broader implication: “Culture and technology cannot be separated anymore. What gets rewarded changes the art that gets made.” The group recognised that discovery systems are not neutral and actively shape the evolution of cultural forms.

      4. How AI is reshaping the creative process from the inside

      Refraction founder Malcolm Levy and Serpentine Gallery curator Alice Scope

      Perhaps the most nuanced discussion centred on how AI alters creative labour. Participants avoided easy dichotomies of “AI as threat” versus “AI as tool.” Instead, they articulated a more layered understanding: AI accelerates exploration but also compresses the time available for deeper creative development.

      Franco noted that the pressure to produce quickly “can corrupt the process,” a sentiment echoed by musicians and digital artists who described being pulled toward workflows optimised for speed, not refinement.

      A music platform founder contextualised this through the lens of distribution: “Platforms can train bots to listen to the AI music they create, just to farm plays.” This raised concerns about synthetic ecosystems that siphon attention away from human artists.

      Yet the group also acknowledged that AI unlocks new capacities. It lowers technical barriers, enabling more people to express ideas without specialised training. For many, it expands the field of imagination.

      Malcolm Levy of Refraction offered a framing rooted in art history: “Every movement in art is shaped by the tools of its time. Digital art was marginal until suddenly it wasn’t. AI will be the same. What matters is who shapes it.”

      Across this discussion, an essential truth emerged: AI does not eliminate creativity. It redistributes the labour involved, elevates the importance of intention and shifts the points at which authorship is asserted.

      III. Technical track: Shaping the infrastructure for trust, agency and scale

      While the Creative Track articulated what must be protected and what must remain possible, the Technical Track explored how to design systems that support those needs.

      1. Provenance as foundational infrastructure

      The technical discussion on provenance opened with a recognition that no single method can guarantee trust in an AI-saturated media environment. Participants approached provenance as an infrastructure layer that must operate across the entire lifecycle of media creation. They examined device-level capture signals, cryptographic attestations, model watermarking, social proof, dataset lineage and content signatures, emphasising that each approach addresses a different vector of uncertainty.

      The importance of this layered approach became clear through the most grounded example offered during the session. A team building a voice-data contribution platform described their experience collecting human audio samples. Even after implementing voice-signature checks and running deepfake detectors, they found that “about ten percent of the data was actually faked.” Contributors were training small voice models on their own samples and then using those models to fake additional submissions. “Validation needs human listeners, model detection and economic incentives working together,” they explained. It illustrated a key point: provenance is a dynamic adversarial problem and must be treated as such.

      This example shifted the discussion from idealised architectures to applied constraints. Participants concluded that provenance must be multi-layered, adversarially robust and economically grounded. A validator network that incorporates human judgment, machine detection and stake-based incentives was seen as a promising direction, not because it solves provenance outright but because it distributes trust across diverse mechanisms rather than centralising it in a single authority or detector. In a digital landscape stricken with antiquated copyright frameworks that hinder both the creation, dissemination and remuneration of artistic works, a multi-nodal, human-centric approach to provenance feels refreshing, urgent and necessary. 

      The discussion also connected provenance to discovery and reputation. If identity and content lineage can be verified at creation time, those signals can later inform how media is surfaced, filtered or contextualised. Provenance, in this framing, is not only about defending against deepfakes but about enabling a more trustworthy environment for cultural production, circulation and monetisation.

      2. Infrastructure for global creativity: compute, identity and discovery as interdependent primitives

      Over the course of the day, participants identified a pattern: compute, provenance and discovery are not separate concerns. They form an interdependent system that determines:

      Compute inequality emerged again as a core issue. Without access to real-time inference, creators are excluded from participating in emerging media forms. Provenance systems ensure that outputs can be trusted, and discovery mechanisms determine whether meaningful work reaches an audience.

      This preceded a rich conversation about discovery architecture. What if users could port their data across platforms to surface relevant content, instead of the platforms selling this data back to users? 

      Participants explored how portable identity, content signatures, verifiable histories and community-shaped surfacing could form a new discovery layer that operates independently of platform-level ranking algorithms. In this model, discovery becomes a protocol rather than a product: a configurable, interoperable layer where authorship, reputation and provenance act as first-class signals.

      Building open media requires a tightly interwoven stack. Compute enables creation; provenance secures identity and authorship; discovery amplifies credible work in ways that reflect the values of specific communities rather than a single optimisation function. 

      Treating these components as independent problems would reproduce the failures of existing platforms. Treating them as interdependent primitives opens the possibility for a healthier and more diverse media ecosystem.

      IV. Synthesis

      When the creative and Technical tracks were read side by side, several coherent themes emerged.

      VI. Conclusion

      The Forum made clear that the future of media will depend on coordination between creative and technical communities.

      Artists articulated what must be preserved: identity, context, agency and the integrity of the creative process. Technologists outlined the systems that can support those needs at scale.

      This event functioned as a working laboratory. The insights surfaced here will inform follow-up research, prototypes and collaborative development. Livepeer and Refraction will continue publishing materials from the Forum and supporting teams exploring these early ideas.

      Open media will not emerge from a single protocol or organisation, but from a community building the foundation together.

      `, + datePosted: `Dec 29, 2025`, + img: `https://blog.livepeer.org/content/images/2025/12/Header.png`, + excerpt: `The AI x Open Media Forum, hosted by the Livepeer Foundation and Refraction during Devconnect Buenos Aires, brought together artists, technologists, curators, protocol designers, founders and researchers at a moment when media is being reshaped at its foundations. Real-time AI has moved from experimental edges into active use, influencing how creative work is made, how it circulates, how it is authenticated and how value flows through entire ecosystems. + +The Forum was designed as a symposium rat`, + readingTime: 8 +}, +{ + title: `A Real-time Update to the Livepeer Network Vision`, + href: `https://blog.livepeer.org/a-real-time-update-to-the-livepeer-network-vision/`, + author: `By Livepeer Team`, + content: `

      For the past year, the Livepeer Ecosystem has been guided by the Cascade vision:  a path to transition from a pure streaming and transcoding infrastructure, to an infrastructure that could succeed at providing compute for the future of real-time AI video. The latest Livepeer quarterly report from Messari highlights that this transition is paying off, with network fees up 3x from this time last year, and over 72% of the fees now driven via AI inference. This is exemplified by the growing inspirational examples emerging from Daydream powered real-time AI, and real-time Agent avatar generation through Embody and the Agent SPE.

      Source: Livepeer Q3 2025 Report by Messari

      This shift has been an ecosystem wide effort – ranging from branding and communications, to productization and go to market, to hardware upgrades for orchestrators. It has successfully shifted the project under an updated mission and direction, however it has still left ambiguity in terms of what the Livepeer network itself offers as killer value propositions to new builders outside of the existing ecosystem. Is it a GPU cloud? A transcoding infra? An API engine? Now that there are signs of validation and accelerated momentum around an exciting opportunity, it’s time to really hone in on a refined vision for the future of the Livepeer network as a product itself. 

      The market for video is set to massively expand

      The concept of live video itself is expanding well beyond a simple single stream of video captured from a camera. Now entire worlds and scenes are generated or enhanced in real-time via AI assistance, leading to more immersive and interactive experiences than possible via old-school streaming alone. For a taste of the future, see the following examples:

      1. The future of gaming will be AI generated video and worlds in real-time:
      +
      1. Video streams can be analyzed and data leveraged programmatically in real-time, for instant insight generation and decision making:
      +
      1. Real-time style transfer can enable avatars and agents to participate in the global economy:
      +

      Video world models and real-time AI video are merging, as they both use AI to generate frame-by-frame video output with low latency on the fly, based on user input and AI inference. This requires a tremendous amount of GPU compute, and requires an amazing low latency video streaming and compute stack – two areas in which the Livepeer network and community thrive, and two areas to which the many other generic GPU inference providers in the market bring no unique skillset, experience, or software advantage. 

      The big opportunity for the Livepeer network is to be the leading AI Infrastructure For Real-Time Video.
      From interactive live streaming to generative world models, Livepeer’s open-access, low-latency network of GPUs will be the best compute solution for cutting edge AI video workflows. 

      World models are a game changing category, and Livepeer is well suited to offer a unique and differentiated product here, that serves a huge market of diverse and varying use cases. These range from creative entertainment, to gaming, to robotics, to data analysis, to monitoring and security, to synthetic data generation for AGI itself.

      While an ambitious stretch, Nvidia executives responsible for the category have even projected that due to the impact in robotics, the economic opportunity for world models could exceed $100 trillion, or approximately the size of the entire global economic output itself!  

      What does it mean to productize the Livepeer network to succeed as a valuable infrastructure in this category?

      From a simplified viewpoint, it needs to deliver on the following:

      1. Ability for users to deploy real-time AI workflows to the Livepeer network and request inference on them

      2. Industry leading latency for providing inference on real-time AI and world model workflows.

      3. Cost effective scalability – users can pay as they go to scale up and down capacity and the network automagically delivers the scale required.

      Imagine a gaming platform is powering world-model generated games using their unique workflows that generate game levels or areas in a certain style by combining several real-time models, LLMs, and style transfer mechanisms. Each game its powering has users exploring and creating their own corners of the interactive worlds, based on prompts and gameplay inputs. Every gamer that joins a game represents a new stream of AI video compute, and the Livepeer network is the backing infrastructure that provides the compute for this video world generation, leveraging hundreds or thousands of GPUs concurrently.

      For this to be possible the Livepeer network needs to enable that game platform to deploy their game generation workflow. It needs to offer low latency on the inference that runs this workflow, relative to the generic GPU compute clouds. The pricing needs to be competitive vs alternative options in the market for this GPU compute. And the network needs to allow this company to scale up and down the number of GPUs that are currently live ready to accept new real-time inference streams based on the number of users currently live on the games it is powering.

      All of this is possible on the Livepeer network, and it isn’t far away from where we are now. If we work to build, test, and iterate on the Livepeer network itself towards supporting the latency and scale required for these types of workflows, we’ll be set up to power them.
      Now multiply this example gaming company by the high number of diverse industries and verticals that real-time AI and world models will touch. Each category can have one or multiple companies competing to leverage this scalable and cost effective infrastructure for unique go to markets targeting different segments. And they can all be powered by the Livepeer network’s unique value propositions.

      Livepeer’s core network is strategically positioned

      What are these value propositions that make the Livepeer network differentiated relative to alternative options in the market? I’d argue that there are three primary, table stakes, must-have value propositions if Livepeer is to succeed. 

      1. Industry standard low latency infrastructure specializing in real-time AI and world model workflows: First of all, the network needs to let its users deploy custom workflows. Inference alone on base models is not enough and does not represent scaled demand. Users want to take base models, chain them together with other models and pre/post processors, and create unique and specialized capabilities. When one of these capabilities is defined as a workflow, that is the unit that needs to be deployed as a job on the Livepeer network, and the network needs to be able to run inference on it. Secondly, for these real-time interactive use cases, latency matters a lot. Generic GPU clouds don’t offer the specialized low latency video stacks to ingest, process, and serve video with optimal latency, but Livepeer does. And Livepeer needs to benchmark itself to have lower or equal latency to alternative GPU clouds for these particular real-time and world model use cases.

      2. Cost effective scalability: GPU provisioning, reservations, and competing for scarce supply procurement creates major challenges for AI companies – often overpaying for GPUs that sit idle most of the time in order to guarantee the capacity that they need. The Livepeer network’s value proposition is that users should be able to “automagically” scale up almost instantly and pay on demand for the compute that they use, rather than having to pre-pay for reservations and let capacity sit idle. This is enabled by Livepeer taking advantage of otherwise existing idle longtail compute through its open marketplace, and its supply side incentives. The Livepeer network needs to be more cost effective than alternative GPU clouds within this category - with impacts comparable to the 10x+ cost reduction already demonstrated in live video transcoding delivered by the network.

      3. Community driven, open source, open access: The Livepeer project and software stack is open source. Users can control, update, and contribute to the software they are using. They also can be owners in the infrastructure itself through the Livepeer Token, and can benefit from the network’s improvements and adoption, creating a network effect. The community that cares about its success and pushes it forward collectively, can be a superpower, relative to the uncertain and shaky relationship between builders and centralized platform providers, who have a history of getting rugged based on limitations to access, changes in functionality, or discontinuity of the platforms. Anyone can build on the Livepeer network regardless of location, jurisdiction, use case, or central party control.

      The above are primary value propositions that should appeal to nearly all users. And we must work to close the gaps to live up to those value props before we could successfully hope to go to market and attract new vertical-specific companies to build directly on top of the network. Luckily, in addition to all of Livepeer’s streaming users, we have a great realtime AI design partner in Daydream, which is already going to market around creative real-time AI, using the network, and contributing to its development to live up to these requirements. While building with this design partner, the ecosystem should be working to productize to live up to these promises in a more generic perspective – it should be setting up benchmarks, testing frameworks, and building mechanisms for scaling up supply ahead of demand, so that it can represent this power to the world alongside successful Daydream case studies.

      Opportunities to push towards this vision

      To truly live up to these value propositions, there are a number of opportunities for the community to focus on in order to close some key gaps. There are many details to come in more technical posts laying out roadmaps and execution frameworks, but at a high level, consider a series of milestones that take the network as a product from technically functional, to production usable, to extensible, to infinitely scalable:

      1. Network MVP - Measure what matters: Establish key network performance SLAs, measure latency and performance benchmarks, and enhance the low latency client to support realtime AI workflows above industry grade standards.
      2. Network as a Product - Self adaptability and scalability: Network delivers against these SLAs and core value props for supported realtime AI workflows. Selection algorithms, failovers and redundancy, and competitive market price discovery established for realtime AI.
      3. Extensibility - Toolkit for community to deploy workflows and provision resources: Workflow deployment and signaling, LPT incentive updates to ensure compute supply for popular AI workflows exceeds demand.
      4. Parallel Scalability: Manage clusters of resources on the network for parallel workflow execution, truly unlocking job types beyond single-GPU inference. 

      Many teams within the ecosystem, from the Foundation, to Livepeer Inc, to various SPEs have already started operationalizing around how they’ll be contributing to milestones 1 and 2 to upgrade the network to deliver against these key realtime AI value propositions. 

      Conclusion and Livepeer’s opportunity

       The market for the opportunity to be the GPU infrastructure that powers real-time AI and world models is absolutely massive – the compute requirements are tremendous - 1000x that of AI text or images - and real-time interaction with media represents a new platform that will affect all of the above-mentioned industries. The Livepeer network can be the infrastructure that powers it. How we plan to close the needed gaps and achieve this will be the subject of an upcoming post. But when we do prove these value propositions, Livepeer will have a clear path to 100x the demand on the network

      The likely target market users for the network are those startups that are building out vertical specific businesses on top of real-time AI and world model workflows. The ecosystem should look to enable one (or multiple!) startups in each category going after building real-time AI platforms that serve gaming, that serve robotics, that serve synthetic data generation, that serve monitoring and analysis, and all the additional relevant categories. The network’s value propositions will hopefully speak for themselves, but in the early stages of this journey, it is likely the ecosystem will want to use incentives (like investment or credits) to bootstrap these businesses into existence. Each will represent a chance at success, and will bring more demand and proof.

      Ultimately, many users of these platforms may choose to build direct on the network themselves. Similarly to how startups start to build on platforms like Heroku, Netlify, or Vercel, and then as they scale and need more control and cost savings they build direct on AWS, and then ultimately move to their own datacenters after reaching even more scale – users of Daydream or a real-time Agent platform built on Livepeer, may ultimately choose to run their own gateways to recognize the cost savings and control and full feature set that comes from doing so. This is a good thing! As it represents even more usage and scale for the network, more proof that as an infrastructure the Livepeer network has product market fit, and that it can absorb all workflows directly. The businesses built on top will provide their own vertical specific bundles of features and services that onboard that vertical specific capacity, but they’ll be complemented by and enabled by the Livepeer Network’s superpowers.

      While there’s a lot of work ahead, the Livepeer community has already stepped up to cover tremendous ground on this mission. At the moment by already powering millions of minutes of real-time AI inference per week, by our orchestrators already upgrading their capacity and procurement mechanisms to provide real-time AI-capable compute, and by the Foundation groups already working to evaluate the networks incentives and cryptoeconomics to sustainably fund and reward those contributing to this effort, we’re set up well to capture this enormous opportunity!

      `, + datePosted: `Nov 13, 2025`, + img: `https://blog.livepeer.org/content/images/2025/11/LP_Blog-Header_Nov25_01_moshed-1.png`, + excerpt: `For the past year, the Livepeer Ecosystem has been guided by the Cascade vision:  a path to transition from a pure streaming and transcoding infrastructure, to an infrastructure that could succeed at providing compute for the future of real-time AI video. The latest Livepeer quarterly report from Messari highlights that this transition is paying off, with network fees up 3x from this time last year, and over 72% of the fees now driven via AI inference. This is exemplified by the growing inspirat`, + readingTime: 9 +}, +{ + title: `Livepeer Onchain Builders - Streamplace: Building the Video Backbone of Decentralized Social`, + href: `https://blog.livepeer.org/livepeer-onchain-builders-streamplace-building-the-video-backbone-of-decentralized-social/`, + author: `By Livepeer Team`, + content: `

      Welcome to Livepeer Onchain Builders, a new content series spotlighting the Special Purpose Entities (SPEs) funded by the Livepeer onchain treasury. SPEs are working groups funded by the community treasury to work on specific tasks and are accountable to the community for their delivery. These deep dives will explore how each initiative is driving protocol usage, expanding infrastructure, and pushing the boundaries of what’s possible in decentralized video and AI.

      Streamplace is an open-source video streaming platform designed to power decentralized social applications with real-time, creator-first infrastructure. It aims to make livestreaming and video hosting as seamless as TikTok or YouTube, but built on open protocols and self-sovereign identity.

      What makes it ambitious? Streamplace is not only building full-stack video infra for federated social networks, it's doing so in a way that prioritizes interoperability, scalability, and public goods. From developer SDKs to end-user apps, Streamplace is building an entire ecosystem.

      What is an SPE? 

      A Special Purpose Entity (SPE) is a focused, community-funded team contributing to the Livepeer ecosystem. SPEs are typically mission-driven groups that operate independently to build infrastructure, applications, or tooling that expand and improve the Livepeer protocol. These teams are funded through proposals to the onchain treasury and are accountable to the community.

      SPEs are necessary for the ecosystem because no single team can build every part of a decentralized protocol. SPEs decentralize development, fund public goods, and allow the community to direct resources where they're most needed.

      Why do they matter to delegators and stakeholders? Because SPEs grow in usage. More usage = more fees = more rewards. Delegators benefit when the protocol succeeds, and SPEs are among the most direct ways to make that happen.

      From Aquareum to Streamplace

      A clear goal drives the team behind Streamplace: to build the foundational video infrastructure for the next generation of decentralized social platforms. These platforms, such as Farcaster and the AT Protocol, promise user-owned identity and interoperability, but have thus far lacked robust support for live and on-demand video.

      Streamplace solves this by providing a full-stack, developer-friendly video layer that anyone can plug into. It's a bold attempt to make decentralized video feel as native and easy as its Web2 counterparts.

      Streamplace started as Aquareum, a project with the same mission and team. This evolution into Streamplace is a rebranding, not a restart, building on past momentum with a sharper focus.

      Their vision is to give every user the ability to publish, stream, and remix content with the same ease as TikTok or YouTube, but backed by self-sovereign identity and decentralized networks.

      Streamplace homepage

      The first proposal delivered:

      • A unified Aquareum node: bundling the Livepeer stack with indexing and playback.
      • App releases on iOS, Android, and Web.
      • Native integrations with AT Protocol and Farcaster.
      • Support for C2PA metadata and content provenance.

      Now, Streamplace continues that momentum with 100,000 LPT in treasury funding and a clear mandate to scale.

      Streamplace Graphana dashboard

      Why Streamplace Matters

      Video is the heart of online social interaction. Yet decentralized social networks have lagged in providing seamless, user-friendly video experiences. Streamplace addresses this by:

      • Transcoding every livestream through Livepeer, providing decentralized, low-cost processing for global delivery.
      • Powering partner platforms like Skylight Social, a TikTok alternative backed by Mark Cuban, that recently hit #1 in entertainment on the App Store.
      • Making it dead-simple to stream or host video through single-binary nodes that anyone can deploy.
      • Championing public goods, 100% of their code is open source, with a commitment to infrastructure, not monetization lock-in.

      Decentralized social, spanning protocols like Farcaster, AT Protocol, and Bluesky, represents a movement toward user-owned networks and open standards. These networks are gaining traction, but video remains a missing layer. That’s where Streamplace comes in.

      Video is essential because it's the most engaging, expressive medium for creators and communities. And as these decentralized platforms scale, having real-time, composable video becomes non-negotiable.

      Streamplace positions itself as the default video infra layer for this new social stack, and with every stream transcoded through Livepeer, it's also a major driver of protocol usage and visibility.

      What Streamplace 2.0 Will Deliver

      This new phase of work, funded by the Livepeer treasury, focuses on scale, performance, and ecosystem integration:

      Infrastructure Enhancements

      • Expand server capacity to support growing user bases like Skylight.
      • Harden video nodes for reliability under real-world load.
      • Deliver high-quality performance on all platforms: Web, iOS, Android.

      Protocol and Developer Growth

      • Deepen native integration with AT Protocol.
      • Build SDKs and NPM packages to embed Streamplace easily into other apps.
      • Ship VOD functionality and new moderation tools.

      Community-First Ethos

      • Launch creator monetization models and stream incentive programs.
      • Empower streamers with self-hosted app capabilities ("Twitch, but it's your own app").
      • Maintain full transparency and livestream development.

      The Livepeer Angle

      Livepeer's decentralized video infrastructure powers every second of video on Streamplace. That means more work for orchestrators, more fees flowing through the protocol, and more incentive for high-quality node operation.

      Streamplace strengthens the Livepeer ecosystem in three key ways:

      • Demand generation: Real-world usage at scale means more consistent transcoding work.
      • Protocol visibility: High-impact apps like Skylight drive awareness of Livepeer beyond its native circles.
      • Infrastructure robustness: Streamplace's nodes enhance the distributed capacity of the Livepeer network.

      Without Livepeer, a decentralized video stack like Streamplace wouldn’t be possible. And without ambitious apps like Streamplace, Livepeer wouldn’t have the same opportunity to prove its value at scale.

      Final Thoughts

      Streamplace is a keystone piece of open video infrastructure and a cornerstone in the emerging world of decentralized social media. By fusing creator-first tooling with Livepeer’s scalable infrastructure, it offers a glimpse into what the open internet can become.

      As decentralized protocols shift from vision to adoption, the need for native video is urgent. Streamplace, with the support of the Livepeer treasury and a relentless commitment to open-source infrastructure, is meeting that need head-on.

      If you're a developer, creator, or community builder, now is the time to get involved.

      Do you want to contribute to Streamplace's success? Explore the open roles here.

      Interested in building or contributing to the Livepeer ecosystem? Learn more about current and past SPEs, open opportunities, and how to submit your own proposal here.

      Follow along, fork the code, or join a stream — the future of social video is open.

      Streamplace App

      Streamplace Proposal

      Aquareum Proposal


      Livepeer is a decentralized video infrastructure network for live and on-demand streaming. It has integrated AI Video Compute capabilities (Livepeer AI) by harnessing its massive GPU network and is not building the future of real-time AI video.

      Twitter | Discord | Website

      `, + datePosted: `Aug 14, 2025`, + img: `https://blog.livepeer.org/content/images/2025/08/Onchain-Builders-Streamplace.jpg`, + excerpt: `Welcome to Livepeer Onchain Builders, a new content series spotlighting the Special Purpose Entities (SPEs) funded by the Livepeer onchain treasury. SPEs are working groups funded by the community treasury to work on specific tasks and are accountable to the community for their delivery. These deep dives will explore how each initiative is driving protocol usage, expanding infrastructure, and pushing the boundaries of what’s possible in decentralized video and AI. + +Streamplace is an open-source `, + readingTime: 5 +}, +{ + title: `Builder Story: dotsimulate x Daydream`, + href: `https://blog.livepeer.org/builder-story-dotsimulate-x-daydream/`, + author: `By Livepeer Team`, + content: `

      Building StreamDiffusionTD Operator - a Real-Time Generative Video Operator for TouchDesigner, Powered by the Daydream API

      Creator:
      Lyell Hintz (@dotsimulate)
      Operator: StreamDiffusionTD
      Backends Supported: Local + Daydream (Livepeer)

      +
      + +
      + +
      +
      +
      + + + 0:00 +
      + /0:34 +
      + + + + + +
      +
      +
      + +

      Overview

      StreamDiffusionTD is a TouchDesigner operator that connects real-time inputs like audio, sensors, and camera feeds to StreamDiffusion, enabling live generative visuals controlled in real time. With the Daydream API, it adds remote inference capabilities on top of the existing local GPU inference and unlocks more flexibility for users.

      Built by Lyell Hintz, a technical artist and TouchDesigner developer, the operator is used in live shows, installations, and experimental workflows.

      Why It Was Built

      Lyell began working on the operator a few hours after StreamDiffusion was released on GitHub. He wanted to use it in TouchDesigner - a powerful tool for real time interactive content creation.

      “TouchDesigner is the only place this could be controlled from… it can hook into everything else.”

      From the start, he avoided creating a “black box.” The operator exposes core parameters like prompt, seed, and ControlNet weights, allowing users to adjust values and see results immediately.

      Key Features

      • Real-time video generation
      • Prompt and seed morphing
      • Dynamic ControlNet weighting
      • Live input support: audio, sensors, camera
      • Local GPU and Daydream backend options
      • Instant visual feedback in TouchDesigner
      +
      + +
      + +
      +
      +
      + + + 0:00 +
      + /0:26 +
      + + + + + +
      +
      +
      + +

      Daydream API Integration

      StreamDiffusionTD works with the Daydream API, which allows the operator to run on a remote GPU backend. This eliminates the major barrier of requiring a high-end PC with an NVIDIA RTX 4090 to run StreamDiffusion at professional quality, unlocking the flexibility to run it from any location, on any device form factor.

      Just drop in your API key and hit “Start Stream.” The backend handles orchestration, model hosting, and frame delivery, so builders can stay focused on their creative and technical workflows.

      Setup takes less than 1 minute and once installed, the configuration is remembered for future use.Daydream’s API brings new features to StreamDiffusion:

      • Multi-controlnet: Mixing different controlnets for better artistic control
      • IPAdapter: Use images as powerful style guides
      • TensorRT: Better frame rate for smooth video output

      Daydream is adding support for more real time video generation models, and developers can request features, suggest improvements, or build on top of the API itself. It aligns with the values of open tooling and community-led infrastructure.

      How Artists can use StreamDiffusionTD in TouchDesigner

      • Audio-reactive visuals for concerts
      • Camera-driven generative visuals
      • Real-time visuals for LED walls and stages
      • TouchDesigner automation workflows

      Because it's built inside TouchDesigner, the operator can be extended using Python, MIDI, OSC, or any other input TouchDesigner supports.

      Current State

      The operator is live and ready to use, with active development underway for new features and improved performance. It’s a great time to jump in, explore, and help shape what comes next.

      Try it Yourself

      Operator Access: patreon.com/dotsimulate
      Community and Support: discord.gg/daydreamlive
      API Keys can be requested here

      `, + datePosted: `Aug 5, 2025`, + img: `https://blog.livepeer.org/content/images/2025/08/DD_Builder-Story_dotsimulate_01.png`, + excerpt: `Building StreamDiffusionTD Operator - a Real-Time Generative Video Operator for TouchDesigner, Powered by the Daydream API + +Creator: Lyell Hintz (@dotsimulate) +Operator: StreamDiffusionTD +Backends Supported: Local + Daydream (Livepeer) + + + + + + + + + + + + + + + + + + + + + + + + +0:00 + +/0:34 + + +1× + + + + + + + + + + + + + + + + + +Overview + +StreamDiffusionTD is a TouchDesigner operator that connects real-time inputs like audio, sensors, and camera feeds to StreamDiffusion, enabling live generative visuals controlled in real time. Wit`, + readingTime: 2 +} +]; \ No newline at end of file diff --git a/snippets/automations/discord/discordAnnouncementsData.jsx b/snippets/automations/discord/discordAnnouncementsData.jsx new file mode 100644 index 00000000..16a761fc --- /dev/null +++ b/snippets/automations/discord/discordAnnouncementsData.jsx @@ -0,0 +1,16 @@ +export const discordAnnouncementsData = [ + { + id: "1463397885272920138", + content: "📣 __The CloudSPE proposal is live.__ 🗳️ 📣

      The proposal funds Cloud SPE to build a focused MVP for standardized, publicly observable network performance, reliability, and demand metrics, making the network measurable and comparable while laying the groundwork for future SLA-aware routing and scaling.

      Vote Yes ✅ or No ❌ [here](https://explorer.livepeer.org/treasury/47675980806842999962173227987422002121354040219792725319563843023665050472833)", + author: "AlisonWonderland", + timestamp: "2026-01-19T18:27:40.785000+00:00", + url: "https://discord.com/channels/423160867534929930/428351836609576972/1462876182298103963" + }, + { + id: "1463397844890288351", + content: "📣 __Vote now on the Protocol R&D SPE__ 🗳️ 📣

      All network value depends on protocol security. The proposal argues for a dedicated, continuously staffed function for protocol security, upgrades, and core improvements, replacing the current ad hoc model with a single accountable structure.

      Vote Yes ✅ or No ❌ [here](https://explorer.livepeer.org/treasury/67253869199932483234551664403036205881217777786063955710174984983936506090761)", + author: "AlisonWonderland", + timestamp: "2026-01-15T16:42:42.059000+00:00", + url: "https://discord.com/channels/423160867534929930/428351836609576972/1461400212063916114" + } +]; diff --git a/snippets/automationData/forum/Hero_Livepeer_Forum.png b/snippets/automations/forum/Hero_Livepeer_Forum.png similarity index 100% rename from snippets/automationData/forum/Hero_Livepeer_Forum.png rename to snippets/automations/forum/Hero_Livepeer_Forum.png diff --git a/snippets/automations/forum/forumData.jsx b/snippets/automations/forum/forumData.jsx new file mode 100644 index 00000000..691b8560 --- /dev/null +++ b/snippets/automations/forum/forumData.jsx @@ -0,0 +1,34 @@ +export const forumData = [ + { + title: 'RFP — Devconnect Assembly', + href: 'https://forum.livepeer.org/t/3101', + author: 'By nickhollins (@nickhollins)', + content: '

      Date Issued: 2025-10-02

      Issued By: Livepeer Foundation

      Contact: Nick Hollins

      1. Objective

      Deliver a half-day Assembly during Devconnect Buenos Aires (Nov 17–22, 2025) that positions Livepeer as the video, AI, and media layer of the open internet.

      The event should be participatory and not panel-driven. It will convene 60–80 curated attendees selected by application and invitation — creators, developers, founders, researchers, and cultural practitioners — to explore the future of media, AI, and video.

      Attendees will take part in active workshops, roundtable conversations, and share the findings of their group conversations with the Assembly which will also be reported on in post-event materials.


      2. Problem Statement

      The event, run by the Livepeer Foundation, does not have a full-time Events team nor has a presence in Argentina. To succeed, we need a production partner who can bring together suppliers locally to execute the event, whilst connecting us with adjacent ecosystems.


      3. Desired Outcome

      A successful production partner (or partners) will help deliver a high quality event of 60-80 participants in an intimate setting on a limited budget. It should create an atmosphere conducive to deep, structured discussions about media, AI, and video, and generate tangible outputs in the form of documented findings, media content, and post-event publishing.

      There should be great engagement throughout and secure a 80 NPS following the event. It should ensure that the right people are in the room and there is a strong quality of discussion.

      We aim to capture event photography, highlight video content for socials, and workshop session notes to be published as reports after the event by the Livepeer comms team and our partners.


      4. Deliverables

      (i) Venue & Production Management

      Goal: Secure and manage a suitable venue that reflects the creative and technical identity of Livepeer.

      Requirements:

      • Coordinate with Valerie’s Factory (preferred) or source an equivalent space.

      • Oversee AV setup (projector, mics, sound, WiFi).

      • Manage catering (light food, coffee/refreshments).

      • Provide signage, branding integration, and wayfinding.

      Outcome: A professional and welcoming physical environment that supports collaboration and reflects Livepeer’s brand.

      Due Date: Wednesday, Nov 19, 2025 (event day).


      (ii) Run-of-Show & Facilitation Support

      Goal: Ensure the Assembly flows seamlessly from check-in through closing.

      Requirements:

      • Design participant flow (registration, seating, session transitions).

      • Coordinate with Livepeer team on structured discussion formats (workshops, facilitators).

      • Provide staff for check-in and on-site coordination.

      Outcome: A cohesive program experience where attendees feel engaged, cared for, and able to focus fully on the content.

      Due Date: Wednesday, Nov 19, 2025.


      (iii) Attendee Experience & Integration

      Goal: Deliver a high-quality, curated audience with a smooth experience from invite to participation.

      Requirements:

      • Manage attendee invitations, including application review, confirmations, and waitlist coordination.

      • Oversee check-in, seating arrangements, and participant flow throughout the Assembly.

      • Ensure clear pre-event communication to attendees, setting expectations for format and outputs.

      Outcome: A well-balanced, high-signal group of attendees including builders, creators, and cultural practitioners, engaged in the Assembly with a seamless end-to-end experience.

      Due Date: Wednesday, Nov 19, 2025.


      (iv) Partner Coordination & Integration

      Goal: Ensure strong representation and meaningful integration of Livepeer’s ecosystem partners in the Assembly.

      Requirements:

      • Work with partner teams to secure speakers, facilitators, and invited participants.

      • Allocate partner seat blocks and coordinate their distribution.

      • Liaise with the Livepeer comms team to align on pre-event promotion, branding, and partner visibility.

      Outcome: Clear and effective partner participation, with strong representation in the room and visible alignment between Livepeer and key ecosystem collaborators.

      Due Date: Wednesday, Nov 19, 2025.


      (v) Post-Event Wrap & Documentation

      Goal: Capture and package the outputs of the Assembly for distribution to the wider community.

      Requirements:

      • Document key discussion points and workshop findings.

      • Provide photo/video documentation (in collaboration with Livepeer comms).

      • Deliver post-event attendance report and budget reconciliation.

      Outcome: A published recap and content package that amplifies Livepeer’s leadership in media/AI/video.

      Due Date: Friday, Nov 28, 2025.


      5. Capabilities Required

      Skills
      • Event production & logistics (planning through execution).

      • AV/technical production (sound, staging, live streaming).

      • Venue management and catering.

      Knowledge
      • Experience working with creative and tech communities.

      • Understanding of participatory and workshop-driven event formats.

      • Familiarity with Web3/crypto-native contexts (a plus).

      Attitude
      • Collaborative, detail-oriented, and budget-disciplined.

      • Comfortable with creative and experimental event formats.

      • Proactive about risk management.


      6. Proposal Requirements

      Proposals should include:

      • Executive Summary – overview of approach and why you’re suited.

      • Team Overview – contributors, bios, relevant case studies.

      • Past Work & Experience – examples of similar productions.

      • Approach & Timeline – breakdown leading to Nov 19.

      • Pricing Breakdown – milestone-based costs.


      7. RFP Timeline

      • Proposal Deadline: Wednesday, Oct 8, 2025

      • Decision Announced: Friday, Oct 10, 2025

      • Project Start: Monday, Oct 13, 2025

      • Event Date: Wednesday, Nov 19, 2025

      • Completion: Friday, Dec 5, 2025


      8. Proposal Submission Instructions

      • Format: PDF / Notion page / presentation.

      • Submission: Post as replies to this forum thread.

      • Questions: Reach out to Nick | Livepeer Foundation on Discord.

      • Payments: Milestone-based, released on deliverable completion.

      ', + replyCount: 5, + datePosted: 'Oct 2, 2025', + }, + { + title: 'Continuing discussions on Inflation', + href: 'https://forum.livepeer.org/t/3139', + author: 'By b3nnn (@b3nnn)', + content: '

      Hey everyone, I wanted to use this post to reinvigorate the inflation discussion led by @dob in this thread earlier this year.

      As a member of the Foundation, and as chair of the Capital Markets Advisory board, I think it’s important to keep us moving forward on this as it part of broader perceptions of the Livepeer project, is part of the broader industry focus on ‘fundamentals’, and is a key component of how capital is allocated within our ecosystem.

      From previous discussions (and some new ones), it seems there is broad consensus on the need for small and incremental action. I see my role as helping give a little nudge so we take that small but important first step.

      The previous draft from Dob got us to the starting line of what a proposal could look like. My personal tldr of the thread was that:

      • There was general alignment that we should start taking some action

      • There’s alignment on using existing parameters, which avoids risks or delays from new protocol or smart contract work

      • But.. the sticking point was whether to do that using targetBondingRate or inflationChange , or both, and how to do it in a principled, risk aware way rather than using something that might feel a bit arbitrary

      Reinforcing all of this, during the Livepeer summit Doug and Arunas /@Jonas_Pixelfield completed a hackathon project that both modeled parameter changes and surveyed a sizeable set of Orchestrators and Delegators on their perceptions. A short summary is that:

      • Simple modeling shows small parameter changes lead to effects over a fairly long time horizon (in the range of 12+ months to reach something that might be considered major change). This gives ample time to start, observe, and learn and adapt as necessary as we go

      • The survey and interviews further reinforced the consensus from Orchestrators and Delegators that they see the need for action, but sometimes struggle to find confidence with any given approach

      With all this in mind, I want to share what we plan to do to help the community move forward:

      • Firstly, we want to keep discussing the Inflation topic with Orchestrators and Delegators. Two ways to do this include:

      • Secondly, we intend to try to quantify the risks involved with some additional modeling. I’ve asked Andrew from Shtuka Research (who is a member of the Capital Markets Advisory board) to take the lead on this. Andrew is a mathematician with a long career in academic and applied research, who will help quantify the risks of different change scenarios. He’ll also be helping us build out a framework for continual risk monitoring and adjustment in the future, so that we can all have confidence to move forward to voting on any proposed changes.

      Hopefully you agree that these goals are a relatively simple way to make that last important push and build on the broad consensus reached so far. This is not a one-and-done topic so we will share a bit more about what the path ahead could look like as we get more information.

      I’m going to sign off here so that Andrew can share a bit more about the survey and modeling, and I’d encourage anyone who wants to chat on this topic to reach out to me direct via DM on Discord or by using my calendar link share above.

      ', + replyCount: 8, + datePosted: 'Nov 5, 2025', + }, + { + title: 'RFP — Documentation Restructure', + href: 'https://forum.livepeer.org/t/3071', + author: 'By honestly_rich (@honestly_rich)', + content: '

      Date Issued: 2025-09-17
      Issued By: Livepeer Foundation
      Contact: Rich O’Grady


      1. Objective

      Restructure, refresh, and modernize Livepeer’s documentation so that it is stakeholder-focused, AI-first, and future-proofed. It should cater to the core personas of the Livepeer project: developers, delegators, gateway operators and orchestrators.


      2. Problem Statement

      Current Livepeer docs suffer from:

      • Complicated onboarding: User journeys (node operators, app builders, delegators, gateway providers) are hidden behind toggles instead of clear entry points.
      • Outdated or inconsistent content: Deprecated APIs, stale references, incomplete AI coverage, and fragmented changelogs.
      • Brand & duplication: Studio-specific guidance is mixed into core docs; AI SDKs and APIs are duplicated across gateways.
      • Weak site integration: Poor linkage between website, explorer, governance portal, and docs. Too many Studio dashboard references.


      3. Desired Outcome

      Success is a single-source-of-truth documentation system that:

      • Leads with clear stakeholder-focused onboarding and goal-oriented entry points.
      • Cleanly separates AI Jobs vs Transcoding Jobs while still surfacing cross-cutting resources (SDKs, APIs, CLI, on-chain/network).
      • Fully deprecates Studio content with redirects and zero broken links.
      • Provides AI-first documentation: semantically structured, LLM-readable, with embedded natural language search/assistant.
      • Consolidates changelogs and introduces versioning / deprecation tracking.
      • Establishes a style guide, contribution model, and ownership playbook for consistency.
      • Integrates seamlessly with the broader Livepeer ecosystem (website, explorer, governance, dashboards).


      4. Deliverables
      (i) Present New Documentation Strategy

      Goal: Create a new outline for Livepeer documentation, including full map of current documentation, a clear information architecture and timeline for writing new documents.

      Requirements:

      • Identify core stakeholder groups (Livepeer Foundation, Livepeer Inc, AI SPE, Cloud SPE, Streamplace, Frameworks and more)
      • Conduct of all docs pages with status recommendations across the 4 categories (Developers, Delegators, Orchestrators, Gateway Operators)
        • Developers: clean up deprecated sections and plan integrations with new gateway products (Streamplace, Frameworks, Daydream and more)
        • Orchestrators: simplify documentation to easy onboarding with plan for support in Discord.
        • Delegators: integrate new video content to make it easy to delegate.
        • Gateways: streamline documentation and workflows with support from the Foundation.
      • Create plan for an updated sidebar, taxonomy, and breadcrumb structure.
      • Consolidation of multiple changelogs into a single canonical feed.
      • Onboard stakeholders to project management process

      Outcome: A forum post detailing the new documentation to the community with a 1-week window RFC.

      Demo Due Date: Friday 17th October

      (ii) Re-Write Documentation

      Goal: Systematically edit and rewrite new content to meet stakeholder needs with consistent accuracy and depth.

      Requirements:

      • Work with core stakeholders to rewrite documentation
      • Make the documentation easily consumable by AI systems and empower users with an embedded assistant ( semantic headings, structured metadata, and machine-readable references (OpenAPI specs, JSON examples).
      • Integrate embedded natural-language search or AI assistant (leveraging Mintlify features) and ensure clear explanations and concise summaries for LLM parsing.
      • Rewrite quickstarts for both AI Jobs and Transcoding Jobs.
      • Migration guides for Studio users.
      • Integrate goal-based tutorials for each stakeholder type where possible.
      • Work with existing groups to incorporate starter repos, examples, and copy-paste snippets and full API/SDK/CLI references with updated coverage (including realtime + BYOC APIs).
      • Conduct review with core stakeholders with a clear RFC.

      Outcome: First written draft of clear, accurate, and goal-oriented documentation that accelerates adoption and reduces support overhead.

      Demo Due Date: Friday 7th November

      (iii) V1 Documentation Live

      Goal: Deliver a technically sound and reliable documentation site.

      Requirements:

      • Implement redesigned IA and content in the current docs stack (Mintlify/Docusaurus).
      • Set up redirects, SEO and AEO optimization, and accessibility compliance (WCAG).
      • Integrate multilingual readiness and analytics tracking.
      • Integrate the documentation into the website.

      Outcome: A responsive and performant documentation site with zero broken links, measurable engagement, and improved accessibility.

      Demo Due Date: Friday 14th November

      (iv) Public Workflow For Maintenance & Community Contributions

      Goal: Create a consistent tone and a scalable contribution process.

      Requirements:

      • Work with the Livepeer Foundation’s Technical Director to establish a unified voice and style guide (tone, terminology, formatting, accessibility).
      • Create contribution guidelines and PR workflow for community involvement.
      • Define and handover ownership and review process for maintaining quality.
      • Integrate multilingual readiness and analytics tracking.
      • Provide a clear ticketing system for reporting problems and patching fixes.

      Outcome: A sustainable documentation process with consistent voice, tone, and governance.

      Demo Due Date: Friday 5th December


      5. Capabilities Required
      Skills
      • Developer documentation strategy, IA design, technical writing.
      • Static site tooling, redirect management, docs CI pipelines.
      • SEO, accessibility, multilingual documentation workflows.
      Knowledge
      • 1+ years experience with Livepeer ecosystem
      • Streaming/transcoding basics (FFmpeg, GPU workloads).
      • AI inference workflows basics, particularly working with APIs.
      • Open-source contribution models and GitHub-based workflows.
      • Comparative familiarity with best-in-class docs (e.g., Chainlink, Base, Solana).
      Attitude
      • Community-first, collaborative, pragmatic.
      • Strong eye for clarity, consistency, and long-term maintainability.
      • Willingness to challenge outdated patterns and propose future-proof solutions.
      • Enjoyment in distilling complex technical concepts into minimal, user-focused documentation.

      6. Proposal Requirements

      Please include in your proposal:

      • Executive Summary - give an overview of the proposal and why you are suited.
      • Company / Contributor Overview & Capabilities - breakdown of each contributor with bio and relevant case studies.
      • Past Work & Experience - examples of docs restructures, especially for developer platforms.
      • Milestone Breakdown - giving a week-by-week breakdown of the project in line with the due dates and requirements above.
      • Pricing Breakdown - breakdown of payment by milestone with an optional ongoing support.

      Though we recommend you use your own creativity in the proposal, an example template is here: Livepeer RFP — Proposal Template


      7. RFP Timeline
      • Proposal Deadline: Wednesday 24th September 2025
      • Decision Announced: Friday 26th September 2025
      • Project Start: Monday 29th September 2025
      • Project Completion: Friday 5th December 2025


      8. Proposal Submission Instructions
      • Format: PDF or Notion page (share with view access).
      • Proposal Deadline: 11:59pm GMT-7, Wednesday 24th September 2025.
      • Questions: reach out to Rich | Livepeer Foundation on the Livepeer Discord.
      • Proposal Updates: if you submit an early draft of the proposal, you are welcome to update your original proposal until the final deadline.
      • Payments: when thinking through your total budget, be mindful that payments will be paid out on milestone completion.
      ', + replyCount: 12, + datePosted: 'Sep 17, 2025', + }, + { + title: 'RFP — Explorer Maintenance', + href: 'https://forum.livepeer.org/t/3072', + author: 'By Rick (@rickstaa)', + content: '

      Date Issued: 2025-09-17
      Issued By: Livepeer Foundation
      Contact: Rick Staa


      1. Objective

      Restore the Livepeer Explorer to a secure, maintainable, and high-performance state while laying the groundwork for new network-wide data and governance dashboards.



      2. Problem Statement

      The Explorer is the primary entry point for orchestrators, delegators, developers, and gateways. However, since December 2023 lack of ownership has accumulated significant technical debt:

      • Outdated dependencies in the Explorer and design system, fragile under Node 20, break on updates and could lead to security risks, undermining long-term maintainability.
      • Duplicated/obsolete code and missing contribution infrastructure (guidelines, CI/tests, stubs), making contributions slow and error-prone.
      • Inefficient data fetching (e.g. Infura/Graph duplication), creating performance issues.
      • A backlog of unmerged PRs and unresolved bugs (e.g., broken migration widget, UI inconsistencies, incorrectly displayed data).

      A future roadmap for expanded network data and richer Explorer stakeholder experiences depends on first restoring a stable, secure, and maintainable Explorer. This RFP focuses on that critical first step.



      3. Desired Outcome

      Success means that within four months the Explorer is:

      • Clean, well tested, with automated tests and continuous integration pipelines, providing a healthy, maintainable codebase.
      • Free of critical bugs and stale pull requests, with a clearly organized issue backlog.
      • Running on up-to-date, secure dependencies (Explorer and design system) fully compatible with the current Node.js LTS.
      • Improved in performance, with faster page loads and a simplified, well-documented data layer for developers
      • Equipped with a new voting-transparency feature integrated with the voting-tally subgraph.
      • Backed by a clear 6-month roadmap and a dedicated maintainer team providing ongoing maintenance and timely support.

      In short, the Explorer will be trusted infrastructure and ready to power further iteration of capabilities.



      4. Deliverables

      Within four months (target completion by February 1, 2026), the selected team will deliver the following milestone-based outcomes.

      Each deliverable must be demonstrated in a Livepeer community call, and the team must provide public progress updates at least every two weeks (e.g., forum posts) throughout the project.

      Payments are released only after each demo is accepted by the RFP owner.


      (i) Establish Healthy Explorer Codebase

      Goal: Deliver a clean, maintainable, and well-tested Explorer foundation to enable ongoing community contributions.

      Requirements:

      • Remove unused and duplicate code.
      • Reorganize folder and module structure, where needed, to improve navigation and long-term maintainability.
      • Add comprehensive unit and integration tests covering all critical user flows (e.g., staking, delegating, governance), with measurable coverage targets proposed by the vendor.
      • Implement CI pipelines for reliable builds and automated checks.
      • Ensure all components are fully typed (TypeScript) and all ESLint/TypeScript errors resolved.
      • Provide clear contributor documentation and review workflow, with local-development stubs/mocks so the codebase can run without production environment variables.

      Outcome: A clean, healthy, well-tested codebase with CI pipelines and local stubs that contributors can run with minimal setup, forming a solid foundation for future improvements.

      Demo Day: To be proposed by the team and agreed with the RFP owner, demonstrating the cleaned codebase, CI, tests, and contributor docs.


      (II) Improve Data-Fetching Efficiency

      Goal: Enhance the Explorer’s data layer to reduce latency, eliminate redundant calls, and ensure responsive, reliable performance for end users and contributors.

      Requirements:

      • Optimize subgraph and RPC data fetching to reduce latency, avoid duplication, and improve responsiveness.

      Outcome: A faster, more efficient Explorer with reduced redundant calls and a simplified, well-documented data layer for easier future contributions.

      Demo Day: To be proposed by the team and agreed with the RFP owner, showcasing faster data fetching and improved data-layer developer documentation.


      (III) Resolve Critical UI Issues & Backlog

      Goal: Eliminate high-impact bugs and stale pull requests to ensure a stable, accurate, user-friendly Explorer.

      Requirements:

      • Resolve all current critical UI bugs (GitHub bug list), including the delegator migration widget, data inaccuracies, and major UX defects, and triage/fix any new critical issues during the engagement.
      • Review and merge, close, or supersede all open pull requests (GitHub pull requests) other than the voting-transparency feature (covered in Deliverable 5).
      • Work with the Foundation and Advisory Boards to prioritize any other high-impact feature requests from the backlog that fit within the agreed budget and timeline.

      Outcome: A more stable, accurate, and user-friendly Explorer with all critical issues resolved and a significantly reduced bug backlog, ready for ongoing community contributions.

      Demo Day: To be proposed by the team and agreed with the RFP owner, presenting a detailed report of resolved bugs and key fixes, along with the cleaned and updated issue board.


      (IV) Deliver Voting-Transparency Feature & Subgraph Integration

      Goal: Provide clear, real-time visibility into on-chain governance participation.

      Requirements:

      Outcome: A fully deployed voting-transparency feature integrated with the voting-tally subgraph and refined UI, offering accurate, real-time governance data.

      Demo Day: To be proposed by the team and agreed with the RFP owner, demonstrating the live voting-transparency feature.


      (V) Establish Maintainability & Roadmap

      Goal: Ensure the Explorer remains easy to maintain with a clear 6-month plan.

      Requirements:

      • Publish a 6-month feature/bug roadmap aligned with the Foundation’s Data Gap Analysis and community feedback.
      • Provide contributor docs, maintenance practices, and an issue-tracking process (including a clean, well-labeled issue board).

      Outcome: A documented maintenance framework and forward roadmap enabling smooth ongoing development and community contributions.

      Demo Day: To be proposed by the team and agreed with the RFP owner, presenting the final roadmap and contributor maintenance guide.


      (VI) Provide Ongoing Support & Post-Delivery Responsibility

      Goal: Ensure professional post-launch support and continuity of maintenance, whether the same team continues or future maintainers take over.

      Requirements:

      • During the project, acknowledge critical bugs within 24–48 hours and resolve or mitigate them within a few business days (or faster per the proposer’s SLA).
      • After delivery, provide at least a 60-day support window for critical fixes, security incidents, and knowledge transfer, meeting the proposer’s SLA.

      Outcome: Stable post-launch operations with timely critical-issue resolution and clear processes for continued maintenance, regardless of who maintains the Explorer.

      Payment: The team must present a support-readiness plan with defined SLA commitments, to be agreed with the RFP owner. Ninety percent of each milestone payment is released after acceptance of that milestone’s demo. The remaining ten percent of the total contract is held until the 60-day support period concludes and SLA commitments and resolution targets are met.


      Out of Scope

      To avoid confusion, the following items are not part of this RFP:

      • A full UI/UX redesign or new visual styling of the Explorer.
      • Major new product features beyond the initial voting-transparency integration.
      • Broader data-gap mapping (handled by separate workstreams).
      • Protocol/client changes or on-chain improvements to surface new data (managed through separate RFPs within the same workstream).


      5 Capabilities required
      Skills
      • Strong front-end engineering in modern JavaScript/TypeScript (React/Next.js), including implementing and refining accessible UI components within an existing component library or design system.
      • Proven experience with codebase modernization and maintainability—refactoring, setting up CI/CD pipelines, adding automated unit and integration tests, and enforcing TypeScript and ESLint standards.
      • Ability to update and manage complex dependency stacks, including Node.js and modern package managers, while resolving breaking changes and configuring automated update tooling (e.g., Dependabot or Renovate).
      • Proficiency in performance optimization and efficient data-fetching techniques, particularly with GraphQL/subgraph and RPC endpoints.
      • Experience reviewing, triaging, and merging open-source pull requests and managing a clean, well-labeled issue backlog.
      • Familiarity with monitoring and incident-response tools (e.g., Sentry, Bugsnag) to ensure production reliability.
      Knowledge
      • Understanding of the Ethereum/Web3 stack, including wallet flows, transactions, RPC providers, and common front-end pitfalls.
      • Awareness of accessibility (a11y) best practices and secure front-end patterns such as dependency risk management and safe secret handling.
      • Experience with open-source project governance, including contributor guidelines, code review workflows, semantic versioning, and changelogs.
      • Familiarity with The Graph/subgraph architecture and GraphQL schemas (nice to have).
      • Familiarity with the Livepeer protocol and current Explorer repository (nice to have).
      Attitude
      • Community-oriented and collaborative, engaging proactively with contributors, Advisory Boards, and Foundation stakeholders.
      • Accountable and responsive, acknowledging critical bugs within 24–48 hours and working toward mitigation or resolution within a few business days.
      • Documentation-first mindset, maintaining clear READMEs, runbooks, and migration notes to enable future contributors.
      • Quality-driven and pragmatic, balancing rigorous testing, CI, and security with on-time delivery.
      • Long-term stewardship, treating the Explorer as trusted infrastructure and designing for multi-year maintainability.
      • Supportive and open, encouraging new contributors and fostering an inclusive contributor community.
      • Mission-aligned, motivated to strengthen the Explorer as a cornerstone of the Livepeer ecosystem.


      6. Proposal Requirements

      Please include in your proposal:

      • Executive Summary - give an overview of the proposal and why you are suited.
      • Company / Contributor Overview & Capabilities - breakdown of each contributor with bio and relevant case studies.
      • Past Work & Experience – examples of large front-end refactors, dependency upgrades, CI/CD setup, performance optimization, or open-source project maintenance.
      • Milestone Breakdown – a plan aligned with Section 2 deliverables, with proposer-set milestone dates and demo day schedules, each including clear outputs and payment tied to demo acceptance.
      • Support & Maintenance Plan – proposed SLA commitments (e.g., response and resolution times), 60-day post-delivery support approach, and handover/knowledge-transfer strategy.
      • Pricing Breakdown - breakdown of payment by milestone with an optional ongoing support.

      Though we recommend you use your own creativity in the proposal, an example template is here: Livepeer RFP — Proposal Template.



      7. RFP Timeline

      Proposal Deadline: Wednesday 24th September 2025
      Decision Announced: Friday 26th September 2025
      Project Start: Wednesday 1 October 2025
      Project Completion: Sunday 1 Feb 2026



      8. Proposal Submission Instructions
      • Format: PDF or Notion page (share with view access).
      • Proposal Deadline: 11:59pm GMT-7, Wednesday 24th September 2025.
      • Questions: reach out to @rickstaa on the Livepeer Discord.
      • Proposal Updates: if you submit an early draft of the proposal, you are welcome to update your original proposal until the final deadline.
      ', + replyCount: 14, + datePosted: 'Sep 17, 2025', + } +]; diff --git a/snippets/automationData/globals/README.md b/snippets/automations/globals/README.mdx similarity index 84% rename from snippets/automationData/globals/README.md rename to snippets/automations/globals/README.mdx index d4d5f2e6..d4f94f09 100644 --- a/snippets/automationData/globals/README.md +++ b/snippets/automations/globals/README.mdx @@ -8,7 +8,7 @@ This is the recommended approach for your setup. It: - Polls the go-livepeer releases API every 30 minutes - Uses Redis to track the last known version (prevents duplicate updates) -- Only updates the `LatestRelease` value without touching anything else +- Only updates the `LatestVersion` value without touching anything else - Commits directly to the docs-v2 branch ## 2. **GitHub Action** (update-livepeer-release.yml) RECOMMENDED @@ -40,11 +40,15 @@ All files include the setup guide with detailed instructions for each approach. ### 2. Code for yml -on: schedule: # Run every 30 minutes - cron: '_/30 _ \* \* \*' -workflow_dispatch: - -jobs: check-and-update: runs-on: ubuntu-latest +```yaml +on: + schedule: # Run every 30 minutes + - cron: "*/30 * * * *" + workflow_dispatch: +jobs: + check-and-update: + runs-on: ubuntu-latest steps: - name: Checkout docs repository uses: actions/checkout@v3 @@ -62,7 +66,7 @@ jobs: check-and-update: runs-on: ubuntu-latest - name: Read current version from globals.jsx id: current_version run: | - CURRENT=$(grep -oP 'LatestRelease:\s*["'\'']?\K[^"'\'']+' snippets/automationData/globals/globals.jsx || echo "") + CURRENT=$(grep -oP 'LatestRelease:\s*["'\''\"]?\K[^"'\'']+' snippets/automations/globals/globals.jsx || echo "") echo "current=${CURRENT}" >> $GITHUB_OUTPUT echo "Current version: ${CURRENT}" @@ -70,14 +74,14 @@ jobs: check-and-update: runs-on: ubuntu-latest if: steps.get_release.outputs.release != steps.current_version.outputs.current run: | # Create backup - cp snippets/automationData/globals/globals.jsx snippets/automationData/globals/globals.jsx.bak + cp snippets/automations/globals/globals.jsx snippets/automations/globals/globals.jsx.bak # Update the LatestRelease value - sed -i "s/LatestRelease:[[:space:]]*[\"'][^\"']*[\"']/LatestRelease: \"${{ steps.get_release.outputs.release }}\"/" snippets/automationData/globals/globals.jsx + sed -i "s/LatestRelease:[[:space:]]*[\"'][^\"']*[\"']/LatestRelease: \"${{ steps.get_release.outputs.release }}\"/" snippets/automations/globals/globals.jsx # Verify the change echo "Updated content:" - grep "LatestRelease" snippets/automationData/globals/globals.jsx + grep "LatestRelease" snippets/automations/globals/globals.jsx - name: Commit and push if changed if: steps.get_release.outputs.release != steps.current_version.outputs.current @@ -87,3 +91,4 @@ jobs: check-and-update: runs-on: ubuntu-latest git add snippets/automationData/globals/globals.jsx git commit -m "chore: update latest release to ${{ steps.get_release.outputs.release }}" git push origin docs-v2 +``` diff --git a/snippets/automations/globals/globals.jsx b/snippets/automations/globals/globals.jsx new file mode 100644 index 00000000..8440ae0f --- /dev/null +++ b/snippets/automations/globals/globals.jsx @@ -0,0 +1,3 @@ +export const latestVersion = "v0.7.7"; +export const latestVersionUrl = + "https://github.com/livepeer/go-livepeer/releases/download/v0.7.7"; diff --git a/snippets/automations/globals/globals.mdx b/snippets/automations/globals/globals.mdx new file mode 100644 index 00000000..d80520fc --- /dev/null +++ b/snippets/automations/globals/globals.mdx @@ -0,0 +1,2 @@ +export const latestVersion = "v0.7.7"; +export const latestVersionUrl = "https://github.com/livepeer/go-livepeer/releases/download/v0.7.7"; diff --git a/snippets/automations/luma/lumaEventsData.jsx b/snippets/automations/luma/lumaEventsData.jsx new file mode 100644 index 00000000..6443e8e0 --- /dev/null +++ b/snippets/automations/luma/lumaEventsData.jsx @@ -0,0 +1,187 @@ +export const lumaEventsData = { + lastUpdated: "2026-01-21T08:39:40.878Z", + upcoming: [ + ], + past: [ + { + title: "AI x Open Media Forum presented by Livepeer, co-curated with Refraction", + date: "November 18, 2025", + location: "https://luma.com/event/evt-KWn61dZNxwOf7tP", + url: "https://luma.com/9q0swwro" + }, + { + title: "SLC Livepeer Delegator Workshops", + date: "July 26, 2025", + location: "https://luma.com/event/evt-wRQfFL4REh1KEwm", + url: "https://luma.com/wfdaaujk" + }, + { + title: "Virtual Livepeer Delegator Workshop", + date: "July 23, 2025", + location: "https://luma.com/event/evt-j9zlkAhOTSKbtYU", + url: "https://luma.com/2si5dp2x" + }, + { + title: "Abuja Livepeer Delegator Workshop", + date: "July 19, 2025", + location: "https://luma.com/event/evt-OPud7laxPHK87V7", + url: "https://luma.com/2bl3t9jn" + }, + { + title: "Livepeer Treasury Talk 💰", + date: "July 7, 2025", + location: "https://luma.com/event/evt-1yHgJArDXMmyB3j", + url: "https://luma.com/n7rpu9wt" + }, + { + title: "The Brunch™ (Cannes) - Builder Brunch at ETHCC", + date: "July 3, 2025", + location: "https://luma.com/event/evt-eqtiphMEMwFuHdp", + url: "https://luma.com/xzbn0cxc" + }, + { + title: "Live AI Fashion Hackathon", + date: "June 10, 2025", + location: "https://luma.com/event/evt-y9bE78VDvyVyFPF", + url: "https://luma.com/tijlbvq6" + }, + { + title: "Livepeer Open Ecosystem Call", + date: "June 6, 2025", + location: "https://luma.com/event/evt-z4GLweG2CVSW81e", + url: "https://luma.com/6ckodf8u" + }, + { + title: "Livepeer Core Dev Call", + date: "May 15, 2025", + location: "https://luma.com/event/evt-wEQR5bO6XaRN3aO", + url: "https://luma.com/1nn2dunw" + }, + { + title: "Daydream Creator Sessions", + date: "May 8, 2025", + location: "https://luma.com/event/evt-PxASpZkEiflGNde", + url: "https://luma.com/5dl1e8ds" + }, + { + title: "IRL Daydream in Greenpoint with Maachew Bentley (063N13)", + date: "May 1, 2025", + location: "Ponyboy, 632 Manhattan Ave, Brooklyn, NY 11222, USA", + url: "https://luma.com/bl9x3zz9" + }, + { + title: "Real-Time Video AI @GenART NYU with ComfyUI & Livepeer", + date: "April 18, 2025", + location: "370 Jay St 4th floor, Brooklyn, NY 11201, USA", + url: "https://luma.com/wyvt8b4k" + }, + { + title: "Open Source AI Meetup Amsterdam", + date: "March 21, 2025", + location: "Mauritskade 57, 1092 AD Amsterdam, Netherlands", + url: "https://luma.com/zgm3iz35" + }, + { + title: "Workflow Competition: Innovating Realtime Video AI", + date: "March 17, 2025", + location: "https://luma.com/event/evt-IdZR5WmEE8NDpPC", + url: "https://luma.com/ztyb4wr4" + }, + { + title: "ComfyUI Official Meetup - Austin AI Film Fest Edition", + date: "March 14, 2025", + location: "AT&T Hotel and Conference Center, 1900 University Ave, Austin, TX 78705, USA", + url: "https://luma.com/nkiothz3" + }, + { + title: "Open Source & Creative AI: Using ComfyUI for Real Time Video AI", + date: "February 25, 2025", + location: "Code Talent, 3412 Blake St, Denver, CO 80205, USA", + url: "https://luma.com/dkuob1j4" + }, + { + title: "ComfyUI Official NYC February Meet-Up", + date: "February 19, 2025", + location: "https://luma.com/event/evt-Ho2RAER8bUJ0V9Q", + url: "https://luma.com/ettshrqa" + }, + { + title: "Real-Time Video AI @GenART NYU with ComfyUI & Livepeer", + date: "January 31, 2025", + location: "370 Jay St 4th floor, Brooklyn, NY 11201, USA", + url: "https://luma.com/cene9t4y" + }, + { + title: "ComfyUI Hacker Program Demo Day", + date: "January 31, 2025", + location: "https://luma.com/event/evt-jovMI8YYwF57G0H", + url: "https://luma.com/5fe2977r" + }, + { + title: "Whats New @Livepeer", + date: "January 29, 2025", + location: "https://luma.com/event/evt-wQHF1QiB98kQ9uW", + url: "https://luma.com/opmnkhna" + }, + { + title: "Livepeer: StreamDiffusion Workshop", + date: "January 27, 2025", + location: "https://luma.com/event/evt-KlsC8BJyisKKsAU", + url: "https://luma.com/yl91e6yy" + }, + { + title: "Weekly Water Cooler Chat", + date: "December 23, 2024", + location: "https://luma.com/event/evt-MMk14m6djg9XwQD", + url: "https://luma.com/qpvkmiyq" + }, + { + title: "ComfyStream Contributors Workshop", + date: "December 19, 2024", + location: "https://luma.com/event/evt-e4CLbc5vMwUeH9S", + url: "https://luma.com/8lt1q50y" + }, + { + title: "AI Video Hackathon: Finale and Prizegiving", + date: "November 26, 2024", + location: "https://luma.com/event/evt-eciLN0qY3oNVRQz", + url: "https://luma.com/E0466_2889" + }, + { + title: "AI Community Research Report: ComfyUI Case Study", + date: "November 5, 2024", + location: "https://luma.com/event/evt-U8GgnmpFsM6WzYb", + url: "https://luma.com/ltaqk21p" + }, + { + title: "Livepeer AI Orchestrator Logo Generation", + date: "October 30, 2024", + location: "https://luma.com/event/evt-3fl2yqHXznPAs26", + url: "https://luma.com/5tg36ots" + }, + { + title: "AI Startup Program Demo Day", + date: "October 9, 2024", + location: "https://luma.com/event/evt-BDB36ZqZBbjwCUS", + url: "https://luma.com/mhr5reat" + }, + { + title: "LIMITLESS: TOKEN-POWERED AI", + date: "September 17, 2024", + location: "ArtScience Museum, 6 Bayfront Ave, Singapore 018974", + url: "https://luma.com/xqvgrmuv" + }, + { + title: "Happy Hour w/ Livepeer", + date: "July 11, 2024", + location: "Reset, Rue de Ligne 8, 1000 Bruxelles, Belgium", + url: "https://luma.com/j8rw4jva" + }, + { + title: "GEN VIDEO Summit - The Future of Decentralized AI Media & Streaming", + date: "May 23, 2024", + location: "NEST Schank- und Speisewirtschaft, Görlitzer Str. 52, 10997 Berlin, Germany", + url: "https://luma.com/4ochjrc3" + } + ] +}; diff --git a/snippets/automations/scripts/n8n/Discord_Announce_to_Mintlify.json b/snippets/automations/scripts/n8n/Discord_Announce_to_Mintlify.json new file mode 100644 index 00000000..ae1bf811 --- /dev/null +++ b/snippets/automations/scripts/n8n/Discord_Announce_to_Mintlify.json @@ -0,0 +1,316 @@ +{ + "name": "Discord_Announce_to_Mintlify", + "nodes": [ + { + "parameters": { + "rule": { + "interval": [ + {} + ] + } + }, + "id": "38f769ec-ef3c-41d6-9805-81f98b0e86e6", + "name": "Schedule Trigger", + "type": "n8n-nodes-base.scheduleTrigger", + "typeVersion": 1.2, + "position": [ + -576, + -336 + ] + }, + { + "parameters": { + "resource": "message", + "operation": "getAll", + "guildId": { + "__rl": true, + "value": "={{ $json.discordServerID }}", + "mode": "id" + }, + "channelId": { + "__rl": true, + "value": "={{ $json.discordChannelID }}", + "mode": "id" + }, + "limit": 50, + "options": {} + }, + "id": "c463d2b2-caca-423a-aaa3-b1f4a80e21d8", + "name": "Get Discord Messages", + "type": "n8n-nodes-base.discord", + "typeVersion": 2, + "position": [ + -192, + -336 + ], + "webhookId": "1a6cec03-797e-4a28-b0a0-0c7d848eddb3", + "credentials": { + "discordBotApi": { + "id": "w1Jsx7w9upr3KgFD", + "name": "Discord Bot account" + } + } + }, + { + "parameters": { + "conditions": { + "options": { + "caseSensitive": true, + "leftValue": "", + "typeValidation": "strict" + }, + "conditions": [ + { + "id": "filter-recent", + "leftValue": "={{ new Date($json.timestamp).getTime() }}", + "rightValue": "={{ Date.now() - (24 * 60 * 60 * 1000) }}", + "operator": { + "type": "number", + "operation": "gt" + } + } + ], + "combinator": "and" + }, + "options": {} + }, + "id": "23a51220-2e7b-454c-886d-1dfb1701c009", + "name": "Filter Recent Messages", + "type": "n8n-nodes-base.filter", + "typeVersion": 2, + "position": [ + 16, + -336 + ] + }, + { + "parameters": { + "jsCode": "const announcements = [];\n\nfor (const item of $input.all()) {\n const message = item.json;\n \n // Skip non-normal messages (type 12 is channel follow notification)\n if (message.type !== 0) continue;\n \n // Extract content from message snapshots (cross-posted messages)\n let content = message.content;\n if (!content && message.message_snapshots && message.message_snapshots.length > 0) {\n content = message.message_snapshots[0].message.content;\n }\n \n // Skip if still no content\n if (!content) continue;\n \n // Get original message reference for better URL\n const originalGuildId = message.message_reference?.guild_id || message.guild_id;\n const originalChannelId = message.message_reference?.channel_id || message.channel_id;\n const originalMessageId = message.message_reference?.message_id || message.id;\n \n announcements.push({\n id: message.id,\n content: content,\n author: message.author.global_name || message.author.username,\n timestamp: message.message_snapshots && message.message_snapshots.length > 0 \n ? message.message_snapshots[0].message.timestamp \n : message.timestamp,\n url: `https://discord.com/channels/${originalGuildId}/${originalChannelId}/${originalMessageId}`,\n attachments: message.attachments || [],\n embeds: message.embeds || []\n });\n}\n\n// Sort by timestamp, newest first\nannouncements.sort((a, b) => new Date(b.timestamp) - new Date(a.timestamp));\n\nreturn [{ json: { announcements } }];" + }, + "id": "6337e61c-c39e-4db0-b742-07a99bddf5dd", + "name": "Process Announcements", + "type": "n8n-nodes-base.code", + "typeVersion": 2, + "position": [ + 224, + -336 + ] + }, + { + "parameters": { + "jsCode": "const announcements = $input.first().json.announcements;\n\n// Helper function to escape JSX content\nfunction escapeJSX(str) {\n if (!str) return '';\n return str\n .replace(/&/g, '&')\n .replace(//g, '>')\n .replace(/\"/g, '"')\n .replace(/'/g, ''')\n .replace(/\\{/g, '{')\n .replace(/\\}/g, '}');\n}\n\n// Helper function to format Discord markdown to HTML\nfunction formatContent(content) {\n if (!content) return '';\n \n let formatted = escapeJSX(content);\n \n // Convert Discord markdown\n formatted = formatted\n .replace(/\\*\\*(.+?)\\*\\*/g, '$1') // Bold\n .replace(/\\*(.+?)\\*/g, '$1') // Italic\n .replace(/\\n/g, '
      '); // Line breaks\n \n return formatted;\n}\n\n// Generate JSX content\nconst jsxContent = `export const DiscordAnnouncements = () => {\n const announcements = [\n${announcements.map(ann => ` {\n id: \"${ann.id}\",\n content: \"${formatContent(ann.content)}\",\n author: \"${escapeJSX(ann.author)}\",\n timestamp: \"${ann.timestamp}\",\n url: \"${ann.url}\"\n }`).join(',\\n')}\n ];\n\n return (\n
      \n
      \n

      Latest Livepeer Announcements

      \n

      From Discord

      \n
      \n
      \n {announcements.map((announcement) => (\n
      \n
      \n {announcement.author}\n \n \n
      \n \n ))}\n
      \n
      \n );\n};\n`;\n\nreturn [{ json: { content: jsxContent, announcements } }];" + }, + "id": "013f9f4a-baca-4fb0-ac7e-c51965c4f55e", + "name": "Generate JSX", + "type": "n8n-nodes-base.code", + "typeVersion": 2, + "position": [ + 416, + -336 + ] + }, + { + "parameters": { + "resource": "file", + "operation": "edit", + "owner": { + "__rl": true, + "value": "={{ $json.githubOwner }}", + "mode": "" + }, + "repository": { + "__rl": true, + "value": "={{ $json.githubRepo }}", + "mode": "" + }, + "filePath": "={{ $json.githubFilePath }}", + "fileContent": "={{ $('Generate JSX').item.json.content }}", + "commitMessage": "=commitMessage: `chore: create Discord announcements file from workflow - ${new Date().toISOString()}`", + "additionalParameters": { + "branch": { + "branch": "={{ $json.githubBranch }}" + } + } + }, + "id": "98eb0352-31ab-4a2e-b9d7-9070f459917b", + "name": "Update GitHub File", + "type": "n8n-nodes-base.github", + "typeVersion": 1, + "position": [ + 1040, + -400 + ], + "webhookId": "a1db9fa1-0d11-4d5c-89c8-28f69cbfb60e", + "credentials": { + "githubApi": { + "id": "vAAQD9gcQcGNKMOH", + "name": "Github Livepeer/docs Write Commit Token" + } + } + }, + { + "parameters": { + "assignments": { + "assignments": [ + { + "id": "github_owner", + "name": "githubOwner", + "value": "livepeer", + "type": "string" + }, + { + "id": "github_repo", + "name": "githubRepo", + "value": "docs", + "type": "string" + }, + { + "id": "github_path", + "name": "githubFilePath", + "value": "snippets/automations/discord/discordAnnouncementsData.jsx", + "type": "string" + }, + { + "id": "077c994c-4563-4210-8690-3b00fe4dba99", + "name": "githubBranch", + "value": "docs-v2-preview", + "type": "string" + }, + { + "id": "293846b3-b346-4a17-96fc-880b2917db8d", + "name": "discordServerID", + "value": "1066890817425387581", + "type": "string" + }, + { + "id": "5cf8e964-1dad-40bd-9813-1b23ecc6e10e", + "name": "discordChannelID", + "value": "1463391944746078319", + "type": "string" + } + ] + }, + "options": {} + }, + "name": "Config", + "type": "n8n-nodes-base.set", + "typeVersion": 3.3, + "position": [ + -384, + -336 + ], + "id": "a0ccaed1-687b-4ac9-8f5a-50ff9d10cd21" + }, + { + "parameters": { + "mode": "combine", + "combineBy": "combineByPosition", + "options": {} + }, + "type": "n8n-nodes-base.merge", + "typeVersion": 3.2, + "position": [ + 784, + -496 + ], + "id": "ec413328-ecc7-46ed-8f7a-10cb3eb00c77", + "name": "Merge" + } + ], + "pinData": {}, + "connections": { + "Schedule Trigger": { + "main": [ + [ + { + "node": "Config", + "type": "main", + "index": 0 + } + ] + ] + }, + "Get Discord Messages": { + "main": [ + [ + { + "node": "Filter Recent Messages", + "type": "main", + "index": 0 + } + ] + ] + }, + "Filter Recent Messages": { + "main": [ + [ + { + "node": "Process Announcements", + "type": "main", + "index": 0 + } + ] + ] + }, + "Process Announcements": { + "main": [ + [ + { + "node": "Generate JSX", + "type": "main", + "index": 0 + } + ] + ] + }, + "Generate JSX": { + "main": [ + [ + { + "node": "Merge", + "type": "main", + "index": 1 + } + ] + ] + }, + "Config": { + "main": [ + [ + { + "node": "Get Discord Messages", + "type": "main", + "index": 0 + }, + { + "node": "Merge", + "type": "main", + "index": 0 + } + ] + ] + }, + "Merge": { + "main": [ + [ + { + "node": "Update GitHub File", + "type": "main", + "index": 0 + } + ] + ] + } + }, + "active": false, + "settings": { + "executionOrder": "v1" + }, + "versionId": "27728702-e1aa-40f2-877b-ba59e857eb82", + "meta": { + "templateCredsSetupCompleted": true, + "instanceId": "b77ce31e344450acd6ad20bd6cde79f46c1e07d55921483122bfba86e8350352" + }, + "id": "zmXdoAYwgqwSESAV", + "tags": [] +} \ No newline at end of file diff --git a/snippets/automations/scripts/n8n/Forum-To-Mintlify-Latest-Topics.json b/snippets/automations/scripts/n8n/Forum-To-Mintlify-Latest-Topics.json new file mode 100644 index 00000000..e6048601 --- /dev/null +++ b/snippets/automations/scripts/n8n/Forum-To-Mintlify-Latest-Topics.json @@ -0,0 +1,324 @@ +{ + "name": "Forum-To-Mintlify-Latest-Topics", + "nodes": [ + { + "parameters": { + "functionCode": "return items.map(item => {\n const topic = item.json;\n const first = topic.post_stream?.posts?.find(p => p.post_number === 1);\n return {\n json: {\n id: topic.id,\n title: topic.title,\n url: `https://forum.livepeer.org/t/${topic.id}`,\n authorName: first?.name || first?.username || \"Unknown\",\n authorUsername: first?.username || \"unknown\",\n body: first?.cooked || \"\",\n replyCount: topic.posts_count - 1 || 0, // Subtract 1 for original post\n createdAt: topic.created_at || first?.created_at || \"\",\n updatedAt: topic.updated_at || first?.updated_at || \"\"\n }\n };\n});" + }, + "id": "9c954e20-38a6-4f89-b661-9653e835fe49", + "name": "Extract Original Post w/ Author", + "type": "n8n-nodes-base.function", + "position": [ + 2560, + 720 + ], + "typeVersion": 1 + }, + { + "parameters": { + "resource": "file", + "operation": "edit", + "owner": { + "__rl": true, + "value": "DeveloperAlly", + "mode": "list", + "cachedResultName": "DeveloperAlly", + "cachedResultUrl": "https://github.com/DeveloperAlly" + }, + "repository": { + "__rl": true, + "value": "livepeer-automations", + "mode": "list", + "cachedResultName": "livepeer-automations", + "cachedResultUrl": "https://github.com/DeveloperAlly/livepeer-automations" + }, + "filePath": "data/forumData.jsx", + "fileContent": "={{ $json.fileContent }}", + "commitMessage": "=Update forum data - {{ $now.toISO() }}" + }, + "type": "n8n-nodes-base.github", + "typeVersion": 1.1, + "position": [ + 3712, + 848 + ], + "id": "84e56137-4d69-49a2-8ae7-d914c49776e6", + "name": "Edit a file", + "webhookId": "0a16afd5-8684-4178-bff3-e0eaea0c81bb", + "credentials": { + "githubApi": { + "id": "jjy0epl4eqPHYqlG", + "name": "GitHub account" + } + } + }, + { + "parameters": { + "mode": "combine", + "combineBy": "combineByPosition", + "options": {} + }, + "type": "n8n-nodes-base.merge", + "typeVersion": 3.2, + "position": [ + 2944, + 848 + ], + "id": "1c6bc37f-5c4b-4a7a-ae7e-bef6e2d02e1e", + "name": "Merge" + }, + { + "parameters": { + "functionCode": "const list = items[0].json.topic_list?.topics || [];\nreturn list.map(t => ({ json: t }));" + }, + "id": "1d258870-9cbf-4779-851d-1cd3e0d04716", + "name": "Extract All Topics", + "type": "n8n-nodes-base.function", + "position": [ + 1792, + 736 + ], + "typeVersion": 1 + }, + { + "parameters": { + "url": "https://forum.livepeer.org/latest.json", + "options": {} + }, + "id": "4b036856-5604-481a-b608-9fb47b6e3160", + "name": "Fetch Latest Topics", + "type": "n8n-nodes-base.httpRequest", + "position": [ + 1536, + 736 + ], + "typeVersion": 1 + }, + { + "parameters": { + "functionCode": "function isOldPinned(t) {\n const pinned = t.json.pinned === true || t.json.pinned_globally === true;\n if (!pinned) return false;\n const created = new Date(t.json.created_at);\n const now = new Date();\n const ageDays = (now - created) / (1000 * 60 * 60 * 24);\n return ageDays > 30;\n}\n\nlet topics = items.filter(t => !isOldPinned(t));\nconst top4 = topics.slice(0, 4);\nreturn top4;" + }, + "id": "3a2a14b5-c878-47ad-bd45-001e4d48942a", + "name": "Filter Top 4 (Exclude Old Pinned)", + "type": "n8n-nodes-base.function", + "position": [ + 2032, + 736 + ], + "typeVersion": 1 + }, + { + "parameters": { + "url": "=https://forum.livepeer.org/t/{{$json.id}}", + "options": { + "fullResponse": false + } + }, + "id": "95915c7f-1c6e-433c-817f-5c92a11b7d11", + "name": "Fetch Topic JSON", + "type": "n8n-nodes-base.httpRequest", + "position": [ + 2304, + 720 + ], + "typeVersion": 1 + }, + { + "parameters": { + "url": "=https://forum.livepeer.org/raw/{{$json.id }}/1", + "options": { + "response": { + "response": { + "responseFormat": "text" + } + } + } + }, + "type": "n8n-nodes-base.httpRequest", + "typeVersion": 4.2, + "position": [ + 2304, + 896 + ], + "id": "c04b94f3-7c8e-4c4c-8bee-3e51eca30e7b", + "name": "Fetch Topic Raw" + }, + { + "parameters": { + "functionCode": "return [{ json: { topics: items.map(i => i.json) } }];" + }, + "id": "9c693984-c37c-47e7-bfea-b4cc57b3d8c2", + "name": "Aggregate Topics", + "type": "n8n-nodes-base.function", + "position": [ + 3248, + 848 + ], + "typeVersion": 1 + }, + { + "parameters": { + "triggerTimes": { + "item": [ + {} + ] + } + }, + "id": "b650689a-f9d1-4751-803e-d689e63d6a67", + "name": "Run Daily", + "type": "n8n-nodes-base.cron", + "position": [ + 1280, + 736 + ], + "typeVersion": 1 + }, + { + "parameters": { + "jsCode": "const topics = items[0].json.topics;\nconst forumData = [];\n\n// HTML cleaner function - keeps basic HTML formatting\nfunction cleanAndFormatHTML(html) {\n let cleanHTML = html;\n \n // Remove anchor navigation links\n cleanHTML = cleanHTML.replace(/]*name=\"[^\"]*\"[^>]*class=\"anchor\"[^>]*>.*?<\\/a>/g, '');\n \n // Clean up headings\n cleanHTML = cleanHTML.replace(/]*>(.*?)<\\/h1>/g, '

      $1

      ');\n cleanHTML = cleanHTML.replace(/]*>(.*?)<\\/h2>/g, '

      $1

      ');\n cleanHTML = cleanHTML.replace(/]*>(.*?)<\\/h3>/g, '
      $1
      ');\n cleanHTML = cleanHTML.replace(/]*>(.*?)<\\/h[4-6]>/g, '
      $1
      ');\n \n // Clean up images and their references\n cleanHTML = cleanHTML.replace(/]*class=\"lightbox\"[^>]*>.*?<\\/a>/g, ''); // Remove lightbox wrappers\n cleanHTML = cleanHTML.replace(/]*class=\"lightbox-wrapper\"[^>]*>.*?<\\/div>/g, ''); // Remove lightbox divs\n cleanHTML = cleanHTML.replace(/]*>/g, ''); // Remove img tags\n cleanHTML = cleanHTML.replace(/\\[!\\[.*?\\]\\(.*?\\)\\]\\(.*?\\)/g, ''); // Remove markdown image links\n cleanHTML = cleanHTML.replace(/image\\d+×\\d+\\s+[\\d.]+\\s*[KM]B/gi, ''); // Remove image size text\n \n // Keep paragraphs, lists, emphasis, code\n cleanHTML = cleanHTML.replace(/

      /g, '

      ');\n cleanHTML = cleanHTML.replace(/<\\/p>/g, '

      ');\n cleanHTML = cleanHTML.replace(/
        /g, '
          ');\n cleanHTML = cleanHTML.replace(/<\\/ul>/g, '
        ');\n cleanHTML = cleanHTML.replace(/
      • /g, '
      • ');\n cleanHTML = cleanHTML.replace(/<\\/li>/g, '
      • ');\n cleanHTML = cleanHTML.replace(/(.*?)<\\/strong>/g, '$1');\n cleanHTML = cleanHTML.replace(/(.*?)<\\/em>/g, '$1');\n cleanHTML = cleanHTML.replace(/(.*?)<\\/code>/g, '$1');\n \n // Simplify links\n cleanHTML = cleanHTML.replace(/]*href=\"([^\"]*)\"[^>]*>(.*?)<\\/a>/g, '$2');\n \n // Decode HTML entities\n cleanHTML = cleanHTML.replace(/&/g, '&');\n cleanHTML = cleanHTML.replace(/</g, '<');\n cleanHTML = cleanHTML.replace(/>/g, '>');\n cleanHTML = cleanHTML.replace(/"/g, '\"');\n cleanHTML = cleanHTML.replace(/'/g, \"'\");\n cleanHTML = cleanHTML.replace(/ /g, ' ');\n \n // Clean up whitespace\n cleanHTML = cleanHTML.replace(/\\s+/g, ' ');\n cleanHTML = cleanHTML.replace(/

        \\s*<\\/p>/g, '');\n \n cleanHTML = cleanHTML.trim();\n \n return cleanHTML;\n}\n\nfor (const t of topics) {\n // Convert to clean HTML\n const htmlContent = cleanAndFormatHTML(t.body);\n \n // Format the date nicely\n const datePosted = t.createdAt ? new Date(t.createdAt).toLocaleDateString('en-US', {\n year: 'numeric',\n month: 'short',\n day: 'numeric'\n }) : '';\n \n forumData.push({\n title: t.title,\n href: t.url,\n author: `By ${t.authorName} (@${t.authorUsername})`,\n content: htmlContent, // Clean HTML\n replyCount: t.replyCount || 0,\n datePosted: datePosted\n });\n}\n\n// Generate the JavaScript export string\nlet jsExport = 'export const forumData = [\\n';\nforumData.forEach((item, index) => {\n jsExport += ' {\\n';\n \n // Title\n jsExport += ` title: '${item.title.replace(/\\\\/g, '\\\\\\\\').replace(/'/g, \"\\\\'\")}',\\n`;\n \n // URL\n jsExport += ` href: '${item.href}',\\n`;\n \n // Author\n jsExport += ` author: '${item.author.replace(/\\\\/g, '\\\\\\\\').replace(/'/g, \"\\\\'\")}',\\n`;\n \n // Content - HTML, properly escaped for JS string\n const escapedContent = item.content\n .replace(/\\\\/g, '\\\\\\\\')\n .replace(/'/g, \"\\\\'\")\n .replace(/\"/g, '\\\\\"')\n .replace(/\\n/g, '\\\\n')\n .replace(/\\r/g, '\\\\r')\n .replace(/\\t/g, '\\\\t');\n jsExport += ` content: '${escapedContent}',\\n`;\n \n // Reply count\n jsExport += ` replyCount: ${item.replyCount},\\n`;\n \n // Date posted\n jsExport += ` datePosted: '${item.datePosted}',\\n`;\n \n jsExport += ' }';\n if (index < forumData.length - 1) {\n jsExport += ',';\n }\n jsExport += '\\n';\n});\njsExport += '];\\n';\n\nreturn [{ json: { fileContent: jsExport } }];" + }, + "type": "n8n-nodes-base.code", + "typeVersion": 2, + "position": [ + 3488, + 848 + ], + "id": "738c8086-e2fd-4815-a78a-021c0539c69d", + "name": "Build ForumData.jsx [mdx content]" + } + ], + "pinData": {}, + "connections": { + "Extract Original Post w/ Author": { + "main": [ + [ + { + "node": "Merge", + "type": "main", + "index": 0 + } + ] + ] + }, + "Edit a file": { + "main": [ + [] + ] + }, + "Merge": { + "main": [ + [ + { + "node": "Aggregate Topics", + "type": "main", + "index": 0 + } + ] + ] + }, + "Extract All Topics": { + "main": [ + [ + { + "node": "Filter Top 4 (Exclude Old Pinned)", + "type": "main", + "index": 0 + } + ] + ] + }, + "Fetch Latest Topics": { + "main": [ + [ + { + "node": "Extract All Topics", + "type": "main", + "index": 0 + } + ] + ] + }, + "Filter Top 4 (Exclude Old Pinned)": { + "main": [ + [ + { + "node": "Fetch Topic JSON", + "type": "main", + "index": 0 + }, + { + "node": "Fetch Topic Raw", + "type": "main", + "index": 0 + } + ] + ] + }, + "Fetch Topic JSON": { + "main": [ + [ + { + "node": "Extract Original Post w/ Author", + "type": "main", + "index": 0 + } + ] + ] + }, + "Fetch Topic Raw": { + "main": [ + [ + { + "node": "Merge", + "type": "main", + "index": 1 + } + ] + ] + }, + "Aggregate Topics": { + "main": [ + [ + { + "node": "Build ForumData.jsx [mdx content]", + "type": "main", + "index": 0 + } + ] + ] + }, + "Run Daily": { + "main": [ + [ + { + "node": "Fetch Latest Topics", + "type": "main", + "index": 0 + } + ] + ] + }, + "Build ForumData.jsx [mdx content]": { + "main": [ + [ + { + "node": "Edit a file", + "type": "main", + "index": 0 + } + ] + ] + } + }, + "active": false, + "settings": { + "executionOrder": "v1" + }, + "versionId": "dc5aba83-7f21-405f-b960-6d7ded2b952e", + "meta": { + "templateCredsSetupCompleted": true, + "instanceId": "b77ce31e344450acd6ad20bd6cde79f46c1e07d55921483122bfba86e8350352" + }, + "id": "qBcNA3S15BdUz55M", + "tags": [] +} \ No newline at end of file diff --git a/snippets/automations/scripts/n8n/Ghost-to-Mintlify.json b/snippets/automations/scripts/n8n/Ghost-to-Mintlify.json new file mode 100644 index 00000000..ebae0b6f --- /dev/null +++ b/snippets/automations/scripts/n8n/Ghost-to-Mintlify.json @@ -0,0 +1,157 @@ +{ + "name": "Ghost-to-Mintlify", + "nodes": [ + { + "parameters": { + "rule": { + "interval": [ + {} + ] + } + }, + "type": "n8n-nodes-base.scheduleTrigger", + "typeVersion": 1.2, + "position": [ + 0, + 0 + ], + "id": "4ba480b5-a326-4d11-92f9-5432b6246edb", + "name": "Schedule Trigger" + }, + { + "parameters": { + "url": "https://livepeer-studio.ghost.io/ghost/api/content/posts/", + "sendQuery": true, + "queryParameters": { + "parameters": [ + { + "name": "=key", + "value": "eaf54ba5c9d4ab35ce268663b0" + }, + { + "name": "limit", + "value": "4" + }, + { + "name": "include", + "value": "tags, authors" + } + ] + }, + "options": {} + }, + "type": "n8n-nodes-base.httpRequest", + "typeVersion": 4.2, + "position": [ + 208, + 0 + ], + "id": "383d6d68-71ce-424b-82b8-c1ae57655488", + "name": "HTTP Request" + }, + { + "parameters": { + "jsCode": "function safeHTML(html) {\n // Escape ONLY backticks\n return (html || \"\").replace(/`/g, \"\\\\`\");\n}\n\nfunction formatDate(iso) {\n return new Date(iso).toLocaleDateString(\"en-US\", {\n month: \"short\",\n day: \"numeric\",\n year: \"numeric\"\n });\n}\n\nconst posts = $json.posts.map(p => ({\n title: p.title,\n href: p.url,\n author: p.primary_author?.name \n ? `By ${p.primary_author.name}`\n : \"By Livepeer Team\",\n\n // SAFE VERSION — template literal inside code export\n content: safeHTML(p.html),\n\n datePosted: formatDate(p.published_at),\n feature_image: p.feature_image,\n excerpt: safeHTML(p.excerpt),\n reading_time: p.reading_time,\n}));\n\n\nconst js = `export const ghostData = [\n${posts.map(post => `{\n title: \\`${post.title}\\`,\n href: \\`${post.href}\\`,\n author: \\`${post.author}\\`,\n content: \\`${post.content}\\`,\n datePosted: \\`${post.datePosted}\\`,\n img: \\`${post.feature_image || \"\"}\\`,\n excerpt: \\`${post.excerpt}\\`,\n readingTime: ${post.reading_time}\n}` ).join(\",\\n\")}\n];`;\n\nreturn [{ json: { js } }];" + }, + "type": "n8n-nodes-base.code", + "typeVersion": 2, + "position": [ + 416, + 0 + ], + "id": "8df60f75-86a6-4433-a76f-5da0c1711f4f", + "name": "Format Data For Mintlify", + "alwaysOutputData": false + }, + { + "parameters": { + "resource": "file", + "operation": "edit", + "owner": { + "__rl": true, + "value": "DeveloperAlly", + "mode": "list", + "cachedResultName": "DeveloperAlly", + "cachedResultUrl": "https://github.com/DeveloperAlly" + }, + "repository": { + "__rl": true, + "value": "livepeer-automations", + "mode": "list", + "cachedResultName": "livepeer-automations", + "cachedResultUrl": "https://github.com/DeveloperAlly/livepeer-automations" + }, + "filePath": "data/ghostBlogData.jsx", + "fileContent": "={{ $json.js }}", + "commitMessage": "=Update Blog Data {{ $now.toISO() }}", + "additionalParameters": { + "branch": { + "branch": "main" + } + } + }, + "type": "n8n-nodes-base.github", + "typeVersion": 1.1, + "position": [ + 624, + 0 + ], + "id": "12da7005-8b77-44f6-bb8d-1b3cf61b2db7", + "name": "Edit a file", + "webhookId": "3002edb1-3d17-44c0-be7d-e526f4aa14ad", + "credentials": { + "githubApi": { + "id": "jjy0epl4eqPHYqlG", + "name": "GitHub account" + } + } + } + ], + "pinData": {}, + "connections": { + "Schedule Trigger": { + "main": [ + [ + { + "node": "HTTP Request", + "type": "main", + "index": 0 + } + ] + ] + }, + "HTTP Request": { + "main": [ + [ + { + "node": "Format Data For Mintlify", + "type": "main", + "index": 0 + } + ] + ] + }, + "Format Data For Mintlify": { + "main": [ + [ + { + "node": "Edit a file", + "type": "main", + "index": 0 + } + ] + ] + } + }, + "active": false, + "settings": { + "executionOrder": "v1" + }, + "versionId": "541aa7a4-475b-40b4-8f0e-3ab5ebfe6b98", + "meta": { + "templateCredsSetupCompleted": true, + "instanceId": "b77ce31e344450acd6ad20bd6cde79f46c1e07d55921483122bfba86e8350352" + }, + "id": "5uLNIqPAxnTXwOnE", + "tags": [] +} \ No newline at end of file diff --git a/snippets/automations/scripts/n8n/Luma-To-Mintlify.json b/snippets/automations/scripts/n8n/Luma-To-Mintlify.json new file mode 100644 index 00000000..bee02941 --- /dev/null +++ b/snippets/automations/scripts/n8n/Luma-To-Mintlify.json @@ -0,0 +1,296 @@ +{ + "name": "My workflow", + "nodes": [ + { + "parameters": { + "rule": { + "interval": [ + { + "field": "weeks" + } + ] + } + }, + "name": "Schedule Trigger", + "type": "n8n-nodes-base.scheduleTrigger", + "typeVersion": 1, + "position": [ + 64, + -96 + ], + "id": "c69f62f0-871a-49f7-870a-062016aaae16" + }, + { + "parameters": { + "url": "=https://api2.luma.com/ics/get?entity=calendar&id={{ $json.lumaCalID }}", + "options": {} + }, + "name": "Fetch iCal", + "type": "n8n-nodes-base.httpRequest", + "typeVersion": 4.1, + "position": [ + 432, + -96 + ], + "id": "2a8ebed6-0dd4-405c-af8a-c41102d2046e" + }, + { + "parameters": { + "jsCode": "const icalData = $input.item.json.data;\n\n// Extract all VEVENT blocks\nconst eventBlocks = icalData.match(/BEGIN:VEVENT[\\s\\S]*?END:VEVENT/g) || [];\n\nconst events = eventBlocks.map(block => {\n const getField = (field) => {\n const match = block.match(new RegExp(`${field}:(.*?)(?:\\n[A-Z]|\\nEND:)`, 's'));\n return match ? match[1].replace(/\\n /g, '').trim() : '';\n };\n \n const parseDate = (dateStr) => {\n // Format: 20240523T090033Z\n const year = dateStr.slice(0, 4);\n const month = dateStr.slice(4, 6);\n const day = dateStr.slice(6, 8);\n const hour = dateStr.slice(9, 11);\n const min = dateStr.slice(11, 13);\n return new Date(`${year}-${month}-${day}T${hour}:${min}:00Z`);\n };\n\n const startStr = getField('DTSTART');\n const endStr = getField('DTEND');\n const summary = getField('SUMMARY');\n const description = getField('DESCRIPTION');\n const location = getField('LOCATION');\n const uid = getField('UID').split('@')[0];\n \n // Extract luma URL from description\n const lumaUrl = description.match(/https:\\/\\/luma\\.com\\/\\w+/)?.[0] || '';\n\n return {\n title: summary,\n start: parseDate(startStr),\n end: parseDate(endStr),\n description: description.split('\\n\\n')[0], // First paragraph only\n location: location,\n url: lumaUrl,\n uid: uid\n };\n});\n\n// Sort by date (newest first for display)\nconst sorted = events.sort((a, b) => b.start - a.start);\n\nreturn [{ json: { events: sorted } }];" + }, + "name": "Parse iCal", + "type": "n8n-nodes-base.code", + "typeVersion": 2, + "position": [ + 624, + -96 + ], + "id": "110949d1-b6b9-467e-bea4-7157c672a129" + }, + { + "parameters": { + "resource": "file", + "operation": "edit", + "owner": { + "__rl": true, + "value": "={{ $json.githubOwner }}", + "mode": "" + }, + "repository": { + "__rl": true, + "value": "={{ $json.githubRepo }}", + "mode": "" + }, + "filePath": "={{ $json.githubFilePath }}", + "fileContent": "={{ $json.content }}", + "commitMessage": "`Chore: Update Livepeer events from Luma - ${new Date().toISOString()}`", + "additionalParameters": { + "branch": { + "branch": "={{ $json.githubBranch }}" + } + } + }, + "name": "Update GitHub", + "type": "n8n-nodes-base.github", + "typeVersion": 1, + "position": [ + 1712, + -96 + ], + "id": "6ea25cad-b0ce-46e2-8e2d-df449de5650e", + "webhookId": "4ad2e461-cf8b-438d-8723-03a405599e22", + "credentials": { + "githubApi": { + "id": "vAAQD9gcQcGNKMOH", + "name": "Github Livepeer/docs Write Commit Token" + } + } + }, + { + "parameters": { + "assignments": { + "assignments": [ + { + "id": "github_owner", + "name": "githubOwner", + "value": "livepeer", + "type": "string" + }, + { + "id": "github_repo", + "name": "githubRepo", + "value": "docs", + "type": "string" + }, + { + "id": "github_path", + "name": "githubFilePath", + "value": "snippets/automations/luma/lumaEventsData.jsx", + "type": "string" + }, + { + "id": "077c994c-4563-4210-8690-3b00fe4dba99", + "name": "githubBranch", + "value": "docs-v2-preview", + "type": "string" + }, + { + "id": "28db75b7-87d1-4ad7-982d-c7c114bb9386", + "name": "lumaCal", + "value": "https://api2.luma.com/ics/get?entity=calendar&id=cal-X93qV3PuUH0wq0f", + "type": "string" + }, + { + "id": "c3e05cc9-c4c2-482c-8fcb-498f68cb3839", + "name": "lumaCalID", + "value": "cal-X93qV3PuUH0wq0f", + "type": "string" + } + ] + }, + "options": {} + }, + "name": "Config", + "type": "n8n-nodes-base.set", + "typeVersion": 3.3, + "position": [ + 256, + -96 + ], + "id": "a9e89dab-422c-4952-a0dd-60a9ae45f9d1" + }, + { + "parameters": { + "mode": "combine", + "combineBy": "combineByPosition", + "options": {} + }, + "type": "n8n-nodes-base.merge", + "typeVersion": 3.2, + "position": [ + 1632, + -352 + ], + "id": "2884d9f4-db09-4594-8016-886a4833d387", + "name": "Merge", + "executeOnce": true + }, + { + "parameters": { + "jsCode": "const events = $input.item.json.events;\n\nconst now = new Date();\nconst upcoming = events.filter(e => new Date(e.start) >= now);\nconst past = events.filter(e => new Date(e.start) < now);\n\nconst formatDate = (dateStr) => {\n const date = new Date(dateStr);\n return date.toLocaleDateString('en-US', { \n year: 'numeric', \n month: 'long', \n day: 'numeric',\n hour: '2-digit',\n minute: '2-digit',\n timeZoneName: 'short'\n });\n};\n\nconst formatDateShort = (dateStr) => {\n const date = new Date(dateStr);\n return date.toLocaleDateString('en-US', { \n year: 'numeric', \n month: 'long', \n day: 'numeric'\n });\n};\n\nlet jsx = `export const lumaEventsData = {\\n`;\njsx += ` lastUpdated: \"${new Date().toISOString()}\",\\n`;\njsx += ` upcoming: [\\n`;\n\nupcoming.forEach((event, idx) => {\n jsx += ` {\\n`;\n jsx += ` title: \"${event.title.replace(/\"/g, '\\\\\"')}\",\\n`;\n jsx += ` date: \"${formatDate(event.start)}\",\\n`;\n jsx += ` location: \"${event.location.replace(/\"/g, '\\\\\"')}\",\\n`;\n jsx += ` url: \"${event.url}\",\\n`;\n jsx += ` description: \"${event.description.split('\\\\n\\\\n')[0].replace(/\"/g, '\\\\\"').replace(/\\n/g, ' ')}\"\\n`;\n jsx += ` }${idx < upcoming.length - 1 ? ',' : ''}\\n`;\n});\n\njsx += ` ],\\n`;\njsx += ` past: [\\n`;\n\npast.forEach((event, idx) => {\n jsx += ` {\\n`;\n jsx += ` title: \"${event.title.replace(/\"/g, '\\\\\"')}\",\\n`;\n jsx += ` date: \"${formatDateShort(event.start)}\",\\n`;\n jsx += ` location: \"${event.location.replace(/\"/g, '\\\\\"')}\",\\n`;\n jsx += ` url: \"${event.url}\"\\n`;\n jsx += ` }${idx < past.length - 1 ? ',' : ''}\\n`;\n});\n\njsx += ` ]\\n`;\njsx += `};\\n`;\n\nreturn [{ \n json: { \n content: jsx,\n filename: 'lumaEventsData.jsx'\n } \n}];" + }, + "name": "Generate JSX", + "type": "n8n-nodes-base.code", + "typeVersion": 2, + "position": [ + 1424, + -96 + ], + "id": "d2e845cd-bbab-4f8b-88d6-1a5497541fdd" + }, + { + "parameters": { + "url": "https://luma.com/livepeer", + "options": {} + }, + "type": "n8n-nodes-base.httpRequest", + "typeVersion": 4.2, + "position": [ + 832, + 160 + ], + "id": "af12e7c9-8c57-4381-aa62-29f247c4da1b", + "name": "HTTP Request", + "disabled": true + }, + { + "parameters": { + "jsCode": "const events = $input.first().json.events;\nconst html = $input.last().json.data;\n\n// Extract __NEXT_DATA__ which has event images\nconst match = html.match(/