diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml new file mode 100644 index 0000000..a9ce921 --- /dev/null +++ b/.github/workflows/deploy.yml @@ -0,0 +1,87 @@ +name: Automated Deployment + +on: + pull_request: + types: [closed] + branches: [ main ] + +jobs: + deploy: + if: github.event.pull_request.merged == true + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + + - name: Deploy to VM (Digital Ocean) + uses: appleboy/ssh-action@v1.0.0 + with: + host: ${{ secrets.VM_HOST }} + username: ${{ secrets.VM_USERNAME }} + password: ${{ secrets.VM_PASSWORD }} + + envs: DB_HOST,DB_USER,DB_PASSWORD,DB_DATABASE,DB_PORT,NEXT_PUBLIC_API_URL + + script: | + echo "*** Updating + Upgrading VM ***" + sudo apt-get update -y + yes | sudo DEBIAN_FRONTEND=noninteractive apt-get -yqq upgrade + + echo "*** Installing Docker Dependencies ***" + sudo apt install -y ca-certificates curl gnupg lsb-release + + echo "*** Add Docker GPG Key and repo ***" + sudo mkdir -p /etc/apt/keyrings + curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg + echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null + + echo "*** Install Docker engine and compose plugin ***" + sudo apt update + sudo apt install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin + + echo "*** Removing old deployment folder ***" + rm -rf ~/pixel-to-pattern + + echo "*** Cloning latest repo ***" + git clone https://github.com/${{ github.repository }}.git ~/pixel-to-pattern + + echo "*** Moving into project directory ***" + cd ~/pixel-to-pattern + + echo "*** Writing backend .env ***" + cat << EENV > .env + DB_HOST=${DB_HOST} + DB_USER=${DB_USER} + DB_PASSWORD=${DB_PASSWORD} + DB_DATABASE=${DB_DATABASE} + DB_PORT=${DB_PORT} + SERVER_PORT=3000 + EENV + + echo "*** Writing frontend client/.env.production ***" + mkdir -p client + cd client + cat << FRONTENV > .env.production + NEXT_PUBLIC_API_URL=${NEXT_PUBLIC_API_URL} + FRONTENV + cd .. + + echo "*** Cleaning old docker images ***" + docker image prune -f + + echo "*** Building + Deploying containers using repo's compose file ***" + docker compose -f docker-compose.deploy.yml up -d --build + + echo "*** Verifying Deployment ***" + sleep 5 + curl -I http://localhost:3000/health || exit 1 + + echo "*** Deployment complete. Running containers: ***" + docker ps + env: + DB_HOST: ${{ secrets.DB_HOST }} + DB_USER: ${{ secrets.DB_USER }} + DB_PASSWORD: ${{ secrets.DB_PASSWORD }} + DB_DATABASE: ${{ secrets.DB_DATABASE }} + DB_PORT: ${{ secrets.DB_PORT }} + NEXT_PUBLIC_API_URL: ${{ secrets.NEXT_PUBLIC_API_URL }} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000..e8179a8 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,152 @@ +name: Automated Testing Pipeline + +defaults: + run: + working-directory: . + +# Workflow triggers only on pull requests and pushes to main or development branches +on: + push: + branches: [ main, development ] + pull_request: + branches: [ main, development ] + +jobs: + backend-tests: + runs-on: ubuntu-latest + env: + # Test environment variables for backend tests + DB_HOST: db-test + DB_USER: testuser + DB_PASSWORD: testpass + DB_DATABASE: test_db + NODE_ENV: test + + steps: + - uses: actions/checkout@v3 + + # Debug: Validate that docker-compose.test.yml exists and loads correctly + - name: Validate compose file + run: | + echo "Listing files..." + ls -al . + echo "Validating docker-compose.test.yml..." + docker compose -f docker-compose.test.yml config + + # Build the backend test containers + - name: Build test containers + run: docker compose -f docker-compose.test.yml build + + # Debug working directory + - name: Print working directory + run: pwd + + - name: List files in working directory + run: ls -al + + # Run backend units tests via Jest + - name: Run backend unit tests + run: docker compose -f docker-compose.test.yml up backend-unit-tests --abort-on-container-exit --exit-code-from backend-unit-tests + + # Run backend integration tests for DB + API with Supertest + - name: Run backend integration tests (DB + API) + run: docker compose -f docker-compose.test.yml up db-test backend-integration --abort-on-container-exit --exit-code-from backend-integration + + # Always clean up the test containers even if the tests fail + - name: Cleanup test containers + if: always() + run: docker compose -f docker-compose.test.yml down -v || true + + + frontend-tests: + runs-on: ubuntu-latest + env: + # Test environment variables for frontend tests + NODE_ENV: test + CI: "true" + + steps: + - uses: actions/checkout@v3 + + # Debug: Validate that docker-compose.test.yml exists and loads correctly + - name: Validate compose file + run: | + echo "Listing files..." + ls -al . + echo "Validating docker-compose.test.yml..." + docker compose -f docker-compose.test.yml config + + # Build the frontend test containers from docker-compose.test.yml + - name: Build test containers + run: docker compose -f docker-compose.test.yml build + + # Debug working directory + - name: Print working directory + run: pwd + + - name: List files in working directory + run: ls -al + + # Run the frontend unit tests with Jest and the React Testing Library + - name: Run frontend unit tests + run: docker compose -f docker-compose.test.yml up frontend-tests --abort-on-container-exit --exit-code-from frontend-tests + + # Always clean up the test containers even if the tests fail + - name: Cleanup + if: always() + run: docker compose -f docker-compose.test.yml down -v || true + + + e2e-tests: + runs-on: ubuntu-latest + needs: [backend-tests, frontend-tests] # Only runs the E2E tests if unit and integration tests pass + env: + # Test environment variables for E2E + DB_HOST: db-test + DB_USER: testuser + DB_PASSWORD: testpass + DB_DATABASE: test_db + NODE_ENV: test + NEXT_PUBLIC_API_BASE_URL: http://backend:3000 # required for Next.js rewrites + + steps: + - uses: actions/checkout@v3 + + # Create CI .env file so docker-compose.yml does not fail (required by env_file: .env) + - name: Create CI .env file + run: | + echo "DB_HOST=db" >> .env + echo "DB_USER=testuser" >> .env + echo "DB_PASSWORD=testpass" >> .env + echo "DB_DATABASE=test_db" >> .env + echo "NODE_ENV=test" >> .env + echo "NEXT_PUBLIC_API_BASE_URL=http://backend:3000" >> .env # <-- REQUIRED + + # Install dependencies so Cypress exists on the GitHub runner + - name: Install dependencies + run: npm install + + # Build the containers and start the full application from docker-compose.yml + - name: Build full stack + run: docker compose up -d --build + + # Wait until the backend API server is ready (no /health route needed) + - name: Wait for backend API server to be ready + run: | + echo "Waiting for backend..." + timeout 60 bash -c 'until curl -s http://localhost:3000/ > /dev/null; do sleep 2; done' + + # Wait until the frontend is ready (no /health route needed) + - name: Wait for frontend + run: | + echo "Waiting for frontend..." + timeout 60 bash -c 'until curl -s http://localhost:3001/ > /dev/null; do sleep 2; done' + + # Run the end-to-end tests using Cypress and full application + - name: Run Cypress E2E tests + run: npm run cypress:run + + # Clean up the test containers after runtime + - name: Shutdown app stack + if: always() + run: docker compose down || true \ No newline at end of file diff --git a/README.md b/README.md index 6392ff3..94fc359 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,20 @@ Let the creativity flow. --- +## Table of Contents + +- [Features](#features) +- [Tech Stack](#tech-stack) +- [Environment Variables](#environment-variables) +- [Running Locally with Docker Compose](#running-locally-with-docker-compose) +- [Local Setup (Non-Docker)](#local-setup-non-docker) +- [VM Setup & Automated Deployment](#vm-setup--automated-deployment) +- [Troubleshooting](#troubleshooting) +- [Manual Testing](#manual-testing) +- [Automated Testing Pipeline (GitHub Actions)](#automated-testing-pipeline-github-actions) + +--- + ## Features ### Create @@ -24,8 +38,11 @@ Browse all submitted creations and view detailed, stitch-by-stitch patterns. ### Update Users will soon be able to edit their own patterns directly. -### Delete *(Coming Soon)* -Remove any pattern you’ve posted with one click. +### Delete +Users are able to delete their own patterns directly. + +### Export to PDF +Users are able to export their pattern to a PDF document. --- @@ -34,189 +51,176 @@ Remove any pattern you’ve posted with one click. - **Frontend:** Next.js, Material UI, React - **Backend:** Node.js, Express - **Database:** MySQL with Sequelize ORM -- **Deployment:** Docker containers, GitHub Container Repository (GHCR) +- **Infrastructure:** Docker & Docker Compose +- **CI/CD:** GitHub Actions - **Version:** Node 24+ --- ## Environment Variables -This project utilizes environment variables for configuration. -You will need to create both a `.env` file in the root directory and a `.env.local` file in the client directory, as outlined below. +The backend uses a `.env` file in the root directory. -### Server `.env` (example.env provided in root) +### Server `.env` (example.env provided) -**Required Variables:** -- `DB_USER`: The username for the database user. -- `DB_PASSWORD`: The password for the database user. -- `DB_HOST`: The IP address for the machine running the database (use `localhost` for local development or `db` for Docker). -- `DB_DATABASE`: The name of the database to access. -- `DB_PORT`: The port number the database is running on. -- `SERVER_PORT`: The port number for the backend server (default: 3000). +``` +DB_USER= +DB_PASSWORD= +DB_HOST= +DB_DATABASE= +DB_PORT= +SERVER_PORT=3000 +``` -**Note:** The frontend uses relative URLs with Next.js rewrites to communicate with the backend. No environment variables are required for the client in Docker setups. +**Note:** +The frontend does *not* use a `.env.production` file anymore. +The API URL is injected during the Docker build step automatically. --- ## Running Locally with Docker Compose -You can run the entire Pixel to Pattern stack locally using Docker Compose. +Run the entire app locally in one command. -1. **Fork and clone** this repository: +1. Clone the repository: ```bash git clone https://github.com/AlexanderORuban/Pixel-to-Pattern.git ``` -2. Ensure Docker and Docker Compose are installed. -3. Create a `.env` file in the root directory with the necessary credentials listed above. -4. Build and start all services: +2. Create a `.env` file in the project root (see above). +3. Start everything: ```bash docker compose up --build ``` -5. Visit [http://localhost:3001](http://localhost:3001) to access the application. - -To stop containers: -```bash -docker compose down -``` - -### Rebuilding or Restarting Containers +4. Visit the app: + http://localhost:3001 -If you make code changes and want to rebuild the images: +### Rebuild images: ```bash docker compose build --no-cache docker compose up -d ``` -To restart containers without rebuilding: +### Restart only: ```bash docker compose restart ``` + --- + ## Local Setup (Non-Docker) -These steps apply only if you wish to run **Pixel to Pattern** manually without Docker. -1. **Fork and clone** this repository: - ```bash - git clone https://github.com/AlexanderORuban/Pixel-to-Pattern.git - ``` -2. In the root directory, create a `.env` file as described above. -3. **Install dependencies** in the root, `server/`, and `client/` directories: +Only if you want to run the backend/frontend manually. + +1. Clone the repo +2. Create your `.env` +3. Install dependencies: ```bash npm install ``` -4. Run the application from the root: +4. Start full dev environment: ```bash npm run dev ``` -5. Open your browser at [http://localhost:3001](http://localhost:3001) -6. Start creating your pattern. +5. Visit http://localhost:3001 --- -## Deployment Process +## VM Setup & Automated Deployment -### Steps +This project includes **automatic production deployment** using GitHub Actions. +When a pull request is merged into `main`, the app **auto-deploys** to the VM. -1. Create GHCR containers for both frontend and backend. -2. Log into GHCR in your terminal or code editor: - ```bash - echo "" | docker login ghcr.io -u --password-stdin - ``` -3. **Build and push the backend container:** - ```bash - docker build --no-cache -t ghcr.io//pixel-to-pattern-backend:latest ./server - docker push ghcr.io//pixel-to-pattern-backend:latest - ``` +### 1. First-Time VM Setup (only done once) + +SSH into the VM: +```bash +ssh root@ +``` + +Install Docker + Compose: +```bash +sudo apt-get update -y +yes | sudo DEBIAN_FRONTEND=noninteractive apt-get -yqq upgrade +sudo apt install -y ca-certificates curl gnupg lsb-release +sudo mkdir -p /etc/apt/keyrings +curl -fsSL https://download.docker.com/linux/ubuntu/gpg \ + | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg +echo "deb [arch=$(dpkg --print-architecture) \ + signed-by=/etc/apt/keyrings/docker.gpg] \ + https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" \ + | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null +sudo apt update +sudo apt install -y docker-ce docker-ce-cli containerd.io \ + docker-buildx-plugin docker-compose-plugin +``` + +Confirm install: +```bash +docker run hello-world +``` + +After this, the VM is fully ready for auto-deployment. -4. **Build and push the frontend container:** - ```bash - docker build --no-cache -t ghcr.io//pixel-to-pattern-frontend ./client - docker push ghcr.io//pixel-to-pattern-frontend:latest - ``` --- -## VM Setup +### 2. Automated Deployment (GitHub Actions) -1. Log into your VM: - ```bash - ssh root@ - ``` -2. Update the package index: - ```bash - sudo apt-get update -y - ``` -3. Upgrade existing packages (non-interactive): - ```bash - yes | sudo DEBIAN_FRONTEND=noninteractive apt-get -yqq upgrade - ``` -4. Install Docker dependencies: - ```bash - sudo apt install -y ca-certificates curl gnupg lsb-release - ``` -5. Add the Docker GPG key and repository: - ```bash - sudo mkdir -p /etc/apt/keyrings - curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg - echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null - ``` -6. Install the Docker engine and compose plugin: - ```bash - sudo apt update - sudo apt install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin - ``` -7. Verify installation: - ```bash - sudo docker run hello-world - ``` - Expected output: “Hello from Docker!” -8. Create a new project directory: - ```bash - mkdir && cd - ``` -9. Add your `docker-compose.deploy.yml` file to the directory. -10. Create a `.env` file and place it at the same level as the YAML file. -11. Pull the images: - ```bash - docker compose -f docker-compose.deploy.yml pull - ``` -12. Start the application: - ```bash - docker compose -f docker-compose.deploy.yml up -d - ``` -13. Verify the containers are running: - ```bash - docker ps - ``` +Every time you merge a PR into `main`: + +✔ Latest code is pulled onto the VM +✔ Old deployment folder is removed +✔ `.env` is written using GitHub Secrets +✔ Containers are rebuilt + restarted +✔ Backend is checked with `/health` endpoint +✔ Deployment succeeds or fails in GitHub Actions + +### Required GitHub Secrets + +| Secret | Purpose | +|--------|---------| +| `VM_HOST` | VM IP | +| `VM_USERNAME` | Usually `root` | +| `VM_PASSWORD` | VM password | +| `DB_HOST` | MySQL host | +| `DB_USER` | DB user | +| `DB_PASSWORD` | DB password | +| `DB_DATABASE` | DB name | +| `DB_PORT` | DB port | +| `NEXT_PUBLIC_API_URL` | Backend URL (e.g., http://:3000) | + +### Workflow location +``` +.github/workflows/deploy.yml +``` + +You never need to SSH and deploy manually unless something breaks. --- ## Troubleshooting -**Common Docker Issues** - -- **Ports already in use:** - Stop conflicting containers or change the port in `docker-compose.yml`. - ```bash - docker ps - docker stop - ``` - -- **Environment variables not loading:** - Ensure the `.env` file is in the root directory and not ignored by `.dockerignore`. - -- **Containers not starting:** - Check logs for detailed errors: - ```bash - docker compose logs - ``` - -- **MySQL connection errors:** - Use `DB_HOST=db` in your `.env` when using Docker Compose. - Test with: - ```bash - docker exec -it db mysql -u root -p - ``` -## Testing +**Ports already in use** +```bash +docker ps +docker stop +``` + +**MySQL issues** +- Ensure `.env` exists on VM +- Use `DB_HOST=db` when running under Docker +Check DB logs: +```bash +docker logs db +``` + +**Frontend hitting 404s** +- Means wrong API URL was injected +- Update `NEXT_PUBLIC_API_URL` secret +- Merge another PR to redeploy + +--- + +## Manual Testing ### Run all unit and integration tests in Docker 1. *(If needed)* force Docker to build/rebuild clean docker-compose.test image: ``` @@ -263,3 +267,27 @@ npm run cypress:open 1. Select a browser to view the app in 1. Select a spec to run from the list, it will auto-run the tests anytime there are changes made to the spec ![cypress-spec-list](image.png) + +--- + +## Automated Testing Pipeline (GitHub Actions) + +Located in: +``` +.github/workflows/test.yml +``` + +Runs automatically on: +- Pushes to `main` or `development` +- PRs targeting `main` or `development` + +Runs: +- Backend unit tests +- Backend integration tests +- Frontend unit tests +- Cypress E2E tests + +Blocks merges if any suite fails. +View results in the GitHub “Actions” tab. + +--- \ No newline at end of file diff --git a/client/.dockerignore b/client/.dockerignore index 06e9699..a51b717 100644 --- a/client/.dockerignore +++ b/client/.dockerignore @@ -1,14 +1,54 @@ +# Node modules node_modules + +# Next.js build output +.next +out dist build + +# Cypress output +cypress/videos +cypress/screenshots + +# Tests and coverage coverage -.tmp -.cache -.next -out -.vscode +test-results +jest.cache + +# Logs +*.log +logs/ + +# Git .git -npm-debug.log -yarn-error.log -.pnpm-store -.eslintcache \ No newline at end of file +.gitignore + +# Docker/compose files +Dockerfile + +# IDE/editor files +.vscode +.idea + +# Environment files +.env +.env.local +.env.development +.env.test +.env.production +.env.*.local + +# System files +.DS_Store +Thumbs.db + +# Misc junk or large files +*.png +*.jpg +*.jpeg +*.gif +*.mp4 +*.mov +*.csv +npm-debug.log \ No newline at end of file diff --git a/client/example.env.local b/client/example.env.local new file mode 100644 index 0000000..ea10bb3 --- /dev/null +++ b/client/example.env.local @@ -0,0 +1,2 @@ +NEXT_PUBLIC_API_BASE_URL=http://localhost:3000 or http://:3000 +PORT=3001 \ No newline at end of file diff --git a/client/next.config.mjs b/client/next.config.mjs index 03bb8d7..ec01b7d 100644 --- a/client/next.config.mjs +++ b/client/next.config.mjs @@ -1,22 +1,23 @@ /** @type {import('next').NextConfig} */ const nextConfig = { - // Proxy API routes to the backend service when the frontend receives relative requests async rewrites() { + const apiBase = process.env.NEXT_PUBLIC_API_BASE_URL; + return [ { - source: '/patterns', - destination: 'http://backend:3000/patterns', + source: "/patterns", + destination: `${apiBase}/patterns`, }, { - source: '/patterns/:id', - destination: 'http://backend:3000/patterns/:id', + source: "/patterns/:id", + destination: `${apiBase}/patterns/:id`, }, { - source: '/update/:id', - destination: 'http://backend:3000/update/:id', + source: "/update/:id", + destination: `${apiBase}/update/:id`, }, ]; }, }; -export default nextConfig; +export default nextConfig; \ No newline at end of file diff --git a/client/src/components/EditablePatternView.jsx b/client/src/components/EditablePatternView.jsx index a751a89..2abcb5c 100644 --- a/client/src/components/EditablePatternView.jsx +++ b/client/src/components/EditablePatternView.jsx @@ -6,9 +6,9 @@ import { useState } from "react"; import { useParams } from "next/navigation"; import PixelDisplay from "@/components/PixelDisplay"; -export default function EditablePatternView({ post, onCancel, params}) { - const { id } = useParams(); - const [formData, setFormData] = useState({ +export default function EditablePatternView({ post, onCancel, params }) { + const { id } = useParams(); + const [formData, setFormData] = useState({ pattern_ID: id, pattern_name: post.pattern_name, description: post.description, @@ -22,21 +22,21 @@ export default function EditablePatternView({ post, onCancel, params}) { setFormData((prev) => ({ ...prev, [name]: value })); }; - const handleSubmit = async(e) => { - try{ - const res = await fetch(`/update/${id}`, - { - method: 'PATCH', - headers: {"Content-Type": "application/json"}, - body: JSON.stringify(formData) - } - ) - - if(!res.ok){ - throw new Error (`PostID: ${id} was not able to be updated.`); + const handleSubmit = async (e) => { + try { + const res = await fetch(`/update/${id}`, + { + method: 'PATCH', + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(formData) } - } catch(err) { - console.error("Error updating pixel art info with ID: ", id); + ) + + if (!res.ok) { + throw new Error(`PostID: ${id} was not able to be updated.`); + } + } catch (err) { + console.error("Error updating pixel art info with ID: ", id); } onCancel(); @@ -44,33 +44,54 @@ export default function EditablePatternView({ post, onCancel, params}) { return ( - + - + - + - + - + - + + diff --git a/client/src/components/PixelForm.jsx b/client/src/components/PixelForm.jsx index 62006e6..1813a88 100644 --- a/client/src/components/PixelForm.jsx +++ b/client/src/components/PixelForm.jsx @@ -58,34 +58,35 @@ export default function PixelForm() { width: canvasWidth, height: canvasHeight, colorConfig: pixelFill - } + }; const formSubmissionInfo = { pattern_name: name, pattern_info: patternInfo, author: author, description: description - } + }; try { - const res = await fetch('/patterns', - { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify(formSubmissionInfo) - } - ) - const postID = await res.json(); + const res = await fetch('/patterns', { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(formSubmissionInfo) + }); + + const data = await res.json(); if (!res.ok) { - throw new Error(`PostID: ${postID} is not ok.`); - } else { - router.push(`/view/${postID}`); + throw new Error(`PostID: ${JSON.stringify(data)} is not ok.`); } + + const id = data.pattern_ID; + + router.push(`/view/${id}`); } catch (err) { console.log("Error submitting pixel art info ", err); } - } + }; const ClearDrawingDialog = () => { return ( @@ -140,9 +141,9 @@ export default function PixelForm() { - @@ -169,7 +170,7 @@ export default function PixelForm() { {/* Canvas Grid and pixels */} - {pixelFill.map((currentColor, i) => ( showGrid ? @@ -198,7 +200,7 @@ export default function PixelForm() { }}> : -
handlePixelEvent(i)} style={{ @@ -214,31 +216,31 @@ export default function PixelForm() { {/* Name and description*/} - setName(e.target.value)} + onChange={(e) => setName(e.target.value)} value={name} label="Name Your Pattern" /> - setAuthor(e.target.value)} + onChange={(e) => setAuthor(e.target.value)} value={author} label="Author" /> setDescription(e.target.value)} - value={description} - multiline - rows={3} + data-testid="pattern-description" + onChange={(e) => setDescription(e.target.value)} + value={description} + multiline + rows={3} sx={{ width: '50%', minWidth: '250px' }} label="Description" /> - - diff --git a/docker-compose.deploy.yml b/docker-compose.deploy.yml index 433b13c..096e8b2 100644 --- a/docker-compose.deploy.yml +++ b/docker-compose.deploy.yml @@ -1,6 +1,10 @@ services: frontend: - image: ghcr.io/auglebobaugles/pixel-to-pattern-frontend:latest + build: + context: ./client + dockerfile: Dockerfile + args: + NEXT_PUBLIC_API_BASE_URL: ${NEXT_PUBLIC_API_URL} container_name: frontend ports: - "80:3001" @@ -12,7 +16,9 @@ services: restart: unless-stopped backend: - image: ghcr.io/auglebobaugles/pixel-to-pattern-backend:latest + build: + context: ./server + dockerfile: Dockerfile container_name: backend ports: - "3000:3000" diff --git a/docker-compose.yml b/docker-compose.yml index 6c182d6..72dfc83 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,10 +1,14 @@ services: frontend: - build: ./client + build: + context: ./client + args: + NEXT_PUBLIC_API_BASE_URL: http://backend:3000 container_name: frontend ports: - "3001:3001" environment: + - NEXT_PUBLIC_API_BASE_URL=http://backend:3000 - NODE_ENV=production - PORT=3001 depends_on: diff --git a/server/.dockerignore b/server/.dockerignore new file mode 100644 index 0000000..3fc0049 --- /dev/null +++ b/server/.dockerignore @@ -0,0 +1,50 @@ +# Node dependencies +node_modules + +# Test output +coverage +test-results +*.test.js +*.spec.js + +# Logs +*.log +logs/ + +# Build artifacts +dist +build +.tmp +.temp + +# Git +.git +.gitignore + +# Docker/compose files +Dockerfile + +# IDE/editor files +.vscode +.idea + +# System files +.DS_Store +Thumbs.db + +# Environment files +.env +.env.* +.env.test +.env.local + +# Misc large file types +*.sqlite +*.db +*.png +*.jpg +*.jpeg +*.gif +*.mp4 +*.mov +*.csv \ No newline at end of file diff --git a/server/app.js b/server/app.js index 429cfcd..b9a6afb 100644 --- a/server/app.js +++ b/server/app.js @@ -10,18 +10,24 @@ import router from './routes/router.js'; export const app = express(); // Resolve the project root .env path -const __filename = fileURLToPath(import.meta.url); -const __dirname = path.dirname(__filename); -dotenv.config({ path: path.join(__dirname, '../.env') }); +// const __filename = fileURLToPath(import.meta.url); +// const __dirname = path.dirname(__filename); +// dotenv.config({ path: path.join(__dirname, '../.env') }); + +dotenv.config(); app.use(express.json()); app.use(cors({ origin: true, credentials: true })); app.use(express.urlencoded({ extended: true })); +app.get("/health", (req, res) => { + res.send("OK"); +}); + app.use('/', router); // Error handling app.use((err, req, res, next) => { console.error(err.stack); res.status(500).send('Something broke!'); -}); \ No newline at end of file +}); diff --git a/server/controllers/controller.js b/server/controllers/controller.js index 0c428cd..05268dd 100644 --- a/server/controllers/controller.js +++ b/server/controllers/controller.js @@ -1,73 +1,75 @@ -import {getAllPatterns, getPattern, postPattern, updatePattern} from '../models/model.js' +import { getAllPatterns, getPattern, postPattern, updatePattern } from '../models/model.js'; import { Patterns } from '../models/patterns.js'; - // CREATE - export const uploadPattern = async (req, res) => { - // expect JSON object from req body with this format: - // { pattern_name: "", pattern_info: {}, description: ""} - const pattern = req.body; + try { + const newPattern = await Patterns.create({ + pattern_name: req.body.pattern_name, + pattern_info: req.body.pattern_info, + author: req.body.author, + description: req.body.description + }); - // ! TODO: add validation for name and description - try { - const patternID = await postPattern(pattern); - res.status(201).json(patternID); - } catch (err) { - return res.status(500).json({ error: err.message}); - } -} + return res.status(201).json({ pattern_ID: newPattern.pattern_ID }); + + } catch (err) { + console.error(err); + return res.status(500).json({ error: err.message }); + } +}; // READ -export const getAll = async(req, res) => { - try{ - const patterns = await getAllPatterns(); - res.status(200).json(patterns); - } catch(err){ - res.status(500).json({error: err.message}); - } -} -export const getSpecificPattern = async(req, res) => { - const patternID = req.params.id; - try{ - const pattern = await getPattern(patternID); - res.status(200).json(pattern); - } catch(err) { - res.status(500).json({error: err.message}); - } -} +export const getAll = async (req, res) => { + try { + const patterns = await getAllPatterns(); + res.status(200).json(patterns); + } catch (err) { + res.status(500).json({ error: err.message }); + } +}; + +export const getSpecificPattern = async (req, res) => { + const patternID = req.params.id; + try { + const pattern = await getPattern(patternID); + res.status(200).json(pattern); + } catch (err) { + res.status(500).json({ error: err.message }); + } +}; // UPDATE -export const updatePatternController = async(req, res) => { - const patternInfo = req.body; - const ID = patternInfo.pattern_ID; - const pattern = { - pattern_name: patternInfo.pattern_name, - pattern_author: patternInfo.author, - description: patternInfo.description - } - try{ - await updatePattern(ID, pattern); - res.status(204); - } catch (err) { - return res.status(500).json({error: err.message}); - } - -} +export const updatePatternController = async (req, res) => { + const patternInfo = req.body; + const ID = patternInfo.pattern_ID; + const pattern = { + pattern_name: patternInfo.pattern_name, + author: patternInfo.author, + description: patternInfo.description + }; + + try { + await updatePattern(ID, pattern); + return res.sendStatus(204); + } catch (err) { + return res.status(500).json({ error: err.message }); + } +}; // DELETE export const deletePattern = async (req, res) => { - try { - const { id } = req.params; - const deleted = await Patterns.destroy({ where: { pattern_ID: id } }); - - if (!deleted) return res.status(404).json({ message: "Pattern not found" }); - - res.status(200).json({ message: "Pattern deleted successfully" }); - } catch (error) { - res.status(500).json({ message: "Error deleting pattern", error }); + try { + const { id } = req.params; + const deleted = await Patterns.destroy({ where: { pattern_ID: id } }); + + if (!deleted) { + return res.status(404).json({ message: "Pattern not found" }); } - }; - - \ No newline at end of file + + return res.status(200).json({ message: "Pattern deleted successfully" }); + } catch (error) { + return res.status(500).json({ message: "Error deleting pattern", error }); + } +}; \ No newline at end of file diff --git a/server/models/db.js b/server/models/db.js index 674a924..9acebc1 100644 --- a/server/models/db.js +++ b/server/models/db.js @@ -9,34 +9,55 @@ const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); dotenv.config({ path: path.join(__dirname, '../../.env'), quiet: true }); +// Environment variables const host = process.env.DB_HOST; const port = process.env.DB_PORT; const database = process.env.DB_DATABASE; const user = process.env.DB_USER; const password = process.env.DB_PASSWORD; -// Check if the database exists +async function waitForMySQL() { + const maxRetries = 20; + let retries = 0; + + while (retries < maxRetries) { + try { + const connection = await mysql.createConnection({ host, user, password }); + await connection.ping(); + await connection.end(); + + console.log("MySQL is ready."); + return; + } catch (err) { + retries++; + console.log(`MySQL not ready, retrying (${retries}/${maxRetries})...`); + await new Promise(res => setTimeout(res, 2000)); // 2 seconds + } + } + + throw new Error("MySQL failed to start after multiple retries."); +} + const makeDb = async () => { const connection = await mysql.createConnection({ host, user, password }); await connection.query(`CREATE DATABASE IF NOT EXISTS \`${database}\`;`); await connection.end(); - // console.log(`Database ${database} is ready.`); + console.log(`Database ${database} ready.`); }; +await waitForMySQL(); await makeDb(); -// Set up sequelize connection to database const sequelize = new Sequelize(database, user, password, { - host, - port, - dialect: 'mysql', - logging: false, - } -); + host, + port, + dialect: 'mysql', + logging: false, +}); try { await sequelize.authenticate(); - // console.log('Connected successfully'); + console.log("Sequelize connected successfully."); } catch (error) { console.error('Unable to connect:', error); }