Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
134 changes: 120 additions & 14 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,33 +17,139 @@ Stop drawing recursion trees by hand. Watch the [demo video](https://youtu.be/1f

## Local development

### Web
### Prerequisites

- [Node.js](https://nodejs.org/) (20.x for web, 14.x for lambda)
- [Docker](https://www.docker.com/) for running the Lambda function
- [Yarn](https://yarnpkg.com/) (recommended) or npm

### Quick Start (Recommended)

The easiest way to run the full project locally is using the provided script:

```bash
# Clone the repository
$ git clone <repository-url>
$ cd recursion-tree-visualizer

# Install web dependencies
$ cd web && yarn install && cd ..

# Install lambda dependencies
$ cd lambda && npm install && cd ..

# Start both services (Lambda + Next.js)
$ cd web && yarn local
```

This will:
- Build and run the Lambda function on port 8080 using Docker
- Start the Next.js development server on port 3003
- Automatically configure the web app to use the local Lambda
- Clean up Docker containers when you press Ctrl+C

In the `web` directory, run:
**Access the app at:** http://localhost:3003

### Custom Ports

You can specify custom ports for both services:

```bash
# to install all dependencies
# Lambda on 8081, Web on 3004
$ cd web && yarn local -- 8081 3004

# Or using environment variables
$ cd web && LAMBDA_PORT=8081 WEB_PORT=3004 yarn local
```

### Manual Setup (Advanced)

If you prefer to run services separately:

#### 1. Lambda Function

```bash
$ cd lambda

# Install dependencies
$ npm install

# to run the app on http://localhost:3003
$ npm run start
# Build and run with Docker (detached)
$ npm run locald

# Or with custom port
$ PORT=8081 npm run locald

# Test the function
$ curl -XPOST "http://localhost:8080/2015-03-31/functions/function/invocations" \
-d '{"body":"{\"lang\":\"javascript\",\"functionData\":{\"body\":\"function fibonacci(n) { return n <= 1 ? n : fibonacci(n-1) + fibonacci(n-2); }\",\"params\":[{\"name\":\"n\",\"initialValue\":\"5\"}]},\"options\":{\"memoize\":false}}"}'
```

### Lambda
#### 2. Web Application

```bash
$ cd web

# Install dependencies
$ yarn install

# For local development (uses local Lambda)
$ NEXT_PUBLIC_USE_LOCAL_API=true yarn dev

# For production mode (uses AWS API)
$ yarn dev
```

### Environment Configuration

The web application can run in two modes:

#### Local Development Mode
- Set `NEXT_PUBLIC_USE_LOCAL_API=true`
- Uses local Lambda function via `/api/run` proxy
- Avoids CORS issues

You can use the Amazon Runtime Interface Emulator (RIE), already contained in the docker image, to test the Lambda function.
#### Production Mode (Default)
- Uses AWS Lambda endpoint: `https://c1y17h6s33.execute-api.us-east-1.amazonaws.com/production/run`
- No local setup required

In the `lambda` directory, run:
### Environment Variables

Create `web/.env.local` for custom configuration:

```env
# Use local Lambda instead of AWS
NEXT_PUBLIC_USE_LOCAL_API=true

# Local Lambda port (for API proxy)
LAMBDA_PORT=8080
```

### Troubleshooting

#### Docker Issues
```bash
# build your local image
$ docker build --tag rtv .
# Stop any running containers
$ docker stop $(docker ps -q --filter ancestor=rtv)

# create and run a container using AWS RIE as executable to emulate a server for your lambda function
$ docker run --rm -p 8080:8080 rtv
# Remove old images
$ docker rmi rtv
```

# make a http request to your function, passing event with the -d in body field (escaped json), see examples in requests.http file
$ curl -XPOST "http://localhost:8080/2015-03-31/functions/function/invocations" -d '{"body":"{}"}'
#### Port Conflicts
```bash
# Check what's using a port
$ lsof -i :8080

# Use different ports
$ cd web && yarn local -- 8081 3004
```

#### Clean Restart
```bash
# Stop all services, clean Docker, and restart
$ docker stop $(docker ps -q --filter ancestor=rtv)
$ cd web && yarn local
```

## Deploy to production
Expand Down
1 change: 1 addition & 0 deletions lambda/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
"scripts": {
"build": "npx tsc",
"local": "docker build --platform linux/arm64 -t rtv . && docker run --rm -p 8080:8080 rtv",
"locald": "docker build --platform linux/arm64 -t rtv . && docker run --rm -p ${PORT:-8080}:8080 -d rtv",
"test": "DEBUG='app:*,test:*' npx jest --config jest.config.js",
"test:cov": "npx jest --config jest.config.js --coverage",
"typesync": "npx typesync"
Expand Down
96 changes: 96 additions & 0 deletions scripts/start-local.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
#!/bin/bash

# Default ports - use env vars first, then command line args, then defaults
LAMBDA_PORT=${LAMBDA_PORT:-${1:-8080}}
WEB_PORT=${WEB_PORT:-${2:-3003}}

# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color

# Variables for tracking
CONTAINER_ID=""
NEXTJS_PID=""

# Show usage if help requested
if [[ "$1" == "-h" || "$1" == "--help" ]]; then
echo "Usage: $0 [LAMBDA_PORT] [WEB_PORT]"
echo ""
echo "Arguments:"
echo " LAMBDA_PORT Port for Lambda RIE (default: 8080)"
echo " WEB_PORT Port for Next.js dev server (default: 3003)"
echo ""
echo "Examples:"
echo " $0 # Use default ports (8080, 3003)"
echo " $0 8081 # Lambda on 8081, Web on 3003"
echo " $0 8081 3004 # Lambda on 8081, Web on 3004"
echo " yarn local -- 8081 3004 # Via yarn with arguments"
echo " LAMBDA_PORT=8081 yarn local # Via environment variables"
exit 0
fi

# Cleanup function
cleanup() {
echo -e "\n${YELLOW}Shutting down services...${NC}"

# Kill Next.js process if running
if [ ! -z "$NEXTJS_PID" ]; then
echo -e "${YELLOW}Stopping Next.js (PID: $NEXTJS_PID)...${NC}"
kill $NEXTJS_PID 2>/dev/null
wait $NEXTJS_PID 2>/dev/null
fi

# Stop Docker container if we have the ID
if [ ! -z "$CONTAINER_ID" ]; then
echo -e "${YELLOW}Stopping Docker container ($CONTAINER_ID)...${NC}"
docker stop $CONTAINER_ID 2>/dev/null
fi

echo -e "${GREEN}All services stopped!${NC}"
exit 0
}

# Set trap to catch Ctrl+C
trap cleanup SIGINT SIGTERM

echo -e "${GREEN}Starting Recursion Tree Visualizer locally...${NC}"

# Start Lambda container with custom port
echo -e "${YELLOW}Starting Lambda container on port $LAMBDA_PORT...${NC}"
cd ../lambda
PORT=$LAMBDA_PORT npm run locald || {
echo -e "${RED}Failed to start Lambda container${NC}"
exit 1
}

# Get the container ID of the most recently started rtv container
CONTAINER_ID=$(docker ps --filter "ancestor=rtv" --format "{{.ID}}" | head -1)

if [ -z "$CONTAINER_ID" ]; then
echo -e "${RED}Could not find running Lambda container${NC}"
exit 1
fi

echo -e "${GREEN}Lambda container started with ID: $CONTAINER_ID${NC}"

# Wait for Lambda to be ready
echo -e "${YELLOW}Waiting for Lambda to be ready...${NC}"

# Start Next.js in background
echo -e "${YELLOW}Starting Next.js development server on port $WEB_PORT...${NC}"
cd ../web
LAMBDA_PORT=$LAMBDA_PORT NEXT_PUBLIC_USE_LOCAL_API=true yarn dev --port $WEB_PORT &
NEXTJS_PID=$!

echo -e "${GREEN}Services started!${NC}"
echo -e "${GREEN}Lambda RIE: http://localhost:$LAMBDA_PORT${NC}"
echo -e "${GREEN}Next.js App: http://localhost:$WEB_PORT${NC}"
echo -e "${YELLOW}Press Ctrl+C to stop all services${NC}"

# Wait for Next.js process to finish (or be killed)
wait $NEXTJS_PID

# If we reach here, Next.js exited naturally, so cleanup
cleanup
29 changes: 29 additions & 0 deletions web/app/api/run/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
const LAMBDA_PORT = process.env.LAMBDA_PORT ?? '8080'
const RIE_INVOKE_URL = process.env.RIE_INVOKE_URL ??
`http://localhost:${LAMBDA_PORT}/2015-03-31/functions/function/invocations`

type ApiGatewayProxyResult = {
statusCode: number
body: string
headers?: Record<string, string>
}

export async function POST(request: Request): Promise<Response> {
const requestJson = await request.json()
const apiGatewayEvent = { body: JSON.stringify(requestJson) }

const upstreamResponse = await fetch(RIE_INVOKE_URL, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(apiGatewayEvent),
})

const upstreamJson = await upstreamResponse.json() as ApiGatewayProxyResult

return new Response(upstreamJson.body, {
status: upstreamJson.statusCode,
headers: upstreamJson.headers,
})
}


1 change: 1 addition & 0 deletions web/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
},
"packageManager": "yarn@1.22.22",
"scripts": {
"local": "../scripts/start-local.sh",
"dev": "next dev --port 3003",
"build": "next build",
"start": "next start",
Expand Down
10 changes: 9 additions & 1 deletion web/src/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,18 +10,26 @@ type RequestBody = {
}
}

// Configuration for API endpoint
const USE_LOCAL_API = process.env.NEXT_PUBLIC_USE_LOCAL_API === 'true'
const PRODUCTION_API_URL = 'https://c1y17h6s33.execute-api.us-east-1.amazonaws.com/production/run'
const LOCAL_API_URL = '/api/run'

const API_ENDPOINT = USE_LOCAL_API ? LOCAL_API_URL : PRODUCTION_API_URL

export const runFunction = async (
requestBody: RequestBody
): Promise<Either<string, TreeViewerData>> => {
try {
const response = await fetch('https://c1y17h6s33.execute-api.us-east-1.amazonaws.com/production/run', {
const response = await fetch(API_ENDPOINT, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
mode: 'cors',
body: safeStringify(requestBody),
})

const responseBody = await response.text()

if (response.ok) {
Expand Down
Loading