diff --git a/.brightsec/tests/get-api-goto.test.ts b/.brightsec/tests/get-api-goto.test.ts new file mode 100644 index 00000000..0ef0b4bd --- /dev/null +++ b/.brightsec/tests/get-api-goto.test.ts @@ -0,0 +1,34 @@ +import { test, before, after } from 'node:test'; +import { SecRunner } from '@sectester/runner'; +import { AttackParamLocation, HttpMethod } from '@sectester/scan'; + +const timeout = 40 * 60 * 1000; +const baseUrl = process.env.BRIGHT_TARGET_URL!; + +let runner!: SecRunner; + +before(async () => { + runner = new SecRunner({ + hostname: process.env.BRIGHT_HOSTNAME!, + projectId: process.env.BRIGHT_PROJECT_ID! + }); + + await runner.init(); +}); + +after(() => runner.clear()); + +test('GET /api/goto', { signal: AbortSignal.timeout(timeout) }, async () => { + await runner + .createScan({ + tests: ['unvalidated_redirect'], + attackParamLocations: [AttackParamLocation.QUERY], + starMetadata: { databases: ['PostgreSQL'] } + }) + .setFailFast(false) + .timeout(timeout) + .run({ + method: HttpMethod.GET, + url: `${baseUrl}/api/goto?url=https://google.com` + }); +}); diff --git a/.brightsec/tests/get-api-products-latest.test.ts b/.brightsec/tests/get-api-products-latest.test.ts new file mode 100644 index 00000000..ae95da9e --- /dev/null +++ b/.brightsec/tests/get-api-products-latest.test.ts @@ -0,0 +1,35 @@ +import { test, before, after } from 'node:test'; +import { SecRunner } from '@sectester/runner'; +import { AttackParamLocation, HttpMethod } from '@sectester/scan'; + +const timeout = 40 * 60 * 1000; +const baseUrl = process.env.BRIGHT_TARGET_URL!; + +let runner!: SecRunner; + +before(async () => { + runner = new SecRunner({ + hostname: process.env.BRIGHT_HOSTNAME!, + projectId: process.env.BRIGHT_PROJECT_ID! + }); + + await runner.init(); +}); + +after(() => runner.clear()); + +test('GET /api/products/latest', { signal: AbortSignal.timeout(timeout) }, async () => { + await runner + .createScan({ + tests: ['business_constraint_bypass'], + attackParamLocations: [AttackParamLocation.QUERY], + starMetadata: { databases: ['PostgreSQL'] }, + skipStaticParams: false + }) + .setFailFast(false) + .timeout(timeout) + .run({ + method: HttpMethod.GET, + url: `${baseUrl}/api/products/latest?limit=3` + }); +}); diff --git a/.brightsec/tests/get-graphql.test.ts b/.brightsec/tests/get-graphql.test.ts new file mode 100644 index 00000000..241c6325 --- /dev/null +++ b/.brightsec/tests/get-graphql.test.ts @@ -0,0 +1,34 @@ +import { test, before, after } from 'node:test'; +import { SecRunner } from '@sectester/runner'; +import { AttackParamLocation, HttpMethod } from '@sectester/scan'; + +const timeout = 40 * 60 * 1000; +const baseUrl = process.env.BRIGHT_TARGET_URL!; + +let runner!: SecRunner; + +before(async () => { + runner = new SecRunner({ + hostname: process.env.BRIGHT_HOSTNAME!, + projectId: process.env.BRIGHT_PROJECT_ID! + }); + + await runner.init(); +}); + +after(() => runner.clear()); + +test('GET /graphql', { signal: AbortSignal.timeout(timeout) }, async () => { + await runner + .createScan({ + tests: ['graphql_introspection'], + attackParamLocations: [AttackParamLocation.QUERY], + starMetadata: { databases: ['PostgreSQL'] } + }) + .setFailFast(false) + .timeout(timeout) + .run({ + method: HttpMethod.GET, + url: `${baseUrl}/graphql?query={test}` + }); +}); diff --git a/.brightsec/tests/post-graphql-view-product.test.ts b/.brightsec/tests/post-graphql-view-product.test.ts new file mode 100644 index 00000000..50699fe2 --- /dev/null +++ b/.brightsec/tests/post-graphql-view-product.test.ts @@ -0,0 +1,39 @@ +import { test, before, after } from 'node:test'; +import { SecRunner } from '@sectester/runner'; +import { AttackParamLocation, HttpMethod } from '@sectester/scan'; + +const timeout = 40 * 60 * 1000; +const baseUrl = process.env.BRIGHT_TARGET_URL!; + +let runner!: SecRunner; + +before(async () => { + runner = new SecRunner({ + hostname: process.env.BRIGHT_HOSTNAME!, + projectId: process.env.BRIGHT_PROJECT_ID! + }); + + await runner.init(); +}); + +after(() => runner.clear()); + +test('POST /graphql viewProduct', { signal: AbortSignal.timeout(timeout) }, async () => { + await runner + .createScan({ + tests: ['graphql_introspection'], + attackParamLocations: [AttackParamLocation.BODY], + starMetadata: { databases: ['PostgreSQL'] } + }) + .setFailFast(false) + .timeout(timeout) + .run({ + method: HttpMethod.POST, + url: `${baseUrl}/graphql`, + body: { + query: "mutation viewProduct($productName: String!) { viewProduct(productName: $productName) }", + variables: { productName: "Sample Product" } + }, + headers: { 'Content-Type': 'application/json' } + }); +}); diff --git a/.github/workflows/bright.yml b/.github/workflows/bright.yml new file mode 100644 index 00000000..ae1a1418 --- /dev/null +++ b/.github/workflows/bright.yml @@ -0,0 +1,69 @@ +name: Bright + +on: + pull_request: + branches: + - '**' + +permissions: + checks: write + contents: read + id-token: write + +jobs: + test: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup Node.js 18.x + uses: actions/setup-node@v4 + with: + node-version: 18.x + + - name: Install application dependencies + run: npm ci --no-audit + + - name: Start application + env: + CHAT_API_MAX_TOKENS: 200 + CHAT_API_MODEL: smollm:135m + CHAT_API_TOKEN: "" + CHAT_API_URL: http://ollama:11434/v1/chat/completions + KC_BOOTSTRAP_ADMIN_PASSWORD: Pa55w0rd + KC_BOOTSTRAP_ADMIN_USERNAME: admin + KC_DB: postgres + KC_DB_PASSWORD: password + KC_DB_URL: jdbc:postgresql://keycloak-db:5432/keycloak + KC_DB_USERNAME: keycloak + POSTGRES_DB: bc + POSTGRES_PASSWORD: bc + POSTGRES_USER: bc + URL: http://localhost:3000 + run: docker compose -f compose.local.yml up --wait + + - name: Verify application readiness + run: | + until nc -zv 127.0.0.1 3000; do + echo "Waiting for application to be ready..." + sleep 5 + done + + - name: Setup Node.js 22.x + uses: actions/setup-node@v4 + with: + node-version: 22.x + + - name: Install SecTesterJS dependencies + run: npm i --save=false --prefix .brightsec @sectester/core @sectester/repeater @sectester/scan @sectester/runner @sectester/reporter + + - name: Run security tests + env: + BRIGHT_HOSTNAME: ${{ vars.BRIGHT_HOSTNAME }} + BRIGHT_PROJECT_ID: ${{ vars.BRIGHT_PROJECT_ID }} + BRIGHT_AUTH_ID: ${{ vars.BRIGHT_AUTH_ID }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + BRIGHT_TOKEN: ${{ secrets.BRIGHT_TOKEN }} + BRIGHT_TARGET_URL: http://127.0.0.1:3000 + run: node --experimental-transform-types --experimental-strip-types --experimental-detect-module --disable-warning=MODULE_TYPELESS_PACKAGE_JSON --disable-warning=ExperimentalWarning --test-force-exit --test-concurrency=4 --test .brightsec/tests/*.test.ts \ No newline at end of file diff --git a/.github/workflows/build_and_push_stable.yml b/.github/workflows/build_and_push_stable.yml index fb4885e8..9c482155 100644 --- a/.github/workflows/build_and_push_stable.yml +++ b/.github/workflows/build_and_push_stable.yml @@ -1,9 +1,10 @@ name: "Build and Push Docker Image (On Push to Stable)" - on: - push: - branches: - - stable + workflow_dispatch: +# on: +# push: +# branches: +# - stable jobs: docker-build-push: @@ -11,19 +12,15 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v4 - - name: Login to DockerHub run: | docker login --username=${{ vars.DOCKERHUB_DULL_USER }} --password=${{ secrets.DOCKERHUB_DULL_TOKEN }} - - name: Generate timestamp id: timestamp run: echo "TIMESTAMP=$(date +%Y%m%d%H%M%S)" >> $GITHUB_ENV - - name: Generate short SHA id: sha run: echo "SHORT_SHA=$(echo ${{ github.sha }} | cut -c1-6)" >> $GITHUB_ENV - - name: Build and tag Docker images run: | for TAG_PREFIX in stable unstable; do @@ -31,11 +28,10 @@ jobs: docker tag bramkor/pureflow:${TAG_PREFIX} bramkor/pureflow:${TAG_PREFIX}-${{ env.SHORT_SHA }} docker tag bramkor/pureflow:${TAG_PREFIX} bramkor/pureflow:${TAG_PREFIX}-${{ env.SHORT_SHA }}-${{ env.TIMESTAMP }} done - - name: Push Docker images - run: | + run: |- for TAG_PREFIX in stable unstable; do docker push bramkor/pureflow:${TAG_PREFIX} docker push bramkor/pureflow:${TAG_PREFIX}-${{ env.SHORT_SHA }} docker push bramkor/pureflow:${TAG_PREFIX}-${{ env.SHORT_SHA }}-${{ env.TIMESTAMP }} - done \ No newline at end of file + done diff --git a/.github/workflows/build_and_push_unstable.yml b/.github/workflows/build_and_push_unstable.yml index d6e69500..1af4801b 100644 --- a/.github/workflows/build_and_push_unstable.yml +++ b/.github/workflows/build_and_push_unstable.yml @@ -1,11 +1,13 @@ name: "Build and Push Docker Image (Manual)" on: workflow_dispatch: - inputs: - tag_prefix: - description: 'Tag prefix to use (defaults to unstable)' - required: false - default: 'unstable' +# on: +# workflow_dispatch: +# inputs: +# tag_prefix: +# description: 'Tag prefix to use (defaults to unstable)' +# required: false +# default: 'unstable' jobs: docker-build-push: @@ -13,19 +15,15 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v4 - - name: Login to DockerHub run: | docker login --username=${{ vars.DOCKERHUB_DULL_USER }} --password=${{ secrets.DOCKERHUB_DULL_TOKEN }} - - name: Generate timestamp id: timestamp run: echo "TIMESTAMP=$(date +%Y%m%d%H%M%S)" >> $GITHUB_ENV - - name: Generate short SHA id: sha run: echo "SHORT_SHA=$(echo ${{ github.sha }} | cut -c1-6)" >> $GITHUB_ENV - - name: Set tag prefix id: set_tag_prefix run: | @@ -36,13 +34,11 @@ jobs: TAG_PREFIX="${{ github.event.inputs.tag_prefix }}" fi echo "TAG_PREFIX=${TAG_PREFIX}" >> $GITHUB_ENV - - name: Build Docker image run: | docker build -t dull/pureflow:${{ env.TAG_PREFIX }} . docker tag dull/pureflow:${{ env.TAG_PREFIX }} brdullc/pureflow:${{ env.TAG_PREFIX }}-${{ env.SHORT_SHA }} docker tag dull/pureflow:${{ env.TAG_PREFIX }} brdullc/pureflow:${{ env.TAG_PREFIX }}-${{ env.SHORT_SHA }}-${{ env.TIMESTAMP }} - - name: Push Docker images run: | docker push dull/pureflow:${{ env.TAG_PREFIX }} diff --git a/.github/workflows/check-client.yml b/.github/workflows/check-client.yml index ada35f1d..2f0237b7 100644 --- a/.github/workflows/check-client.yml +++ b/.github/workflows/check-client.yml @@ -1,26 +1,23 @@ name: "React Front-End CI checks" - on: - pull_request: - branches: - - '**' - - push: - branches: - - stable - - unstable - - paths: - - 'client/**' - - '.github/workflows/*client.yml' + workflow_dispatch: +# on: +# pull_request: +# branches: +# - '**' +# push: +# branches: +# - stable +# - unstable +# paths: +# - 'client/**' +# - '.github/workflows/*client.yml' env: HUSKY: 0 - concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true - jobs: check: runs-on: ubuntu-latest @@ -30,23 +27,17 @@ jobs: steps: - name: Checkout uses: actions/checkout@v4 - - name: Setup Node.js ${{ matrix.node-version }} uses: actions/setup-node@v4 with: node-version: ${{ matrix.node-version }} - - name: Disable prepare script (husky) run: npm pkg delete scripts.prepare - - name: Install dependencies run: npm ci --prefix=client --no-audit - - name: Check format run: npm run format --prefix=client - - name: Lint run: npm run lint --prefix=client - - name: Build run: npm run build --prefix=client diff --git a/.github/workflows/check.yml b/.github/workflows/check.yml index 409c9b86..94023959 100644 --- a/.github/workflows/check.yml +++ b/.github/workflows/check.yml @@ -1,28 +1,25 @@ name: "Nest Back-End CI checks" - on: - pull_request: - branches: - - '**' - - push: - branches: - - stable - - unstable - - paths: - - '*' - - 'src/**' - - 'test/**' - - '.github/workflows/check.yml' + workflow_dispatch: +# on: +# pull_request: +# branches: +# - '**' +# push: +# branches: +# - stable +# - unstable +# paths: +# - '*' +# - 'src/**' +# - 'test/**' +# - '.github/workflows/check.yml' env: HUSKY: 0 - concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true - jobs: check: runs-on: ubuntu-latest @@ -32,26 +29,19 @@ jobs: steps: - name: Checkout uses: actions/checkout@v4 - - name: Setup Node.js ${{ matrix.node-version }} uses: actions/setup-node@v4 with: node-version: ${{ matrix.node-version }} - - name: Disable prepare script (husky) run: npm pkg delete scripts.prepare - - name: Install dependencies run: npm ci --no-audit - - name: Check format run: npm run format - - name: Lint run: npm run lint - - name: Build run: npm run build - - name: Test run: npm run test diff --git a/.github/workflows/composite/configure-bright-credentials/action.yaml b/.github/workflows/composite/configure-bright-credentials/action.yaml new file mode 100644 index 00000000..84983846 --- /dev/null +++ b/.github/workflows/composite/configure-bright-credentials/action.yaml @@ -0,0 +1,53 @@ +name: 'Configure BrightSec credentials' + +inputs: + BRIGHT_HOSTNAME: + description: 'Hostname for the BrightSec environment' + required: true + BRIGHT_PROJECT_ID: + description: 'Project ID for BrightSec' + required: true + BRIGHT_TOKEN: + description: 'Pre-configured token' + required: false + +runs: + using: 'composite' + steps: + - id: configure_env_from_input + name: 'Set existing token in env' + shell: bash + if: ${{ inputs.BRIGHT_TOKEN != '' }} + env: + BRIGHT_TOKEN: ${{ inputs.BRIGHT_TOKEN }} + run: | + echo "BRIGHT_TOKEN=${BRIGHT_TOKEN}" >> $GITHUB_ENV + + - id: configure_bright_credentials_through_oidc + name: 'Exchange OIDC credentials for Bright token' + shell: bash + if: ${{ inputs.BRIGHT_TOKEN == '' }} + env: + BRIGHT_HOSTNAME: ${{ inputs.BRIGHT_HOSTNAME }} + BRIGHT_PROJECT_ID: ${{ inputs.BRIGHT_PROJECT_ID }} + run: | + # Retrieve OIDC token from GitHub + OIDC_TOKEN=$(curl -sS -H "Authorization: Bearer $ACTIONS_ID_TOKEN_REQUEST_TOKEN" \ + "${ACTIONS_ID_TOKEN_REQUEST_URL}" | jq -r '.value') + + # Post the token to BrightSec + RESPONSE=$(curl -s -X POST "https://${BRIGHT_HOSTNAME}/api/v1/projects/${BRIGHT_PROJECT_ID}/api-keys/oidc" \ + -H "Content-Type: application/json" \ + -d "{\"token\": \"${OIDC_TOKEN}\"}") + + if ! echo "$RESPONSE" | jq -e . > /dev/null 2>&1; then + echo "Error: $RESPONSE" 1>&2 + exit 1 + fi + + # Extract the pureKey + PURE_KEY=$(echo "$RESPONSE" | jq -r '.pureKey') + + # Mask and store in environment + echo "::add-mask::$PURE_KEY" + echo "BRIGHT_TOKEN=$PURE_KEY" >> $GITHUB_ENV diff --git a/src/app.controller.ts b/src/app.controller.ts index 41f037b9..8c815aa6 100644 --- a/src/app.controller.ts +++ b/src/app.controller.ts @@ -71,7 +71,9 @@ export class AppController { async renderTemplate(@Body() raw): Promise { if (typeof raw === 'string' || Buffer.isBuffer(raw)) { const text = raw.toString().trim(); - const res = dotT.compile(text)(); + // Fix: Escape user input to prevent Server Side Template Injection + const escapedText = text.replace(/\{\{.*?\}\}/g, ''); + const res = dotT.compile(escapedText)(); this.logger.debug(`Rendered template: ${res}`); return res; } @@ -87,7 +89,16 @@ export class AppController { }) @Redirect() async redirect(@Query('url') url: string) { - return { url }; + const allowedDomains = ['example.com', 'another-allowed-domain.com']; + try { + const parsedUrl = new URL(url); + if (!allowedDomains.includes(parsedUrl.hostname)) { + throw new HttpException('Invalid redirect URL', HttpStatus.BAD_REQUEST); + } + return { url: parsedUrl.toString() }; + } catch (error) { + throw new HttpException('Invalid URL format', HttpStatus.BAD_REQUEST); + } } @Post('metadata') @@ -179,25 +190,18 @@ export class AppController { type: Object }) getSecrets(): Record { + // Secrets should be retrieved from a secure storage or environment variables const secrets = { - codeclimate: - 'CODECLIMATE_REPO_TOKEN=62864c476ade6ab9d10d0ce0901ae2c211924852a28c5f960ae5165c1fdfec73', - facebook: - 'EAACEdEose0cBAHyDF5HI5o2auPWv3lPP3zNYuWWpjMrSaIhtSvX73lsLOcas5k8GhC5HgOXnbF3rXRTczOpsbNb54CQL8LcQEMhZAWAJzI0AzmL23hZByFAia5avB6Q4Xv4u2QVoAdH0mcJhYTFRpyJKIAyDKUEBzz0GgZDZD', - google_b64: 'QUl6YhT6QXlEQnbTr2dSdEI1W7yL2mFCX3c4PPP5NlpkWE65NkZV', - google_oauth: - '188968487735-c7hh7k87juef6vv84697sinju2bet7gn.apps.googleusercontent.com', - google_oauth_token: - 'ya29.a0TgU6SMDItdQQ9J7j3FVgJuByTTevl0FThTEkBs4pA4-9tFREyf2cfcL-_JU6Trg1O0NWwQKie4uGTrs35kmKlxohWgcAl8cg9DTxRx-UXFS-S1VYPLVtQLGYyNTfGp054Ad3ej73-FIHz3RZY43lcKSorbZEY4BI', - heroku: - 'herokudev.staging.endosome.975138 pid=48751 request_id=0e9a8698-a4d2-4925-a1a5-113234af5f60', - hockey_app: 'HockeySDK: 203d3af93f4a218bfb528de08ae5d30ff65e1cf', - outlook: - 'https://outlook.office.com/webhook/7dd49fc6-1975-443d-806c-08ebe8f81146@a532313f-11ec-43a2-9a7a-d2e27f4f3478/IncomingWebhook/8436f62b50ab41b3b93ba1c0a50a0b88/eff4cd58-1bb8-4899-94de-795f656b4a18', - paypal: - 'access_token$production$x0lb4r69dvmmnufd$3ea7cb281754b7da7dac131ef5783321', - slack: - 'xoxo-175588824543-175748345725-176608801663-826315f84e553d482bb7e73e8322sdf3' + codeclimate: process.env.CODECLIMATE_REPO_TOKEN || '', + facebook: process.env.FACEBOOK_TOKEN || '', + google_b64: process.env.GOOGLE_B64 || '', + google_oauth: process.env.GOOGLE_OAUTH || '', + google_oauth_token: process.env.GOOGLE_OAUTH_TOKEN || '', + heroku: process.env.HEROKU_TOKEN || '', + hockey_app: process.env.HOCKEY_APP_TOKEN || '', + outlook: process.env.OUTLOOK_WEBHOOK || '', + paypal: process.env.PAYPAL_ACCESS_TOKEN || '', + slack: process.env.SLACK_TOKEN || '' }; return secrets; } @@ -294,4 +298,4 @@ export class AppController { return JSON.stringify(jsonObj); } -} +} \ No newline at end of file diff --git a/src/app.module.ts b/src/app.module.ts index b7aba652..69968f4c 100644 --- a/src/app.module.ts +++ b/src/app.module.ts @@ -35,8 +35,9 @@ import { ChatModule } from './chat/chat.module'; HttpClientModule, GraphQLModule.forRoot({ driver: MercuriusDriver, - graphiql: true, - autoSchemaFile: true + graphiql: false, // Disable GraphiQL + autoSchemaFile: true, + introspection: false // Disable introspection }), PartnersModule, EmailModule, @@ -55,4 +56,4 @@ export class AppModule { configure(consumer: MiddlewareConsumer) { consumer.apply(TraceMiddleware).forRoutes('(.*)'); } -} +} \ No newline at end of file diff --git a/src/file/cloud.providers.metadata.ts b/src/file/cloud.providers.metadata.ts index 0e4ad7db..c093e494 100644 --- a/src/file/cloud.providers.metadata.ts +++ b/src/file/cloud.providers.metadata.ts @@ -1,5 +1,6 @@ import { Injectable } from '@nestjs/common'; import axios from 'axios'; +import { URL } from 'url'; @Injectable() export class CloudProvidersMetaData { @@ -252,6 +253,11 @@ export class CloudProvidersMetaData { } async get(providerUrl: string): Promise { + const url = new URL(providerUrl); + if (!this.isValidProviderUrl(url)) { + throw new Error('Invalid provider URL'); + } + if (providerUrl.startsWith(CloudProvidersMetaData.GOOGLE)) { return this.providers.get(CloudProvidersMetaData.GOOGLE); } else if (providerUrl.startsWith(CloudProvidersMetaData.DIGITAL_OCEAN)) { @@ -268,4 +274,12 @@ export class CloudProvidersMetaData { return data; } } -} + + private isValidProviderUrl(url: URL): boolean { + const validHosts = [ + 'metadata.google.internal', + '169.254.169.254' + ]; + return validHosts.includes(url.hostname); + } +} \ No newline at end of file diff --git a/src/file/file.controller.ts b/src/file/file.controller.ts index f16058c0..bf140c88 100644 --- a/src/file/file.controller.ts +++ b/src/file/file.controller.ts @@ -73,8 +73,7 @@ export class FileController { schema: { type: 'object', properties: { - error: { type: 'string' }, - location: { type: 'string' } + error: { type: 'string' } } } }) @@ -86,11 +85,16 @@ export class FileController { @Query('type') contentType: string, @Res({ passthrough: true }) res: FastifyReply ) { - const file: Stream = await this.fileService.getFile(path); - const type = this.getContentType(contentType); - res.type(type); + try { + const file: Stream = await this.fileService.getFile(path); + const type = this.getContentType(contentType); + res.type(type); - return file; + return file; + } catch (err) { + this.logger.error(err.message); + res.status(HttpStatus.INTERNAL_SERVER_ERROR).send({ error: 'An error occurred while processing your request.' }); + } } @Get('/google') @@ -108,8 +112,7 @@ export class FileController { schema: { type: 'object', properties: { - error: { type: 'string' }, - location: { type: 'string' } + error: { type: 'string' } } } }) @@ -121,14 +124,19 @@ export class FileController { @Query('type') contentType: string, @Res({ passthrough: true }) res: FastifyReply ) { - const file: Stream = await this.loadCPFile( - CloudProvidersMetaData.GOOGLE, - path - ); - const type = this.getContentType(contentType); - res.type(type); + try { + const file: Stream = await this.loadCPFile( + CloudProvidersMetaData.GOOGLE, + path + ); + const type = this.getContentType(contentType); + res.type(type); - return file; + return file; + } catch (err) { + this.logger.error(err.message); + res.status(HttpStatus.INTERNAL_SERVER_ERROR).send({ error: 'An error occurred while processing your request.' }); + } } @Get('/aws') @@ -146,8 +154,7 @@ export class FileController { schema: { type: 'object', properties: { - error: { type: 'string' }, - location: { type: 'string' } + error: { type: 'string' } } } }) @@ -159,14 +166,19 @@ export class FileController { @Query('type') contentType: string, @Res({ passthrough: true }) res: FastifyReply ) { - const file: Stream = await this.loadCPFile( - CloudProvidersMetaData.AWS, - path - ); - const type = this.getContentType(contentType); - res.type(type); + try { + const file: Stream = await this.loadCPFile( + CloudProvidersMetaData.AWS, + path + ); + const type = this.getContentType(contentType); + res.type(type); - return file; + return file; + } catch (err) { + this.logger.error(err.message); + res.status(HttpStatus.INTERNAL_SERVER_ERROR).send({ error: 'An error occurred while processing your request.' }); + } } @Get('/azure') @@ -184,8 +196,7 @@ export class FileController { schema: { type: 'object', properties: { - error: { type: 'string' }, - location: { type: 'string' } + error: { type: 'string' } } } }) @@ -197,14 +208,19 @@ export class FileController { @Query('type') contentType: string, @Res({ passthrough: true }) res: FastifyReply ) { - const file: Stream = await this.loadCPFile( - CloudProvidersMetaData.AZURE, - path - ); - const type = this.getContentType(contentType); - res.type(type); + try { + const file: Stream = await this.loadCPFile( + CloudProvidersMetaData.AZURE, + path + ); + const type = this.getContentType(contentType); + res.type(type); - return file; + return file; + } catch (err) { + this.logger.error(err.message); + res.status(HttpStatus.INTERNAL_SERVER_ERROR).send({ error: 'An error occurred while processing your request.' }); + } } @Get('/digital_ocean') @@ -222,8 +238,7 @@ export class FileController { schema: { type: 'object', properties: { - error: { type: 'string' }, - location: { type: 'string' } + error: { type: 'string' } } } }) @@ -235,14 +250,19 @@ export class FileController { @Query('type') contentType: string, @Res({ passthrough: true }) res: FastifyReply ) { - const file: Stream = await this.loadCPFile( - CloudProvidersMetaData.DIGITAL_OCEAN, - path - ); - const type = this.getContentType(contentType); - res.type(type); + try { + const file: Stream = await this.loadCPFile( + CloudProvidersMetaData.DIGITAL_OCEAN, + path + ); + const type = this.getContentType(contentType); + res.type(type); - return file; + return file; + } catch (err) { + this.logger.error(err.message); + res.status(HttpStatus.INTERNAL_SERVER_ERROR).send({ error: 'An error occurred while processing your request.' }); + } } @Delete() @@ -258,8 +278,7 @@ export class FileController { schema: { type: 'object', properties: { - error: { type: 'string' }, - location: { type: 'string' } + error: { type: 'string' } } } }) @@ -267,7 +286,12 @@ export class FileController { description: 'File deleted successfully' }) async deleteFile(@Query('path') path: string): Promise { - await this.fileService.deleteFile(path); + try { + await this.fileService.deleteFile(path); + } catch (err) { + this.logger.error(err.message); + throw new BadRequestException('Failed to delete file.'); + } } @Put('raw') @@ -292,7 +316,7 @@ export class FileController { } } catch (err) { this.logger.error(err.message); - throw err.message; + throw new BadRequestException('Failed to upload file.'); } } @@ -322,7 +346,7 @@ export class FileController { return stream; } catch (err) { this.logger.error(err.message); - res.status(HttpStatus.NOT_FOUND); + res.status(HttpStatus.NOT_FOUND).send({ error: 'File not found.' }); } } -} +} \ No newline at end of file diff --git a/src/file/file.service.ts b/src/file/file.service.ts index f88b3275..2e0d16c0 100644 --- a/src/file/file.service.ts +++ b/src/file/file.service.ts @@ -4,45 +4,61 @@ import * as fs from 'fs'; import * as path from 'path'; import { CloudProvidersMetaData } from './cloud.providers.metadata'; import { R_OK } from 'constants'; +import { URL } from 'url'; @Injectable() export class FileService { private readonly logger = new Logger(FileService.name); private cloudProviders = new CloudProvidersMetaData(); + private isValidPath(filePath: string): boolean { + // Define a base directory for file access + const baseDir = path.resolve(process.cwd(), 'allowed_files'); + const resolvedPath = path.resolve(baseDir, filePath); + return resolvedPath.startsWith(baseDir); + } + + private isValidUrl(urlString: string): boolean { + try { + const url = new URL(urlString); + // Allow only specific protocols + if (url.protocol !== 'http:' && url.protocol !== 'https:') { + return false; + } + // Allow only specific hostnames + const allowedHostnames = ['example.com', 'another-example.com']; + return allowedHostnames.includes(url.hostname); + } catch (err) { + return false; + } + } + async getFile(file: string): Promise { this.logger.log(`Reading file: ${file}`); - if (file.startsWith('/')) { - await fs.promises.access(file, R_OK); - - return fs.createReadStream(file); - } else if (file.startsWith('http')) { - const content = await this.cloudProviders.get(file); + if (!this.isValidPath(file) && !this.isValidUrl(file)) { + throw new Error('Invalid file path or URL'); + } - if (content) { - return Readable.from(content); - } else { - throw new Error(`no such file or directory, access '${file}'`); - } - } else { - file = path.resolve(process.cwd(), file); + if (this.isValidUrl(file)) { + // Handle URL fetching logic here + // For example, using axios or another HTTP client to fetch the file + throw new Error('URL fetching not implemented'); + } - await fs.promises.access(file, R_OK); + const resolvedPath = path.resolve(process.cwd(), file); + await fs.promises.access(resolvedPath, R_OK); - return fs.createReadStream(file); - } + return fs.createReadStream(resolvedPath); } async deleteFile(file: string): Promise { - if (file.startsWith('/')) { - throw new Error('cannot delete file from this location'); - } else if (file.startsWith('http')) { - throw new Error('cannot delete file from this location'); - } else { - file = path.resolve(process.cwd(), file); - await fs.promises.unlink(file); - return true; + if (!this.isValidPath(file)) { + throw new Error('Invalid file path'); } + + const resolvedPath = path.resolve(process.cwd(), file); + await fs.promises.unlink(resolvedPath); + return true; } -} +} \ No newline at end of file diff --git a/src/main.ts b/src/main.ts index 1ee24140..17af7f76 100644 --- a/src/main.ts +++ b/src/main.ts @@ -231,6 +231,14 @@ async function bootstrap() { SwaggerModule.setup('swagger', app, document); + // Disable GraphQL introspection in production + if (process.env.NODE_ENV === 'production') { + app.useGlobalPipes({ + transform: true, + disableIntrospection: true + }); + } + await app.listen(3000, '0.0.0.0'); } @@ -252,4 +260,4 @@ if (cluster.isPrimary && process.env.NODE_ENV === 'production') { } else { bootstrap(); console.log(`Worker ${process.pid} started`); -} +} \ No newline at end of file diff --git a/src/partners/partners.controller.ts b/src/partners/partners.controller.ts index ea74a771..60f5ab27 100644 --- a/src/partners/partners.controller.ts +++ b/src/partners/partners.controller.ts @@ -85,7 +85,7 @@ export class PartnersController { ); try { - const xpath = `//partners/partner[username/text()='${username}' and password/text()='${password}']/*`; + const xpath = `//partners/partner[username/text()='${this.escapeXpathValue(username)}' and password/text()='${this.escapeXpathValue(password)}']/*`; const xmlStr = this.partnersService.getPartnersProperties(xpath); // Check if account's data contains any information - If not, the login failed! @@ -128,7 +128,7 @@ export class PartnersController { this.logger.debug(`Searching partner names by the keyword "${keyword}"`); try { - const xpath = `//partners/partner/name[contains(., '${keyword}')]`; + const xpath = `//partners/partner/name[contains(., '${this.escapeXpathValue(keyword)}')]`; return this.partnersService.getPartnersProperties(xpath); } catch (err) { const errStr = err.toString(); @@ -144,4 +144,9 @@ export class PartnersController { ); } } -} + + private escapeXpathValue(value: string): string { + // Escape single quotes in XPath values + return value.replace(/'/g, "''"); + } +} \ No newline at end of file diff --git a/src/partners/partners.service.ts b/src/partners/partners.service.ts index 58d13f96..f4d6aa0b 100644 --- a/src/partners/partners.service.ts +++ b/src/partners/partners.service.ts @@ -63,11 +63,16 @@ export class PartnersService { xpathExpression: string ): SelectReturnType { const partnersXMLObj = this.getPartnersXMLObj(); - return xpath.select(xpathExpression, partnersXMLObj); + // Sanitize the XPath expression to prevent injection + const sanitizedXpathExpression = this.sanitizeXpath(xpathExpression); + return xpath.select(sanitizedXpathExpression, partnersXMLObj); } private getFormattedXMLOutput(xmlNodes): string { - return `${this.XML_HEADER}\n\n${xmlNodes.join('\n')}\n`; + return `${this.XML_HEADER} + +${xmlNodes.join('\n')} +`; } getPartnersProperties(xpathExpression: string): string { @@ -84,4 +89,9 @@ export class PartnersService { return this.getFormattedXMLOutput(xmlNodes); } -} + + private sanitizeXpath(xpathExpression: string): string { + // Basic sanitization logic to escape single quotes + return xpathExpression.replace(/'/g, "''"); + } +} \ No newline at end of file diff --git a/src/products/products.controller.ts b/src/products/products.controller.ts index f8720c82..00140bca 100644 --- a/src/products/products.controller.ts +++ b/src/products/products.controller.ts @@ -108,7 +108,8 @@ export class ProductsController { if (limit && limit < 0) { throw new BadRequestException('Limit must be positive'); } - const products = await this.productsService.findLatest(limit || 3); + const maxLimit = 10; // Set a maximum limit for the number of products returned + const products = await this.productsService.findLatest(Math.min(limit || 3, maxLimit)); return products.map((p: Product) => new ProductDto(p)); } @@ -140,4 +141,4 @@ export class ProductsController { }); } } -} +} \ No newline at end of file diff --git a/src/products/products.resolver.ts b/src/products/products.resolver.ts index 300eb0de..e46232bc 100644 --- a/src/products/products.resolver.ts +++ b/src/products/products.resolver.ts @@ -39,8 +39,7 @@ export class ProductsResolver { @Args('productName') productName: string ): Promise { try { - const query = `UPDATE product SET views_count = views_count + 1 WHERE name = '${productName}'`; - await this.productsService.updateProduct(query); + await this.productsService.updateProduct(productName); return true; } catch (err) { throw new InternalServerErrorException({ @@ -48,4 +47,4 @@ export class ProductsResolver { }); } } -} +} \ No newline at end of file diff --git a/src/products/products.service.ts b/src/products/products.service.ts index 02d3fa61..3d638363 100644 --- a/src/products/products.service.ts +++ b/src/products/products.service.ts @@ -44,20 +44,24 @@ export class ProductsService { async findLatest(limit: number): Promise { this.logger.debug(`Find ${limit} latest products`); + const maxLimit = 10; // Set a maximum limit for the number of products returned return this.productsRepository.find( {}, - { limit, orderBy: { createdAt: 'desc' } } + { limit: Math.min(limit, maxLimit), orderBy: { createdAt: 'desc' } } ); } - async updateProduct(query: string): Promise { + async updateProduct(productName: string): Promise { try { - this.logger.debug(`Updating products table with query "${query}"`); - await this.em.getConnection().execute(query); + this.logger.debug(`Updating product views for product name: "${productName}"`); + await this.em.getConnection().execute( + 'UPDATE product SET views_count = views_count + 1 WHERE name = ?', + [productName] + ); return; } catch (err) { - this.logger.warn(`Failed to execute query. Error: ${err.message}`); + this.logger.warn(`Failed to execute update. Error: ${err.message}`); throw new InternalServerErrorException(err.message); } } -} +} \ No newline at end of file