From ea9fa3aac322910a7fde0a5c8f055eec82625847 Mon Sep 17 00:00:00 2001 From: need4deed Date: Thu, 16 Apr 2026 14:29:33 +0300 Subject: [PATCH 01/10] chore: add src/data/migrations/1776335627281-add-volunteer-notion-id.ts --- .../1776335627281-add-volunteer-notion-id.ts | 41 +++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 src/data/migrations/1776335627281-add-volunteer-notion-id.ts diff --git a/src/data/migrations/1776335627281-add-volunteer-notion-id.ts b/src/data/migrations/1776335627281-add-volunteer-notion-id.ts new file mode 100644 index 0000000..dff5068 --- /dev/null +++ b/src/data/migrations/1776335627281-add-volunteer-notion-id.ts @@ -0,0 +1,41 @@ +import { MigrationInterface, QueryRunner } from "typeorm"; + +export class AddVolunteerNotionId1776335627281 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `ALTER TABLE volunteer ADD COLUMN IF NOT EXISTS notion_id VARCHAR(50)`, + ); + + await queryRunner.query( + `CREATE INDEX IF NOT EXISTS idx_volunteer_notion_id ON volunteer (notion_id)`, + ); + + // Partial backfill: volunteers that have at least one opportunity link in + // notion_relation can be resolved immediately without any external call. + // Volunteers with no opportunity links are backfilled by the + // migrate-notion script, which uses the seed JSON (nid + email). + await queryRunner.query(` + UPDATE volunteer v + SET notion_id = sub.tenant_nid + FROM ( + SELECT DISTINCT ON (ov.volunteer_id) + ov.volunteer_id, + nr.tenant_nid + FROM notion_relation nr + JOIN opportunity_volunteer ov ON ov.opportunity_id = nr.host_id + WHERE nr.tenant_type = 'volunteer' + AND nr.host_type = 'opportunity' + ORDER BY ov.volunteer_id + ) sub + WHERE v.id = sub.volunteer_id + AND v.notion_id IS NULL + `); + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP INDEX IF EXISTS idx_volunteer_notion_id`); + await queryRunner.query( + `ALTER TABLE volunteer DROP COLUMN IF EXISTS notion_id`, + ); + } +} From 5826af6ad82602dd78f6deac88c6882e27cccf4e Mon Sep 17 00:00:00 2001 From: need4deed Date: Thu, 16 Apr 2026 14:30:19 +0300 Subject: [PATCH 02/10] chore: update src/data/entity/volunteer/volunteer.entity.ts --- src/data/entity/volunteer/volunteer.entity.ts | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/data/entity/volunteer/volunteer.entity.ts b/src/data/entity/volunteer/volunteer.entity.ts index a9dc85a..bde3538 100644 --- a/src/data/entity/volunteer/volunteer.entity.ts +++ b/src/data/entity/volunteer/volunteer.entity.ts @@ -37,6 +37,11 @@ export default class Volunteer { @PrimaryGeneratedColumn() id: number; + @Column({ nullable: true }) + @IsOptional() + @IsString() + notionId: string; + @Column({ nullable: true }) @IsOptional() @IsString() From 15e0461f1f164bc9fb6ee1a51dd684d37e3fde4f Mon Sep 17 00:00:00 2001 From: need4deed Date: Thu, 16 Apr 2026 14:30:21 +0300 Subject: [PATCH 03/10] chore: update src/data/seeds/volunteer.seed.ts --- src/data/seeds/volunteer.seed.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/src/data/seeds/volunteer.seed.ts b/src/data/seeds/volunteer.seed.ts index 949a416..6f3a232 100644 --- a/src/data/seeds/volunteer.seed.ts +++ b/src/data/seeds/volunteer.seed.ts @@ -45,6 +45,7 @@ export async function seedVolunteers(dataSource: DataSource): Promise { statusVaccination: getDocumentStatus(volunteer.statusVaccination), infoAbout: volunteer.infoAbout || "", infoExperience: volunteer.infoExperience || "", + notionId: volunteer.nid || null, createdAt: new Date(volunteer.timestamp), updatedAt: new Date(volunteer.timestamp), person, From 3f745f0d5f92164de15571ba94d06ead2e5af277 Mon Sep 17 00:00:00 2001 From: need4deed Date: Thu, 16 Apr 2026 14:30:24 +0300 Subject: [PATCH 04/10] chore: add src/data/seeds/migrate-notion.ts --- src/data/seeds/migrate-notion.ts | 358 +++++++++++++++++++++++++++++++ 1 file changed, 358 insertions(+) create mode 100644 src/data/seeds/migrate-notion.ts diff --git a/src/data/seeds/migrate-notion.ts b/src/data/seeds/migrate-notion.ts new file mode 100644 index 0000000..be1532e --- /dev/null +++ b/src/data/seeds/migrate-notion.ts @@ -0,0 +1,358 @@ +/** + * Notion → BE data migration script. + * + * Migrates four fields for every volunteer found in a Notion CSV export: + * 1. Engagement status (Active, Available, Inactive, Unresponsive, …) + * 2. Opportunity matching (PENDING → MATCHED / ACTIVE on OpportunityVolunteer) + * 3. Appreciation records (T-shirt, Certificate, …) + * 4. Comments (coordinator notes + volunteer personal notes) + * + * The script is idempotent — safe to run multiple times and for different + * engagement-status slices (active, inactive, unresponsive, etc.). + * + * Usage: + * 1. Convert the Notion CSV: + * python3 scripts/csv-to-json.py migration.json + * 2. Run this script: + * yarn migrate:notion migration.json + */ + +import * as fs from "fs"; +import "reflect-metadata"; +import { + EntityTableName, + OpportunityVolunteerStatusType, + VolunteerStateAppreciationType, + VolunteerStateEngagementType, +} from "need4deed-sdk"; +import { DataSource } from "typeorm"; +import { seedVolunteersFile } from "../../config/constants"; +import logger from "../../logger"; +import { tryCatch } from "../../services/utils"; +import { dataSource } from "../data-source"; +import Comment from "../entity/comment.entity"; +import OpportunityVolunteer from "../entity/m2m/opportunity-volunteer"; +import NotionRelation from "../entity/notion-relation.entity"; +import Person from "../entity/person.entity"; +import Language from "../entity/profile/language.entity"; +import User from "../entity/user.entity"; +import Appreciation from "../entity/volunteer/appreciation.entity"; +import Volunteer from "../entity/volunteer/volunteer.entity"; +import { fetchJsonFromUrl, getRepository } from "../utils"; +import { updateVolunteerMatching } from "../utils/update-volunteer-matching"; +import { VolunteerJSON } from "./types"; + +// --------------------------------------------------------------------------- +// Types +// --------------------------------------------------------------------------- + +interface MigrationEntry { + notionId: string; + status: string; + appreciation: string; + cgc: string; + coordinatorComments: string; + volunteerComments: string; + activeOpportunityIds: string[]; + contactedOpportunityIds: string[]; + matchedOpportunityIds: string[]; +} + +// --------------------------------------------------------------------------- +// Status mapping — covers all Notion engagement status values +// --------------------------------------------------------------------------- + +const ENGAGEMENT_MAP: Record = { + Active: VolunteerStateEngagementType.ACTIVE, + Available: VolunteerStateEngagementType.AVAILABLE, + Inactive: VolunteerStateEngagementType.INACTIVE, + New: VolunteerStateEngagementType.NEW, + "Temp Unavailable": VolunteerStateEngagementType.TEMP_UNAVAILABLE, + Unresponsive: VolunteerStateEngagementType.UNRESPONSIVE, +}; + +// --------------------------------------------------------------------------- +// Appreciation mapping +// --------------------------------------------------------------------------- + +function parseAppreciation( + raw: string, +): { title: VolunteerStateAppreciationType; delivered: boolean }[] { + const result: { title: VolunteerStateAppreciationType; delivered: boolean }[] = + []; + for (const part of raw.split(",").map((p) => p.trim())) { + if (part === "T-shirt") { + result.push({ + title: VolunteerStateAppreciationType.T_SHIRT, + delivered: true, + }); + } else if (part === "T-shirt offered") { + result.push({ + title: VolunteerStateAppreciationType.T_SHIRT, + delivered: false, + }); + } else if (part === "Certificate") { + result.push({ + title: VolunteerStateAppreciationType.BENEFIT_CARD, + delivered: true, + }); + } + // MTV Video, EA - Recommended, FWP - Recommended: no enum equivalent, skip + } + return result; +} + +// --------------------------------------------------------------------------- +// Step 1 — backfill notionId for volunteers not covered by the schema migration +// --------------------------------------------------------------------------- + +async function backfillNotionIds(ds: DataSource): Promise { + const volunteersJson = (await fetchJsonFromUrl( + seedVolunteersFile, + )) as VolunteerJSON[]; + + const personRepo = getRepository(ds, Person); + const volunteerRepo = getRepository(ds, Volunteer); + let count = 0; + + for (const v of volunteersJson ?? []) { + if (!v.nid || !v.person?.email) continue; + + const person = await personRepo.findOne({ + where: { email: v.person.email }, + }); + if (!person) continue; + + const volunteer = await volunteerRepo.findOne({ + where: { personId: person.id }, + }); + if (!volunteer || volunteer.notionId) continue; + + volunteer.notionId = v.nid; + await volunteerRepo.save(volunteer); + count++; + } + + return count; +} + +// --------------------------------------------------------------------------- +// Step 2 — process each CSV entry +// --------------------------------------------------------------------------- + +async function procesEntries( + ds: DataSource, + entries: MigrationEntry[], +): Promise { + const volunteerRepo = getRepository(ds, Volunteer); + const commentRepo = getRepository(ds, Comment); + const appreciationRepo = getRepository(ds, Appreciation); + const oppVolRepo = getRepository(ds, OpportunityVolunteer); + const notionRelRepo = getRepository(ds, NotionRelation); + const userRepo = getRepository(ds, User); + const langRepo = getRepository(ds, Language); + + // Use admin user as author for migrated comments / appreciation records + const systemUser = await userRepo.findOne({ + where: { email: "john.doe@need4deed.org" }, + }); + const englishLang = await langRepo.findOne({ where: { title: "English" } }); + + let updated = 0; + let skipped = 0; + let errors = 0; + + for (const entry of entries) { + const [, err] = await tryCatch(processOne(entry)); + if (err) { + logger.error( + `Error processing ${entry.notionId}: ${(err as Error).message}`, + ); + errors++; + } + } + + logger.info( + `Migration complete — updated: ${updated}, skipped: ${skipped}, errors: ${errors}`, + ); + + // ------------------------------------------------------------------------- + async function processOne(entry: MigrationEntry): Promise { + if (!entry.notionId) { + skipped++; + return; + } + + const volunteer = await volunteerRepo.findOne({ + where: { notionId: entry.notionId }, + }); + if (!volunteer) { + logger.warn(`Volunteer not found: ${entry.notionId} — skipping`); + skipped++; + return; + } + + // 1. Engagement status — always write the CSV value so re-runs stay fresh + const engStatus = ENGAGEMENT_MAP[entry.status]; + if (engStatus) { + volunteer.statusEngagement = engStatus; + await volunteerRepo.save(volunteer); + } + + // 2. Coordinator comments (idempotent — skip if text already exists) + if (entry.coordinatorComments) { + const exists = await commentRepo.findOne({ + where: { + entityType: EntityTableName.VOLUNTEER, + entityId: volunteer.id, + text: entry.coordinatorComments, + }, + }); + if (!exists) { + await commentRepo.save( + new Comment({ + text: entry.coordinatorComments, + entityType: EntityTableName.VOLUNTEER, + entityId: volunteer.id, + userId: systemUser?.id, + languageId: englishLang?.id, + }), + ); + } + } + + // 3. Volunteer personal comments (idempotent) + if (entry.volunteerComments) { + const exists = await commentRepo.findOne({ + where: { + entityType: EntityTableName.VOLUNTEER, + entityId: volunteer.id, + text: entry.volunteerComments, + }, + }); + if (!exists) { + await commentRepo.save( + new Comment({ + text: entry.volunteerComments, + entityType: EntityTableName.VOLUNTEER, + entityId: volunteer.id, + userId: systemUser?.id, + languageId: englishLang?.id, + }), + ); + } + } + + // 4. Appreciation records (idempotent — one record per title per volunteer) + for (const { title, delivered } of parseAppreciation(entry.appreciation)) { + const exists = await appreciationRepo.findOne({ + where: { volunteerId: volunteer.id, title }, + }); + if (!exists) { + await appreciationRepo.save( + new Appreciation({ + title, + volunteerId: volunteer.id, + userId: systemUser?.id, + dateDelivery: delivered ? new Date() : null, + }), + ); + } + } + + // 5. OpportunityVolunteer statuses + // Notion "active" opportunities → ACTIVE (only upgrade from PENDING) + // Notion "matched" opportunities → MATCHED (only upgrade from PENDING) + // volunteer.statusMatch is then recomputed automatically by + // updateVolunteerMatching, keeping it opportunity-driven as the BE expects. + + let oppStatusChanged = false; + + for (const oppNid of entry.activeOpportunityIds) { + if (await upgradeOppVolStatus(oppNid, volunteer.id, OpportunityVolunteerStatusType.ACTIVE)) { + oppStatusChanged = true; + } + } + + for (const oppNid of entry.matchedOpportunityIds) { + if (await upgradeOppVolStatus(oppNid, volunteer.id, OpportunityVolunteerStatusType.MATCHED)) { + oppStatusChanged = true; + } + } + + if (oppStatusChanged) { + await updateVolunteerMatching(volunteer.id); + } + + updated++; + } + + // ------------------------------------------------------------------------- + // Helper: look up the opportunity by its Notion ID, then upgrade the + // OpportunityVolunteer row only if it is currently PENDING. + // Returns true when a change was made. + // ------------------------------------------------------------------------- + async function upgradeOppVolStatus( + oppNotionId: string, + volunteerId: number, + targetStatus: OpportunityVolunteerStatusType, + ): Promise { + const notionRel = await notionRelRepo.findOne({ + where: { + hostNid: oppNotionId, + hostType: EntityTableName.OPPORTUNITY, + }, + }); + if (!notionRel) return false; + + const ov = await oppVolRepo.findOne({ + where: { volunteerId, opportunityId: notionRel.hostId }, + }); + if (!ov || ov.status !== OpportunityVolunteerStatusType.PENDING) return false; + + ov.status = targetStatus; + await oppVolRepo.save(ov); + return true; + } +} + +// --------------------------------------------------------------------------- +// Entry point +// --------------------------------------------------------------------------- + +async function main(): Promise { + const jsonPath = process.argv[2]; + if (!jsonPath) { + console.error( + "Usage: yarn migrate:notion \n" + + " Generate the JSON first with: python3 scripts/csv-to-json.py ", + ); + process.exit(1); + } + + if (!fs.existsSync(jsonPath)) { + console.error(`File not found: ${jsonPath}`); + process.exit(1); + } + + await dataSource.initialize(); + + // Step 1: ensure every volunteer has a notionId + logger.info("Step 1: backfilling notionId from seed JSON…"); + const backfilled = await backfillNotionIds(dataSource); + logger.info(` → ${backfilled} volunteer(s) backfilled`); + + // Step 2: apply CSV data + const entries: MigrationEntry[] = JSON.parse( + fs.readFileSync(jsonPath, "utf-8"), + ); + logger.info(`Step 2: processing ${entries.length} entries from ${jsonPath}…`); + await procesEntries(dataSource, entries); + + await dataSource.destroy(); +} + +main().catch((err) => { + logger.error("Migration failed:", err); + process.exit(1); +}); From c9d45af634097a4f8e0e2ece6a6f3f7b240bce6a Mon Sep 17 00:00:00 2001 From: need4deed Date: Thu, 16 Apr 2026 14:30:26 +0300 Subject: [PATCH 05/10] chore: add scripts/csv-to-json.py --- scripts/csv-to-json.py | 95 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 95 insertions(+) create mode 100644 scripts/csv-to-json.py diff --git a/scripts/csv-to-json.py b/scripts/csv-to-json.py new file mode 100644 index 0000000..1c390a8 --- /dev/null +++ b/scripts/csv-to-json.py @@ -0,0 +1,95 @@ +#!/usr/bin/env python3 +""" +Convert Notion volunteer CSV export to JSON for the BE migration script. + +Usage: + python3 scripts/csv-to-json.py + +Example: + python3 scripts/csv-to-json.py "volunteers.csv" volunteers-migration.json +""" + +import csv +import json +import sys + +# Column indices (from header row — top row in the revised Notion export) +COL = { + "email": 0, + "appreciation": 9, + "cgc": 10, + "volunteer_comments": 11, + "coordinator_comments": 13, + "status": 16, + "notion_id": 20, + "matching_status": 23, + "name": 25, + "active_opp_ids": 41, # VO active ID — comma-separated VOL-xxx + "contacted_opp_ids": 43, # VO contacted ID — comma-separated VOL-xxx + "matched_opp_ids": 45, # VO matched ID — comma-separated VOL-xxx +} + + +def get_col(row: list[str], key: str) -> str: + idx = COL[key] + return row[idx].strip() if len(row) > idx else "" + + +def split_ids(raw: str) -> list[str]: + """Split a comma-separated VOL-xxx string into a clean list.""" + return [v.strip() for v in raw.split(",") if v.strip().startswith("VOL-")] + + +def main(): + if len(sys.argv) < 3: + print("Usage: python3 csv-to-json.py ") + sys.exit(1) + + input_file = sys.argv[1] + output_file = sys.argv[2] + + with open(input_file, "r", encoding="utf-8") as f: + reader = csv.reader(f) + rows = list(reader) + + # Detect header position: if row 0 col[20] is a label it's at the top, + # otherwise (legacy Notion export) the header is the last row. + if rows and rows[0][COL["notion_id"]].strip() not in ("", ) and \ + not rows[0][COL["notion_id"]].strip().startswith("VOLVO-"): + data_rows = rows[1:] # header at top + else: + data_rows = rows[:-1] # header at bottom (legacy) + + volunteers = [] + skipped = 0 + + for row in data_rows: + notion_id = get_col(row, "notion_id") + if not notion_id or not notion_id.startswith("VOLVO-"): + skipped += 1 + continue + + volunteers.append( + { + "notionId": notion_id, + "status": get_col(row, "status"), + "appreciation": get_col(row, "appreciation"), + "cgc": get_col(row, "cgc"), + "coordinatorComments": get_col(row, "coordinator_comments"), + "volunteerComments": get_col(row, "volunteer_comments"), + "activeOpportunityIds": split_ids(get_col(row, "active_opp_ids")), + "contactedOpportunityIds": split_ids(get_col(row, "contacted_opp_ids")), + "matchedOpportunityIds": split_ids(get_col(row, "matched_opp_ids")), + } + ) + + with open(output_file, "w", encoding="utf-8") as f: + json.dump(volunteers, f, ensure_ascii=False, indent=2) + + print(f"Done: {len(volunteers)} volunteers written to {output_file}") + if skipped: + print(f"Skipped {skipped} rows without a valid VOLVO-xxx ID") + + +if __name__ == "__main__": + main() From 42de4231f3b028284e1418e12d043fd18755aa00 Mon Sep 17 00:00:00 2001 From: need4deed Date: Thu, 16 Apr 2026 14:30:45 +0300 Subject: [PATCH 06/10] chore: add migrate:notion script --- package.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index f7b1706..916acbe 100644 --- a/package.json +++ b/package.json @@ -72,7 +72,8 @@ "test:coverage": "yarn test --coverage", "test:ui": "yarn test --ui", "prepare": "husky", - "what": "./get-json-field-value.sh package.json" + "what": "./get-json-field-value.sh package.json", + "migrate:notion": "ts-node -r tsconfig-paths/register src/data/seeds/migrate-notion.ts" }, "lint-staged": { "*.{js,jsx,ts,tsx}": [ From 60f8b6568876736aebce9200c25f9490f49750f0 Mon Sep 17 00:00:00 2001 From: need4deed Date: Thu, 16 Apr 2026 15:25:26 +0300 Subject: [PATCH 07/10] feat: support URL argument in migrate:notion script for production use --- src/data/seeds/migrate-notion.ts | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/src/data/seeds/migrate-notion.ts b/src/data/seeds/migrate-notion.ts index be1532e..fc045d8 100644 --- a/src/data/seeds/migrate-notion.ts +++ b/src/data/seeds/migrate-notion.ts @@ -324,13 +324,15 @@ async function main(): Promise { const jsonPath = process.argv[2]; if (!jsonPath) { console.error( - "Usage: yarn migrate:notion \n" + + "Usage: yarn migrate:notion \n" + " Generate the JSON first with: python3 scripts/csv-to-json.py ", ); process.exit(1); } - if (!fs.existsSync(jsonPath)) { + const isUrl = jsonPath.startsWith("http://") || jsonPath.startsWith("https://"); + + if (!isUrl && !fs.existsSync(jsonPath)) { console.error(`File not found: ${jsonPath}`); process.exit(1); } @@ -343,9 +345,13 @@ async function main(): Promise { logger.info(` → ${backfilled} volunteer(s) backfilled`); // Step 2: apply CSV data - const entries: MigrationEntry[] = JSON.parse( - fs.readFileSync(jsonPath, "utf-8"), - ); + let entries: MigrationEntry[]; + if (isUrl) { + logger.info(`Fetching migration data from URL: ${jsonPath}`); + entries = (await fetchJsonFromUrl(jsonPath)) as MigrationEntry[]; + } else { + entries = JSON.parse(fs.readFileSync(jsonPath, "utf-8")); + } logger.info(`Step 2: processing ${entries.length} entries from ${jsonPath}…`); await procesEntries(dataSource, entries); @@ -356,3 +362,4 @@ main().catch((err) => { logger.error("Migration failed:", err); process.exit(1); }); + From 553b93d30c2bc47f3e51e2f361cd28c829efe028 Mon Sep 17 00:00:00 2001 From: need4deed Date: Thu, 16 Apr 2026 15:37:28 +0300 Subject: [PATCH 08/10] feat: add migrate:notion:prod script for production container --- package.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index 916acbe..7124979 100644 --- a/package.json +++ b/package.json @@ -73,7 +73,8 @@ "test:ui": "yarn test --ui", "prepare": "husky", "what": "./get-json-field-value.sh package.json", - "migrate:notion": "ts-node -r tsconfig-paths/register src/data/seeds/migrate-notion.ts" + "migrate:notion": "ts-node -r tsconfig-paths/register src/data/seeds/migrate-notion.ts", + "migrate:notion:prod": "node build/src/data/seeds/migrate-notion.js" }, "lint-staged": { "*.{js,jsx,ts,tsx}": [ From d6e5e801ad7f3ced9d01021a4886c0eb1db9922d Mon Sep 17 00:00:00 2001 From: arturasmckwcz Date: Fri, 17 Apr 2026 07:33:20 +0000 Subject: [PATCH 09/10] revert: to the current develop --- package.json | 4 +- scripts/csv-to-json.py | 95 ----- src/data/entity/volunteer/volunteer.entity.ts | 5 - .../1776335627281-add-volunteer-notion-id.ts | 41 -- src/data/seeds/migrate-notion.ts | 365 ------------------ src/data/seeds/volunteer.seed.ts | 1 - 6 files changed, 1 insertion(+), 510 deletions(-) delete mode 100644 scripts/csv-to-json.py delete mode 100644 src/data/migrations/1776335627281-add-volunteer-notion-id.ts delete mode 100644 src/data/seeds/migrate-notion.ts diff --git a/package.json b/package.json index 7124979..f7b1706 100644 --- a/package.json +++ b/package.json @@ -72,9 +72,7 @@ "test:coverage": "yarn test --coverage", "test:ui": "yarn test --ui", "prepare": "husky", - "what": "./get-json-field-value.sh package.json", - "migrate:notion": "ts-node -r tsconfig-paths/register src/data/seeds/migrate-notion.ts", - "migrate:notion:prod": "node build/src/data/seeds/migrate-notion.js" + "what": "./get-json-field-value.sh package.json" }, "lint-staged": { "*.{js,jsx,ts,tsx}": [ diff --git a/scripts/csv-to-json.py b/scripts/csv-to-json.py deleted file mode 100644 index 1c390a8..0000000 --- a/scripts/csv-to-json.py +++ /dev/null @@ -1,95 +0,0 @@ -#!/usr/bin/env python3 -""" -Convert Notion volunteer CSV export to JSON for the BE migration script. - -Usage: - python3 scripts/csv-to-json.py - -Example: - python3 scripts/csv-to-json.py "volunteers.csv" volunteers-migration.json -""" - -import csv -import json -import sys - -# Column indices (from header row — top row in the revised Notion export) -COL = { - "email": 0, - "appreciation": 9, - "cgc": 10, - "volunteer_comments": 11, - "coordinator_comments": 13, - "status": 16, - "notion_id": 20, - "matching_status": 23, - "name": 25, - "active_opp_ids": 41, # VO active ID — comma-separated VOL-xxx - "contacted_opp_ids": 43, # VO contacted ID — comma-separated VOL-xxx - "matched_opp_ids": 45, # VO matched ID — comma-separated VOL-xxx -} - - -def get_col(row: list[str], key: str) -> str: - idx = COL[key] - return row[idx].strip() if len(row) > idx else "" - - -def split_ids(raw: str) -> list[str]: - """Split a comma-separated VOL-xxx string into a clean list.""" - return [v.strip() for v in raw.split(",") if v.strip().startswith("VOL-")] - - -def main(): - if len(sys.argv) < 3: - print("Usage: python3 csv-to-json.py ") - sys.exit(1) - - input_file = sys.argv[1] - output_file = sys.argv[2] - - with open(input_file, "r", encoding="utf-8") as f: - reader = csv.reader(f) - rows = list(reader) - - # Detect header position: if row 0 col[20] is a label it's at the top, - # otherwise (legacy Notion export) the header is the last row. - if rows and rows[0][COL["notion_id"]].strip() not in ("", ) and \ - not rows[0][COL["notion_id"]].strip().startswith("VOLVO-"): - data_rows = rows[1:] # header at top - else: - data_rows = rows[:-1] # header at bottom (legacy) - - volunteers = [] - skipped = 0 - - for row in data_rows: - notion_id = get_col(row, "notion_id") - if not notion_id or not notion_id.startswith("VOLVO-"): - skipped += 1 - continue - - volunteers.append( - { - "notionId": notion_id, - "status": get_col(row, "status"), - "appreciation": get_col(row, "appreciation"), - "cgc": get_col(row, "cgc"), - "coordinatorComments": get_col(row, "coordinator_comments"), - "volunteerComments": get_col(row, "volunteer_comments"), - "activeOpportunityIds": split_ids(get_col(row, "active_opp_ids")), - "contactedOpportunityIds": split_ids(get_col(row, "contacted_opp_ids")), - "matchedOpportunityIds": split_ids(get_col(row, "matched_opp_ids")), - } - ) - - with open(output_file, "w", encoding="utf-8") as f: - json.dump(volunteers, f, ensure_ascii=False, indent=2) - - print(f"Done: {len(volunteers)} volunteers written to {output_file}") - if skipped: - print(f"Skipped {skipped} rows without a valid VOLVO-xxx ID") - - -if __name__ == "__main__": - main() diff --git a/src/data/entity/volunteer/volunteer.entity.ts b/src/data/entity/volunteer/volunteer.entity.ts index bde3538..a9dc85a 100644 --- a/src/data/entity/volunteer/volunteer.entity.ts +++ b/src/data/entity/volunteer/volunteer.entity.ts @@ -37,11 +37,6 @@ export default class Volunteer { @PrimaryGeneratedColumn() id: number; - @Column({ nullable: true }) - @IsOptional() - @IsString() - notionId: string; - @Column({ nullable: true }) @IsOptional() @IsString() diff --git a/src/data/migrations/1776335627281-add-volunteer-notion-id.ts b/src/data/migrations/1776335627281-add-volunteer-notion-id.ts deleted file mode 100644 index dff5068..0000000 --- a/src/data/migrations/1776335627281-add-volunteer-notion-id.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { MigrationInterface, QueryRunner } from "typeorm"; - -export class AddVolunteerNotionId1776335627281 implements MigrationInterface { - public async up(queryRunner: QueryRunner): Promise { - await queryRunner.query( - `ALTER TABLE volunteer ADD COLUMN IF NOT EXISTS notion_id VARCHAR(50)`, - ); - - await queryRunner.query( - `CREATE INDEX IF NOT EXISTS idx_volunteer_notion_id ON volunteer (notion_id)`, - ); - - // Partial backfill: volunteers that have at least one opportunity link in - // notion_relation can be resolved immediately without any external call. - // Volunteers with no opportunity links are backfilled by the - // migrate-notion script, which uses the seed JSON (nid + email). - await queryRunner.query(` - UPDATE volunteer v - SET notion_id = sub.tenant_nid - FROM ( - SELECT DISTINCT ON (ov.volunteer_id) - ov.volunteer_id, - nr.tenant_nid - FROM notion_relation nr - JOIN opportunity_volunteer ov ON ov.opportunity_id = nr.host_id - WHERE nr.tenant_type = 'volunteer' - AND nr.host_type = 'opportunity' - ORDER BY ov.volunteer_id - ) sub - WHERE v.id = sub.volunteer_id - AND v.notion_id IS NULL - `); - } - - public async down(queryRunner: QueryRunner): Promise { - await queryRunner.query(`DROP INDEX IF EXISTS idx_volunteer_notion_id`); - await queryRunner.query( - `ALTER TABLE volunteer DROP COLUMN IF EXISTS notion_id`, - ); - } -} diff --git a/src/data/seeds/migrate-notion.ts b/src/data/seeds/migrate-notion.ts deleted file mode 100644 index fc045d8..0000000 --- a/src/data/seeds/migrate-notion.ts +++ /dev/null @@ -1,365 +0,0 @@ -/** - * Notion → BE data migration script. - * - * Migrates four fields for every volunteer found in a Notion CSV export: - * 1. Engagement status (Active, Available, Inactive, Unresponsive, …) - * 2. Opportunity matching (PENDING → MATCHED / ACTIVE on OpportunityVolunteer) - * 3. Appreciation records (T-shirt, Certificate, …) - * 4. Comments (coordinator notes + volunteer personal notes) - * - * The script is idempotent — safe to run multiple times and for different - * engagement-status slices (active, inactive, unresponsive, etc.). - * - * Usage: - * 1. Convert the Notion CSV: - * python3 scripts/csv-to-json.py migration.json - * 2. Run this script: - * yarn migrate:notion migration.json - */ - -import * as fs from "fs"; -import "reflect-metadata"; -import { - EntityTableName, - OpportunityVolunteerStatusType, - VolunteerStateAppreciationType, - VolunteerStateEngagementType, -} from "need4deed-sdk"; -import { DataSource } from "typeorm"; -import { seedVolunteersFile } from "../../config/constants"; -import logger from "../../logger"; -import { tryCatch } from "../../services/utils"; -import { dataSource } from "../data-source"; -import Comment from "../entity/comment.entity"; -import OpportunityVolunteer from "../entity/m2m/opportunity-volunteer"; -import NotionRelation from "../entity/notion-relation.entity"; -import Person from "../entity/person.entity"; -import Language from "../entity/profile/language.entity"; -import User from "../entity/user.entity"; -import Appreciation from "../entity/volunteer/appreciation.entity"; -import Volunteer from "../entity/volunteer/volunteer.entity"; -import { fetchJsonFromUrl, getRepository } from "../utils"; -import { updateVolunteerMatching } from "../utils/update-volunteer-matching"; -import { VolunteerJSON } from "./types"; - -// --------------------------------------------------------------------------- -// Types -// --------------------------------------------------------------------------- - -interface MigrationEntry { - notionId: string; - status: string; - appreciation: string; - cgc: string; - coordinatorComments: string; - volunteerComments: string; - activeOpportunityIds: string[]; - contactedOpportunityIds: string[]; - matchedOpportunityIds: string[]; -} - -// --------------------------------------------------------------------------- -// Status mapping — covers all Notion engagement status values -// --------------------------------------------------------------------------- - -const ENGAGEMENT_MAP: Record = { - Active: VolunteerStateEngagementType.ACTIVE, - Available: VolunteerStateEngagementType.AVAILABLE, - Inactive: VolunteerStateEngagementType.INACTIVE, - New: VolunteerStateEngagementType.NEW, - "Temp Unavailable": VolunteerStateEngagementType.TEMP_UNAVAILABLE, - Unresponsive: VolunteerStateEngagementType.UNRESPONSIVE, -}; - -// --------------------------------------------------------------------------- -// Appreciation mapping -// --------------------------------------------------------------------------- - -function parseAppreciation( - raw: string, -): { title: VolunteerStateAppreciationType; delivered: boolean }[] { - const result: { title: VolunteerStateAppreciationType; delivered: boolean }[] = - []; - for (const part of raw.split(",").map((p) => p.trim())) { - if (part === "T-shirt") { - result.push({ - title: VolunteerStateAppreciationType.T_SHIRT, - delivered: true, - }); - } else if (part === "T-shirt offered") { - result.push({ - title: VolunteerStateAppreciationType.T_SHIRT, - delivered: false, - }); - } else if (part === "Certificate") { - result.push({ - title: VolunteerStateAppreciationType.BENEFIT_CARD, - delivered: true, - }); - } - // MTV Video, EA - Recommended, FWP - Recommended: no enum equivalent, skip - } - return result; -} - -// --------------------------------------------------------------------------- -// Step 1 — backfill notionId for volunteers not covered by the schema migration -// --------------------------------------------------------------------------- - -async function backfillNotionIds(ds: DataSource): Promise { - const volunteersJson = (await fetchJsonFromUrl( - seedVolunteersFile, - )) as VolunteerJSON[]; - - const personRepo = getRepository(ds, Person); - const volunteerRepo = getRepository(ds, Volunteer); - let count = 0; - - for (const v of volunteersJson ?? []) { - if (!v.nid || !v.person?.email) continue; - - const person = await personRepo.findOne({ - where: { email: v.person.email }, - }); - if (!person) continue; - - const volunteer = await volunteerRepo.findOne({ - where: { personId: person.id }, - }); - if (!volunteer || volunteer.notionId) continue; - - volunteer.notionId = v.nid; - await volunteerRepo.save(volunteer); - count++; - } - - return count; -} - -// --------------------------------------------------------------------------- -// Step 2 — process each CSV entry -// --------------------------------------------------------------------------- - -async function procesEntries( - ds: DataSource, - entries: MigrationEntry[], -): Promise { - const volunteerRepo = getRepository(ds, Volunteer); - const commentRepo = getRepository(ds, Comment); - const appreciationRepo = getRepository(ds, Appreciation); - const oppVolRepo = getRepository(ds, OpportunityVolunteer); - const notionRelRepo = getRepository(ds, NotionRelation); - const userRepo = getRepository(ds, User); - const langRepo = getRepository(ds, Language); - - // Use admin user as author for migrated comments / appreciation records - const systemUser = await userRepo.findOne({ - where: { email: "john.doe@need4deed.org" }, - }); - const englishLang = await langRepo.findOne({ where: { title: "English" } }); - - let updated = 0; - let skipped = 0; - let errors = 0; - - for (const entry of entries) { - const [, err] = await tryCatch(processOne(entry)); - if (err) { - logger.error( - `Error processing ${entry.notionId}: ${(err as Error).message}`, - ); - errors++; - } - } - - logger.info( - `Migration complete — updated: ${updated}, skipped: ${skipped}, errors: ${errors}`, - ); - - // ------------------------------------------------------------------------- - async function processOne(entry: MigrationEntry): Promise { - if (!entry.notionId) { - skipped++; - return; - } - - const volunteer = await volunteerRepo.findOne({ - where: { notionId: entry.notionId }, - }); - if (!volunteer) { - logger.warn(`Volunteer not found: ${entry.notionId} — skipping`); - skipped++; - return; - } - - // 1. Engagement status — always write the CSV value so re-runs stay fresh - const engStatus = ENGAGEMENT_MAP[entry.status]; - if (engStatus) { - volunteer.statusEngagement = engStatus; - await volunteerRepo.save(volunteer); - } - - // 2. Coordinator comments (idempotent — skip if text already exists) - if (entry.coordinatorComments) { - const exists = await commentRepo.findOne({ - where: { - entityType: EntityTableName.VOLUNTEER, - entityId: volunteer.id, - text: entry.coordinatorComments, - }, - }); - if (!exists) { - await commentRepo.save( - new Comment({ - text: entry.coordinatorComments, - entityType: EntityTableName.VOLUNTEER, - entityId: volunteer.id, - userId: systemUser?.id, - languageId: englishLang?.id, - }), - ); - } - } - - // 3. Volunteer personal comments (idempotent) - if (entry.volunteerComments) { - const exists = await commentRepo.findOne({ - where: { - entityType: EntityTableName.VOLUNTEER, - entityId: volunteer.id, - text: entry.volunteerComments, - }, - }); - if (!exists) { - await commentRepo.save( - new Comment({ - text: entry.volunteerComments, - entityType: EntityTableName.VOLUNTEER, - entityId: volunteer.id, - userId: systemUser?.id, - languageId: englishLang?.id, - }), - ); - } - } - - // 4. Appreciation records (idempotent — one record per title per volunteer) - for (const { title, delivered } of parseAppreciation(entry.appreciation)) { - const exists = await appreciationRepo.findOne({ - where: { volunteerId: volunteer.id, title }, - }); - if (!exists) { - await appreciationRepo.save( - new Appreciation({ - title, - volunteerId: volunteer.id, - userId: systemUser?.id, - dateDelivery: delivered ? new Date() : null, - }), - ); - } - } - - // 5. OpportunityVolunteer statuses - // Notion "active" opportunities → ACTIVE (only upgrade from PENDING) - // Notion "matched" opportunities → MATCHED (only upgrade from PENDING) - // volunteer.statusMatch is then recomputed automatically by - // updateVolunteerMatching, keeping it opportunity-driven as the BE expects. - - let oppStatusChanged = false; - - for (const oppNid of entry.activeOpportunityIds) { - if (await upgradeOppVolStatus(oppNid, volunteer.id, OpportunityVolunteerStatusType.ACTIVE)) { - oppStatusChanged = true; - } - } - - for (const oppNid of entry.matchedOpportunityIds) { - if (await upgradeOppVolStatus(oppNid, volunteer.id, OpportunityVolunteerStatusType.MATCHED)) { - oppStatusChanged = true; - } - } - - if (oppStatusChanged) { - await updateVolunteerMatching(volunteer.id); - } - - updated++; - } - - // ------------------------------------------------------------------------- - // Helper: look up the opportunity by its Notion ID, then upgrade the - // OpportunityVolunteer row only if it is currently PENDING. - // Returns true when a change was made. - // ------------------------------------------------------------------------- - async function upgradeOppVolStatus( - oppNotionId: string, - volunteerId: number, - targetStatus: OpportunityVolunteerStatusType, - ): Promise { - const notionRel = await notionRelRepo.findOne({ - where: { - hostNid: oppNotionId, - hostType: EntityTableName.OPPORTUNITY, - }, - }); - if (!notionRel) return false; - - const ov = await oppVolRepo.findOne({ - where: { volunteerId, opportunityId: notionRel.hostId }, - }); - if (!ov || ov.status !== OpportunityVolunteerStatusType.PENDING) return false; - - ov.status = targetStatus; - await oppVolRepo.save(ov); - return true; - } -} - -// --------------------------------------------------------------------------- -// Entry point -// --------------------------------------------------------------------------- - -async function main(): Promise { - const jsonPath = process.argv[2]; - if (!jsonPath) { - console.error( - "Usage: yarn migrate:notion \n" + - " Generate the JSON first with: python3 scripts/csv-to-json.py ", - ); - process.exit(1); - } - - const isUrl = jsonPath.startsWith("http://") || jsonPath.startsWith("https://"); - - if (!isUrl && !fs.existsSync(jsonPath)) { - console.error(`File not found: ${jsonPath}`); - process.exit(1); - } - - await dataSource.initialize(); - - // Step 1: ensure every volunteer has a notionId - logger.info("Step 1: backfilling notionId from seed JSON…"); - const backfilled = await backfillNotionIds(dataSource); - logger.info(` → ${backfilled} volunteer(s) backfilled`); - - // Step 2: apply CSV data - let entries: MigrationEntry[]; - if (isUrl) { - logger.info(`Fetching migration data from URL: ${jsonPath}`); - entries = (await fetchJsonFromUrl(jsonPath)) as MigrationEntry[]; - } else { - entries = JSON.parse(fs.readFileSync(jsonPath, "utf-8")); - } - logger.info(`Step 2: processing ${entries.length} entries from ${jsonPath}…`); - await procesEntries(dataSource, entries); - - await dataSource.destroy(); -} - -main().catch((err) => { - logger.error("Migration failed:", err); - process.exit(1); -}); - diff --git a/src/data/seeds/volunteer.seed.ts b/src/data/seeds/volunteer.seed.ts index 6f3a232..949a416 100644 --- a/src/data/seeds/volunteer.seed.ts +++ b/src/data/seeds/volunteer.seed.ts @@ -45,7 +45,6 @@ export async function seedVolunteers(dataSource: DataSource): Promise { statusVaccination: getDocumentStatus(volunteer.statusVaccination), infoAbout: volunteer.infoAbout || "", infoExperience: volunteer.infoExperience || "", - notionId: volunteer.nid || null, createdAt: new Date(volunteer.timestamp), updatedAt: new Date(volunteer.timestamp), person, From dfc4af2315a5bc6d59300255e2fb41b05d194a11 Mon Sep 17 00:00:00 2001 From: nadavosa Date: Mon, 27 Apr 2026 17:24:51 +0200 Subject: [PATCH 10/10] feat: add document.received + PATCH /volunteer/:id/doc/:type endpoint Closes #416 Co-Authored-By: Claude Sonnet 4.6 --- src/data/entity/document.entity.ts | 6 +++ .../1777303247669-add-document-received.ts | 18 +++++++ src/server/routes/volunteer/doc.routes.ts | 54 +++++++++++++++++++ src/server/schema/volunteer-doc.schema.ts | 8 +++ src/services/dto/dto-document.ts | 2 + 5 files changed, 88 insertions(+) create mode 100644 src/data/migrations/1777303247669-add-document-received.ts diff --git a/src/data/entity/document.entity.ts b/src/data/entity/document.entity.ts index f7c05f9..2749294 100644 --- a/src/data/entity/document.entity.ts +++ b/src/data/entity/document.entity.ts @@ -57,6 +57,12 @@ export default class Document { @Column({ nullable: true }) volunteerId: number; + @Column({ default: false }) + received: boolean; + + @Column({ nullable: true, type: "timestamp" }) + receivedOn: Date | null; + @CreateDateColumn() createdAt: Date; diff --git a/src/data/migrations/1777303247669-add-document-received.ts b/src/data/migrations/1777303247669-add-document-received.ts new file mode 100644 index 0000000..f44e0de --- /dev/null +++ b/src/data/migrations/1777303247669-add-document-received.ts @@ -0,0 +1,18 @@ +import { MigrationInterface, QueryRunner } from "typeorm"; + +export class AddDocumentReceived1777303247669 implements MigrationInterface { + name = "AddDocumentReceived1777303247669"; + + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(` + ALTER TABLE "document" + ADD COLUMN IF NOT EXISTS "received" boolean NOT NULL DEFAULT false, + ADD COLUMN IF NOT EXISTS "received_on" timestamp + `); + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "document" DROP COLUMN IF EXISTS "received_on"`); + await queryRunner.query(`ALTER TABLE "document" DROP COLUMN IF EXISTS "received"`); + } +} diff --git a/src/server/routes/volunteer/doc.routes.ts b/src/server/routes/volunteer/doc.routes.ts index 197e27e..d246acd 100644 --- a/src/server/routes/volunteer/doc.routes.ts +++ b/src/server/routes/volunteer/doc.routes.ts @@ -10,6 +10,7 @@ import { responseErrors, volunteerDocSchemaGet200, volunteerDocSchemaGetMeta200, + volunteerDocSchemaPatchBody, volunteerDocSchemaUploadMeta, } from "../../schema"; import { getVolunteerDocuments } from "../../utils"; @@ -244,4 +245,57 @@ export default async function volunteerDocRoutes( }); }, ); + + fastify.patch<{ + Params: { id: number; type: DocumentType }; + Body: { received: boolean }; + }>( + "/:type", + { + schema: { + params: idTypeParamSchema, + body: volunteerDocSchemaPatchBody, + response: { + 200: { + type: "object", + properties: { message: { type: "string" } }, + required: ["message"], + }, + ...responseErrors, + }, + }, + }, + async (request, reply) => { + const { id, type } = request.params; + const { received } = request.body; + const documentRepository = fastify.db.documentRepository; + + const doc = await documentRepository.findOneBy({ volunteerId: id, type }); + + if (!doc) { + return reply.status(404).send({ + message: `Document of type:${type} for volunteer_id:${id} not found.`, + }); + } + + const [, error] = await tryCatch( + documentRepository.save({ + ...doc, + received, + receivedOn: received ? new Date() : null, + }), + ); + + if (error) { + logger.error(`Error updating document for volunteer ${id}: ${error}`); + return reply.status(400).send({ + message: `Error updating document: ${error}`, + }); + } + + return reply.send({ + message: `Document of type:${type} for volunteer_id:${id} successfully updated.`, + }); + }, + ); } diff --git a/src/server/schema/volunteer-doc.schema.ts b/src/server/schema/volunteer-doc.schema.ts index 607643c..d09036b 100644 --- a/src/server/schema/volunteer-doc.schema.ts +++ b/src/server/schema/volunteer-doc.schema.ts @@ -33,6 +33,14 @@ export const volunteerDocSchemaGetMeta200 = { required: ["url", "fields"], }; +export const volunteerDocSchemaPatchBody = { + type: "object", + properties: { + received: { type: "boolean" }, + }, + required: ["received"], +}; + export const volunteerDocSchemaUploadMeta = { type: "object", properties: { diff --git a/src/services/dto/dto-document.ts b/src/services/dto/dto-document.ts index 46feb46..e80d0ee 100644 --- a/src/services/dto/dto-document.ts +++ b/src/services/dto/dto-document.ts @@ -10,5 +10,7 @@ export function documentSerializer(doc: Document): ApiDocumentGet { url: getDocumentUrl(doc.s3Key), mimeType: doc.mimeType, createdAt: doc.createdAt, + received: doc.received ?? false, + receivedOn: doc.receivedOn ?? null, }; }