diff --git a/.github/workflows/pgsync.build.dev.yaml b/.github/workflows/pgsync.build.dev.yaml new file mode 100644 index 0000000000..1d4b2e10c3 --- /dev/null +++ b/.github/workflows/pgsync.build.dev.yaml @@ -0,0 +1,33 @@ +name: PGSync Image Build DEV + +on: + workflow_dispatch: + push: + branches: + - develop + paths: + - .github/workflows/pgsync.build.dev.yaml + +env: + INITIAL_TAG: latest + TAG: dev + NAME: pgsync + CONTEXT: services/pgsync/ + +jobs: + build-pgsync: + runs-on: ubuntu-24.04 + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Login + run: | + docker login -u ${{ secrets.CLUSTER_REGISTRY_USER }} -p ${{ secrets.BUILD_TOKEN }} ${{ secrets.CLUSTER_REGISTRY }} + - name: Build n Tag + run: | + docker build -t ${{ env.NAME }}:${{ env.INITIAL_TAG }} ${{ env.CONTEXT }} -f ${{ env.CONTEXT }}Dockerfile + docker tag ${{ env.NAME }}:${{ env.INITIAL_TAG }} ${{ secrets.CLUSTER_REGISTRY }}/${{ secrets.NS_TOOLS }}/${{ env.NAME }}:${{ env.INITIAL_TAG }} + docker tag ${{ env.NAME }}:${{ env.INITIAL_TAG }} ${{ secrets.CLUSTER_REGISTRY }}/${{ secrets.NS_TOOLS }}/${{ env.NAME }}:${{ env.TAG }} + - name: Push + run: | + docker push --all-tags ${{ secrets.CLUSTER_REGISTRY }}/${{ secrets.NS_TOOLS }}/${{ env.NAME }} diff --git a/.github/workflows/pgsync.deploy.prod.yaml b/.github/workflows/pgsync.deploy.prod.yaml new file mode 100644 index 0000000000..a4e03d9f7c --- /dev/null +++ b/.github/workflows/pgsync.deploy.prod.yaml @@ -0,0 +1,65 @@ +name: PGSync - Promote PROD + +on: + workflow_dispatch: + +env: + ORIG_TAG: test + PROMOTE_TAG: prod + IMAGE: pgsync + +jobs: + promote-image: + name: promote-image + runs-on: ubuntu-24.04 + steps: + - name: Install oc + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4.7" + + - name: oc login + run: | + oc login --token=${{ secrets.BUILD_TOKEN }} --server=${{ secrets.CLUSTER_API }} + + - name: Promote from test to prod + run: | + oc -n ${{secrets.NS_TOOLS}} tag \ + ${{ secrets.NS_TOOLS }}/${{ env.IMAGE }}:${{ env.ORIG_TAG }} \ + ${{ secrets.NS_TOOLS }}/${{ env.IMAGE }}:${{ env.PROMOTE_TAG }} + + trigger-gitops: + runs-on: ubuntu-24.04 + timeout-minutes: 10 + needs: promote-image + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Setup SSH Agent + uses: ./.github/actions/setup-ssh + with: + ssh-private-key: ${{ secrets.GITOPS_REPO_DEPLOY_KEY }} + + - name: Git Ops Push + run: ./gitops/commit.sh ${{ env.IMAGE }} test prod ${{ github.actor }} + - name: Install oc + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4.7" + - name: Setup ArgoCD CLI + uses: imajeetyadav/argocd-cli@v1 + with: + version: v2.7.9 # optional + - name: oc login + run: oc login --token=${{ secrets.BUILD_TOKEN }} --server=${{ secrets.CLUSTER_API }} + - name: Notification + run: ./gitops/watch-deployment.sh pgsync prod ${{ github.sha }} ${{ secrets.DISCORD_DEPLOYMENT_WEBHOOK }} ${{ secrets.ARGOCD_SERVER }} ${{ secrets.ARGO_CD_CLI_JWT }} + + run-if-failed: + runs-on: ubuntu-24.04 + needs: [trigger-gitops] + if: always() && (needs.trigger-gitops.result == 'failure') + steps: + - name: Notification + run: ./gitops/watch-deployment.sh pgsync prod ${{ github.sha }} ${{ secrets.DISCORD_DEPLOYMENT_WEBHOOK }} 1 diff --git a/.github/workflows/pgsync.deploy.test.yaml b/.github/workflows/pgsync.deploy.test.yaml new file mode 100644 index 0000000000..e0c60fca6e --- /dev/null +++ b/.github/workflows/pgsync.deploy.test.yaml @@ -0,0 +1,65 @@ +name: PGSync - Promote Test + +on: + workflow_dispatch: + +env: + ORIG_TAG: dev + PROMOTE_TAG: test + IMAGE: pgsync + +jobs: + promote-image: + name: promote-image + runs-on: ubuntu-24.04 + steps: + - name: Install oc + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4.7" + + - name: oc login + run: | + oc login --token=${{ secrets.BUILD_TOKEN }} --server=${{ secrets.CLUSTER_API }} + + - name: Promote from dev to test + run: | + oc -n ${{secrets.NS_TOOLS}} tag \ + ${{ secrets.NS_TOOLS }}/${{ env.IMAGE }}:${{ env.ORIG_TAG }} \ + ${{ secrets.NS_TOOLS }}/${{ env.IMAGE }}:${{ env.PROMOTE_TAG }} + + trigger-gitops: + runs-on: ubuntu-24.04 + timeout-minutes: 10 + needs: promote-image + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Setup SSH Agent + uses: ./.github/actions/setup-ssh + with: + ssh-private-key: ${{ secrets.GITOPS_REPO_DEPLOY_KEY }} + + - name: Git Ops Push + run: ./gitops/commit.sh ${{ env.IMAGE }} dev test ${{ github.actor }} + - name: Install oc + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4.7" + - name: Setup ArgoCD CLI + uses: imajeetyadav/argocd-cli@v1 + with: + version: v2.7.9 # optional + - name: oc login + run: oc login --token=${{ secrets.BUILD_TOKEN }} --server=${{ secrets.CLUSTER_API }} + - name: Notification + run: ./gitops/watch-deployment.sh pgsync test ${{ github.sha }} ${{ secrets.DISCORD_DEPLOYMENT_WEBHOOK }} ${{ secrets.ARGOCD_SERVER }} ${{ secrets.ARGO_CD_CLI_JWT }} + + run-if-failed: + runs-on: ubuntu-24.04 + needs: [trigger-gitops] + if: always() && (needs.trigger-gitops.result == 'failure') + steps: + - name: Notification + run: ./gitops/watch-deployment.sh pgsync test ${{ github.sha }} ${{ secrets.DISCORD_DEPLOYMENT_WEBHOOK }} 1 diff --git a/bin/setenv.sh b/bin/setenv.sh index 51f0b60313..ba8cbf5ce7 100755 --- a/bin/setenv.sh +++ b/bin/setenv.sh @@ -33,6 +33,9 @@ AZURE_SEARCH_API_KEY ELASTICSEARCH_CA_CERT SYNCFUSION_LICENSE_KEY SYNCFUSION_FRONTEND_LICENSE_KEY +AZURE_STORAGE_CONNECTION_STRING +AZURE_STORAGE_CONTAINER +AZURE_STORAGE_BLOB_SERVICE_ENDPOINT " bold=$(tput bold) diff --git a/docker-compose.M1.yaml b/docker-compose.M1.yaml index 954d775d72..716f610aba 100644 --- a/docker-compose.M1.yaml +++ b/docker-compose.M1.yaml @@ -2,7 +2,6 @@ version: "3" include: - ./services/permits/docker-compose.yaml services: - ####################### Keycloak for Cypress ####################### keycloak: build: @@ -24,7 +23,7 @@ services: ####################### Open Telemetry ####################### otelcollector: image: otel/opentelemetry-collector - command: [ --config=/etc/otel-collector-config.yaml ] + command: [--config=/etc/otel-collector-config.yaml] volumes: - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml ports: @@ -67,10 +66,9 @@ services: - ./services/postgres/postgresql.conf:/config/postgresql.conf - ./services/postgres/pg_hba.conf:/config/pg_hba.conf - postgres_data:/var/lib/postgresql/data - command: - postgres -c 'config_file=/config/postgresql.conf' -c 'hba_file=/config/pg_hba.conf' + command: postgres -c 'config_file=/config/postgresql.conf' -c 'hba_file=/config/pg_hba.conf' healthcheck: - test: [ "CMD", "pg_isready" ] + test: ["CMD", "pg_isready"] interval: 5s timeout: 5s retries: 5 @@ -84,12 +82,12 @@ services: context: services/postgres dockerfile: Dockerfile_update environment: - - PGUSER=mds - - POSTGRES_PASSWORD=test - - POSTGRES_INITDB_ARGS=-U mds + - PGUSER=mds + - POSTGRES_PASSWORD=test + - POSTGRES_INITDB_ARGS=-U mds volumes: - - postgres_data_bkp:/var/lib/postgresql/13/data - - postgres_data:/var/lib/postgresql/16/data + - postgres_data_bkp:/var/lib/postgresql/13/data + - postgres_data:/var/lib/postgresql/16/data ####################### Flyway Migration Definition ####################### flyway: @@ -118,6 +116,40 @@ services: depends_on: - postgres ####################### Backend Definition ####################### + + pgsync: + build: + context: services/pgsync + container_name: mds_pgsync + platform: linux/amd64 + environment: + - PG_HOST=postgres + - PG_PORT=5432 + - PG_USER=mds + - PG_PASSWORD=test + - PG_DATABASE=mds + - ELASTICSEARCH_HOST=elasticsearch + - ELASTICSEARCH_PORT=9200 + - ELASTICSEARCH_SCHEME=https + - ELASTICSEARCH_USER=elastic + - ELASTICSEARCH_PASSWORD=elastic + - ELASTICSEARCH_VERIFY_CERTS=false + - ELASTICSEARCH_CA_CERTS=/certs/ca/ca.crt + - PGSYNC_CHECKPOINT_PATH=/tmp + - REDIS_HOST=redis + - REDIS_PORT=6379 + - REDIS_DB=0 + - REDIS_AUTH=redis-password + volumes: + - ./services/pgsync:/config + - certs:/certs + depends_on: + postgres: + condition: service_healthy + elasticsearch: + condition: service_healthy + redis: + condition: service_healthy backend: restart: always user: 1000:1000 @@ -131,6 +163,7 @@ services: - ./migrations:/migrations - ./services/core-api:/app - core_api_logs:/var/log/core-api + - certs:/certs depends_on: - flyway - nris_backend @@ -139,12 +172,13 @@ services: - filesystem_provider - postgres - redis + - pgsync - jaeger - otelcollector - keycloak - core_api_celery healthcheck: - test: [ "CMD", "curl", "localhost:5000/health" ] + test: ["CMD", "curl", "localhost:5000/health"] interval: 5s timeout: 5s retries: 5 @@ -159,6 +193,7 @@ services: - 5556:5555 volumes: - ./services/core-api:/app + - certs:/certs depends_on: - postgres - redis @@ -173,7 +208,7 @@ services: ports: - "6379:6379" healthcheck: - test: [ "CMD", "redis-cli", "ping" ] + test: ["CMD", "redis-cli", "ping"] interval: 5s timeout: 5s retries: 5 @@ -186,7 +221,7 @@ services: platform: linux/amd64 build: context: services/core-web - command: [ "npm", "run", "serve" ] + command: ["npm", "run", "serve"] volumes: - ./services/core-web/src:/app/src ports: @@ -195,7 +230,7 @@ services: - backend env_file: ./services/core-web/.env healthcheck: - test: [ "CMD", "curl", "localhost:3000/health" ] + test: ["CMD", "curl", "localhost:3000/health"] interval: 15s timeout: 5s retries: 5 @@ -209,7 +244,7 @@ services: platform: linux/amd64 build: context: services/minespace-web - command: [ "npm", "run", "serve" ] + command: ["npm", "run", "serve"] volumes: - ./services/minespace-web/src:/app/src ports: @@ -218,7 +253,7 @@ services: - backend env_file: ./services/minespace-web/.env healthcheck: - test: [ "CMD", "curl", "localhost:3020/health" ] + test: ["CMD", "curl", "localhost:3020/health"] interval: 5s timeout: 5s retries: 5 @@ -239,7 +274,7 @@ services: - nris_migrate env_file: ./services/nris-api/backend/.env healthcheck: - test: [ "CMD", "curl", "localhost:5500/health" ] + test: ["CMD", "curl", "localhost:5500/health"] interval: 5s timeout: 5s retries: 5 @@ -269,6 +304,7 @@ services: ####################### Syncfusion Filesystem Provider Definition ####################### filesystem_provider: container_name: filesystem_provider + platform: linux/amd64 build: context: services/filesystem-provider ports: @@ -329,7 +365,7 @@ services: restart: always container_name: docgen_api image: bcgovimages/common-document-generation-service:2.4.1 - command: [ "npm", "run", "start" ] + command: ["npm", "run", "start"] environment: - SERVER_PORT=3030 - APP_PORT=3030 diff --git a/docker-compose.yaml b/docker-compose.yaml index 1fb9897f17..124cf181c3 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -2,7 +2,6 @@ version: "3" include: - ./services/permits/docker-compose.yaml services: - ####################### Keycloak for Cypress ####################### keycloak: build: @@ -23,7 +22,7 @@ services: ####################### Open Telemetry ####################### otelcollector: image: otel/opentelemetry-collector - command: [ --config=/etc/otel-collector-config.yaml ] + command: [--config=/etc/otel-collector-config.yaml] volumes: - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml ports: @@ -66,14 +65,43 @@ services: - ./services/postgres/postgresql.conf:/config/postgresql.conf - ./services/postgres/pg_hba.conf:/config/pg_hba.conf - postgres_data:/var/lib/postgresql/data - command: - postgres -c 'config_file=/config/postgresql.conf' -c 'hba_file=/config/pg_hba.conf' + command: postgres -c 'config_file=/config/postgresql.conf' -c 'hba_file=/config/pg_hba.conf' healthcheck: - test: [ "CMD", "pg_isready" ] + test: ["CMD", "pg_isready"] interval: 5s timeout: 5s retries: 5 - + pgsync: + image: toluaina1/pgsync + container_name: mds_pgsync + environment: + - PG_HOST=postgres + - PG_PORT=5432 + - PG_USER=mds + - PG_PASSWORD=test + - PG_DATABASE=mds + - ELASTICSEARCH_HOST=elasticsearch + - ELASTICSEARCH_PORT=9200 + - ELASTICSEARCH_SCHEME=https + - ELASTICSEARCH_USER=elastic + - ELASTICSEARCH_PASSWORD=elastic + - ELASTICSEARCH_VERIFY_CERTS=false + - REDIS_HOST=redis + - REDIS_PORT=6379 + - REDIS_DB=0 + - REDIS_AUTH=redis-password + - REDIS_CHECKPOINT=true + - CONSOLE_LOGGING_HANDLER_MIN_LEVEL=DEBUG + volumes: + - ./services/pgsync/schema.json:/app/schema.json + command: ["-c", "schema.json", "--daemon"] + depends_on: + postgres: + condition: service_healthy + elasticsearch: + condition: service_healthy + redis: + condition: service_healthy postgres_update: container_name: mds_postgres_update user: postgres @@ -81,12 +109,12 @@ services: context: services/postgres dockerfile: Dockerfile_update environment: - - PGUSER=mds - - POSTGRES_PASSWORD=test - - POSTGRES_INITDB_ARGS=-U mds + - PGUSER=mds + - POSTGRES_PASSWORD=test + - POSTGRES_INITDB_ARGS=-U mds volumes: - - postgres_data_bkp:/var/lib/postgresql/13/data - - postgres_data:/var/lib/postgresql/16/data + - postgres_data_bkp:/var/lib/postgresql/13/data + - postgres_data:/var/lib/postgresql/16/data ####################### Flyway Migration Definition ####################### flyway: @@ -142,8 +170,9 @@ services: - otelcollector - keycloak - core_api_celery + - pgsync healthcheck: - test: [ "CMD", "curl", "localhost:5000/health" ] + test: ["CMD", "curl", "localhost:5000/health"] interval: 5s timeout: 5s retries: 5 @@ -171,7 +200,7 @@ services: ports: - "6379:6379" healthcheck: - test: [ "CMD", "redis-cli", "ping" ] + test: ["CMD", "redis-cli", "ping"] interval: 5s timeout: 5s retries: 5 @@ -194,7 +223,7 @@ services: env_file: ./services/core-web/.env network_mode: "host" healthcheck: - test: [ "CMD", "curl", "localhost:3000/health" ] + test: ["CMD", "curl", "localhost:3000/health"] interval: 15s timeout: 5s retries: 5 @@ -218,7 +247,7 @@ services: - 3020:3020 env_file: ./services/minespace-web/.env healthcheck: - test: [ "CMD", "curl", "localhost:3020/health" ] + test: ["CMD", "curl", "localhost:3020/health"] interval: 5s timeout: 5s retries: 5 @@ -240,7 +269,7 @@ services: - nris_migrate env_file: ./services/nris-api/backend/.env healthcheck: - test: [ "CMD", "curl", "localhost:5500/health" ] + test: ["CMD", "curl", "localhost:5500/health"] interval: 5s timeout: 5s retries: 5 @@ -270,6 +299,7 @@ services: ####################### Syncfusion Filesystem Provider Definition ####################### filesystem_provider: container_name: filesystem_provider + platform: linux/amd64 build: context: services/filesystem-provider ports: @@ -330,7 +360,7 @@ services: restart: always container_name: docgen_api image: bcgovimages/common-document-generation-service:2.5.0 - command: [ "npm", "run", "start" ] + command: ["npm", "run", "start"] environment: - SERVER_PORT=3030 - APP_PORT=3030 diff --git a/services/common/src/components/tailings/BasicInformation.spec.tsx b/services/common/src/components/tailings/BasicInformation.spec.tsx index 08cef5cd1d..0c11210b36 100644 --- a/services/common/src/components/tailings/BasicInformation.spec.tsx +++ b/services/common/src/components/tailings/BasicInformation.spec.tsx @@ -17,7 +17,24 @@ const initialState = { }; + describe("Tailings BasicInformation", () => { + let tzGuessSpy; + const originalTZ = process.env.TZ; + + beforeAll(() => { + // Set timezone to UTC for consistent CI results + // Mock moment.tz.guess to always return UTC + const moment = require('moment-timezone'); + moment.tz.setDefault('UTC'); + }); + + afterAll(() => { + // Restore original timezone + const moment = require('moment-timezone'); + moment.tz.setDefault(); // Unset default timezone + }); + it("renders properly", () => { const { container } = render( diff --git a/services/common/src/components/tailings/__snapshots__/ContactDetails.spec.tsx.snap b/services/common/src/components/tailings/__snapshots__/ContactDetails.spec.tsx.snap index 1558a7ab88..7ece71edfb 100644 --- a/services/common/src/components/tailings/__snapshots__/ContactDetails.spec.tsx.snap +++ b/services/common/src/components/tailings/__snapshots__/ContactDetails.spec.tsx.snap @@ -105,7 +105,9 @@ exports[`Tailings ContactDetails renders properly 1`] = `
+ > + mock@example.com +
+ > + 123-456-7890 +
- + N/A
diff --git a/services/common/src/interfaces/search/searchResult.interface.ts b/services/common/src/interfaces/search/searchResult.interface.ts index dfcd5d9e0f..d78f378b16 100644 --- a/services/common/src/interfaces/search/searchResult.interface.ts +++ b/services/common/src/interfaces/search/searchResult.interface.ts @@ -13,6 +13,38 @@ export interface ISearchResult { export interface ISimpleSearchResult { id: string; value: string; + description?: string; + highlight?: string; + mine_guid?: string; +} + +export interface IExplosivesPermitSearchResult { + explosives_permit_guid: string; + explosives_permit_id: string; + application_number: string; + application_status: string; + mine_guid: string; + mine_name: string; + is_closed: boolean; +} + +export interface INowApplicationSearchResult { + now_application_guid: string; + now_number: string; + mine_guid: string; + mine_name: string; + now_application_status_code: string; + notice_of_work_type_code: string; +} + +export interface INodSearchResult { + nod_guid: string; + nod_no: string; + nod_title: string; + mine_guid: string; + mine_name: string; + nod_type: string; + nod_status: string; } export interface ISearchResultList { @@ -21,4 +53,7 @@ export interface ISearchResultList { party: ISearchResult[], permit: ISearchResult[], permit_documents: ISearchResult[], + explosives_permit: ISearchResult[], + now_application: ISearchResult[], + notice_of_departure: ISearchResult[], } \ No newline at end of file diff --git a/services/common/src/redux/actionCreators/searchActionCreator.js b/services/common/src/redux/actionCreators/searchActionCreator.js deleted file mode 100644 index 0dc945a3ad..0000000000 --- a/services/common/src/redux/actionCreators/searchActionCreator.js +++ /dev/null @@ -1,71 +0,0 @@ -import { showLoading, hideLoading } from "react-redux-loading-bar"; -import { ENVIRONMENT } from "@mds/common/constants/environment"; -import { request, success, error, clear } from "../actions/genericActions"; -import { NetworkReducerTypes } from "@mds/common/constants/networkReducerTypes"; -import * as searchActions from "../actions/searchActions"; -import * as API from "@mds/common/constants/API"; -import { createRequestHeader } from "../utils/RequestHeaders"; -import CustomAxios from "../customAxios"; - -export const fetchSearchResults = (searchTerm, searchTypes) => (dispatch) => { - dispatch(request(NetworkReducerTypes.GET_SEARCH_RESULTS)); - dispatch(showLoading()); - return CustomAxios() - .get( - ENVIRONMENT.apiUrl + API.SEARCH({ search_term: searchTerm, search_types: searchTypes }), - createRequestHeader() - ) - .then((response) => { - dispatch(success(NetworkReducerTypes.GET_SEARCH_RESULTS)); - dispatch(searchActions.storeSearchResults(response.data)); - dispatch(hideLoading()); - return response; - }) - .catch(() => { - dispatch(error(NetworkReducerTypes.GET_SEARCH_RESULTS)); - }) - .finally(() => dispatch(hideLoading())); -}; - -export const fetchSearchBarResults = (searchTerm) => (dispatch) => { - dispatch(request(NetworkReducerTypes.GET_SEARCH_BAR_RESULTS)); - dispatch(showLoading()); - return CustomAxios() - .get( - `${ENVIRONMENT.apiUrl + API.SIMPLE_SEARCH}?search_term=${searchTerm}`, - createRequestHeader() - ) - .then((response) => { - dispatch(success(NetworkReducerTypes.GET_SEARCH_BAR_RESULTS)); - dispatch(searchActions.storeSearchBarResults(response.data)); - dispatch(hideLoading()); - }) - .catch(() => dispatch(error(NetworkReducerTypes.GET_SEARCH_BAR_RESULTS))) - .finally(() => dispatch(hideLoading())); -}; - -export const fetchSearchOptions = () => (dispatch) => { - dispatch(request(NetworkReducerTypes.GET_SEARCH_OPTIONS)); - dispatch(showLoading()); - return CustomAxios() - .get(ENVIRONMENT.apiUrl + API.SEARCH_OPTIONS, createRequestHeader()) - .then((response) => { - dispatch(success(NetworkReducerTypes.GET_SEARCH_OPTIONS)); - dispatch(searchActions.storeSearchOptions(response.data)); - dispatch(hideLoading()); - }) - .catch(() => dispatch(error(NetworkReducerTypes.GET_SEARCH_OPTIONS))) - .finally(() => dispatch(hideLoading())); -}; - -export const clearSearchBarResults = () => (dispatch) => { - dispatch(clear(NetworkReducerTypes.CLEAR_SEARCH_BAR_RESULTS)); - dispatch(searchActions.clearSearchBarResults()); - dispatch(success(NetworkReducerTypes.CLEAR_SEARCH_BAR_RESULTS)); -}; - -export const clearAllSearchResults = () => (dispatch) => { - dispatch(clear(NetworkReducerTypes.CLEAR_ALL_SEARCH_RESULTS)); - dispatch(searchActions.clearAllSearchResults()); - dispatch(success(NetworkReducerTypes.CLEAR_ALL_SEARCH_RESULTS)); -}; diff --git a/services/common/src/redux/actions/searchActions.js b/services/common/src/redux/actions/searchActions.js deleted file mode 100644 index 6e694f86cb..0000000000 --- a/services/common/src/redux/actions/searchActions.js +++ /dev/null @@ -1,30 +0,0 @@ -import * as ActionTypes from "@mds/common/constants/actionTypes"; - -export const storeSearchOptions = (payload) => ({ - type: ActionTypes.STORE_SEARCH_OPTIONS, - payload, -}); - -export const storeSearchResults = (payload) => ({ - type: ActionTypes.STORE_SEARCH_RESULTS, - payload, -}); - -export const storeSearchBarResults = (payload) => ({ - type: ActionTypes.STORE_SEARCH_BAR_RESULTS, - payload, -}); - -export const clearSearchBarResults = (payload) => ({ - type: ActionTypes.CLEAR_SEARCH_BAR_RESULTS, - payload, -}); - -export const clearAllSearchResults = () => ({ - type: ActionTypes.CLEAR_ALL_SEARCH_RESULTS, -}); - -export const storeSubsetSearchResults = (payload) => ({ - type: ActionTypes.STORE_SUBSET_SEARCH_RESULTS, - payload, -}); diff --git a/services/common/src/redux/reducers.ts b/services/common/src/redux/reducers.ts index a9f38b820c..a557e0cae5 100644 --- a/services/common/src/redux/reducers.ts +++ b/services/common/src/redux/reducers.ts @@ -13,7 +13,6 @@ import noticeOfWorkReducerObject from "./reducers/noticeOfWorkReducer"; import orgbookReducerObject from "./reducers/orgbookReducer"; import permitReducerObject from "./reducers/permitReducer"; import projectReducerObject from "./reducers/projectReducer"; -import searchReducerObject from "./reducers/searchReducer"; import securitiesReducerObject from "./reducers/securitiesReducer"; import staticContentReducerObject from "./reducers/staticContentReducer"; import varianceReducerObject from "./reducers/varianceReducer"; @@ -29,7 +28,6 @@ export const modalReducer = modalReducerObject; export const documentViewerReducer = documentViewerReducerObject; export const noticeOfWorkReducer = noticeOfWorkReducerObject; export const permitReducer = permitReducerObject; -export const searchReducer = searchReducerObject; export const staticContentReducer = staticContentReducerObject; export const varianceReducer = varianceReducerObject; export const securitiesReducer = securitiesReducerObject; diff --git a/services/common/src/redux/reducers/rootReducerShared.ts b/services/common/src/redux/reducers/rootReducerShared.ts index 107202ec39..6e526e0fa2 100644 --- a/services/common/src/redux/reducers/rootReducerShared.ts +++ b/services/common/src/redux/reducers/rootReducerShared.ts @@ -18,7 +18,6 @@ import { orgbookReducer, permitReducer, projectReducer, - searchReducer, securitiesReducer, staticContentReducer, varianceReducer, @@ -26,6 +25,7 @@ import { workInformationReducer, } from "../reducers"; import partiesReducer, { partiesReducerType } from "@mds/common/redux/slices/partiesSlice"; +import searchReducer, { searchReducerType } from "../slices/searchSlice"; import reportSubmissionReducer from "@mds/common/components/reports/reportSubmissionSlice"; import verifiableCredentialsReducer from "@mds/common/redux/slices/verifiableCredentialsSlice"; @@ -72,7 +72,6 @@ export const sharedReducer = { ...workInformationReducer, ...permitReducer, ...projectReducer, - ...searchReducer, ...securitiesReducer, ...staticContentReducer, form: formReducer, @@ -98,5 +97,6 @@ export const sharedReducer = { [minespaceReducerType]: minespaceReducer, [reportReducerType]: reportReducer, [partiesReducerType]: partiesReducer, + [searchReducerType]: searchReducer, ...networkReducers }; diff --git a/services/common/src/redux/reducers/searchReducer.ts b/services/common/src/redux/reducers/searchReducer.ts deleted file mode 100644 index 94f215e149..0000000000 --- a/services/common/src/redux/reducers/searchReducer.ts +++ /dev/null @@ -1,63 +0,0 @@ -import * as actionTypes from "@mds/common/constants/actionTypes"; -import { SEARCH } from "@mds/common/constants/reducerTypes"; -import { ISearchResult, ISearchResultList, ISimpleSearchResult } from "@mds/common/interfaces"; - -/** - * @file mineReducer.js - * all data associated with new mine/existing mine records is handled witnin this reducer. - */ - -const initialState = { - searchOptions: [], - searchResults: [], - searchBarResults: [], - searchTerms: [], - searchSubsetResults: [], -}; - -export const searchReducer = (state = initialState, action) => { - switch (action.type) { - case actionTypes.STORE_SEARCH_OPTIONS: - return { - ...state, - searchOptions: action.payload, - }; - case actionTypes.STORE_SEARCH_RESULTS: - return { - ...state, - searchResults: action.payload.search_results, - searchTerms: action.payload.search_terms, - }; - case actionTypes.STORE_SUBSET_SEARCH_RESULTS: - return { - ...state, - searchSubsetResults: action.payload, - }; - case actionTypes.STORE_SEARCH_BAR_RESULTS: - return { - ...state, - searchBarResults: action.payload.search_results, - }; - case actionTypes.CLEAR_SEARCH_BAR_RESULTS: - return { - ...state, - searchBarResults: [], - }; - case actionTypes.CLEAR_ALL_SEARCH_RESULTS: - return initialState; - default: - return state; - } -}; - -const searchReducerObject = { - [SEARCH]: searchReducer, -}; - -export const getSearchOptions = (state) => state[SEARCH].searchOptions; -export const getSearchResults = (state): ISearchResultList => state[SEARCH].searchResults; -export const getSearchBarResults = (state): ISearchResult[] => state[SEARCH].searchBarResults; -export const getSearchTerms = (state) => state[SEARCH].searchTerms; -export const getSearchSubsetResults = (state) => state[SEARCH].searchSubsetResults; - -export default searchReducerObject; diff --git a/services/common/src/redux/selectors/searchSelectors.js b/services/common/src/redux/selectors/searchSelectors.js index f537f48946..9e840ddd44 100644 --- a/services/common/src/redux/selectors/searchSelectors.js +++ b/services/common/src/redux/selectors/searchSelectors.js @@ -1,8 +1,17 @@ -import * as searchReducer from "../reducers/searchReducer"; +import { + getSearchResults, + getSearchFacets, + getSearchTerms, + getSearchBarResults, + getSearchBarFacets, + getSearchSubsetResults, +} from "../slices/searchSlice"; -export const { +export { getSearchResults, + getSearchFacets, getSearchTerms, getSearchBarResults, + getSearchBarFacets, getSearchSubsetResults, -} = searchReducer; +}; diff --git a/services/common/src/redux/slices/searchSlice.spec.ts b/services/common/src/redux/slices/searchSlice.spec.ts new file mode 100644 index 0000000000..b25c76f871 --- /dev/null +++ b/services/common/src/redux/slices/searchSlice.spec.ts @@ -0,0 +1,745 @@ +import MockAdapter from "axios-mock-adapter"; +import axios from "axios"; +import { getStore } from "@mds/common/redux/rootState"; +import { ENVIRONMENT } from "@mds/common/constants/environment"; +import * as API from "@mds/common/constants/API"; +import { + storeSearchOptions, + storeSearchResults, + storeSubsetSearchResults, + storeSearchBarResults, + clearSearchBarResults, + clearAllSearchResults, + fetchSearchResults, + fetchSearchBarResults, + fetchSearchOptions, + selectSearchOptions, + selectSearchResults, + selectSearchFacets, + selectSearchBarResults, + selectSearchBarFacets, + selectSearchTerms, + selectSearchSubsetResults, +} from "./searchSlice"; +import * as MOCK from "../../tests/mocks/dataMocks"; + +const mockAxios = new MockAdapter(axios); + +describe("searchSlice", () => { + beforeEach(() => { + mockAxios.reset(); + }); + + describe("initial state", () => { + it("should have correct initial state", () => { + const store = getStore(); + const state = store.getState(); + + expect(selectSearchOptions(state)).toEqual([]); + expect(selectSearchResults(state)).toEqual({ + mine: [], + mine_documents: [], + party: [], + permit: [], + notice_of_departure: [], + explosives_permit: [], + now_application: [], + permit_documents: [], + }); + expect(selectSearchFacets(state)).toEqual({ + mine_region: [], + mine_classification: [], + mine_operation_status: [], + mine_tenure: [], + mine_commodity: [], + has_tsf: [], + verified_status: [], + permit_status: [], + is_exploration: [], + party_type: [], + explosives_permit_status: [], + explosives_permit_closed: [], + nod_type: [], + nod_status: [], + now_application_status: [], + now_type: [], + type: [], + }); + expect(selectSearchBarResults(state)).toEqual([]); + expect(selectSearchBarFacets(state)).toEqual({ + mine: 0, + person: 0, + organization: 0, + permit: 0, + nod: 0, + explosives_permit: 0, + now_application: 0, + mine_documents: 0, + permit_documents: 0, + }); + expect(selectSearchTerms(state)).toEqual([]); + expect(selectSearchSubsetResults(state)).toEqual([]); + }); + }); + + describe("synchronous actions", () => { + describe("storeSearchOptions", () => { + it("should store search options", () => { + const store = getStore(); + const mockOptions = [ + { model_id: "mine", description: "Mines" }, + { model_id: "party", description: "Contacts" }, + ]; + + store.dispatch(storeSearchOptions(mockOptions)); + + expect(selectSearchOptions(store.getState())).toEqual(mockOptions); + }); + }); + + describe("storeSearchResults", () => { + it("should store search results with facets and terms", () => { + const store = getStore(); + const mockData = { + search_results: MOCK.SEARCH_RESULTS_V2 || [], + facets: MOCK.SEARCH_FACETS || {}, + search_terms: ["test", "mine"], + }; + + store.dispatch(storeSearchResults(mockData)); + + expect(selectSearchResults(store.getState())).toEqual(mockData.search_results); + expect(selectSearchFacets(store.getState())).toEqual(mockData.facets); + expect(selectSearchTerms(store.getState())).toEqual(mockData.search_terms); + }); + + it("should use default facets if not provided", () => { + const store = getStore(); + const mockData = { + search_results: [], + facets: null, + search_terms: [], + }; + + store.dispatch(storeSearchResults(mockData)); + + const facets = selectSearchFacets(store.getState()); + expect(facets).toHaveProperty("mine_region"); + expect(facets).toHaveProperty("mine_classification"); + }); + }); + + describe("storeSubsetSearchResults", () => { + it("should store subset search results", () => { + const store = getStore(); + const mockSubsetResults = [ + { id: "1", name: "Result 1" }, + { id: "2", name: "Result 2" }, + ]; + + store.dispatch(storeSubsetSearchResults(mockSubsetResults)); + + expect(selectSearchSubsetResults(store.getState())).toEqual(mockSubsetResults); + }); + }); + + describe("storeSearchBarResults", () => { + it("should store search bar results with facets", () => { + const store = getStore(); + const mockBarResults = [ + { + type: "mine", + score: 10, + result: { id: "mine-123", value: "Test Mine" }, + }, + { + type: "party", + score: 8, + result: { id: "party-456", value: "John Doe" }, + }, + ]; + const mockFacets = { + mine: 5, + person: 3, + organization: 2, + permit: 0, + nod: 0, + explosives_permit: 0, + now_application: 0, + mine_documents: 0, + permit_documents: 0, + }; + + store.dispatch( + storeSearchBarResults({ + search_results: mockBarResults, + facets: mockFacets, + }) + ); + + expect(selectSearchBarResults(store.getState())).toEqual(mockBarResults); + expect(selectSearchBarFacets(store.getState())).toEqual(mockFacets); + }); + + it("should use default facets if not provided", () => { + const store = getStore(); + const mockBarResults = [ + { + type: "mine", + score: 10, + result: { id: "mine-123", value: "Test Mine" }, + }, + ]; + + store.dispatch( + storeSearchBarResults({ + search_results: mockBarResults, + facets: null, + }) + ); + + const facets = selectSearchBarFacets(store.getState()); + expect(facets.mine).toBe(0); + expect(facets.person).toBe(0); + }); + }); + + describe("clearSearchBarResults", () => { + it("should clear search bar results and facets", () => { + const store = getStore(); + + // First, add some results + store.dispatch( + storeSearchBarResults({ + search_results: [ + { + type: "mine", + score: 10, + result: { id: "mine-123", value: "Test Mine" }, + }, + ], + facets: { mine: 5, person: 0, organization: 0, permit: 0, nod: 0, explosives_permit: 0, now_application: 0, mine_documents: 0, permit_documents: 0 }, + }) + ); + + // Then clear them + store.dispatch(clearSearchBarResults()); + + expect(selectSearchBarResults(store.getState())).toEqual([]); + expect(selectSearchBarFacets(store.getState())).toEqual({ + mine: 0, + person: 0, + organization: 0, + permit: 0, + nod: 0, + explosives_permit: 0, + now_application: 0, + mine_documents: 0, + permit_documents: 0, + }); + }); + }); + + describe("clearAllSearchResults", () => { + it("should reset all search state to initial values", () => { + const store = getStore(); + + // Add some data + store.dispatch(storeSearchOptions([{ model_id: "mine", description: "Mines" }])); + store.dispatch( + storeSearchResults({ + search_results: MOCK.SEARCH_RESULTS_V2 || [], + facets: {}, + search_terms: ["test"], + }) + ); + store.dispatch( + storeSearchBarResults({ + search_results: [ + { + type: "mine", + score: 10, + result: { id: "mine-123", value: "Test Mine" }, + }, + ], + facets: null, + }) + ); + + // Clear everything + store.dispatch(clearAllSearchResults()); + + expect(selectSearchOptions(store.getState())).toEqual([]); + expect(selectSearchResults(store.getState())).toEqual({ + mine: [], + mine_documents: [], + party: [], + permit: [], + notice_of_departure: [], + explosives_permit: [], + now_application: [], + permit_documents: [], + }); + expect(selectSearchBarResults(store.getState())).toEqual([]); + expect(selectSearchTerms(store.getState())).toEqual([]); + }); + }); + }); + + describe("async thunks", () => { + describe("fetchSearchResults", () => { + it("should fetch search results successfully", async () => { + const store = getStore(); + const searchTerm = "test"; + const searchTypes = ["mine", "party"]; + const mockResponse = { + search_results: MOCK.SEARCH_RESULTS_V2 || [], + facets: MOCK.SEARCH_FACETS || {}, + search_terms: ["test"], + }; + + mockAxios.onGet().reply(200, mockResponse); + + await store.dispatch( + fetchSearchResults({ + searchTerm, + searchTypes, + }) + ); + + // When API returns empty array, slice converts it to proper structure + const expectedResults = Array.isArray(mockResponse.search_results) && mockResponse.search_results.length === 0 + ? { + mine: [], + mine_documents: [], + party: [], + permit: [], + notice_of_departure: [], + explosives_permit: [], + now_application: [], + permit_documents: [], + } + : mockResponse.search_results; + + expect(selectSearchResults(store.getState())).toEqual(expectedResults); + expect(selectSearchFacets(store.getState())).toEqual(mockResponse.facets); + expect(selectSearchTerms(store.getState())).toEqual(mockResponse.search_terms); + }); + + it("should handle search with filters", async () => { + const store = getStore(); + const searchTerm = "mine"; + const searchTypes = ["mine"]; + const filters = { mine_region: "SW" }; + + mockAxios.onGet().reply(200, { + search_results: [], + facets: {}, + search_terms: ["mine"], + }); + + await store.dispatch( + fetchSearchResults({ + searchTerm, + searchTypes, + filters, + }) + ); + + // Verify the request was made with filters + expect(mockAxios.history.get[0].url).toBeDefined(); + }); + + it("should handle empty search term", async () => { + const store = getStore(); + + mockAxios.onGet().reply(200, { + search_results: [], + facets: {}, + search_terms: [], + }); + + await store.dispatch( + fetchSearchResults({ + searchTerm: "", + searchTypes: [], + }) + ); + + expect(selectSearchResults(store.getState())).toEqual({ + mine: [], + mine_documents: [], + party: [], + permit: [], + notice_of_departure: [], + explosives_permit: [], + now_application: [], + permit_documents: [], + }); + }); + + it("should handle API errors gracefully", async () => { + const store = getStore(); + + mockAxios.onGet().reply(500, { error: "Internal server error" }); + + try { + await store.dispatch( + fetchSearchResults({ + searchTerm: "test", + searchTypes: ["mine"], + }) + ); + } catch (error) { + // Error should be handled by rejectHandler + } + + // State should remain unchanged on error + expect(selectSearchResults(store.getState())).toEqual({ + mine: [], + mine_documents: [], + party: [], + permit: [], + notice_of_departure: [], + explosives_permit: [], + now_application: [], + permit_documents: [], + }); + }); + + it("should handle network errors", async () => { + const store = getStore(); + + mockAxios.onGet().networkError(); + + try { + await store.dispatch( + fetchSearchResults({ + searchTerm: "test", + searchTypes: ["mine"], + }) + ); + } catch (error) { + // Error should be handled + } + + expect(selectSearchResults(store.getState())).toEqual({ + mine: [], + mine_documents: [], + party: [], + permit: [], + notice_of_departure: [], + explosives_permit: [], + now_application: [], + permit_documents: [], + }); + }); + }); + + describe("fetchSearchBarResults", () => { + it("should fetch search bar results successfully", async () => { + const store = getStore(); + const searchTerm = "test"; + const mockResponse = { + search_results: [ + { + type: "mine", + score: 10, + result: { id: "mine-123", value: "Test Mine" }, + }, + ], + facets: { mine: 5, person: 0, organization: 0, permit: 0, nod: 0, explosives_permit: 0, now_application: 0, mine_documents: 0, permit_documents: 0 }, + }; + + mockAxios.onGet().reply(200, mockResponse); + + await store.dispatch( + fetchSearchBarResults({ + searchTerm, + }) + ); + + expect(selectSearchBarResults(store.getState())).toEqual(mockResponse.search_results); + expect(selectSearchBarFacets(store.getState())).toEqual(mockResponse.facets); + }); + + it("should fetch with search types filter", async () => { + const store = getStore(); + const searchTerm = "test"; + const searchTypes = ["mine", "party"]; + + mockAxios.onGet().reply(200, { + search_results: [], + facets: null, + }); + + await store.dispatch( + fetchSearchBarResults({ + searchTerm, + searchTypes, + }) + ); + + // Verify the URL includes search types + const requestUrl = mockAxios.history.get[0].url; + expect(requestUrl).toContain("search_term=test"); + expect(requestUrl).toContain("search_types=mine%2Cparty"); + }); + + it("should fetch with mine guid filter", async () => { + const store = getStore(); + const searchTerm = "test"; + const mineGuid = "mine-guid-123"; + + mockAxios.onGet().reply(200, { + search_results: [], + facets: null, + }); + + await store.dispatch( + fetchSearchBarResults({ + searchTerm, + mineGuid, + }) + ); + + // Verify the URL includes mine_guid + const requestUrl = mockAxios.history.get[0].url; + expect(requestUrl).toContain("search_term=test"); + expect(requestUrl).toContain("mine_guid=mine-guid-123"); + }); + + it("should handle special characters in search term", async () => { + const store = getStore(); + const searchTerm = "test & special / chars"; + + mockAxios.onGet().reply(200, { + search_results: [], + facets: null, + }); + + await store.dispatch( + fetchSearchBarResults({ + searchTerm, + }) + ); + + // Verify URL encoding + const requestUrl = mockAxios.history.get[0].url; + expect(requestUrl).toContain("search_term="); + }); + + it("should handle API errors gracefully", async () => { + const store = getStore(); + + mockAxios.onGet().reply(500, { error: "Internal server error" }); + + try { + await store.dispatch( + fetchSearchBarResults({ + searchTerm: "test", + }) + ); + } catch (error) { + // Error should be handled by rejectHandler + } + + expect(selectSearchBarResults(store.getState())).toEqual([]); + }); + }); + + describe("fetchSearchOptions", () => { + it("should fetch search options successfully", async () => { + const store = getStore(); + const mockOptions = [ + { model_id: "mine", description: "Mines" }, + { model_id: "party", description: "Contacts" }, + { model_id: "permit", description: "Permits" }, + ]; + + mockAxios.onGet(ENVIRONMENT.apiUrl + API.SEARCH_OPTIONS).reply(200, mockOptions); + + await store.dispatch(fetchSearchOptions()); + + expect(selectSearchOptions(store.getState())).toEqual(mockOptions); + }); + + it("should handle empty options", async () => { + const store = getStore(); + + mockAxios.onGet(ENVIRONMENT.apiUrl + API.SEARCH_OPTIONS).reply(200, []); + + await store.dispatch(fetchSearchOptions()); + + expect(selectSearchOptions(store.getState())).toEqual([]); + }); + + it("should handle API errors gracefully", async () => { + const store = getStore(); + + mockAxios.onGet(ENVIRONMENT.apiUrl + API.SEARCH_OPTIONS).reply(500, { error: "Internal server error" }); + + try { + await store.dispatch(fetchSearchOptions()); + } catch (error) { + // Error should be handled by rejectHandler + } + + expect(selectSearchOptions(store.getState())).toEqual([]); + }); + }); + }); + + describe("selectors", () => { + it("should select correct state slices", () => { + const store = getStore(); + + // Add some test data + const mockOptions = [{ model_id: "mine", description: "Mines" }]; + const mockResults = MOCK.SEARCH_RESULTS_V2 || []; + const mockFacets = MOCK.SEARCH_FACETS || {}; + const mockTerms = ["test", "search"]; + const mockBarResults = [ + { + type: "mine", + score: 10, + result: { id: "mine-123", value: "Test Mine" }, + }, + ]; + const mockBarFacets = { mine: 5, person: 0, organization: 0, permit: 0, nod: 0, explosives_permit: 0, now_application: 0, mine_documents: 0, permit_documents: 0 }; + + store.dispatch(storeSearchOptions(mockOptions)); + store.dispatch( + storeSearchResults({ + search_results: mockResults, + facets: mockFacets, + search_terms: mockTerms, + }) + ); + store.dispatch( + storeSearchBarResults({ + search_results: mockBarResults, + facets: mockBarFacets, + }) + ); + + const state = store.getState(); + + expect(selectSearchOptions(state)).toEqual(mockOptions); + expect(selectSearchResults(state)).toEqual(mockResults); + expect(selectSearchFacets(state)).toEqual(mockFacets); + expect(selectSearchTerms(state)).toEqual(mockTerms); + expect(selectSearchBarResults(state)).toEqual(mockBarResults); + expect(selectSearchBarFacets(state)).toEqual(mockBarFacets); + }); + + it("should handle undefined values gracefully", () => { + const store = getStore(); + const state = store.getState(); + + // Initial state should have default values, not undefined + expect(selectSearchOptions(state)).toBeDefined(); + expect(selectSearchResults(state)).toBeDefined(); + expect(selectSearchFacets(state)).toBeDefined(); + expect(selectSearchTerms(state)).toBeDefined(); + expect(selectSearchBarResults(state)).toBeDefined(); + expect(selectSearchBarFacets(state)).toBeDefined(); + }); + }); + + describe("integration tests", () => { + it("should handle complete search workflow", async () => { + const store = getStore(); + + // 1. Fetch search options + const mockOptions = [ + { model_id: "mine", description: "Mines" }, + { model_id: "party", description: "Contacts" }, + ]; + mockAxios.onGet(ENVIRONMENT.apiUrl + API.SEARCH_OPTIONS).reply(200, mockOptions); + + await store.dispatch(fetchSearchOptions()); + expect(selectSearchOptions(store.getState())).toEqual(mockOptions); + + // 2. Perform a search bar search + const searchTerm = "test"; + const barMockResponse = { + search_results: [ + { + type: "mine", + score: 10, + result: { id: "mine-123", value: "Test Mine" }, + }, + ], + facets: { mine: 1, person: 0, organization: 0, permit: 0, nod: 0, explosives_permit: 0, now_application: 0, mine_documents: 0, permit_documents: 0 }, + }; + mockAxios.onGet().reply(200, barMockResponse); + + await store.dispatch( + fetchSearchBarResults({ + searchTerm, + }) + ); + expect(selectSearchBarResults(store.getState())).toHaveLength(1); + + // 3. Clear search bar results + store.dispatch(clearSearchBarResults()); + expect(selectSearchBarResults(store.getState())).toEqual([]); + + // 4. Perform a full search + const fullMockResponse = { + search_results: MOCK.SEARCH_RESULTS_V2 || [], + facets: MOCK.SEARCH_FACETS || {}, + search_terms: ["test"], + }; + mockAxios.onGet().reply(200, fullMockResponse); + + await store.dispatch( + fetchSearchResults({ + searchTerm, + searchTypes: ["mine"], + }) + ); + expect(selectSearchResults(store.getState())).toBeDefined(); + + // 5. Clear all results + store.dispatch(clearAllSearchResults()); + expect(selectSearchOptions(store.getState())).toEqual([]); + expect(selectSearchResults(store.getState())).toEqual({ + mine: [], + mine_documents: [], + party: [], + permit: [], + notice_of_departure: [], + explosives_permit: [], + now_application: [], + permit_documents: [], + }); + }); + + it("should maintain state consistency across multiple operations", async () => { + const store = getStore(); + + // Perform multiple operations + store.dispatch(storeSearchOptions([{ model_id: "mine", description: "Mines" }])); + store.dispatch( + storeSearchResults({ + search_results: [], + facets: {}, + search_terms: ["first"], + }) + ); + store.dispatch( + storeSearchResults({ + search_results: [], + facets: {}, + search_terms: ["second"], + }) + ); + + // Verify state is consistent + expect(selectSearchOptions(store.getState())).toHaveLength(1); + expect(selectSearchTerms(store.getState())).toEqual(["second"]); + }); + }); +}); diff --git a/services/common/src/redux/slices/searchSlice.ts b/services/common/src/redux/slices/searchSlice.ts new file mode 100644 index 0000000000..67a228b6e0 --- /dev/null +++ b/services/common/src/redux/slices/searchSlice.ts @@ -0,0 +1,217 @@ +import { createAppSlice, rejectHandler } from "@mds/common/redux/createAppSlice"; +import { showLoading, hideLoading } from "react-redux-loading-bar"; +import { ENVIRONMENT } from "@mds/common/constants/environment"; +import { createRequestHeader } from "@mds/common/redux/utils/RequestHeaders"; +import CustomAxios from "@mds/common/redux/customAxios"; +import * as API from "@mds/common/constants/API"; +import { ISearchResult, ISearchResultList, ISimpleSearchResult } from "@mds/common/interfaces"; + +export const searchReducerType = "search"; + +export interface SearchState { + searchOptions: any[]; + searchResults: ISearchResultList; + searchFacets: any; + searchBarResults: ISearchResult[]; + searchBarFacets: { mine: number; person: number; organization: number; permit: number; nod: number; explosives_permit: number; now_application: number; mine_documents: number; permit_documents: number }; + searchTerms: any[]; + searchSubsetResults: any[]; +} + +const initialState: SearchState = { + searchOptions: [], + searchResults: { + mine: [], + mine_documents: [], + party: [], + permit: [], + notice_of_departure: [], + explosives_permit: [], + now_application: [], + permit_documents: [], + }, + searchFacets: { + mine_region: [], + mine_classification: [], + mine_operation_status: [], + mine_tenure: [], + mine_commodity: [], + has_tsf: [], + verified_status: [], + permit_status: [], + is_exploration: [], + party_type: [], + explosives_permit_status: [], + explosives_permit_closed: [], + nod_type: [], + nod_status: [], + now_application_status: [], + now_type: [], + type: [] + }, + searchBarResults: [], + searchBarFacets: { mine: 0, person: 0, organization: 0, permit: 0, nod: 0, explosives_permit: 0, now_application: 0, mine_documents: 0, permit_documents: 0 }, + searchTerms: [], + searchSubsetResults: [], +}; + +const searchSlice = createAppSlice({ + name: searchReducerType, + initialState, + reducers: (create) => ({ + storeSearchOptions: create.reducer((state, action: { payload: any[] }) => { + state.searchOptions = action.payload; + }), + storeSearchResults: create.reducer((state, action: { payload: { search_results: ISearchResultList; facets: any; search_terms: any[] } }) => { + state.searchResults = action.payload.search_results; + state.searchFacets = action.payload.facets ? JSON.parse(JSON.stringify(action.payload.facets)) : initialState.searchFacets; + state.searchTerms = action.payload.search_terms; + }), + storeSubsetSearchResults: create.reducer((state, action: { payload: any[] }) => { + state.searchSubsetResults = action.payload; + }), + storeSearchBarResults: create.reducer((state, action: { payload: { search_results: ISearchResult[]; facets: any } }) => { + state.searchBarResults = action.payload.search_results; + state.searchBarFacets = action.payload.facets ? { ...action.payload.facets } : initialState.searchBarFacets; + }), + clearSearchBarResults: create.reducer((state) => { + state.searchBarResults = []; + state.searchBarFacets = initialState.searchBarFacets; + }), + clearAllSearchResults: create.reducer(() => { + return initialState; + }), + fetchSearchResults: create.asyncThunk( + async ({ searchTerm, searchTypes, filters = {} }: { searchTerm: string; searchTypes: string[]; filters?: any }, thunkApi) => { + thunkApi.dispatch(showLoading()); + + const params: any = { + search_term: searchTerm, + search_types: searchTypes, + ...filters + }; + + // Remove undefined/null/empty values + Object.keys(params).forEach(key => { + if (params[key] === undefined || params[key] === null || params[key] === '') { + delete params[key]; + } + }); + + const headers = createRequestHeader(); + const response = await CustomAxios().get( + `${ENVIRONMENT.apiUrl}${API.SEARCH(params)}`, + headers + ); + + thunkApi.dispatch(hideLoading()); + return response.data; + }, + { + fulfilled: (state, action) => { + // If search_results is an empty array, use initial state structure + const results = action.payload.search_results; + state.searchResults = (Array.isArray(results) && results.length === 0) + ? initialState.searchResults + : results; + state.searchFacets = action.payload.facets ? JSON.parse(JSON.stringify(action.payload.facets)) : initialState.searchFacets; + state.searchTerms = action.payload.search_terms; + }, + rejected: (state, action) => { + rejectHandler(action); + }, + } + ), + fetchSearchBarResults: create.asyncThunk( + async ({ searchTerm, searchTypes = null, mineGuid = null }: { searchTerm: string; searchTypes?: string[] | null; mineGuid?: string | null }, thunkApi) => { + thunkApi.dispatch(showLoading()); + + let url = `${ENVIRONMENT.apiUrl}${API.SIMPLE_SEARCH}?search_term=${encodeURIComponent(searchTerm)}`; + if (searchTypes && searchTypes.length > 0) { + url += `&search_types=${encodeURIComponent(searchTypes.join(','))}`; + } + if (mineGuid) { + url += `&mine_guid=${encodeURIComponent(mineGuid)}`; + } + + const headers = createRequestHeader(); + const response = await CustomAxios().get(url, headers); + + thunkApi.dispatch(hideLoading()); + return response.data; + }, + { + fulfilled: (state, action) => { + state.searchBarResults = action.payload.search_results; + state.searchBarFacets = action.payload.facets ? { ...action.payload.facets } : initialState.searchBarFacets; + }, + rejected: (state, action) => { + rejectHandler(action); + }, + } + ), + fetchSearchOptions: create.asyncThunk( + async (_, thunkApi) => { + thunkApi.dispatch(showLoading()); + + const headers = createRequestHeader(); + const response = await CustomAxios().get( + `${ENVIRONMENT.apiUrl}${API.SEARCH_OPTIONS}`, + headers + ); + + thunkApi.dispatch(hideLoading()); + return response.data; + }, + { + fulfilled: (state, action) => { + state.searchOptions = action.payload; + }, + rejected: (state, action) => { + rejectHandler(action); + }, + } + ), + }), + selectors: { + selectSearchOptions: (state: SearchState) => state.searchOptions, + selectSearchResults: (state: SearchState) => state.searchResults, + selectSearchFacets: (state: SearchState) => state.searchFacets, + selectSearchBarResults: (state: SearchState) => state.searchBarResults, + selectSearchBarFacets: (state: SearchState) => state.searchBarFacets, + selectSearchTerms: (state: SearchState) => state.searchTerms, + selectSearchSubsetResults: (state: SearchState) => state.searchSubsetResults, + }, +}); + +export const { + storeSearchOptions, + storeSearchResults, + storeSubsetSearchResults, + storeSearchBarResults, + clearSearchBarResults, + clearAllSearchResults, + fetchSearchResults, + fetchSearchBarResults, + fetchSearchOptions, +} = searchSlice.actions; + +export const { + selectSearchOptions, + selectSearchResults, + selectSearchFacets, + selectSearchBarResults, + selectSearchBarFacets, + selectSearchTerms, + selectSearchSubsetResults, +} = searchSlice.selectors; + +export const getSearchOptions = (state: { search: SearchState }) => state.search.searchOptions; +export const getSearchResults = (state: { search: SearchState }) => state.search.searchResults; +export const getSearchFacets = (state: { search: SearchState }) => state.search.searchFacets; +export const getSearchBarResults = (state: { search: SearchState }) => state.search.searchBarResults; +export const getSearchBarFacets = (state: { search: SearchState }) => state.search.searchBarFacets; +export const getSearchTerms = (state: { search: SearchState }) => state.search.searchTerms; +export const getSearchSubsetResults = (state: { search: SearchState }) => state.search.searchSubsetResults; + +export default searchSlice.reducer; diff --git a/services/common/src/tests/mocks/dataMocks.tsx b/services/common/src/tests/mocks/dataMocks.tsx index 3e3abaa549..c9c92b454e 100644 --- a/services/common/src/tests/mocks/dataMocks.tsx +++ b/services/common/src/tests/mocks/dataMocks.tsx @@ -859,39 +859,90 @@ export const PARTY = { party_guid: "18133c75-49ad-4101-85f3-a43e35ae989a", first_name: "First", party_name: "mock name", - party_type_code: "PER", - address: [{}], - business_role_appts: [{}], + name: "First mock name", + party_type_code: PartyTypeCodeEnum.PER, + email: "mock@example.com", + phone_no: "123-456-7890", + address: { + address_type_code: "CAN", + address_line_1: "123 Main St", + city: "Vancouver", + sub_division_code: "BC", + post_code: "V6B 1A1", + }, + business_role_appts: [], + mine_party_appt: [], + now_party_appt: [], + organization: null, + organization_guid: null, + party_orgbook_entity: null, }, "18145c75-49ad-0101-85f3-a43e45ae989a": { party_guid: "18145c75-49ad-0101-85f3-a43e45ae989a", + first_name: "Mock", party_name: "mock Two", - party_type_code: "PER", - address: [{}], - business_role_appts: [{}], + name: "Mock mock Two", + party_type_code: PartyTypeCodeEnum.PER, + email: "mocktwo@example.com", + phone_no: "123-456-7891", + address: { + address_type_code: "CAN", + address_line_1: "456 Oak Ave", + city: "Victoria", + sub_division_code: "BC", + post_code: "V8W 1A2", + }, + business_role_appts: [], + mine_party_appt: [], + now_party_appt: [], + organization: null, + organization_guid: null, + party_orgbook_entity: null, }, }, partiesWithAppointments: { "18133c75-49ad-4101-85f3-a43e35ae989a": { party_guid: "18133c75-49ad-4101-85f3-a43e35ae989a", party_name: "mock name", - party_type_code: "PER", - address: [{}], + party_type_code: PartyTypeCodeEnum.PER, + email: "mock@example.com", + phone_no: "123-456-7890", + address: { + address_type_code: "CAN", + address_line_1: "123 Main St", + city: "Vancouver", + sub_division_code: "BC", + post_code: "V6B 1A1", + }, mine_party_appt: [], - business_role_appts: [{}], + business_role_appts: [], first_name: "mock name", name: "other mock name", now_party_appt: [], + organization: null, + organization_guid: null, + party_orgbook_entity: null, }, "18145c75-49ad-0101-85f3-a43e45ae989a": { party_guid: "18145c75-49ad-0101-85f3-a43e45ae989a", party_name: "mock Two", - party_type_code: "PER", - address: [{}], + party_type_code: PartyTypeCodeEnum.PER, + email: "mocktwo@example.com", + phone_no: "123-456-7891", + address: { + address_type_code: "CAN", + address_line_1: "456 Oak Ave", + city: "Victoria", + sub_division_code: "BC", + post_code: "V8W 1A2", + }, mine_party_appt: [], first_name: "mock name", name: "other mock name", now_party_appt: [], + organization: null, + organization_guid: null, + party_orgbook_entity: null, business_role_appts: [ { end_date: "2020-08-29T00:00:00", @@ -5476,7 +5527,7 @@ export const PERMITTEE = { name: "Tiyudoveh Higesewawa", party_guid: "1c7da2c4-10d5-4c9f-994a-96427aa0c69b", party_name: "Higesewawa", - party_type_code: "PER", + party_type_code: PartyTypeCodeEnum.PER, phone_ext: null, phone_no: "123-123-1234", }, @@ -6440,7 +6491,7 @@ export const PARTYRELATIONSHIPS = [ end_date: "2019-01-03", party: { party_guid: "43f513af-1142-443b-a1e6-f14ef857f4ea", - party_type_code: "ORG", + party_type_code: PartyTypeCodeEnum.ORG, phone_no: "123-123-1234", phone_ext: null, email: "test@test.test", @@ -6462,7 +6513,7 @@ export const PARTYRELATIONSHIPS = [ end_date: "2019-01-03", party: { party_guid: "43f513af-1142-443b-a1e6-f14ef857f4ea", - party_type_code: "ORG", + party_type_code: PartyTypeCodeEnum.ORG, phone_no: "123-123-1234", phone_ext: null, email: "test@test.test", @@ -6483,7 +6534,7 @@ export const PARTYRELATIONSHIPS = [ end_date: "2019-01-03", party: { party_guid: "97b59b9c-8576-47cb-9a04-d7d0340730d5", - party_type_code: "PER", + party_type_code: PartyTypeCodeEnum.PER, phone_no: "123-123-1234", phone_ext: null, email: "test@test.test", @@ -6507,7 +6558,7 @@ export const PARTY_RELATIONSHIP = [ end_date: "2019-01-03", party: { party_guid: "43f513af-1142-443b-a1e6-f14ef857f4ea", - party_type_code: "ORG", + party_type_code: PartyTypeCodeEnum.ORG, phone_no: "123-123-1234", phone_ext: null, email: "test@test.test", diff --git a/services/common/src/tests/mocks/searchMockData.ts b/services/common/src/tests/mocks/searchMockData.ts new file mode 100644 index 0000000000..2371f0ac62 --- /dev/null +++ b/services/common/src/tests/mocks/searchMockData.ts @@ -0,0 +1,303 @@ +/** + * Mock data for V2 search functionality with facets and filters + */ + +import { ISearchResultList } from "@mds/common/interfaces/search/searchResult.interface"; +import { MINES, PARTY, PERMITS, MINEDOCUMENTS, EXPLOSIVES_PERMITS, NOW } from "./dataMocks"; + +export const SEARCH_RESULTS_V2: ISearchResultList = { + mine: [ + { + type: "mine", + score: 10.5, + result: MINES[0], + }, + { + type: "mine", + score: 8.3, + result: MINES[1], + }, + ], + party: [ + { + type: "party", + score: 9.2, + result: PARTY.parties["18133c75-49ad-4101-85f3-a43e35ae989a"], + }, + ], + permit: [ + { + type: "permit", + score: 8.7, + result: PERMITS[0], + }, + ], + mine_documents: [ + { + type: "mine_documents", + score: 6.5, + result: MINEDOCUMENTS[0], + }, + ], + permit_documents: [ + { + type: "permit_documents", + score: 6.3, + result: MINEDOCUMENTS[0], + }, + ], + explosives_permit: [ + { + type: "explosives_permit", + score: 7.2, + result: EXPLOSIVES_PERMITS[0], + }, + ], + now_application: [ + { + type: "now_application", + score: 6.8, + result: NOW[0], + } + ], + notice_of_departure: [ + { + type: "notice_of_departure", + score: 5.9, + result: { + nod_guid: "test-nod-guid-1", + nod_no: "NOD-001", + nod_title: "Test Notice of Departure", + mine_name: "Test Mine One", + nod_status: "pending_review", + mine_guid: "", + nod_type: "" + }, + }, + ], +}; + +export const SEARCH_FACETS = { + mine_region: [ + { key: "SW", count: 15 }, + { key: "NE", count: 10 }, + { key: "NW", count: 8 }, + { key: "SE", count: 5 }, + { key: "SC", count: 12 }, + ], + mine_classification: [ + { key: "Major Mine", count: 20 }, + { key: "Regional Mine", count: 30 }, + ], + mine_operation_status: [ + { key: "OP", count: 25 }, + { key: "CLD", count: 15 }, + { key: "NS", count: 10 }, + ], + mine_tenure: [ + { key: "PLR", count: 18 }, + { key: "MIN", count: 12 }, + { key: "BCL", count: 8 }, + ], + mine_commodity: [ + { key: "CU", count: 15 }, + { key: "AU", count: 12 }, + { key: "AG", count: 8 }, + { key: "ZN", count: 6 }, + ], + has_tsf: [ + { key: "Yes", count: 22 }, + { key: "No", count: 28 }, + ], + verified_status: [ + { key: "Verified", count: 35 }, + { key: "Unverified", count: 15 }, + ], + permit_status: [ + { key: "O", count: 28 }, + { key: "C", count: 12 }, + { key: "D", count: 5 }, + ], + is_exploration: [ + { key: "Yes", count: 18 }, + { key: "No", count: 27 }, + ], + party_type: [ + { key: "PER", count: 45 }, + { key: "ORG", count: 30 }, + ], + explosives_permit_status: [ + { key: "APP", count: 10 }, + { key: "REC", count: 15 }, + { key: "REJ", count: 3 }, + ], + explosives_permit_closed: [ + { key: "Yes", count: 8 }, + { key: "No", count: 20 }, + ], + nod_type: [ + { key: "temporary", count: 12 }, + { key: "permanent", count: 8 }, + ], + nod_status: [ + { key: "pending_review", count: 10 }, + { key: "approved", count: 15 }, + { key: "rejected", count: 3 }, + ], + now_application_status: [ + { key: "REC", count: 20 }, + { key: "REF", count: 10 }, + { key: "AIA", count: 8 }, + ], + now_type: [ + { key: "QIM", count: 15 }, + { key: "SAG", count: 12 }, + { key: "QCA", count: 8 }, + ], + type: [ + { key: "mine", count: 50 }, + { key: "party", count: 75 }, + { key: "permit", count: 45 }, + { key: "permit_documents", count: 22 }, + { key: "mine_documents", count: 35 }, + { key: "explosives_permit", count: 28 }, + { key: "now_application", count: 38 }, + { key: "notice_of_departure", count: 20 }, + ], +}; + +export const SEARCH_OPTIONS = [ + { model_id: "mine", description: "Mines" }, + { model_id: "party", description: "Contacts" }, + { model_id: "permit", description: "Permits" }, + { model_id: "permit_documents", description: "Permit Documents" }, + { model_id: "mine_documents", description: "Mine Documents" }, + { model_id: "explosives_permit", description: "Explosives Permits" }, + { model_id: "now_application", description: "NoW Applications" }, + { model_id: "notice_of_departure", description: "Notices of Departure" }, +]; + +export const SIMPLE_SEARCH_RESULTS = [ + { + type: "mine", + score: 10.0, + result: { + id: "mine-123", + value: "Test Mine", + description: "M-001", + mine_guid: "test-mine-guid-1", + }, + }, + { + type: "person", + score: 8.5, + result: { + id: "party-123", + value: "John Doe", + description: "john.doe@example.com | 555-1234", + mine_guid: null, + }, + }, + { + type: "organization", + score: 7.5, + result: { + id: "party-456", + value: "ACME Corporation", + description: "contact@acme.com", + mine_guid: null, + }, + }, + { + type: "permit", + score: 9.0, + result: { + id: "permit-789", + value: "P-001", + description: "John Doe | Status: O", + mine_guid: "test-mine-guid-1", + }, + }, +]; + +export const SIMPLE_SEARCH_FACETS = { + mine: 50, + person: 45, + organization: 30, + permit: 45, + nod: 20, + explosives_permit: 28, + now_application: 38, +}; + +export const EMPTY_SEARCH_RESULTS = { + mine: [], + party: [], + permit: [], + permit_documents: [], + mine_documents: [], + explosives_permit: [], + now_application: [], + notice_of_departure: [], +}; + +export const EMPTY_SEARCH_FACETS = { + mine_region: [], + mine_classification: [], + mine_operation_status: [], + mine_tenure: [], + mine_commodity: [], + has_tsf: [], + verified_status: [], + permit_status: [], + party_type: [], + type: [], +}; + +// Search parameters for testing +export const SEARCH_PARAMS = { + basic: { + search_term: "test", + }, + withTypes: { + search_term: "test", + search_types: ["mine", "party"], + }, + withFilters: { + search_term: "test", + mine_region: ["SW", "NE"], + permit_status: ["O"], + }, + withMultipleFilters: { + search_term: "test mine", + mine_region: ["SW"], + mine_classification: ["Major Mine"], + permit_status: ["O"], + party_type: ["PER"], + }, + empty: { + search_term: "nonexistent", + }, +}; + +// URL query string examples +export const SEARCH_URLS = { + basic: "?q=test", + withTypes: "?q=test&search_types=mine,party", + withFilters: "?q=test&mine_region=SW,NE&permit_status=O", + withPagination: "?q=test&page=2", + scoped: "?q=test&mine_guid=test-mine-guid-1", +}; + + +export default { + SEARCH_RESULTS_V2, + SEARCH_FACETS, + SEARCH_OPTIONS, + SIMPLE_SEARCH_RESULTS, + SIMPLE_SEARCH_FACETS, + EMPTY_SEARCH_RESULTS, + EMPTY_SEARCH_FACETS, + SEARCH_PARAMS, + SEARCH_URLS, +}; diff --git a/services/common/src/utils/featureFlag.ts b/services/common/src/utils/featureFlag.ts index 2696e90932..94a8d8cd4d 100644 --- a/services/common/src/utils/featureFlag.ts +++ b/services/common/src/utils/featureFlag.ts @@ -35,7 +35,8 @@ export enum Feature { STANDARD_PERMIT_CONDITIONS_EDITOR = "standard_permit_conditions_new_editor", NOW_PERMIT_CONDITIONS_EDITOR = "now_permit_conditions_new_editor", REPORT_MANAGEMENT_V2 = "report_management_v2", - MINESPACE_SIGNUP = "minespace_signup" + MINESPACE_SIGNUP = "minespace_signup", + GLOBAL_SEARCH_V2 = "global_search_v2", } export const initializeFlagsmith = async (flagsmithUrl, flagsmithKey) => { diff --git a/services/common/src/utils/feature_flags.json b/services/common/src/utils/feature_flags.json index 3484d900f3..f88bb7cfb0 100644 --- a/services/common/src/utils/feature_flags.json +++ b/services/common/src/utils/feature_flags.json @@ -30,5 +30,6 @@ "help_guide": true, "PERMIT_CONDITION_SEARCH": true, "report_management_v2": true, - "minespace_signup": true + "minespace_signup": true, + "global_search_v2": true } \ No newline at end of file diff --git a/services/core-api/.env-example b/services/core-api/.env-example index 23fe883351..e20709ff8a 100644 --- a/services/core-api/.env-example +++ b/services/core-api/.env-example @@ -149,4 +149,7 @@ JWT_OIDC_TEST_PRIVATE_KEY_PEM="-----BEGIN RSA PRIVATE KEY-----\nMIICXQIBAAKBgQDf AMS_BEARER_TOKEN=e3yC6nkR0XgdnPvIbbXdqiYas82fnXechxSFEt9CT8UfIyJQVpjMusDX4dbrljQsQAqdeEwWcCFvMhYskOI+Ks5tg0GzeruWXYTv37NM3dA= AMS_URL=https://test.j200.gov.bc.ca/ws/EMCORE -CORE_WEB_URL=http://localhost:3000 \ No newline at end of file +CORE_WEB_URL=http://localhost:3000 + +ELASTICSEARCH_USERNAME=elastic +ELASTICSEARCH_PASSWORD=elastic \ No newline at end of file diff --git a/services/core-api/app/api/mines/permits/permit_conditions/tasks.py b/services/core-api/app/api/mines/permits/permit_conditions/tasks.py index 62834354e8..392d5e3f01 100644 --- a/services/core-api/app/api/mines/permits/permit_conditions/tasks.py +++ b/services/core-api/app/api/mines/permits/permit_conditions/tasks.py @@ -2,11 +2,12 @@ import datetime import io -from app.api.tasks.celery_task_base import TaskBase -from app.cli_commands.export_permit_conditions import headers, export_permit_conditions from app.api.search.search.permit_search_service import PermitSearchService +from app.api.tasks.celery_task_base import TaskBase +from app.cli_commands.export_permit_conditions import export_permit_conditions, headers from app.tasks.celery import celery + @celery.task(base=TaskBase) def export_and_index_permit_amendments(permit_amendment_guids, is_manual=False): """ diff --git a/services/core-api/app/api/search/elasticsearch/elastic_search_service.py b/services/core-api/app/api/search/elasticsearch/elastic_search_service.py new file mode 100644 index 0000000000..2a297f0c38 --- /dev/null +++ b/services/core-api/app/api/search/elasticsearch/elastic_search_service.py @@ -0,0 +1,27 @@ +import logging + +from elasticsearch import Elasticsearch +from flask import current_app + + +class ElasticSearchService: + _client = None + + @classmethod + def get_client(cls): + if cls._client is None: + config = current_app.config + current_app.logger.info(f"Connecting to Elasticsearch at {config['ELASTICSEARCH_URL']}") + current_app.logger.info(f"Using CA certs at {config['ELASTICSEARCH_CA_CERTS']}") + cls._client = Elasticsearch( + config['ELASTICSEARCH_URL'], + basic_auth=(config['ELASTICSEARCH_USERNAME'], config['ELASTICSEARCH_PASSWORD']), + ca_certs=config['ELASTICSEARCH_CA_CERTS'], + verify_certs=False + ) + return cls._client + + @classmethod + def search(cls, index_name, query, size=10): + client = cls.get_client() + return client.search(index=index_name, body=query, size=size, ignore_unavailable=True) diff --git a/services/core-api/app/api/search/response_models.py b/services/core-api/app/api/search/response_models.py index ae36f53341..95904586e6 100644 --- a/services/core-api/app/api/search/response_models.py +++ b/services/core-api/app/api/search/response_models.py @@ -1,3 +1,9 @@ +from app.api.mines.response_models import ( + MINE_TSF_MODEL, + MINE_TYPE_MODEL, + MINE_VERIFIED_MODEL, + MINE_WORK_INFORMATION_MODEL, +) from app.api.parties.response_models import ( PARTY_BUSINESS_ROLE_APPT, PARTY_ORGBOOK_ENTITY, @@ -6,13 +12,16 @@ from flask_restx import fields SEARCH_RESULT_MODEL = api.model('SearchResult', { - 'score': fields.Integer, + 'score': fields.Float, 'type': fields.String, }) SIMPLE_SEARCH_MODEL = api.model('SimpleSearchResult', { 'id': fields.String, 'value': fields.String, + 'description': fields.String, + 'highlight': fields.String, + 'mine_guid': fields.String, }) MINE_MODEL = api.model('Mine_simple ', { @@ -23,7 +32,7 @@ PERMIT_SEARCH_MODEL = api.model( 'Permit', { 'permit_guid': fields.String, - 'mine': fields.List(fields.Nested(MINE_MODEL), attribute=lambda x: x._all_mines), + 'mine': fields.List(fields.Nested(MINE_MODEL), attribute=lambda x: x.get('mine', []) if isinstance(x, dict) else x._all_mines), 'permit_no': fields.String, 'current_permittee': fields.String, }) @@ -34,7 +43,7 @@ 'start_date': fields.Date, 'end_date': fields.Date, 'mine': fields.Nested(MINE_MODEL), - 'permit_no': fields.String(attribute='permit.permit_no'), + 'permit_no': fields.String(attribute=lambda x: x.get('permit_no') if isinstance(x, dict) else (x.permit.permit_no if hasattr(x, 'permit') and x.permit else None)), }) MINE_STATUS_MODEL = api.model('MineStatus', { @@ -50,6 +59,11 @@ 'mine_permit': fields.List(fields.Nested(PERMIT_SEARCH_MODEL)), 'mine_status': fields.Nested(MINE_STATUS_MODEL), 'mms_alias': fields.String, + 'major_mine_ind': fields.Boolean, + 'mine_type': fields.List(fields.Nested(MINE_TYPE_MODEL)), + 'mine_tailings_storage_facilities': fields.List(fields.Nested(MINE_TSF_MODEL)), + 'mine_work_information': fields.Nested(MINE_WORK_INFORMATION_MODEL), + 'verified_status': fields.Nested(MINE_VERIFIED_MODEL), }) PARTY_ADDRESS = api.model( @@ -123,6 +137,50 @@ 'result': fields.Nested(SIMPLE_SEARCH_MODEL), }) +EXPLOSIVES_PERMIT_SEARCH_MODEL = api.model( + 'ExplosivesPermit', { + 'explosives_permit_guid': fields.String, + 'explosives_permit_id': fields.String, + 'application_number': fields.String, + 'application_status': fields.String, + 'mine_guid': fields.String, + 'mine_name': fields.String, + 'is_closed': fields.Boolean, + }) + +NOW_APPLICATION_SEARCH_MODEL = api.model( + 'NowApplication', { + 'now_application_guid': fields.String, + 'now_number': fields.String, + 'mine_guid': fields.String, + 'mine_name': fields.String, + 'now_application_status_code': fields.String, + 'notice_of_work_type_code': fields.String, + }) + +NOD_SEARCH_MODEL = api.model( + 'NoticeOfDeparture', { + 'nod_guid': fields.String, + 'nod_no': fields.String, + 'nod_title': fields.String, + 'mine_guid': fields.String, + 'mine_name': fields.String, + 'nod_type': fields.String, + 'nod_status': fields.String, + }) + +EXPLOSIVES_PERMIT_SEARCH_RESULT_MODEL = api.inherit('ExplosivesPermitSearchResult', SEARCH_RESULT_MODEL, { + 'result': fields.Nested(EXPLOSIVES_PERMIT_SEARCH_MODEL), +}) + +NOW_APPLICATION_SEARCH_RESULT_MODEL = api.inherit('NowApplicationSearchResult', SEARCH_RESULT_MODEL, { + 'result': fields.Nested(NOW_APPLICATION_SEARCH_MODEL), +}) + +NOD_SEARCH_RESULT_MODEL = api.inherit('NodSearchResult', SEARCH_RESULT_MODEL, { + 'result': fields.Nested(NOD_SEARCH_MODEL), +}) + SEARCH_RESULTS_LIST_MODEL = api.model( 'SearchResultList', { 'mine': fields.List(fields.Nested(MINE_SEARCH_RESULT_MODEL)), @@ -130,16 +188,68 @@ 'permit': fields.List(fields.Nested(PERMIT_SEARCH_RESULT_MODEL)), 'mine_documents': fields.List(fields.Nested(MINE_DOCUMENT_SEARCH_RESULT_MODEL)), 'permit_documents': fields.List(fields.Nested(PERMIT_DOCUMENT_SEARCH_RESULT_MODEL)), + 'explosives_permit': fields.List(fields.Nested(EXPLOSIVES_PERMIT_SEARCH_RESULT_MODEL)), + 'now_application': fields.List(fields.Nested(NOW_APPLICATION_SEARCH_RESULT_MODEL)), + 'notice_of_departure': fields.List(fields.Nested(NOD_SEARCH_RESULT_MODEL)), + }) + +SEARCH_FACET_BUCKET_MODEL = api.model( + 'SearchFacetBucket', { + 'key': fields.String, + 'count': fields.Integer, + }) + +SEARCH_FACETS_MODEL = api.model( + 'SearchFacets', { + # Mine facets + 'mine_region': fields.List(fields.Nested(SEARCH_FACET_BUCKET_MODEL)), + 'mine_classification': fields.List(fields.Nested(SEARCH_FACET_BUCKET_MODEL)), + 'mine_operation_status': fields.List(fields.Nested(SEARCH_FACET_BUCKET_MODEL)), + 'mine_tenure': fields.List(fields.Nested(SEARCH_FACET_BUCKET_MODEL)), + 'mine_commodity': fields.List(fields.Nested(SEARCH_FACET_BUCKET_MODEL)), + 'has_tsf': fields.List(fields.Nested(SEARCH_FACET_BUCKET_MODEL)), + 'verified_status': fields.List(fields.Nested(SEARCH_FACET_BUCKET_MODEL)), + # Permit facets + 'permit_status': fields.List(fields.Nested(SEARCH_FACET_BUCKET_MODEL)), + 'is_exploration': fields.List(fields.Nested(SEARCH_FACET_BUCKET_MODEL)), + # Party facets + 'party_type': fields.List(fields.Nested(SEARCH_FACET_BUCKET_MODEL)), + # Explosives permit facets + 'explosives_permit_status': fields.List(fields.Nested(SEARCH_FACET_BUCKET_MODEL)), + 'explosives_permit_closed': fields.List(fields.Nested(SEARCH_FACET_BUCKET_MODEL)), + # NOD facets + 'nod_type': fields.List(fields.Nested(SEARCH_FACET_BUCKET_MODEL)), + 'nod_status': fields.List(fields.Nested(SEARCH_FACET_BUCKET_MODEL)), + # NoW facets + 'now_application_status': fields.List(fields.Nested(SEARCH_FACET_BUCKET_MODEL)), + 'now_type': fields.List(fields.Nested(SEARCH_FACET_BUCKET_MODEL)), + # Type facet + 'type': fields.List(fields.Nested(SEARCH_FACET_BUCKET_MODEL)), }) SEARCH_RESULT_RETURN_MODEL = api.model( 'SearchResultReturn', { 'search_terms': fields.List(fields.String), 'search_results': fields.Nested(SEARCH_RESULTS_LIST_MODEL), + 'facets': fields.Nested(SEARCH_FACETS_MODEL), + }) + +SIMPLE_SEARCH_FACETS_MODEL = api.model( + 'SimpleSearchFacets', { + 'mine': fields.Integer, + 'person': fields.Integer, + 'organization': fields.Integer, + 'permit': fields.Integer, + 'nod': fields.Integer, + 'explosives_permit': fields.Integer, + 'now_application': fields.Integer, + 'mine_documents': fields.Integer, + 'permit_documents': fields.Integer, }) SIMPLE_SEARCH_RESULT_RETURN_MODEL = api.model( 'SimpleSearchResultReturn', { 'search_terms': fields.List(fields.String), 'search_results': fields.List(fields.Nested(SIMPLE_SEARCH_RESULT_MODEL)), + 'facets': fields.Nested(SIMPLE_SEARCH_FACETS_MODEL), }) diff --git a/services/core-api/app/api/search/search/global_search_service.py b/services/core-api/app/api/search/search/global_search_service.py new file mode 100644 index 0000000000..a94bcb2c0f --- /dev/null +++ b/services/core-api/app/api/search/search/global_search_service.py @@ -0,0 +1,159 @@ +"""Global search service for executing searches against Elasticsearch.""" + +import regex +from flask import current_app + +from app.api.search.elasticsearch.elastic_search_service import ElasticSearchService +from .search_constants import TYPE_TO_INDEX, ES_AGGREGATIONS, FACET_KEYS, FILTER_PARAMS, SEARCH_FIELDS +from .search_filters import build_filter_clauses +from .search_facets import extract_facets +from .search_transformers import transform_es_results + + +def parse_csv_param(value): + """Parse comma-separated parameter into list.""" + return [v.strip() for v in value.split(',')] if value else [] + + +def parse_search_terms(search_term): + """Parse search term into individual terms.""" + reg_exp = regex.compile(r'\'.*?\' | ".*?" | \S+ ', regex.VERBOSE) + return [term.replace('"', '') for term in reg_exp.findall(search_term)] + + +def parse_filters(request_args): + """Parse filter parameters from request args.""" + return {param: parse_csv_param(request_args.get(param)) for param in FILTER_PARAMS} + + +def build_search_query(search_term, filter_clauses): + """Build the complete ES search query.""" + if not search_term or search_term == "*": + return { + "query": { + "bool": { + "must": [{"match_all": {}}], + "filter": filter_clauses + } + }, + "sort": [{"_score": "desc"}], + "aggs": ES_AGGREGATIONS + } + + # Highlight configuration (optional usage for now) + highlight_config = { + "fields": { + "mine_name": {}, + "mine_no": {}, + "mms_alias": {}, + "mine.mine_name": {}, + "mine.mine_no": {}, + "party_name": {}, + "first_name": {}, + "email": {}, + "permit_no": {}, + "permit_number": {}, + "nod_no": {}, + "nod_title": {}, + "nod_description": {}, + "now_number": {}, + "application.property_name": {}, + "document_name": {}, + "description": {}, + "application_number": {}, + }, + "pre_tags": [""], + "post_tags": [""], + "fragment_size": 150, + "number_of_fragments": 1 + } + + should_clauses = [ + { + "multi_match": { + "query": search_term, + "fields": SEARCH_FIELDS, + "type": "phrase_prefix" + } + } + ] + + # If search term is longer, add fuzzy match + if len(search_term) >= 3: + should_clauses.append({ + "multi_match": { + "query": search_term, + "fields": SEARCH_FIELDS, + "fuzziness": "AUTO" + } + }) + + return { + "query": { + "bool": { + "should": should_clauses, + "minimum_should_match": 1, + "filter": filter_clauses + } + }, + "highlight": highlight_config, + "aggs": ES_AGGREGATIONS + } + + +def get_empty_results(search_types): + """Get empty results structure.""" + return { + 'results': {t: [] for t in search_types}, + 'facets': {k: [] for k in FACET_KEYS} + } + + +class GlobalSearchService: + """Service for executing global searches.""" + + @staticmethod + def search(search_term, search_types, filters, size=200): + """ + Execute a global search. + + Args: + search_term: The search query string + search_types: List of types to search (e.g., ['mine', 'party', 'permit']) + filters: Dict of filter parameters + size: Maximum number of results to return + + Returns: + Dict with 'results' and 'facets' keys + """ + indices = [TYPE_TO_INDEX[t] for t in search_types if t in TYPE_TO_INDEX] + + if not indices: + return get_empty_results(search_types) + + try: + filter_clauses = build_filter_clauses(filters) + query = build_search_query(search_term, filter_clauses) + + current_app.logger.info(f"Searching ES indices: {','.join(indices)} for: {search_term}") + + es_results = ElasticSearchService.search(','.join(set(indices)), query, size=size) + hits = es_results['hits']['hits'] + + current_app.logger.info(f"ES returned {len(hits)} hits") + + facets = extract_facets(es_results.get('aggregations', {})) + results = transform_es_results(hits) + + # Ensure all requested types have entries + for t in search_types: + if t not in results: + results[t] = [] + + return {'results': results, 'facets': facets} + + except Exception as e: + current_app.logger.error(f"Elasticsearch error: {e}") + import traceback + current_app.logger.error(traceback.format_exc()) + return get_empty_results(search_types) diff --git a/services/core-api/app/api/search/search/resources/search.py b/services/core-api/app/api/search/search/resources/search.py index dfb5969bcb..1f265ca49f 100644 --- a/services/core-api/app/api/search/search/resources/search.py +++ b/services/core-api/app/api/search/search/resources/search.py @@ -1,29 +1,74 @@ +"""Search API resources.""" + import regex from concurrent.futures import ThreadPoolExecutor, as_completed -from flask_restx import Resource from flask import request, current_app +from flask_restx import Resource -from app.extensions import db, api -from app.api.utils.access_decorators import requires_role_view_all, requires_role_mine_edit -from app.api.utils.resources_mixins import UserMixin -from app.api.utils.search import search_targets, append_result, execute_search, SearchResult from app.api.search.response_models import SEARCH_RESULT_RETURN_MODEL +from app.api.utils.access_decorators import requires_role_view_all +from app.api.utils.resources_mixins import UserMixin +from app.api.utils.search import search_targets, execute_search +from app.api.utils.feature_flag import Feature, is_feature_enabled +from app.extensions import api, db +from ..global_search_service import GlobalSearchService, parse_search_terms, parse_filters class SearchOptionsResource(Resource, UserMixin): + """Resource for retrieving available search options.""" + @requires_role_view_all def get(self): - options = [] - for type, type_config in search_targets.items(): - options.append({'model_id': type, 'description': type_config['description']}) - - return options + """Get list of searchable types with descriptions.""" + return [ + {'model_id': type_key, 'description': config['description']} + for type_key, config in search_targets.items() + ] class SearchResource(Resource, UserMixin): + """Resource for executing global searches.""" + @requires_role_view_all @api.marshal_with(SEARCH_RESULT_RETURN_MODEL, 200) def get(self): + """ + Execute a global search across mines, parties, permits, and documents. + + Query Parameters: + search_term: The search query string + search_types: Comma-separated list of types to search (optional) + Various filter parameters (mine_region, permit_status, etc.) + + Returns: + search_terms: List of parsed search terms + search_results: Dict of results grouped by type + facets: Dict of facet counts for filtering (v2 only) + """ + if is_feature_enabled(Feature.GLOBAL_SEARCH_V2): + return self._search_v2() + else: + return self._search_v1() + + def _search_v2(self): + """New Elasticsearch-based search implementation.""" + search_term = request.args.get('search_term', '', type=str) + search_types_param = request.args.get('search_types', None, type=str) + search_types = search_types_param.split(',') if search_types_param else list(search_targets.keys()) + + search_terms = parse_search_terms(search_term) + filters = parse_filters(request.args) + + search_result = GlobalSearchService.search(search_term, search_types, filters) + + return { + 'search_terms': search_terms, + 'search_results': search_result['results'], + 'facets': search_result['facets'] + } + + def _search_v1(self): + """Original ThreadPoolExecutor-based search implementation.""" search_results = [] app = current_app._get_current_object() diff --git a/services/core-api/app/api/search/search/resources/simple_search.py b/services/core-api/app/api/search/search/resources/simple_search.py index 8ab5d17245..9c3cccebe9 100644 --- a/services/core-api/app/api/search/search/resources/simple_search.py +++ b/services/core-api/app/api/search/search/resources/simple_search.py @@ -1,28 +1,72 @@ -import regex +""" +Simple Search Resource + +REST API endpoint for simple search functionality. +Delegates business logic to SimpleSearchService - thin resource layer. +""" + +import logging from concurrent.futures import ThreadPoolExecutor, as_completed + +from flask import current_app, request from flask_restx import Resource -from flask import request, current_app -from app.extensions import db, api -from app.api.utils.access_decorators import requires_role_view_all, requires_role_mine_edit -from app.api.utils.resources_mixins import UserMixin -from app.api.utils.search import simple_search_targets, append_result, execute_search, SearchResult from app.api.search.response_models import SIMPLE_SEARCH_RESULT_RETURN_MODEL +from app.api.utils.access_decorators import requires_role_view_all +from app.api.utils.resources_mixins import UserMixin +from app.api.utils.search import execute_search, simple_search_targets +from app.api.utils.feature_flag import Feature, is_feature_enabled +from app.extensions import api + +# Import services +from ..simple_search_service import SimpleSearchService +from ..global_search_service import parse_search_terms + +logger = logging.getLogger(__name__) class SimpleSearchResource(Resource, UserMixin): + """ + REST API resource for simple search. + + Responsibilities: + - Handle HTTP request/response + - Extract and validate request parameters + - Delegate to SimpleSearchService for business logic + - Return formatted response + """ + @requires_role_view_all @api.marshal_with(SIMPLE_SEARCH_RESULT_RETURN_MODEL, 200) def get(self): + """ + GET /search/simple + + Execute a simple search query. + + Query Parameters: + search_term: Text to search for + search_types: Optional comma-separated list of types to filter by + mine_guid: Optional mine GUID to scope the search + + Returns: + dict with search_terms, search_results, and facets (V2 only) + """ + if is_feature_enabled(Feature.GLOBAL_SEARCH_V2): + return self._search_v2() + else: + return self._search_v1() + + def _search_v1(self): + """ + V1 search implementation - ThreadPoolExecutor-based. + Kept for backward compatibility when V2 feature flag is off. + """ search_results = [] app = current_app._get_current_object() search_term = request.args.get('search_term', None, type=str) - - # Split incoming search query by space to search by individual words - reg_exp = regex.compile(r'\'.*?\' | ".*?" | \S+ ', regex.VERBOSE) - search_terms = reg_exp.findall(search_term) - search_terms = [term.replace('"', '') for term in search_terms] + search_terms = parse_search_terms(search_term) if search_term else [] with ThreadPoolExecutor(max_workers=50) as executor: task_list = [] @@ -48,4 +92,24 @@ def get(self): search_results.sort(key=lambda x: x.score, reverse=True) search_results = search_results[0:4] - return {'search_terms': search_terms, 'search_results': search_results} \ No newline at end of file + return {'search_terms': search_terms, 'search_results': search_results} + + def _search_v2(self): + """ + V2 search implementation - delegates to SimpleSearchService. + + Thin resource method that only handles HTTP concerns: + - Extract request parameters + - Delegate to service layer + - Return response + """ + # Extract request parameters + search_term = request.args.get('search_term', None, type=str) + search_types = request.args.get('search_types', None, type=str) + mine_guid = request.args.get('mine_guid', None, type=str) + + # Instantiate service (allows for easier mocking in tests) + search_service = SimpleSearchService() + + # Delegate all business logic to service layer + return search_service.execute_search(search_term, search_types, mine_guid) diff --git a/services/core-api/app/api/search/search/search_constants.py b/services/core-api/app/api/search/search/search_constants.py new file mode 100644 index 0000000000..d0f3ba8870 --- /dev/null +++ b/services/core-api/app/api/search/search/search_constants.py @@ -0,0 +1,114 @@ +"""Constants and configuration for search functionality.""" + +TYPE_TO_INDEX = { + 'mine': 'mines', + 'party': 'parties', + 'permit': 'mine_permits', + 'mine_documents': 'documents', + 'notice_of_departure': 'notices_of_departure', + 'explosives_permit': 'explosives_permits', + 'now_application': 'now_applications', +} + +INDEX_TO_TYPE = {v: k for k, v in TYPE_TO_INDEX.items()} + +# Define searchable fields with boosting +SEARCH_FIELDS = [ + # Mine fields + "mine_name^3", + "mine_no^3", + "mms_alias^2", + "mine.mine_name^2", + "mine.mine_no^2", + # Party/contact fields + "party_name^3", + "first_name^2", + "email^2", + "phone_no", + # Permit fields + "permit_no^3", + "permit_number^3", + "application_number^2", + # NOD fields + "nod_no^3", + "nod_title^3", + "nod_description", + # NOW fields + "now_number^3", + "application.property_name^2", + # Document fields + "document_name^2", + # Description fields + "description", + # Catch-all + "*" +] + +FACET_KEYS = [ + 'mine_region', 'mine_classification', 'mine_operation_status', + 'mine_tenure', 'mine_commodity', 'has_tsf', 'verified_status', + 'permit_status', 'is_exploration', 'party_type', + 'explosives_permit_status', 'explosives_permit_closed', + 'nod_type', 'nod_status', 'now_application_status', 'now_type', 'type' +] + +FILTER_PARAMS = [ + 'mine_region', 'mine_classification', 'mine_operation_status', + 'mine_tenure', 'mine_commodity', 'has_tsf', 'verified_status', + 'permit_status', 'is_exploration', 'party_type', + 'explosives_permit_status', 'explosives_permit_closed', + 'nod_type', 'nod_status', 'now_application_status', 'now_type' +] + +ES_AGGREGATIONS = { + "by_index": {"terms": {"field": "_index", "size": 10}}, + "mine_region": {"terms": {"field": "mine_region.keyword", "size": 20, "missing": "Unknown"}}, + "major_mine_ind": {"terms": {"field": "major_mine_ind", "size": 10}}, + "mine_operation_status": { + "nested": {"path": "mine_status"}, + "aggs": { + "status_codes": { + "nested": {"path": "mine_status.status_xref"}, + "aggs": { + "codes": {"terms": {"field": "mine_status.status_xref.mine_operation_status_code.keyword", "size": 20}} + } + } + } + }, + "mine_tenure": { + "nested": {"path": "mine_types"}, + "aggs": {"tenure_codes": {"terms": {"field": "mine_types.mine_tenure_type_code.keyword", "size": 20}}} + }, + "mine_commodity": { + "nested": {"path": "mine_types"}, + "aggs": { + "details": { + "nested": {"path": "mine_types.mine_type_details"}, + "aggs": {"commodity_codes": {"terms": {"field": "mine_types.mine_type_details.mine_commodity_code.keyword", "size": 30}}} + } + } + }, + "has_tsf": { + "nested": {"path": "tailings_storage_facilities"}, + "aggs": {"count": {"value_count": {"field": "tailings_storage_facilities.mine_tailings_storage_facility_guid"}}} + }, + "verified_status": { + "nested": {"path": "verified_status"}, + "aggs": {"healthy": {"terms": {"field": "verified_status.healthy_ind", "size": 10}}} + }, + "permit_status": {"terms": {"field": "permit_status_code.keyword", "size": 20}}, + "is_exploration": {"terms": {"field": "is_exploration", "size": 10}}, + "party_type": {"terms": {"field": "party_type_code.keyword", "size": 10}}, + "explosives_permit_status": {"terms": {"field": "application_status.keyword", "size": 20}}, + "explosives_permit_closed": {"terms": {"field": "is_closed", "size": 10}}, + "nod_type": {"terms": {"field": "nod_type.keyword", "size": 20}}, + "nod_status": {"terms": {"field": "nod_status.keyword", "size": 20}}, + "now_application_status": { + "nested": {"path": "application"}, + "aggs": {"status_codes": {"terms": {"field": "application.now_application_status_code.keyword", "size": 20}}} + }, + "now_type": { + "nested": {"path": "application"}, + "aggs": {"type_codes": {"terms": {"field": "application.notice_of_work_type_code.keyword", "size": 20}}} + }, +} diff --git a/services/core-api/app/api/search/search/search_facets.py b/services/core-api/app/api/search/search/search_facets.py new file mode 100644 index 0000000000..fc3cea983f --- /dev/null +++ b/services/core-api/app/api/search/search/search_facets.py @@ -0,0 +1,225 @@ +"""Facet extraction from Elasticsearch aggregations.""" + +from .search_constants import FACET_KEYS, INDEX_TO_TYPE + + +def extract_simple_type_facets(aggs): + """ + Extract simple facet counts by document type from ES aggregations. + + Used by simple search to get counts for autocomplete/preview results. + Returns counts grouped by result type (mine, person, organization, etc.). + + Args: + aggs: Elasticsearch aggregations response with by_index terms aggregation + + Returns: + Dict with counts per type: { + 'mine': 0, 'person': 0, 'organization': 0, 'permit': 0, + 'nod': 0, 'explosives_permit': 0, 'now_application': 0, + 'mine_documents': 0, 'permit_documents': 0 + } + """ + facets = { + 'mine': 0, 'person': 0, 'organization': 0, 'permit': 0, + 'nod': 0, 'explosives_permit': 0, 'now_application': 0, + 'mine_documents': 0, 'permit_documents': 0 + } + + # Map ES index names to facet keys + index_to_facet = { + 'mines': 'mine', + 'mine_permits': 'permit', + 'notices_of_departure': 'nod', + 'explosives_permits': 'explosives_permit', + 'now_applications': 'now_application', + 'documents': 'mine_documents' + } + + for bucket in _extract_buckets(aggs, 'by_index'): + index_name = bucket['key'] + doc_count = bucket['doc_count'] + + if index_name in index_to_facet: + facets[index_to_facet[index_name]] = doc_count + elif index_name == 'parties': + # Split parties by type (person vs organization) + for party_bucket in _extract_buckets(bucket, 'by_party_type'): + party_type = party_bucket['key'] + if party_type == 'PER': + facets['person'] = party_bucket['doc_count'] + elif party_type == 'ORG': + facets['organization'] = party_bucket['doc_count'] + + return facets + + +# Predefined values for facets that should always appear +PREDEFINED_FACETS = { + 'mine_classification': ['Major Mine', 'Regional Mine'], + 'has_tsf': ['Has TSF', 'No TSF'], + 'verified_status': ['Verified', 'Unverified'], + 'is_exploration': ['Exploration', 'Non-Exploration'], + 'party_type': ['Person', 'Organization'], + 'explosives_permit_closed': ['Open', 'Closed'], + 'type': ['mine', 'party', 'permit', 'mine_documents', 'notice_of_departure', 'explosives_permit', 'now_application'], +} + + +def _extract_buckets(aggs, key, nested_path=None): + """Extract buckets from aggregation, handling nested paths.""" + data = aggs.get(key, {}) + if nested_path: + for path in nested_path: + data = data.get(path, {}) + return data.get('buckets', []) + + +def _parse_boolean_bucket(bucket, true_label, false_label): + """Parse a boolean aggregation bucket.""" + key = bucket.get('key') + key_as_string = bucket.get('key_as_string', '') + is_true = key_as_string == 'true' or key == True or key == 1 + return {'key': true_label if is_true else false_label, 'count': bucket['doc_count']} + + +def _ensure_predefined_values(facet_list, facet_key): + """Ensure all predefined values exist in the facet list, adding 0 counts for missing ones.""" + if facet_key not in PREDEFINED_FACETS: + return facet_list + + existing_keys = {item['key'] for item in facet_list} + for predefined_key in PREDEFINED_FACETS[facet_key]: + if predefined_key not in existing_keys: + facet_list.append({'key': predefined_key, 'count': 0}) + + return facet_list + + +def _append_bucket_facets(facets, facet_key, aggs, agg_key, nested_path=None, transform=None, filter_fn=None): + """Append buckets from an aggregation to a facet list.""" + for bucket in _extract_buckets(aggs, agg_key, nested_path): + if filter_fn and not filter_fn(bucket): + continue + item = transform(bucket) if transform else {'key': bucket['key'], 'count': bucket['doc_count']} + facets[facet_key].append(item) + + +def _append_boolean_facets(facets, facet_key, aggs, agg_key, true_label, false_label, nested_path=None): + """Append boolean bucket facets with labels.""" + _append_bucket_facets( + facets, + facet_key, + aggs, + agg_key, + nested_path=nested_path, + transform=lambda b: _parse_boolean_bucket(b, true_label, false_label) + ) + + +def extract_facets(aggs): + """Extract facet data from ES aggregations.""" + facets = {k: [] for k in FACET_KEYS} + + # Type facets (by index) + for bucket in _extract_buckets(aggs, 'by_index'): + type_name = INDEX_TO_TYPE.get(bucket['key'], bucket['key']) + facets['type'].append({'key': type_name, 'count': bucket['doc_count']}) + facets['type'] = _ensure_predefined_values(facets['type'], 'type') + + # Mine region + _append_bucket_facets( + facets, + 'mine_region', + aggs, + 'mine_region', + filter_fn=lambda b: b['key'] != 'Unknown' + ) + + # Classification (major vs regional) + _append_boolean_facets(facets, 'mine_classification', aggs, 'major_mine_ind', 'Major Mine', 'Regional Mine') + facets['mine_classification'] = _ensure_predefined_values(facets['mine_classification'], 'mine_classification') + + # Operation status (nested) + _append_bucket_facets( + facets, + 'mine_operation_status', + aggs, + 'mine_operation_status', + nested_path=['status_codes', 'codes'] + ) + + # Tenure (nested) + _append_bucket_facets(facets, 'mine_tenure', aggs, 'mine_tenure', nested_path=['tenure_codes']) + + # Commodity (nested) + _append_bucket_facets( + facets, + 'mine_commodity', + aggs, + 'mine_commodity', + nested_path=['details', 'commodity_codes'], + filter_fn=lambda b: b['key'] + ) + + # TSF + tsf_count = aggs.get('has_tsf', {}).get('count', {}).get('value', 0) + total_mines = sum(b['doc_count'] for b in _extract_buckets(aggs, 'by_index') if b['key'] == 'mines') + facets['has_tsf'].append({'key': 'Has TSF', 'count': tsf_count}) + facets['has_tsf'].append({'key': 'No TSF', 'count': max(0, total_mines - tsf_count)}) + + # Verified status (nested) + _append_boolean_facets( + facets, + 'verified_status', + aggs, + 'verified_status', + 'Verified', + 'Unverified', + nested_path=['healthy'] + ) + facets['verified_status'] = _ensure_predefined_values(facets['verified_status'], 'verified_status') + + # Permit status + _append_bucket_facets(facets, 'permit_status', aggs, 'permit_status') + + # Is exploration + _append_boolean_facets(facets, 'is_exploration', aggs, 'is_exploration', 'Exploration', 'Non-Exploration') + facets['is_exploration'] = _ensure_predefined_values(facets['is_exploration'], 'is_exploration') + + # Party type + _append_bucket_facets( + facets, + 'party_type', + aggs, + 'party_type', + transform=lambda b: {'key': {'ORG': 'Organization', 'PER': 'Person'}.get(b['key'], b['key']), 'count': b['doc_count']} + ) + facets['party_type'] = _ensure_predefined_values(facets['party_type'], 'party_type') + + # Explosives permit status + _append_bucket_facets(facets, 'explosives_permit_status', aggs, 'explosives_permit_status') + + # Explosives permit closed + _append_boolean_facets(facets, 'explosives_permit_closed', aggs, 'explosives_permit_closed', 'Closed', 'Open') + facets['explosives_permit_closed'] = _ensure_predefined_values(facets['explosives_permit_closed'], 'explosives_permit_closed') + + # NOD type + _append_bucket_facets(facets, 'nod_type', aggs, 'nod_type') + + # NOD status + _append_bucket_facets(facets, 'nod_status', aggs, 'nod_status') + + # NoW application status (nested) + _append_bucket_facets( + facets, + 'now_application_status', + aggs, + 'now_application_status', + nested_path=['status_codes'] + ) + + # NoW type (nested) + _append_bucket_facets(facets, 'now_type', aggs, 'now_type', nested_path=['type_codes']) + + return facets diff --git a/services/core-api/app/api/search/search/search_filters.py b/services/core-api/app/api/search/search/search_filters.py new file mode 100644 index 0000000000..70b3b5c112 --- /dev/null +++ b/services/core-api/app/api/search/search/search_filters.py @@ -0,0 +1,189 @@ +"""Filter builders for Elasticsearch queries.""" + + +def build_deleted_filter(): + """Build filter for deleted_ind that handles missing field.""" + return { + "bool": { + "should": [ + {"term": {"deleted_ind": False}}, + {"bool": {"must_not": {"exists": {"field": "deleted_ind"}}}} + ], + "minimum_should_match": 1 + } + } + + +def build_mine_guid_filter(mine_guid): + """ + Build filter for mine_guid scoping across different indices. + + Handles different field locations: + - mines index: mine_guid direct field + - permits index: mine_guids array field (from mine_permit_xref) + - nod/explosives/now indices: mine_guid direct field or mine.mine_guid nested + + Uses both raw and .keyword variants for compatibility with different field mappings. + + Args: + mine_guid: The mine GUID to filter by + + Returns: + Elasticsearch bool filter with should clauses for all possible locations + """ + return { + "bool": { + "should": [ + {"term": {"mine_guid": mine_guid}}, + {"term": {"mine_guid.keyword": mine_guid}}, + {"term": {"mine_guids": mine_guid}}, + {"term": {"mine_guids.keyword": mine_guid}}, + {"term": {"mine.mine_guid": mine_guid}}, + {"term": {"mine.mine_guid.keyword": mine_guid}}, + ], + "minimum_should_match": 1 + } + } + + +def build_terms_filter(field, values): + """Build simple terms filter.""" + return {"terms": {field: values}} + + +def build_nested_filter(path, query): + """Build nested filter.""" + return {"nested": {"path": path, "query": query}} + + +def build_boolean_filter(field, value_map, values): + """Build filter for boolean fields with string mappings.""" + bool_values = [] + for v in values: + if v in value_map: + bool_values.append(value_map[v]) + return {"terms": {field: bool_values}} if bool_values else None + + +def _append_terms_filter(clauses, filters, filter_key, field): + if filters.get(filter_key): + clauses.append(build_terms_filter(field, filters[filter_key])) + + +def _append_nested_terms_filter(clauses, filters, filter_key, path, field): + if filters.get(filter_key): + clauses.append(build_nested_filter(path, {"terms": {field: filters[filter_key]}})) + + +def _append_boolean_terms_filter(clauses, filters, filter_key, field, value_map): + if filters.get(filter_key): + clause = build_boolean_filter(field, value_map, filters[filter_key]) + if clause: + clauses.append(clause) + + +def _append_has_tsf_filters(clauses, filters): + if not filters.get('has_tsf'): + return + exists_clause = build_nested_filter( + "tailings_storage_facilities", + {"exists": {"field": "tailings_storage_facilities.mine_tailings_storage_facility_guid"}} + ) + for tsf_filter in filters['has_tsf']: + if tsf_filter == 'Has TSF': + clauses.append(exists_clause) + elif tsf_filter == 'No TSF': + clauses.append({"bool": {"must_not": exists_clause}}) + + +def _append_verified_status_filters(clauses, filters): + if not filters.get('verified_status'): + return + for status in filters['verified_status']: + clauses.append(build_nested_filter( + "verified_status", + {"term": {"verified_status.healthy_ind": status == 'Verified'}} + )) + + +def _append_party_type_filters(clauses, filters): + if not filters.get('party_type'): + return + type_codes = [] + for pt in filters['party_type']: + type_codes.append({'Organization': 'ORG', 'Person': 'PER'}.get(pt, pt)) + clauses.append(build_terms_filter("party_type_code.keyword", type_codes)) + + +def build_filter_clauses(filters): + """Build ES filter clauses from filter parameters.""" + clauses = [build_deleted_filter()] + + _append_terms_filter(clauses, filters, 'mine_region', "mine_region.keyword") + _append_boolean_terms_filter( + clauses, + filters, + 'mine_classification', + "major_mine_ind", + {'Major Mine': True, 'Regional Mine': False} + ) + _append_nested_terms_filter( + clauses, + filters, + 'mine_tenure', + "mine_types", + "mine_types.mine_tenure_type_code.keyword" + ) + if filters.get('mine_commodity'): + clauses.append(build_nested_filter( + "mine_types", + build_nested_filter( + "mine_types.mine_type_details", + {"terms": {"mine_types.mine_type_details.mine_commodity_code.keyword": filters['mine_commodity']}} + ) + )) + _append_terms_filter(clauses, filters, 'permit_status', "permit_status_code.keyword") + if filters.get('mine_operation_status'): + clauses.append(build_nested_filter( + "mine_status", + build_nested_filter( + "mine_status.status_xref", + {"terms": {"mine_status.status_xref.mine_operation_status_code.keyword": filters['mine_operation_status']}} + ) + )) + _append_has_tsf_filters(clauses, filters) + _append_verified_status_filters(clauses, filters) + _append_boolean_terms_filter( + clauses, + filters, + 'is_exploration', + "is_exploration", + {'Exploration': True, 'Non-Exploration': False} + ) + _append_party_type_filters(clauses, filters) + _append_terms_filter(clauses, filters, 'explosives_permit_status', "application_status.keyword") + _append_boolean_terms_filter( + clauses, + filters, + 'explosives_permit_closed', + "is_closed", + {'Closed': True, 'Open': False} + ) + _append_terms_filter(clauses, filters, 'nod_type', "nod_type.keyword") + _append_terms_filter(clauses, filters, 'nod_status', "nod_status.keyword") + _append_nested_terms_filter( + clauses, + filters, + 'now_application_status', + "application", + "application.now_application_status_code.keyword" + ) + _append_nested_terms_filter( + clauses, + filters, + 'now_type', + "application", + "application.notice_of_work_type_code.keyword" + ) + + return clauses diff --git a/services/core-api/app/api/search/search/search_transformers.py b/services/core-api/app/api/search/search/search_transformers.py new file mode 100644 index 0000000000..0d544a3d3f --- /dev/null +++ b/services/core-api/app/api/search/search/search_transformers.py @@ -0,0 +1,192 @@ +"""Transformers for converting ES hits to API response format using Flask-RESTX marshalling.""" + +from flask_restx import marshal +from app.api.search.response_models import ( + MINE_SEARCH_RESULT_MODEL, + PARTY_SEARCH_RESULT_MODEL, + PERMIT_SEARCH_RESULT_MODEL, + MINE_DOCUMENT_SEARCH_RESULT_MODEL, + EXPLOSIVES_PERMIT_SEARCH_RESULT_MODEL, + NOW_APPLICATION_SEARCH_RESULT_MODEL, + NOD_SEARCH_RESULT_MODEL, +) +from .search_constants import INDEX_TO_TYPE + + +def prepare_mine_source(source): + """Prepare mine source data for marshalling.""" + # Extract status labels from nested structure + status_labels = [] + mine_status = source.get('mine_status', []) + if mine_status: + for status in mine_status if isinstance(mine_status, list) else [mine_status]: + xref = status.get('status_xref', {}) + if xref and xref.get('mine_operation_status_code'): + status_labels.append(xref['mine_operation_status_code']) + + # Prepare the source dict with correct field names for the model + prepared = dict(source) + if status_labels: + prepared['mine_status'] = {'status_labels': status_labels} + prepared['mine_type'] = source.get('mine_types', []) + prepared['mine_tailings_storage_facilities'] = source.get('tailings_storage_facilities', []) + prepared['mine_work_information'] = source.get('work_information') + + return prepared + + +def prepare_party_source(source): + """Prepare party source data for marshalling.""" + first_name = source.get('first_name', '') + party_name = source.get('party_name', '') + + # Transform mine_party_appt relationships from indexed data + # Note: Mine and permit details are left empty to avoid DB queries during search + mine_party_appts = source.get('mine_party_appt', []) + transformed_appts = [] + + if mine_party_appts: + if not isinstance(mine_party_appts, list): + mine_party_appts = [mine_party_appts] + + for appt in mine_party_appts: + if not appt: + continue + + transformed_appts.append({ + 'mine_party_appt_guid': appt.get('mine_party_appt_guid'), + 'mine_party_appt_type_code': appt.get('mine_party_appt_type_code'), + 'start_date': appt.get('start_date'), + 'end_date': appt.get('end_date'), + 'mine': None, + 'permit_no': None, + }) + + prepared = dict(source) + prepared['name'] = f"{first_name} {party_name}".strip() if first_name else party_name + prepared['party_orgbook_entity'] = None + prepared['business_role_appts'] = [] + prepared['mine_party_appt'] = transformed_appts + prepared['address'] = [] + + return prepared + + +def prepare_permit_source(source): + """Prepare permit source data for marshalling.""" + permittees = source.get('permittees', []) + current_permittee = None + if permittees: + first_permittee = permittees[0] if isinstance(permittees, list) else permittees + if first_permittee: + first_name = first_permittee.get('first_name', '') + party_name = first_permittee.get('party_name', '') + current_permittee = f"{first_name} {party_name}".strip() if first_name else party_name + + mine_guids = source.get('mine_guids', []) + mines = [] + if mine_guids: + for guid in (mine_guids if isinstance(mine_guids, list) else [mine_guids]): + mines.append({'mine_guid': guid, 'mine_name': '', 'mine_no': ''}) + + prepared = dict(source) + prepared['current_permittee'] = current_permittee + prepared['mine'] = mines + + return prepared + + +def prepare_document_source(source): + """Prepare document source data for marshalling.""" + mine_info = source.get('mine', {}) + + prepared = dict(source) + prepared['mine_name'] = mine_info.get('mine_name') if mine_info else None + + return prepared + + +def prepare_explosives_permit_source(source): + """Prepare explosives permit source data for marshalling.""" + mine_info = source.get('mine', {}) + + prepared = dict(source) + prepared['mine_name'] = mine_info.get('mine_name') if mine_info else source.get('mine_name') + + return prepared + + +def prepare_now_application_source(source): + """Prepare NoW application source data for marshalling.""" + mine_info = source.get('mine', {}) + application = source.get('application', {}) + + prepared = dict(source) + prepared['mine_name'] = mine_info.get('mine_name') if mine_info else source.get('mine_name') + prepared['now_application_status_code'] = application.get('now_application_status_code') if application else source.get('now_application_status_code') + prepared['notice_of_work_type_code'] = application.get('notice_of_work_type_code') if application else source.get('notice_of_work_type_code') + + return prepared + + +def prepare_nod_source(source): + """Prepare NOD source data for marshalling.""" + mine_info = source.get('mine', {}) + + prepared = dict(source) + prepared['mine_name'] = mine_info.get('mine_name') if mine_info else source.get('mine_name') + + return prepared + + +# Mapping of document types to their prepare functions +PREPARE_FUNCTIONS = { + 'mine': prepare_mine_source, + 'party': prepare_party_source, + 'permit': prepare_permit_source, + 'mine_documents': prepare_document_source, + 'explosives_permit': prepare_explosives_permit_source, + 'now_application': prepare_now_application_source, + 'notice_of_departure': prepare_nod_source, +} + +# Mapping of document types to their search result models +SEARCH_RESULT_MODELS = { + 'mine': MINE_SEARCH_RESULT_MODEL, + 'party': PARTY_SEARCH_RESULT_MODEL, + 'permit': PERMIT_SEARCH_RESULT_MODEL, + 'mine_documents': MINE_DOCUMENT_SEARCH_RESULT_MODEL, + 'explosives_permit': EXPLOSIVES_PERMIT_SEARCH_RESULT_MODEL, + 'now_application': NOW_APPLICATION_SEARCH_RESULT_MODEL, + 'notice_of_departure': NOD_SEARCH_RESULT_MODEL, +} + + +def transform_es_results(hits): + """Transform ES hits into grouped results by type using Flask-RESTX marshalling.""" + results = {} + + for hit in hits: + doc_type = INDEX_TO_TYPE.get(hit['_index']) + if not doc_type or doc_type not in SEARCH_RESULT_MODELS: + continue + + if doc_type not in results: + results[doc_type] = [] + + # Prepare the source data for the specific type + prepare_fn = PREPARE_FUNCTIONS.get(doc_type) + prepared_source = prepare_fn(hit['_source']) if prepare_fn else hit['_source'] + + # Create the search result dict with score, type, and result + search_result = { + 'score': hit['_score'], + 'type': doc_type, + 'result': prepared_source + } + + # Marshal using the appropriate search result model + marshalled_result = marshal(search_result, SEARCH_RESULT_MODELS[doc_type]) + results[doc_type].append(marshalled_result) + + return results diff --git a/services/core-api/app/api/search/search/simple_search_service.py b/services/core-api/app/api/search/search/simple_search_service.py new file mode 100644 index 0000000000..6125299f14 --- /dev/null +++ b/services/core-api/app/api/search/search/simple_search_service.py @@ -0,0 +1,411 @@ +""" +Simple Search Service + +Business logic for simple search functionality. +Separated from the REST resource layer for better testability and maintainability. +""" + +import logging + +from app.api.search.elasticsearch.elastic_search_service import ElasticSearchService +from app.api.utils.search import SearchResult, simple_search_targets +from flask import current_app + +from .global_search_service import parse_csv_param + +# Import shared constants and utilities +from .search_constants import INDEX_TO_TYPE, SEARCH_FIELDS, TYPE_TO_INDEX +from .search_facets import extract_simple_type_facets +from .search_filters import build_deleted_filter, build_mine_guid_filter + +logger = logging.getLogger(__name__) + +# Result type to internal type mapping +RESULT_TYPE_TO_INDEX = { + 'mine': 'mine', + 'person': 'party', + 'organization': 'party', + 'permit': 'permit', + 'nod': 'notice_of_departure', + 'explosives_permit': 'explosives_permit', + 'now_application': 'now_application' +} + +# Derive highlight configuration from search fields +HIGHLIGHT_FIELDS = { + field.split('^')[0]: {} for field in SEARCH_FIELDS if '^' in field or field not in ['*'] +} + + +class SimpleSearchService: + """ + Service class for simple search business logic. + + Handles: + - Search execution and coordination + - Query building + - Result processing + - Facet extraction + """ + + def execute_search(self, search_term, search_types=None, mine_guid=None): + """ + Execute a simple search with the given parameters. + + Args: + search_term: The search term to query + search_types: Optional comma-separated list of types to filter by + mine_guid: Optional mine GUID to scope the search + + Returns: + dict with search_terms, search_results, and facets + """ + # Parse allowed types + # If search_types is None (not provided), set allowed_types to None (implies all types) + # If search_types is empty string, parse_csv_param returns [], which implies NO types + allowed_types = parse_csv_param(search_types) if search_types else None + + # Determine indices to search + indices = self._determine_search_indices(allowed_types) + if not indices: + return {'search_results': [], 'facets': {}} + + indices_string = ",".join(list(set(indices))) + current_app.logger.info(f"Searching indices: {indices_string}") + + # Build filters and query + base_filters = self._build_base_filters(mine_guid) + query = self._build_search_query(search_term, base_filters) + + # Execute search and process results + search_results = self._execute_and_process_search( + indices_string, query, allowed_types + ) + + # Group and rank results + search_results = self._group_and_rank_results(search_results) + + # Get facets + facets = self._get_facet_counts(search_term) + + return { + 'search_results': search_results, + 'facets': facets + } + + def _determine_search_indices(self, allowed_types): + """Determine which Elasticsearch indices to search based on allowed types.""" + available_types = [t for t in simple_search_targets.keys() if t in TYPE_TO_INDEX] + + if allowed_types is None: + return [TYPE_TO_INDEX[t] for t in available_types] + + allowed_index_types = {RESULT_TYPE_TO_INDEX.get(t) for t in allowed_types} + allowed_index_types.discard(None) + return [TYPE_TO_INDEX[t] for t in available_types if t in allowed_index_types] + + def _build_base_filters(self, mine_guid=None): + """Build base Elasticsearch filters using shared filter builders.""" + filters = [build_deleted_filter()] + + if mine_guid: + filters.append(build_mine_guid_filter(mine_guid)) + + return filters + + def _build_search_query(self, search_term, base_filters, include_highlight=True): + """Build Elasticsearch search query based on search term length.""" + is_wildcard = search_term == "*" or not search_term + + if is_wildcard: + return { + "query": {"bool": {"must": [{"match_all": {}}], "filter": base_filters}}, + "sort": [{"_score": "desc"}] + } + + highlight_config = { + "fields": HIGHLIGHT_FIELDS, + "pre_tags": [""], + "post_tags": [""], + "fragment_size": 150, + "number_of_fragments": 1 + } if include_highlight else None + + if len(search_term) < 3: + query = { + "query": { + "bool": { + "should": [ + {"multi_match": {"query": search_term, "fields": SEARCH_FIELDS, "type": "phrase_prefix"}}, + {"multi_match": {"query": search_term, "fields": SEARCH_FIELDS}} + ], + "minimum_should_match": 1, + "filter": base_filters + } + } + } + else: + query = { + "query": { + "bool": { + "should": [ + {"multi_match": {"query": search_term, "fields": SEARCH_FIELDS, "type": "phrase_prefix"}}, + {"multi_match": {"query": search_term, "fields": SEARCH_FIELDS, "fuzziness": "AUTO"}} + ], + "minimum_should_match": 1, + "filter": base_filters + } + } + } + + if highlight_config: + query["highlight"] = highlight_config + + return query + + def _execute_and_process_search(self, indices_string, query, allowed_types): + """Execute Elasticsearch search and process results.""" + search_results = [] + + try: + current_app.logger.info(f"ES Query: {query}") + es_results = ElasticSearchService.search(indices_string, query, size=30) + hits = es_results['hits']['hits'] + current_app.logger.info(f"ES returned {len(hits)} hits") + + # Process each hit + for hit in hits: + result = self._process_hit(hit, allowed_types) + if result: + search_results.append(result) + + except Exception as e: + current_app.logger.error(f"Elasticsearch error: {e}") + + return search_results + + def _group_and_rank_results(self, search_results): + """Group results by ID and rank by score.""" + # Group results by ID (combine duplicates) + grouped_results = {} + for result in search_results: + result_id = result.result['id'] + if result_id in grouped_results: + grouped_results[result_id].score += result.score + else: + grouped_results[result_id] = result + + # Sort by score and limit to top 4 + results_list = list(grouped_results.values()) + results_list.sort(key=lambda x: x.score, reverse=True) + return results_list[:4] + + def _process_hit(self, hit, allowed_types): + """Process a single Elasticsearch hit into a SearchResult.""" + index = hit['_index'] + index_to_type = {v: k for k, v in TYPE_TO_INDEX.items()} + doc_type = index_to_type.get(index) + + if not doc_type or doc_type not in simple_search_targets: + return None + + source = hit['_source'] + score = hit['_score'] + type_config = simple_search_targets[doc_type] + + # Process based on type using processor dictionary + processors = { + 'mine': self._process_mine_result, + 'party': self._process_party_result, + 'permit': self._process_permit_result, + 'notice_of_departure': self._process_nod_result, + 'explosives_permit': self._process_explosives_permit_result, + 'now_application': self._process_now_application_result + } + + processor = processors.get(doc_type) + if not processor: + return None + + result_type, value, description = processor(source) + + # Filter by allowed types + if allowed_types and result_type not in allowed_types: + return None + + # Extract highlight + highlight_text = None + if highlights := hit.get('highlight', {}): + for fragments in highlights.values(): + if fragments: + highlight_text = fragments[0] + break + + mine_guid = self._extract_mine_guid(doc_type, source) + + return SearchResult( + score, + result_type, + { + 'id': source.get(type_config['id_field']), + 'value': value, + 'description': description, + 'highlight': highlight_text, + 'mine_guid': mine_guid + } + ) + + def _extract_mine_guid(self, doc_type, source): + """Extract mine_guid from source based on document type.""" + if doc_type == 'mine': + return source.get('mine_guid') + elif doc_type == 'permit': + mine_guids = source.get('mine_guids', []) + return mine_guids[0] if mine_guids and isinstance(mine_guids, list) else None + elif doc_type in ['notice_of_departure', 'explosives_permit', 'now_application']: + mine_info = source.get('mine') + return mine_info.get('mine_guid') if isinstance(mine_info, dict) else None + return None + + def _process_mine_result(self, source): + """Process mine search result.""" + value = source.get('mine_name', '') + mine_no = source.get('mine_no', '') + mms_alias = source.get('mms_alias', '') + + # Extract commodities + commodities = set() + for mt in source.get('mine_types', []): + for detail in mt.get('mine_type_details', []): + if commodity := detail.get('mine_commodity_code'): + commodities.add(commodity) + + desc_parts = [f"Mine #: {mine_no}"] if mine_no else [] + if commodities: + desc_parts.append(", ".join(sorted(commodities))) + if mms_alias: + desc_parts.append(f"Alias: {mms_alias}") + + return 'mine', value, " | ".join(desc_parts) + + def _process_party_result(self, source): + """Process party search result.""" + first_name = source.get('first_name', '') + party_name = source.get('party_name', '') + party_type_code = source.get('party_type_code', 'PER') + email = source.get('email', '') + phone_no = source.get('phone_no', '') + + result_type = 'person' if party_type_code == 'PER' else 'organization' + value = f"{first_name} {party_name}".strip() if first_name else party_name + + desc_parts = [email] if email else [] + if phone_no: + desc_parts.append(phone_no) + + return result_type, value, " | ".join(desc_parts) + + def _process_permit_result(self, source): + """Process permit search result.""" + value = source.get('permit_no') or source.get('permit_number', '') + permit_status = source.get('permit_status_code', '') + + # Get first permittee + current_permittee = '' + if permittees := source.get('permittees', []): + first_permittee = permittees[0] if isinstance(permittees, list) else permittees + if first_permittee: + first_name = first_permittee.get('first_name', '') + party_name = first_permittee.get('party_name', '') + current_permittee = f"{first_name} {party_name}".strip() if first_name else party_name + + desc_parts = [current_permittee] if current_permittee else [] + if permit_status: + desc_parts.append(f"Status: {permit_status}") + + return 'permit', value, " | ".join(desc_parts) + + def _process_nod_result(self, source): + """Process notice of departure search result.""" + value = source.get('nod_title', '') or source.get('nod_no', '') + nod_no = source.get('nod_no', '') + nod_status = source.get('nod_status', '') + mine_name = source.get('mine', {}).get('mine_name', '') if source.get('mine') else '' + + desc_parts = [nod_no] if nod_no else [] + if mine_name: + desc_parts.append(mine_name) + if nod_status: + desc_parts.append(nod_status.replace('_', ' ').title()) + + return 'nod', value, " | ".join(desc_parts) + + def _process_explosives_permit_result(self, source): + """Process explosives permit search result.""" + value = source.get('permit_number', '') or source.get('application_number', '') + app_status = source.get('application_status', '') + is_closed = source.get('is_closed', False) + mine_name = source.get('mine', {}).get('mine_name', '') if source.get('mine') else '' + + status_map = {'REC': 'Received', 'APP': 'Approved', 'REJ': 'Rejected'} + + desc_parts = [mine_name] if mine_name else [] + if is_closed: + desc_parts.append('Closed') + elif app_status: + desc_parts.append(status_map.get(app_status, app_status)) + + return 'explosives_permit', value, " | ".join(desc_parts) + + def _process_now_application_result(self, source): + """Process NOW application search result.""" + value = source.get('now_number', '') + application = source.get('application', {}) + property_name = application.get('property_name', '') if application else '' + status_code = application.get('now_application_status_code', '') if application else '' + mine_name = source.get('mine', {}).get('mine_name', '') if source.get('mine') else '' + + status_map = { + 'REC': 'Received', 'REF': 'Referred', 'CDI': 'Client Delay', 'GVD': 'Govt Delay', + 'CON': 'Consultation', 'AIA': 'Approved', 'REJ': 'Rejected', 'WDN': 'Withdrawn', 'NPR': 'No Permit Required' + } + + desc_parts = [property_name] if property_name else [] + if mine_name: + desc_parts.append(mine_name) + if status_code: + desc_parts.append(status_map.get(status_code, status_code)) + + return 'now_application', value, " | ".join(desc_parts) + + def _get_facet_counts(self, search_term): + """Get facet counts using Elasticsearch aggregations and shared extraction logic.""" + all_indices = ",".join([TYPE_TO_INDEX[t] for t in simple_search_targets.keys() if t in TYPE_TO_INDEX]) + if not all_indices or not search_term: + return extract_simple_type_facets({}) # Return empty facets with structure + + # Build query with aggregations using shared filter builder + facet_query = self._build_search_query( + search_term, + [build_deleted_filter()], + include_highlight=False + ) + + # Add aggregations for type counting + facet_query["aggs"] = { + "by_index": { + "terms": {"field": "_index", "size": 100}, # Increased size to ensure we get all indices + "aggs": { + "by_party_type": { + "terms": {"field": "party_type_code.keyword", "missing": "N/A"} + } + } + } + } + + try: + facet_results = ElasticSearchService.search(all_indices, facet_query, size=0) + return extract_simple_type_facets(facet_results.get('aggregations', {})) + except Exception as e: + current_app.logger.error(f"Elasticsearch facet error: {e}") + return extract_simple_type_facets({}) diff --git a/services/core-api/app/api/utils/feature_flag.py b/services/core-api/app/api/utils/feature_flag.py index ef6a0c6574..39cfb76414 100644 --- a/services/core-api/app/api/utils/feature_flag.py +++ b/services/core-api/app/api/utils/feature_flag.py @@ -1,6 +1,6 @@ -from enum import Enum -import os import json +import os +from enum import Enum from app.config import Config from flagsmith import Flagsmith @@ -21,6 +21,7 @@ class Feature(Enum): AMS_AGENT = 'ams_agent' RECURRING_REPORTS = 'recurring_reports' MINESPACE_SIGNUP = 'minespace_signup' + GLOBAL_SEARCH_V2 = 'global_search_v2' def __str__(self): return self.value diff --git a/services/core-api/app/api/utils/search.py b/services/core-api/app/api/utils/search.py index cdc0101460..b229aadfa4 100644 --- a/services/core-api/app/api/utils/search.py +++ b/services/core-api/app/api/utils/search.py @@ -1,16 +1,19 @@ import json from app.api.mines.documents.models.mine_document import MineDocument +from app.api.mines.explosives_permit.models.explosives_permit import ExplosivesPermit from app.api.mines.mine.models.mine import Mine from app.api.mines.permits.permit.models.permit import Permit from app.api.mines.permits.permit_amendment.models.permit_amendment_document import ( PermitAmendmentDocument, ) +from app.api.notice_of_departure.models.notice_of_departure import NoticeOfDeparture +from app.api.now_applications.models.now_application_identity import ( + NOWApplicationIdentity, +) from app.api.parties.party.models.party import Party -from app.api.search.search.permit_search_service import PermitSearchService -from app.api.utils.feature_flag import Feature, is_feature_enabled from app.extensions import db -from sqlalchemy import desc, func, or_ +from sqlalchemy import desc, func common_search_targets = { 'mine': { @@ -58,6 +61,39 @@ 'id_field': 'permit_guid', 'value_field': 'permit_no', 'score_multiplier': 1000 + }, + 'notice_of_departure': { + 'model': NoticeOfDeparture, + 'primary_column': NoticeOfDeparture.nod_guid, + 'description': 'Notices of Departure', + 'entities_to_return': [NoticeOfDeparture.nod_guid, NoticeOfDeparture.nod_no, NoticeOfDeparture.nod_title], + 'columns_to_search': [NoticeOfDeparture.nod_no, NoticeOfDeparture.nod_title], + 'has_deleted_ind': True, + 'id_field': 'nod_guid', + 'value_field': 'nod_title', + 'score_multiplier': 500 + }, + 'explosives_permit': { + 'model': ExplosivesPermit, + 'primary_column': ExplosivesPermit.explosives_permit_guid, + 'description': 'Explosives Permits', + 'entities_to_return': [ExplosivesPermit.explosives_permit_guid, ExplosivesPermit.permit_number], + 'columns_to_search': [ExplosivesPermit.permit_number, ExplosivesPermit.application_number], + 'has_deleted_ind': True, + 'id_field': 'explosives_permit_guid', + 'value_field': 'permit_number', + 'score_multiplier': 500 + }, + 'now_application': { + 'model': NOWApplicationIdentity, + 'primary_column': NOWApplicationIdentity.now_application_guid, + 'description': 'Notice of Work Applications', + 'entities_to_return': [NOWApplicationIdentity.now_application_guid, NOWApplicationIdentity.now_number], + 'columns_to_search': [NOWApplicationIdentity.now_number], + 'has_deleted_ind': False, + 'id_field': 'now_application_guid', + 'value_field': 'now_number', + 'score_multiplier': 500 } } @@ -104,6 +140,52 @@ 'document_name', 'score_multiplier': 250 + }, + 'notice_of_departure': { + 'model': NoticeOfDeparture, + 'primary_column': NoticeOfDeparture.nod_guid, + 'description': 'Notices of Departure', + 'entities_to_return': [ + NoticeOfDeparture.nod_guid, + NoticeOfDeparture.nod_no, + NoticeOfDeparture.nod_title, + NoticeOfDeparture.nod_status, + NoticeOfDeparture.nod_type + ], + 'columns_to_search': [NoticeOfDeparture.nod_no, NoticeOfDeparture.nod_title, NoticeOfDeparture.nod_description], + 'has_deleted_ind': True, + 'id_field': 'nod_guid', + 'value_field': 'nod_title', + 'score_multiplier': 500 + }, + 'explosives_permit': { + 'model': ExplosivesPermit, + 'primary_column': ExplosivesPermit.explosives_permit_guid, + 'description': 'Explosives Permits', + 'entities_to_return': [ + ExplosivesPermit.explosives_permit_guid, + ExplosivesPermit.permit_number, + ExplosivesPermit.application_number, + ExplosivesPermit.description, + ExplosivesPermit.application_status, + ExplosivesPermit.is_closed + ], + 'columns_to_search': [ExplosivesPermit.permit_number, ExplosivesPermit.application_number, ExplosivesPermit.description], + 'has_deleted_ind': True, + 'id_field': 'explosives_permit_guid', + 'value_field': 'permit_number', + 'score_multiplier': 500 + }, + 'now_application': { + 'model': NOWApplicationIdentity, + 'primary_column': NOWApplicationIdentity.now_application_guid, + 'description': 'Notice of Work Applications', + 'entities_to_return': [NOWApplicationIdentity.now_application_guid, NOWApplicationIdentity.now_number], + 'columns_to_search': [NOWApplicationIdentity.now_number], + 'has_deleted_ind': False, + 'id_field': 'now_application_guid', + 'value_field': 'now_number', + 'score_multiplier': 500 } } diff --git a/services/core-api/app/config.py b/services/core-api/app/config.py index 9245e52ae0..f1f6bb7bd1 100644 --- a/services/core-api/app/config.py +++ b/services/core-api/app/config.py @@ -2,11 +2,11 @@ import os import traceback -from dotenv import load_dotenv, find_dotenv +import requests from celery.schedules import crontab +from dotenv import find_dotenv, load_dotenv from flask import current_app, has_app_context, has_request_context from opentelemetry import trace -import requests ENV_FILE = find_dotenv() if ENV_FILE: @@ -73,6 +73,11 @@ class Config(object): 'CRITICAL') # ['DEBUG','INFO','WARN','ERROR','CRITICAL'] DISPLAY_WERKZEUG_LOG = os.environ.get('DISPLAY_WERKZEUG_LOG', True) + ELASTICSEARCH_URL = os.environ.get('ELASTICSEARCH_URL', 'https://elasticsearch:9200') + ELASTICSEARCH_USERNAME = os.environ.get('ELASTICSEARCH_USERNAME', 'elastic') + ELASTICSEARCH_PASSWORD = os.environ.get('ELASTICSEARCH_PASSWORD', 'changeme') + ELASTICSEARCH_CA_CERTS = os.environ.get('ELASTICSEARCH_CA_CERTS', '/usr/share/elasticsearch/config/certs/ca/ca.crt') + LOGGING_DICT_CONFIG = { 'version': 1, 'formatters': { @@ -142,6 +147,7 @@ class Config(object): SQLALCHEMY_DATABASE_URI = DB_URL SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_ENGINE_OPTIONS = {"pool_pre_ping": True} + SQLALCHEMY_WARN_20 = True JWT_OIDC_WELL_KNOWN_CONFIG = os.environ.get( 'JWT_OIDC_WELL_KNOWN_CONFIG', diff --git a/services/core-api/feature_flags.json b/services/core-api/feature_flags.json index 675afb76ae..af8467d8d1 100644 --- a/services/core-api/feature_flags.json +++ b/services/core-api/feature_flags.json @@ -10,5 +10,6 @@ "vc_w3c": true, "ams_agent": true, "recurring_reports": true, - "minespace_signup": true + "minespace_signup": true, + "global_search_v2": true } \ No newline at end of file diff --git a/services/core-api/requirements.txt b/services/core-api/requirements.txt index 384de10582..295ae6f0ae 100644 --- a/services/core-api/requirements.txt +++ b/services/core-api/requirements.txt @@ -58,4 +58,5 @@ Pillow==10.3.0 setuptools==65.5.1 requests_toolbelt==1.0.0 untp_models==0.1.1 -urllib3==2.5.0 \ No newline at end of file +urllib3==2.5.0 +elasticsearch==8.12.0 \ No newline at end of file diff --git a/services/core-api/tests/cli_commands/__init__.py b/services/core-api/tests/cli_commands/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/services/core-api/tests/search/resource/test_search_resource.py b/services/core-api/tests/search/resource/test_search_resource.py index e8bbf3d1a3..57565f3062 100644 --- a/services/core-api/tests/search/resource/test_search_resource.py +++ b/services/core-api/tests/search/resource/test_search_resource.py @@ -1,7 +1,31 @@ import json import uuid +import pytest +from unittest.mock import patch from tests.factories import MineFactory, PartyFactory +from app.api.utils.feature_flag import Feature + + +# Feature Flag Fixtures +# These fixtures ensure that all tests in this module use the V1 (original) +# search implementation instead of the V2 (Elasticsearch) implementation. +# This maintains test stability and validates that the legacy code path works correctly. + +@pytest.fixture(autouse=True) +def disable_search_v2_flag(): + """Mock is_feature_enabled to always return False for search.py""" + with patch('app.api.search.search.resources.search.is_feature_enabled') as mock_flag: + mock_flag.return_value = False + yield mock_flag + + +@pytest.fixture(autouse=True) +def disable_simple_search_v2_flag(): + """Mock is_feature_enabled to always return False for simple_search.py""" + with patch('app.api.search.search.resources.simple_search.is_feature_enabled') as mock_flag: + mock_flag.return_value = False + yield mock_flag # GET @@ -11,8 +35,12 @@ def test_get_no_search_results(test_client, db_session, auth_headers): get_data = json.loads(get_resp.data.decode()) assert get_resp.status_code == 200 assert get_data['search_terms'] == ['Abbo'] - assert len( - [key for key, value in get_data['search_results'].items() if len(value) is not 0]) == 0 + # Verify no search results in any category + non_empty_categories = [ + key for key, value in get_data['search_results'].items() if len(value) != 0 + ] + assert len(non_empty_categories) == 0, \ + f"Expected no results, but found results in: {non_empty_categories}" def test_search_party(test_client, db_session, auth_headers): @@ -24,10 +52,14 @@ def test_search_party(test_client, db_session, auth_headers): assert len(parties) == 1 assert party.first_name in parties[0]['result']['name'] assert uuid.UUID(parties[0]['result']['party_guid']) == party.party_guid - assert len([ + # Verify all other search result categories are empty (only party should have results) + empty_categories = [ key for key, value in get_data['search_results'].items() - if key is not 'party' and len(value) is 0 - ]) == 4 + if key != 'party' and len(value) == 0 + ] + total_categories = len(get_data['search_results']) + assert len(empty_categories) == total_categories - 1, \ + f"Expected {total_categories - 1} empty categories, got {len(empty_categories)}" assert get_resp.status_code == 200 @@ -53,7 +85,7 @@ def test_simple_search_no_results(test_client, db_session, auth_headers): f'/search/simple?search_term=Abbo', headers=auth_headers['full_auth_header']) get_data = json.loads(get_resp.data.decode()) assert get_resp.status_code == 200 - assert get_data['search_terms'] == ['Abbo'] + assert get_data['search_terms'] == ['Abbo'] # V1 returns search_terms assert len(get_data['search_results']) == 0 diff --git a/services/core-api/tests/search/resource/test_search_resource_v2.py b/services/core-api/tests/search/resource/test_search_resource_v2.py new file mode 100644 index 0000000000..f3237e42cf --- /dev/null +++ b/services/core-api/tests/search/resource/test_search_resource_v2.py @@ -0,0 +1,412 @@ +"""Tests for search resource with V2 (Elasticsearch) enabled.""" + +import json +import pytest +from unittest.mock import patch +from tests.factories import MineFactory, PartyFactory + + +@pytest.fixture +def enable_search_v2(): + """Enable V2 search for tests in this module.""" + with patch('app.api.search.search.resources.search.is_feature_enabled') as mock_flag: + mock_flag.return_value = True + yield mock_flag + + +@pytest.fixture +def enable_simple_search_v2(): + """Enable V2 simple search for tests in this module.""" + with patch('app.api.search.search.resources.simple_search.is_feature_enabled') as mock_flag: + mock_flag.return_value = True + yield mock_flag + + +@pytest.fixture +def mock_es_service(): + """Mock Elasticsearch service - mocks the class method 'search'.""" + with patch('app.api.search.elasticsearch.elastic_search_service.ElasticSearchService.search') as mock_search: + # Default return value to prevent errors + mock_search.return_value = { + 'hits': {'hits': []}, + 'aggregations': {'by_index': {'buckets': []}} + } + yield mock_search + + +class TestSearchResourceV2: + """Test search resource with V2 enabled.""" + + def test_search_v2_mine_results(self, test_client, db_session, auth_headers, enable_search_v2, mock_es_service): + """Test V2 search returns mine results.""" + mock_es_service.return_value = { + 'hits': { + 'hits': [ + { + '_index': 'mines', + '_score': 10.0, + '_source': { + 'mine_guid': 'test-mine-guid', + 'mine_name': 'Test Mine', + 'mine_no': 'M-001', + 'mine_region': 'SW', + 'major_mine_ind': True + } + } + ] + }, + 'aggregations': { + 'by_index': { + 'buckets': [{'key': 'mines', 'doc_count': 1}] + }, + 'mine_region': { + 'buckets': [{'key': 'SW', 'doc_count': 1}] + } + } + } + + response = test_client.get( + '/search?search_term=Test', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + data = json.loads(response.data.decode()) + + assert 'search_terms' in data + assert 'search_results' in data + assert 'facets' in data + + # Check mine results + assert 'mine' in data['search_results'] + mines = data['search_results']['mine'] + assert len(mines) >= 1 + + # Check facets are present + assert 'mine_region' in data['facets'] + assert len(data['facets']['mine_region']) >= 1 + + def test_search_v2_party_results(self, test_client, db_session, auth_headers, enable_search_v2, mock_es_service): + """Test V2 search returns party results.""" + mock_es_service.return_value = { + 'hits': { + 'hits': [ + { + '_index': 'parties', + '_score': 8.5, + '_source': { + 'party_guid': 'test-party-guid', + 'first_name': 'John', + 'party_name': 'Doe', + 'party_type_code': 'PER', + 'email': 'john@example.com', + 'phone_no': '555-1234' + } + } + ] + }, + 'aggregations': { + 'by_index': { + 'buckets': [{'key': 'parties', 'doc_count': 1}] + }, + 'party_type': { + 'buckets': [{'key': 'PER', 'doc_count': 1}] + } + } + } + + response = test_client.get( + '/search?search_term=John&search_types=party', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + data = json.loads(response.data.decode()) + + assert 'party' in data['search_results'] + parties = data['search_results']['party'] + assert len(parties) >= 1 + + party = parties[0] + assert party['result']['name'] == 'John Doe' + + def test_search_v2_with_filters(self, test_client, db_session, auth_headers, enable_search_v2, mock_es_service): + """Test V2 search with filter parameters.""" + mock_es_service.return_value = { + 'hits': {'hits': []}, + 'aggregations': {'by_index': {'buckets': []}} + } + + response = test_client.get( + '/search?search_term=test&mine_region=SW&permit_status=O', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + + # Verify ES service was called with filters + assert mock_es_service.called + call_args = mock_es_service.call_args + query = call_args[0][1] + + # Should have filter clauses + assert 'filter' in query['query']['bool'] + + def test_search_v2_multiple_types(self, test_client, db_session, auth_headers, enable_search_v2, mock_es_service): + """Test V2 search with multiple result types.""" + mock_es_service.return_value = { + 'hits': { + 'hits': [ + { + '_index': 'mines', + '_score': 10.0, + '_source': {'mine_guid': 'mine-1', 'mine_name': 'Mine 1'} + }, + { + '_index': 'parties', + '_score': 8.0, + '_source': { + 'party_guid': 'party-1', + 'first_name': 'John', + 'party_name': 'Doe', + 'party_type_code': 'PER' + } + }, + { + '_index': 'mine_permits', + '_score': 7.5, + '_source': { + 'permit_guid': 'permit-1', + 'permit_no': 'P-001', + 'mine_guids': ['mine-1'] + } + } + ] + }, + 'aggregations': { + 'by_index': { + 'buckets': [ + {'key': 'mines', 'doc_count': 1}, + {'key': 'parties', 'doc_count': 1}, + {'key': 'mine_permits', 'doc_count': 1} + ] + } + } + } + + response = test_client.get( + '/search?search_term=test&search_types=mine,party,permit', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + data = json.loads(response.data.decode()) + + assert 'mine' in data['search_results'] + assert 'party' in data['search_results'] + assert 'permit' in data['search_results'] + + def test_search_v2_empty_results(self, test_client, db_session, auth_headers, enable_search_v2, mock_es_service): + """Test V2 search with no results.""" + mock_es_service.return_value = { + 'hits': {'hits': []}, + 'aggregations': {'by_index': {'buckets': []}} + } + + response = test_client.get( + '/search?search_term=nonexistent', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + data = json.loads(response.data.decode()) + + # Should have structure but empty results + assert 'search_results' in data + for result_type in data['search_results'].values(): + assert len(result_type) == 0 + + def test_search_v2_handles_es_error(self, test_client, db_session, auth_headers, enable_search_v2, mock_es_service): + """Test V2 search handles Elasticsearch errors gracefully.""" + mock_es_service.side_effect = Exception('ES connection failed') + + response = test_client.get( + '/search?search_term=test', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + data = json.loads(response.data.decode()) + + # Should return empty results instead of erroring + assert 'search_results' in data + assert len(data['search_results']) >= 0 + + def test_search_options_returns_available_types(self, test_client, db_session, auth_headers): + """Test search options endpoint returns available types.""" + response = test_client.get( + '/search/options', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + data = json.loads(response.data.decode()) + + # Should return list of search types + assert isinstance(data, list) + assert len(data) > 0 + + # Each item should have model_id and description + for item in data: + assert 'model_id' in item + assert 'description' in item + + +class TestSimpleSearchResourceV2: + """Test simple search resource with V2 enabled.""" + + def test_simple_search_v2_basic(self, test_client, db_session, auth_headers, enable_simple_search_v2, mock_es_service): + """Test V2 simple search returns results.""" + mock_es_service.return_value = { + 'hits': { + 'hits': [ + { + '_index': 'mines', + '_score': 10.0, + '_source': { + 'mine_guid': 'mine-123', + 'mine_name': 'Test Mine', + 'mine_no': 'M-001' + } + } + ] + }, + 'aggregations': { + 'by_index': { + 'buckets': [{'key': 'mines', 'doc_count': 1}] + } + } + } + + response = test_client.get( + '/search/simple?search_term=Test', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + data = json.loads(response.data.decode()) + + assert 'search_results' in data + assert 'facets' in data + assert len(data['search_results']) >= 1 + + def test_simple_search_v2_with_mine_guid(self, test_client, db_session, auth_headers, enable_simple_search_v2, mock_es_service): + """Test V2 simple search with mine_guid filter (scoped search).""" + mock_es_service.return_value = { + 'hits': {'hits': []}, + 'aggregations': {'by_index': {'buckets': []}} + } + + response = test_client.get( + '/search/simple?search_term=test&mine_guid=test-mine-guid', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + + # Verify ES was called with mine_guid filter + assert mock_es_service.called + call_args = mock_es_service.call_args + query = call_args[0][1] + + # Should include mine_guid in filter + assert 'bool' in query['query'] + + def test_simple_search_v2_facets(self, test_client, db_session, auth_headers, enable_simple_search_v2, mock_es_service): + """Test V2 simple search returns facet counts.""" + mock_es_service.return_value = { + 'hits': { + 'hits': [ + { + '_index': 'mines', + '_score': 10.0, + '_source': {'mine_guid': 'mine-1', 'mine_name': 'Mine 1'} + }, + { + '_index': 'parties', + '_score': 8.0, + '_source': { + 'party_guid': 'party-1', + 'first_name': 'John', + 'party_name': 'Doe', + 'party_type_code': 'PER' + } + } + ] + }, + 'aggregations': { + 'by_index': { + 'buckets': [ + {'key': 'mines', 'doc_count': 10}, + {'key': 'parties', 'doc_count': 5} + ] + } + } + } + + response = test_client.get( + '/search/simple?search_term=test', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + data = json.loads(response.data.decode()) + + assert 'facets' in data + # Simple search has different facet structure (counts by type) + assert 'mine' in data['facets'] or isinstance(data['facets'], dict) + + +class TestSearchV1V2Compatibility: + """Test compatibility between V1 and V2 search.""" + + def test_response_structure_compatibility(self, test_client, db_session, auth_headers, enable_search_v2, mock_es_service): + """Test V2 response structure matches V1 for basic fields.""" + mock_es_service.return_value = { + 'hits': {'hits': []}, + 'aggregations': {'by_index': {'buckets': []}} + } + + response = test_client.get( + '/search?search_term=test', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + data = json.loads(response.data.decode()) + + # V1 and V2 should both have these keys + assert 'search_terms' in data + assert 'search_results' in data + + # V2 adds facets + assert 'facets' in data + + def test_search_terms_parsing_matches_v1(self, test_client, db_session, auth_headers, enable_search_v2, mock_es_service): + """Test search terms are parsed the same way as V1.""" + mock_es_service.return_value = { + 'hits': {'hits': []}, + 'aggregations': {'by_index': {'buckets': []}} + } + + response = test_client.get( + '/search?search_term=test mine', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + data = json.loads(response.data.decode()) + + # Should parse into individual terms + assert isinstance(data['search_terms'], list) + assert len(data['search_terms']) >= 1 diff --git a/services/core-api/tests/search/resource/test_simple_search_resource_integration.py b/services/core-api/tests/search/resource/test_simple_search_resource_integration.py new file mode 100644 index 0000000000..53a9ce91f1 --- /dev/null +++ b/services/core-api/tests/search/resource/test_simple_search_resource_integration.py @@ -0,0 +1,517 @@ +""" +Integration tests for SimpleSearchResource API + +Tests the full HTTP request/response cycle including parameter extraction, +service delegation, and response formatting. +""" + +import json +import pytest +from unittest.mock import patch, Mock +from tests.factories import MineFactory, PartyFactory + + +@pytest.fixture +def enable_simple_search_v2(): + """Enable V2 simple search for tests.""" + with patch('app.api.search.search.resources.simple_search.is_feature_enabled') as mock_flag: + mock_flag.return_value = True + yield mock_flag + + +@pytest.fixture +def mock_simple_search_service(): + """Mock SimpleSearchService for integration testing.""" + with patch('app.api.search.search.resources.simple_search.SimpleSearchService') as mock_service_class: + mock_service = Mock() + mock_service_class.return_value = mock_service + yield mock_service + + +class TestSimpleSearchResourceIntegration: + """Integration tests for SimpleSearchResource API endpoints.""" + + # ==================== Basic API Tests ==================== + + def test_simple_search_endpoint_exists(self, test_client, auth_headers): + """Test that the simple search endpoint exists.""" + response = test_client.get( + '/search/simple?search_term=test', + headers=auth_headers['full_auth_header'] + ) + + # Should not return 404 + assert response.status_code != 404 + + def test_simple_search_requires_authentication(self, test_client): + """Test that simple search requires authentication.""" + response = test_client.get('/search/simple?search_term=test') + + # Should return 401 or 403 (depending on auth setup) + assert response.status_code in [401, 403] + + # ==================== V2 API Tests (with service delegation) ==================== + + def test_simple_search_v2_delegates_to_service( + self, test_client, db_session, auth_headers, + enable_simple_search_v2, mock_simple_search_service + ): + """Test that V2 endpoint delegates to SimpleSearchService.""" + # Configure mock service response + mock_simple_search_service.execute_search.return_value = { + 'search_results': [], + 'facets': {'mine': 0, 'person': 0, 'organization': 0} + } + + response = test_client.get( + '/search/simple?search_term=test', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + + # Verify service was called with correct parameters + mock_simple_search_service.execute_search.assert_called_once_with( + 'test', # search_term + None, # search_types + None # mine_guid + ) + + def test_simple_search_v2_with_all_parameters( + self, test_client, db_session, auth_headers, + enable_simple_search_v2, mock_simple_search_service + ): + """Test V2 endpoint with all query parameters.""" + mock_simple_search_service.execute_search.return_value = { + 'search_results': [], + 'facets': {} + } + + response = test_client.get( + '/search/simple?search_term=mountain&search_types=mine,permit&mine_guid=abc-123', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + + # Verify all parameters were passed to service + mock_simple_search_service.execute_search.assert_called_once_with( + 'mountain', # search_term + 'mine,permit', # search_types + 'abc-123' # mine_guid + ) + + def test_simple_search_v2_returns_correct_response_format( + self, test_client, db_session, auth_headers, + enable_simple_search_v2, mock_simple_search_service + ): + """Test that V2 endpoint returns correctly formatted response.""" + # Mock service response with sample data + mock_result = Mock() + mock_result.score = 10.0 + mock_result.type = 'mine' + mock_result.result = { + 'id': 'mine-123', + 'value': 'Test Mine', + 'description': 'Mine #: M-001', + 'highlight': None, + 'mine_guid': 'mine-123' + } + + mock_simple_search_service.execute_search.return_value = { + 'search_results': [mock_result], + 'facets': { + 'mine': 1, + 'person': 0, + 'organization': 0, + 'permit': 0, + 'nod': 0, + 'explosives_permit': 0, + 'now_application': 0, + 'mine_documents': 0, + 'permit_documents': 0 + } + } + + response = test_client.get( + '/search/simple?search_term=test', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + data = json.loads(response.data.decode()) + + # Verify response structure + assert 'search_results' in data + assert 'facets' in data + + assert len(data['search_results']) == 1 + assert data['facets']['mine'] == 1 + + # ==================== Query Parameter Tests ==================== + + def test_simple_search_with_wildcard( + self, test_client, db_session, auth_headers, + enable_simple_search_v2, mock_simple_search_service + ): + """Test search with wildcard (*) search term.""" + mock_simple_search_service.execute_search.return_value = { + 'search_results': [], + 'facets': {} + } + + response = test_client.get( + '/search/simple?search_term=*', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + mock_simple_search_service.execute_search.assert_called_once() + + def test_simple_search_with_empty_search_term( + self, test_client, db_session, auth_headers, + enable_simple_search_v2, mock_simple_search_service + ): + """Test search with empty search term.""" + mock_simple_search_service.execute_search.return_value = { + 'search_results': [], + 'facets': {} + } + + response = test_client.get( + '/search/simple?search_term=', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + + def test_simple_search_with_special_characters( + self, test_client, db_session, auth_headers, + enable_simple_search_v2, mock_simple_search_service + ): + """Test search with special characters in search term.""" + mock_simple_search_service.execute_search.return_value = { + 'search_results': [], + 'facets': {} + } + + response = test_client.get( + '/search/simple?search_term=test&special', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + + def test_simple_search_with_unicode_characters( + self, test_client, db_session, auth_headers, + enable_simple_search_v2, mock_simple_search_service + ): + """Test search with unicode characters.""" + mock_simple_search_service.execute_search.return_value = { + 'search_results': [], + 'facets': {} + } + + response = test_client.get( + '/search/simple?search_term=café', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + + # ==================== Type Filter Tests ==================== + + def test_simple_search_with_single_type_filter( + self, test_client, db_session, auth_headers, + enable_simple_search_v2, mock_simple_search_service + ): + """Test search with single type filter.""" + mock_simple_search_service.execute_search.return_value = { + 'search_results': [], + 'facets': {} + } + + response = test_client.get( + '/search/simple?search_term=test&search_types=mine', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + mock_simple_search_service.execute_search.assert_called_with('test', 'mine', None) + + def test_simple_search_with_multiple_type_filters( + self, test_client, db_session, auth_headers, + enable_simple_search_v2, mock_simple_search_service + ): + """Test search with multiple type filters.""" + mock_simple_search_service.execute_search.return_value = { + 'search_results': [], + 'facets': {} + } + + response = test_client.get( + '/search/simple?search_term=test&search_types=mine,permit,person', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + mock_simple_search_service.execute_search.assert_called_with( + 'test', 'mine,permit,person', None + ) + + # ==================== Mine GUID Scoping Tests ==================== + + def test_simple_search_with_mine_guid( + self, test_client, db_session, auth_headers, + enable_simple_search_v2, mock_simple_search_service + ): + """Test mine-scoped search.""" + mock_simple_search_service.execute_search.return_value = { + 'search_results': [], + 'facets': {} + } + + response = test_client.get( + '/search/simple?search_term=permit&mine_guid=test-mine-guid-123', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + mock_simple_search_service.execute_search.assert_called_with( + 'permit', None, 'test-mine-guid-123' + ) + + def test_simple_search_with_mine_guid_and_types( + self, test_client, db_session, auth_headers, + enable_simple_search_v2, mock_simple_search_service + ): + """Test mine-scoped search with type filter.""" + mock_simple_search_service.execute_search.return_value = { + 'search_results': [], + 'facets': {} + } + + response = test_client.get( + '/search/simple?search_term=*&mine_guid=abc-123&search_types=permit', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + mock_simple_search_service.execute_search.assert_called_with( + '*', 'permit', 'abc-123' + ) + + # ==================== Error Handling Tests ==================== + + def test_simple_search_handles_service_error( + self, test_client, db_session, auth_headers, + enable_simple_search_v2, mock_simple_search_service + ): + """Test that API handles service errors gracefully.""" + # Mock service error + mock_simple_search_service.execute_search.side_effect = Exception("Service error") + + response = test_client.get( + '/search/simple?search_term=test', + headers=auth_headers['full_auth_header'] + ) + + # Should handle error gracefully (might return 500 or empty results depending on implementation) + assert response.status_code in [200, 500] + + def test_simple_search_with_missing_search_term( + self, test_client, db_session, auth_headers, + enable_simple_search_v2, mock_simple_search_service + ): + """Test search without search_term parameter.""" + mock_simple_search_service.execute_search.return_value = { + 'search_results': [], + 'facets': {} + } + + response = test_client.get( + '/search/simple', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + # Should call service with None + mock_simple_search_service.execute_search.assert_called_with(None, None, None) + + # ==================== Response Format Tests ==================== + + def test_simple_search_response_has_required_fields( + self, test_client, db_session, auth_headers, + enable_simple_search_v2, mock_simple_search_service + ): + """Test that response contains all required fields.""" + mock_simple_search_service.execute_search.return_value = { + 'search_results': [], + 'facets': { + 'mine': 0, + 'person': 0, + 'organization': 0, + 'permit': 0, + 'nod': 0, + 'explosives_permit': 0, + 'now_application': 0, + 'mine_documents': 0, + 'permit_documents': 0 + } + } + + response = test_client.get( + '/search/simple?search_term=test', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + data = json.loads(response.data.decode()) + + # V2 should have search_results and facets + assert 'search_results' in data + assert 'facets' in data + + # Facets should have at least the basic types + facet_keys = data['facets'].keys() + basic_types = [ + 'mine', 'person', 'organization', 'permit', + 'nod', 'explosives_permit', 'now_application' + ] + for basic_type in basic_types: + assert basic_type in facet_keys + + def test_simple_search_result_structure( + self, test_client, db_session, auth_headers, + enable_simple_search_v2, mock_simple_search_service + ): + """Test the structure of individual search results.""" + mock_result = Mock() + mock_result.score = 15.5 + mock_result.type = 'mine' + mock_result.result = { + 'id': 'mine-123', + 'value': 'Test Mine', + 'description': 'Mine #: M-001 | Coal', + 'highlight': 'Test Mine', + 'mine_guid': 'mine-123' + } + + mock_simple_search_service.execute_search.return_value = { + 'search_results': [mock_result], + 'facets': {} + } + + response = test_client.get( + '/search/simple?search_term=test', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + data = json.loads(response.data.decode()) + + result = data['search_results'][0] + # Verify result structure (depends on marshalling) + assert 'id' in result or 'result' in result + + # ==================== Performance Tests ==================== + + def test_simple_search_response_time( + self, test_client, db_session, auth_headers, + enable_simple_search_v2, mock_simple_search_service + ): + """Test that search responds within reasonable time.""" + import time + + mock_simple_search_service.execute_search.return_value = { + 'search_results': [], + 'facets': {} + } + + start_time = time.time() + response = test_client.get( + '/search/simple?search_term=test', + headers=auth_headers['full_auth_header'] + ) + end_time = time.time() + + assert response.status_code == 200 + # Should respond within 5 seconds (with mocked service this should be very fast) + assert (end_time - start_time) < 5.0 + + # ==================== V1 Backward Compatibility Tests ==================== + + def test_simple_search_v1_still_works( + self, test_client, db_session, auth_headers + ): + """Test that V1 endpoint still works when feature flag is off.""" + with patch('app.api.search.search.resources.simple_search.is_feature_enabled') as mock_flag: + mock_flag.return_value = False # V1 mode + + response = test_client.get( + '/search/simple?search_term=test', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + data = json.loads(response.data.decode()) + + # V1 should have search_results + assert 'search_results' in data + + # ==================== Edge Cases ==================== + + def test_simple_search_with_very_long_search_term( + self, test_client, db_session, auth_headers, + enable_simple_search_v2, mock_simple_search_service + ): + """Test search with very long search term.""" + long_term = 'a' * 500 + mock_simple_search_service.execute_search.return_value = { + 'search_results': [], + 'facets': {} + } + + response = test_client.get( + f'/search/simple?search_term={long_term}', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + + def test_simple_search_with_many_type_filters( + self, test_client, db_session, auth_headers, + enable_simple_search_v2, mock_simple_search_service + ): + """Test search with all possible type filters.""" + all_types = 'mine,person,organization,permit,nod,explosives_permit,now_application' + mock_simple_search_service.execute_search.return_value = { + 'search_results': [], + 'facets': {} + } + + response = test_client.get( + f'/search/simple?search_term=test&search_types={all_types}', + headers=auth_headers['full_auth_header'] + ) + + assert response.status_code == 200 + mock_simple_search_service.execute_search.assert_called_with('test', all_types, None) + + def test_simple_search_with_invalid_type_filter( + self, test_client, db_session, auth_headers, + enable_simple_search_v2, mock_simple_search_service + ): + """Test search with invalid type filter.""" + mock_simple_search_service.execute_search.return_value = { + 'search_results': [], + 'facets': {} + } + + response = test_client.get( + '/search/simple?search_term=test&search_types=invalid_type', + headers=auth_headers['full_auth_header'] + ) + + # Should handle gracefully + assert response.status_code == 200 diff --git a/services/core-api/tests/search/test_global_search_service.py b/services/core-api/tests/search/test_global_search_service.py new file mode 100644 index 0000000000..23f617da9c --- /dev/null +++ b/services/core-api/tests/search/test_global_search_service.py @@ -0,0 +1,313 @@ +"""Tests for global search service.""" + +import pytest +from unittest.mock import patch, MagicMock +from app.api.search.search.global_search_service import ( + GlobalSearchService, + parse_csv_param, + parse_search_terms, + parse_filters, + build_search_query, +) + + +class TestParseHelpers: + """Test parsing helper functions.""" + + def test_parse_csv_param_with_values(self): + result = parse_csv_param('SW,NE,NW') + assert result == ['SW', 'NE', 'NW'] + + def test_parse_csv_param_empty(self): + result = parse_csv_param('') + assert result == [] + + def test_parse_csv_param_none(self): + result = parse_csv_param(None) + assert result == [] + + def test_parse_csv_param_with_spaces(self): + result = parse_csv_param(' SW , NE , NW ') + assert result == ['SW', 'NE', 'NW'] + + def test_parse_search_terms_basic(self): + result = parse_search_terms('test mine') + assert 'test' in result + assert 'mine' in result + + def test_parse_search_terms_with_quotes(self): + result = parse_search_terms('"test mine" another') + assert 'test mine' in result + assert 'another' in result + + def test_parse_search_terms_empty(self): + result = parse_search_terms('') + assert result == [] + + def test_parse_filters(self): + args = MagicMock() + args.get.side_effect = lambda key: { + 'mine_region': 'SW,NE', + 'permit_status': 'O', + 'other_param': 'ignored' + }.get(key) + + result = parse_filters(args) + + assert 'mine_region' in result + assert result['mine_region'] == ['SW', 'NE'] + assert 'permit_status' in result + assert result['permit_status'] == ['O'] + + +class TestBuildSearchQuery: + """Test search query building.""" + + def test_build_search_query_with_term(self): + query = build_search_query('test', []) + + assert 'query' in query + assert 'bool' in query['query'] + assert 'should' in query['query']['bool'] + assert 'aggs' in query + + def test_build_search_query_wildcard(self): + query = build_search_query('*', []) + + assert 'query' in query + assert 'match_all' in query['query']['bool']['must'][0] + + def test_build_search_query_empty(self): + query = build_search_query('', []) + + assert 'match_all' in query['query']['bool']['must'][0] + + def test_build_search_query_with_filters(self): + filter_clauses = [ + {'term': {'mine_region.keyword': 'SW'}} + ] + query = build_search_query('test', filter_clauses) + + assert 'filter' in query['query']['bool'] + assert len(query['query']['bool']['filter']) == 1 + + def test_build_search_query_includes_aggregations(self): + query = build_search_query('test', []) + + assert 'aggs' in query + # Should have various aggregations defined + assert len(query['aggs']) > 0 + + def test_build_search_query_phrase_prefix(self): + query = build_search_query('test mine', []) + + # Should include phrase_prefix match + should_clauses = query['query']['bool']['should'] + phrase_prefix = next((c for c in should_clauses if 'multi_match' in c and c['multi_match'].get('type') == 'phrase_prefix'), None) + assert phrase_prefix is not None + + def test_build_search_query_fuzzy_for_long_term(self): + query = build_search_query('testing', []) + + # Should include fuzzy match for terms >= 3 chars + should_clauses = query['query']['bool']['should'] + fuzzy_match = next((c for c in should_clauses if 'multi_match' in c and 'fuzziness' in c['multi_match']), None) + assert fuzzy_match is not None + + def test_build_search_query_no_fuzzy_for_short_term(self): + query = build_search_query('ab', []) + + # Should not include fuzzy match for short terms + should_clauses = query['query']['bool']['should'] + fuzzy_matches = [c for c in should_clauses if 'multi_match' in c and 'fuzziness' in c['multi_match']] + assert len(fuzzy_matches) == 0 + + +class TestGlobalSearchService: + """Test GlobalSearchService.""" + + @patch('app.api.search.search.global_search_service.ElasticSearchService') + def test_search_basic(self, mock_es_service): + # Mock ES response + mock_es_service.search.return_value = { + 'hits': { + 'hits': [ + { + '_index': 'mines', + '_score': 10.0, + '_source': { + 'mine_guid': 'mine-123', + 'mine_name': 'Test Mine', + 'mine_no': 'M-001' + } + } + ] + }, + 'aggregations': { + 'by_index': { + 'buckets': [{'key': 'mines', 'doc_count': 1}] + } + } + } + + result = GlobalSearchService.search('test', ['mine'], {}) + + assert 'results' in result + assert 'facets' in result + assert 'mine' in result['results'] + assert len(result['results']['mine']) == 1 + mock_es_service.search.assert_called_once() + + @patch('app.api.search.search.global_search_service.ElasticSearchService') + def test_search_multiple_types(self, mock_es_service): + mock_es_service.search.return_value = { + 'hits': { + 'hits': [ + { + '_index': 'mines', + '_score': 10.0, + '_source': {'mine_guid': 'mine-123', 'mine_name': 'Test Mine'} + }, + { + '_index': 'parties', + '_score': 8.0, + '_source': {'party_guid': 'party-123', 'first_name': 'John', 'party_name': 'Doe'} + } + ] + }, + 'aggregations': {} + } + + result = GlobalSearchService.search('test', ['mine', 'party'], {}) + + assert 'mine' in result['results'] + assert 'party' in result['results'] + assert len(result['results']['mine']) == 1 + assert len(result['results']['party']) == 1 + + @patch('app.api.search.search.global_search_service.ElasticSearchService') + def test_search_with_filters(self, mock_es_service): + mock_es_service.search.return_value = { + 'hits': {'hits': []}, + 'aggregations': {} + } + + filters = {'mine_region': ['SW'], 'permit_status': ['O']} + result = GlobalSearchService.search('test', ['mine'], filters) + + # Verify ES service was called + mock_es_service.search.assert_called_once() + call_args = mock_es_service.search.call_args + query = call_args[0][1] + + # Query should include filters + assert 'filter' in query['query']['bool'] + + @patch('app.api.search.search.global_search_service.ElasticSearchService') + def test_search_ensures_all_types_present(self, mock_es_service): + # ES returns only mines + mock_es_service.search.return_value = { + 'hits': { + 'hits': [ + { + '_index': 'mines', + '_score': 10.0, + '_source': {'mine_guid': 'mine-123', 'mine_name': 'Test Mine'} + } + ] + }, + 'aggregations': {} + } + + # Request both mines and parties + result = GlobalSearchService.search('test', ['mine', 'party'], {}) + + # Both should be in results, party should be empty list + assert 'mine' in result['results'] + assert 'party' in result['results'] + assert len(result['results']['mine']) == 1 + assert len(result['results']['party']) == 0 + + @patch('app.api.search.search.global_search_service.ElasticSearchService') + def test_search_handles_es_error(self, mock_es_service): + # Simulate ES error + mock_es_service.search.side_effect = Exception('ES connection failed') + + result = GlobalSearchService.search('test', ['mine'], {}) + + # Should return empty results instead of raising + assert 'results' in result + assert 'facets' in result + assert result['results']['mine'] == [] + + @patch('app.api.search.search.global_search_service.ElasticSearchService') + def test_search_with_custom_size(self, mock_es_service): + mock_es_service.search.return_value = { + 'hits': {'hits': []}, + 'aggregations': {} + } + + GlobalSearchService.search('test', ['mine'], {}, size=100) + + # Verify size parameter was passed + call_args = mock_es_service.search.call_args + assert call_args[1]['size'] == 100 + + @patch('app.api.search.search.global_search_service.ElasticSearchService') + def test_search_builds_correct_indices_string(self, mock_es_service): + mock_es_service.search.return_value = { + 'hits': {'hits': []}, + 'aggregations': {} + } + + GlobalSearchService.search('test', ['mine', 'party', 'permit'], {}) + + # Verify correct indices were passed + call_args = mock_es_service.search.call_args + indices = call_args[0][0] + + # Should be comma-separated index names + assert 'mines' in indices + assert 'parties' in indices + assert 'permits' in indices or 'mine_permits' in indices + + @patch('app.api.search.search.global_search_service.ElasticSearchService') + def test_search_returns_facets(self, mock_es_service): + mock_es_service.search.return_value = { + 'hits': {'hits': []}, + 'aggregations': { + 'mine_region': { + 'buckets': [{'key': 'SW', 'doc_count': 10}] + } + } + } + + result = GlobalSearchService.search('test', ['mine'], {}) + + assert 'facets' in result + assert 'mine_region' in result['facets'] + assert len(result['facets']['mine_region']) == 1 + assert result['facets']['mine_region'][0]['key'] == 'SW' + + def test_search_with_no_indices(self): + result = GlobalSearchService.search('test', [], {}) + + # Should return empty results + assert 'results' in result + assert 'facets' in result + assert result['results'] == {} + + @patch('app.api.search.search.global_search_service.ElasticSearchService') + def test_search_logs_info(self, mock_es_service): + mock_es_service.search.return_value = { + 'hits': {'hits': []}, + 'aggregations': {} + } + + with patch('app.api.search.search.global_search_service.current_app') as mock_app: + GlobalSearchService.search('test query', ['mine'], {}) + + # Should log the search + assert mock_app.logger.info.called + call_args = str(mock_app.logger.info.call_args_list) + assert 'test query' in call_args diff --git a/services/core-api/tests/search/test_search_constants.py b/services/core-api/tests/search/test_search_constants.py new file mode 100644 index 0000000000..a93bf5e8a3 --- /dev/null +++ b/services/core-api/tests/search/test_search_constants.py @@ -0,0 +1,205 @@ +"""Tests for search constants and mappings.""" + +import pytest +from app.api.search.search.search_constants import ( + TYPE_TO_INDEX, + INDEX_TO_TYPE, + FACET_KEYS, + FILTER_PARAMS, + SEARCH_FIELDS, + ES_AGGREGATIONS, +) + + +class TestSearchConstants: + """Test search constant definitions.""" + + def test_type_to_index_mapping(self): + """Test TYPE_TO_INDEX has all expected mappings.""" + expected_types = [ + 'mine', 'party', 'permit', 'mine_documents', + 'explosives_permit', 'now_application', 'notice_of_departure' + ] + + for doc_type in expected_types: + assert doc_type in TYPE_TO_INDEX, f"Missing TYPE_TO_INDEX mapping for {doc_type}" + assert isinstance(TYPE_TO_INDEX[doc_type], str), f"TYPE_TO_INDEX[{doc_type}] should be a string" + + def test_index_to_type_mapping(self): + """Test INDEX_TO_TYPE is inverse of TYPE_TO_INDEX.""" + expected_indices = [ + 'mines', 'parties', 'mine_permits', 'documents', + 'explosives_permits', 'now_applications', 'notices_of_departure' + ] + + for index in expected_indices: + assert index in INDEX_TO_TYPE, f"Missing INDEX_TO_TYPE mapping for {index}" + + def test_type_index_mappings_are_inverse(self): + """Test TYPE_TO_INDEX and INDEX_TO_TYPE are inverses of each other.""" + for doc_type, index in TYPE_TO_INDEX.items(): + assert INDEX_TO_TYPE[index] == doc_type, \ + f"TYPE_TO_INDEX[{doc_type}] = {index} but INDEX_TO_TYPE[{index}] != {doc_type}" + + def test_facet_keys_defined(self): + """Test FACET_KEYS contains expected facet names.""" + expected_facets = [ + 'mine_region', 'mine_classification', 'mine_operation_status', + 'mine_tenure', 'mine_commodity', 'has_tsf', 'verified_status', + 'permit_status', 'party_type', 'type' + ] + + for facet in expected_facets: + assert facet in FACET_KEYS, f"Missing facet key: {facet}" + + def test_filter_params_defined(self): + """Test FILTER_PARAMS contains expected filter names.""" + expected_filters = [ + 'mine_region', 'mine_classification', 'mine_operation_status', + 'mine_tenure', 'mine_commodity', 'has_tsf', 'verified_status', + 'permit_status', 'is_exploration', 'party_type', + 'explosives_permit_status', 'explosives_permit_closed', + 'nod_type', 'nod_status', 'now_application_status', 'now_type' + ] + + for filter_param in expected_filters: + assert filter_param in FILTER_PARAMS, f"Missing filter param: {filter_param}" + + def test_search_fields_defined(self): + """Test SEARCH_FIELDS contains expected searchable fields.""" + # Should have various searchable fields + assert isinstance(SEARCH_FIELDS, list) + assert len(SEARCH_FIELDS) > 0 + + # Should include common fields + expected_fields = [ + 'mine_name', 'mine_no', 'party_name', 'first_name', + 'permit_no', 'document_name' + ] + + for field in expected_fields: + # Check if field or field with boost is present + field_present = any(field in search_field for search_field in SEARCH_FIELDS) + assert field_present, f"Expected search field containing '{field}'" + + def test_es_aggregations_structure(self): + """Test ES_AGGREGATIONS has proper structure.""" + assert isinstance(ES_AGGREGATIONS, dict) + assert len(ES_AGGREGATIONS) > 0 + + # Should have by_index aggregation + assert 'by_index' in ES_AGGREGATIONS + assert 'terms' in ES_AGGREGATIONS['by_index'] + + # Should have mine-related aggregations + assert 'mine_region' in ES_AGGREGATIONS + assert 'major_mine_ind' in ES_AGGREGATIONS + + def test_es_aggregations_nested_properly(self): + """Test nested aggregations have correct structure.""" + # Check nested aggregations + nested_aggs = [ + 'mine_operation_status', + 'mine_tenure_type', + 'mine_commodity_code' + ] + + for agg_name in nested_aggs: + if agg_name in ES_AGGREGATIONS: + assert 'nested' in ES_AGGREGATIONS[agg_name], \ + f"{agg_name} should be a nested aggregation" + + def test_facet_keys_match_aggregations(self): + """Test FACET_KEYS and ES_AGGREGATIONS are consistent.""" + # Most facet keys should have corresponding aggregations + # (some may be derived from multiple aggregations) + core_facets = [ + 'mine_region', 'mine_operation_status', 'mine_tenure', + 'mine_commodity', 'has_tsf', 'permit_status' + ] + + for facet in core_facets: + # Check if there's a related aggregation + # (may not be exact match due to transformation) + agg_exists = facet in ES_AGGREGATIONS or \ + any(facet in agg_name for agg_name in ES_AGGREGATIONS.keys()) + assert agg_exists, f"No aggregation found for facet: {facet}" + + def test_search_fields_have_boosts(self): + """Test important search fields have boost values.""" + # Important fields should have boost notation (^N) + boosted_fields = [f for f in SEARCH_FIELDS if '^' in f] + assert len(boosted_fields) > 0, "Expected some fields to have boost values" + + def test_search_fields_cover_all_types(self): + """Test search fields cover all searchable entity types.""" + # Should have fields from all major entity types + field_str = ' '.join(SEARCH_FIELDS) + + assert 'mine' in field_str.lower() + assert 'party' in field_str.lower() or 'name' in field_str.lower() + assert 'permit' in field_str.lower() + assert 'document' in field_str.lower() + + def test_type_to_index_no_duplicates(self): + """Test TYPE_TO_INDEX has no duplicate index names.""" + indices = list(TYPE_TO_INDEX.values()) + assert len(indices) == len(set(indices)), "Duplicate index names found in TYPE_TO_INDEX" + + def test_index_to_type_no_duplicates(self): + """Test INDEX_TO_TYPE has no duplicate type names.""" + types = list(INDEX_TO_TYPE.values()) + assert len(types) == len(set(types)), "Duplicate type names found in INDEX_TO_TYPE" + + +class TestSearchConstantsUsage: + """Test search constants can be used correctly.""" + + def test_can_lookup_index_from_type(self): + """Test looking up ES index from document type.""" + index = TYPE_TO_INDEX['mine'] + assert index == 'mines' + + index = TYPE_TO_INDEX['party'] + assert index == 'parties' + + def test_can_lookup_type_from_index(self): + """Test looking up document type from ES index.""" + doc_type = INDEX_TO_TYPE['mines'] + assert doc_type == 'mine' + + doc_type = INDEX_TO_TYPE['parties'] + assert doc_type == 'party' + + def test_can_iterate_facet_keys(self): + """Test can iterate over FACET_KEYS.""" + count = 0 + for facet_key in FACET_KEYS: + assert isinstance(facet_key, str) + count += 1 + + assert count > 0, "FACET_KEYS should not be empty" + + def test_can_iterate_filter_params(self): + """Test can iterate over FILTER_PARAMS.""" + count = 0 + for filter_param in FILTER_PARAMS: + assert isinstance(filter_param, str) + count += 1 + + assert count > 0, "FILTER_PARAMS should not be empty" + + def test_can_use_search_fields_in_query(self): + """Test SEARCH_FIELDS format is valid for ES queries.""" + for field in SEARCH_FIELDS: + assert isinstance(field, str) + # Should not have invalid characters + assert not any(char in field for char in ['<', '>', '{', '}']) + # If has boost, should be in format field^number + if '^' in field: + parts = field.split('^') + assert len(parts) == 2, f"Invalid boost format: {field}" + try: + float(parts[1]) # Boost should be a number + except ValueError: + pytest.fail(f"Invalid boost value in field: {field}") diff --git a/services/core-api/tests/search/test_search_facets.py b/services/core-api/tests/search/test_search_facets.py new file mode 100644 index 0000000000..200a6402b8 --- /dev/null +++ b/services/core-api/tests/search/test_search_facets.py @@ -0,0 +1,180 @@ +"""Tests for search facets extraction.""" + +import pytest +from app.api.search.search.search_facets import extract_facets + + +class TestExtractFacets: + """Test facet extraction from ES aggregations.""" + + def test_extract_facets_basic(self): + aggregations = { + 'by_index': { + 'buckets': [ + {'key': 'mines', 'doc_count': 10}, + {'key': 'parties', 'doc_count': 5} + ] + } + } + + facets = extract_facets(aggregations) + + assert 'type' in facets + # extract_facets adds predefined values with 0 counts for missing types + assert len(facets['type']) == 7 + # Find the actual results (non-zero counts) + mine_facet = next(f for f in facets['type'] if f['key'] == 'mine') + party_facet = next(f for f in facets['type'] if f['key'] == 'party') + assert mine_facet == {'key': 'mine', 'count': 10} + assert party_facet == {'key': 'party', 'count': 5} + + def test_extract_facets_mine_region(self): + aggregations = { + 'mine_region': { + 'buckets': [ + {'key': 'SW', 'doc_count': 15}, + {'key': 'NE', 'doc_count': 8} + ] + } + } + + facets = extract_facets(aggregations) + + assert 'mine_region' in facets + assert len(facets['mine_region']) == 2 + assert facets['mine_region'][0] == {'key': 'SW', 'count': 15} + + def test_extract_facets_major_mine_ind(self): + aggregations = { + 'major_mine_ind': { + 'buckets': [ + {'key': 1, 'doc_count': 20}, + {'key': 0, 'doc_count': 30} + ] + } + } + + facets = extract_facets(aggregations) + + assert 'mine_classification' in facets + assert len(facets['mine_classification']) == 2 + # key=1 maps to 'Major Mine', key=0 maps to 'Regional Mine' + assert facets['mine_classification'][0] == {'key': 'Major Mine', 'count': 20} + assert facets['mine_classification'][1] == {'key': 'Regional Mine', 'count': 30} + + def test_extract_facets_nested_aggregation(self): + # Nested aggregation uses specific path structure: ['status_codes', 'codes'] + aggregations = { + 'mine_operation_status': { + 'status_codes': { + 'codes': { + 'buckets': [ + {'key': 'OP', 'doc_count': 25}, + {'key': 'CLD', 'doc_count': 10} + ] + } + } + } + } + + facets = extract_facets(aggregations) + + assert 'mine_operation_status' in facets + assert len(facets['mine_operation_status']) == 2 + assert facets['mine_operation_status'][0] == {'key': 'OP', 'count': 25} + assert facets['mine_operation_status'][1] == {'key': 'CLD', 'count': 10} + + def test_extract_facets_empty_aggregations(self): + aggregations = {} + + facets = extract_facets(aggregations) + + # Should return all expected facet keys + expected_keys = [ + 'mine_region', 'mine_classification', 'mine_operation_status', + 'mine_tenure', 'mine_commodity', 'has_tsf', 'verified_status', + 'permit_status', 'party_type', 'type' + ] + + for key in expected_keys: + assert key in facets + + # Some facets have predefined values with 0 counts + assert facets['mine_classification'] == [ + {'key': 'Major Mine', 'count': 0}, + {'key': 'Regional Mine', 'count': 0} + ] + assert facets['party_type'] == [ + {'key': 'Person', 'count': 0}, + {'key': 'Organization', 'count': 0} + ] + assert len(facets['type']) == 7 # All 7 document types with 0 counts + + # Others should be empty + assert facets['mine_region'] == [] + assert facets['permit_status'] == [] + + def test_extract_facets_boolean_fields(self): + # has_tsf uses special count aggregation, not boolean buckets + aggregations = { + 'has_tsf': { + 'count': { + 'value': 12 + } + }, + 'by_index': { + 'buckets': [ + {'key': 'mines', 'doc_count': 100}, + ] + } + } + + facets = extract_facets(aggregations) + + assert 'has_tsf' in facets + assert len(facets['has_tsf']) == 2 + assert facets['has_tsf'][0] == {'key': 'Has TSF', 'count': 12} + assert facets['has_tsf'][1] == {'key': 'No TSF', 'count': 88} + + def test_extract_facets_party_type(self): + aggregations = { + 'party_type': { + 'buckets': [ + {'key': 'PER', 'doc_count': 50}, + {'key': 'ORG', 'doc_count': 30} + ] + } + } + + facets = extract_facets(aggregations) + + assert 'party_type' in facets + assert len(facets['party_type']) == 2 + # Party types are mapped to display names + assert facets['party_type'][0] == {'key': 'Person', 'count': 50} + assert facets['party_type'][1] == {'key': 'Organization', 'count': 30} + + def test_extract_facets_multiple_aggregations(self): + aggregations = { + 'mine_region': { + 'buckets': [{'key': 'SW', 'doc_count': 10}] + }, + 'permit_status': { + 'buckets': [{'key': 'O', 'doc_count': 5}] + }, + 'major_mine_ind': { + 'buckets': [{'key': 1, 'doc_count': 3}] + } + } + + facets = extract_facets(aggregations) + + assert 'mine_region' in facets + assert 'permit_status' in facets + assert 'mine_classification' in facets + assert len(facets['mine_region']) == 1 + assert len(facets['permit_status']) == 1 + # mine_classification adds predefined values, so we get 2 (Major Mine: 3, Regional Mine: 0) + assert len(facets['mine_classification']) == 2 + assert facets['mine_classification'][0] == {'key': 'Major Mine', 'count': 3} + assert facets['mine_classification'][1] == {'key': 'Regional Mine', 'count': 0} diff --git a/services/core-api/tests/search/test_search_filters.py b/services/core-api/tests/search/test_search_filters.py new file mode 100644 index 0000000000..0d1015882d --- /dev/null +++ b/services/core-api/tests/search/test_search_filters.py @@ -0,0 +1,154 @@ +"""Tests for search filters.""" + +import pytest +from app.api.search.search.search_filters import build_filter_clauses + + +class TestBuildFilterClauses: + """Test building ES filter clauses from filter parameters.""" + + def test_build_filter_clauses_empty_filters(self): + filters = { + 'mine_region': [], + 'mine_classification': [], + 'permit_status': [] + } + + clauses = build_filter_clauses(filters) + + # Should always include deleted_ind filter + assert len(clauses) >= 1 + deleted_filter = next((c for c in clauses if 'bool' in c and 'should' in c['bool']), None) + assert deleted_filter is not None + + def test_build_filter_clauses_mine_region(self): + filters = { + 'mine_region': ['SW', 'NE'] + } + + clauses = build_filter_clauses(filters) + + # Should have region filter + region_filter = next((c for c in clauses if 'terms' in c and 'mine_region.keyword' in c['terms']), None) + assert region_filter is not None + assert set(region_filter['terms']['mine_region.keyword']) == {'SW', 'NE'} + + def test_build_filter_clauses_major_mine(self): + filters = { + 'mine_classification': ['Major Mine'] + } + + clauses = build_filter_clauses(filters) + + # Should have major_mine_ind filter set to true (using terms, not term) + major_mine_filter = next((c for c in clauses if 'terms' in c and 'major_mine_ind' in c['terms']), None) + assert major_mine_filter is not None + assert major_mine_filter['terms']['major_mine_ind'] == [True] + + def test_build_filter_clauses_regional_mine(self): + filters = { + 'mine_classification': ['Regional Mine'] + } + + clauses = build_filter_clauses(filters) + + # Should have major_mine_ind filter set to false (using terms, not term) + regional_mine_filter = next((c for c in clauses if 'terms' in c and 'major_mine_ind' in c['terms']), None) + assert regional_mine_filter is not None + assert regional_mine_filter['terms']['major_mine_ind'] == [False] + + def test_build_filter_clauses_permit_status(self): + filters = { + 'permit_status': ['O', 'C'] + } + + clauses = build_filter_clauses(filters) + + # Should have permit status filter + permit_filter = next((c for c in clauses if 'terms' in c and 'permit_status_code.keyword' in c['terms']), None) + assert permit_filter is not None + assert set(permit_filter['terms']['permit_status_code.keyword']) == {'O', 'C'} + + def test_build_filter_clauses_has_tsf_yes(self): + filters = { + 'has_tsf': ['Has TSF'] # Uses 'Has TSF' label, not 'Yes' + } + + clauses = build_filter_clauses(filters) + + # Should have nested filter with exists for TSF + tsf_filter = next((c for c in clauses if 'nested' in c and c['nested'].get('path') == 'tailings_storage_facilities'), None) + assert tsf_filter is not None + # Check the nested query has an exists clause + assert 'exists' in tsf_filter['nested']['query'] + assert tsf_filter['nested']['query']['exists']['field'] == 'tailings_storage_facilities.mine_tailings_storage_facility_guid' + + def test_build_filter_clauses_has_tsf_no(self): + filters = { + 'has_tsf': ['No TSF'] # Uses 'No TSF' label, not 'No' + } + + clauses = build_filter_clauses(filters) + + # Should have bool must_not with nested filter + tsf_filter = next((c for c in clauses if 'bool' in c and 'must_not' in c['bool']), None) + assert tsf_filter is not None + # Check that must_not contains a nested filter + assert 'nested' in tsf_filter['bool']['must_not'] + assert tsf_filter['bool']['must_not']['nested']['path'] == 'tailings_storage_facilities' + + def test_build_filter_clauses_verified_status(self): + filters = { + 'verified_status': ['Verified'] + } + + clauses = build_filter_clauses(filters) + + # Should have nested filter for verified status + verified_filter = next((c for c in clauses if 'nested' in c and c['nested'].get('path') == 'verified_status'), None) + assert verified_filter is not None + + def test_build_filter_clauses_multiple_filters(self): + filters = { + 'mine_region': ['SW'], + 'permit_status': ['O'], + 'mine_classification': ['Major Mine'] + } + + clauses = build_filter_clauses(filters) + + # Should have multiple filters + assert len(clauses) >= 3 + + def test_build_filter_clauses_mine_operation_status(self): + filters = { + 'mine_operation_status': ['OP', 'CLD'] + } + + clauses = build_filter_clauses(filters) + + # Should have nested filter for operation status + op_status_filter = next((c for c in clauses if 'nested' in c and c['nested'].get('path') == 'mine_status'), None) + assert op_status_filter is not None + + def test_build_filter_clauses_mine_tenure(self): + filters = { + 'mine_tenure': ['PLR', 'MIN'] + } + + clauses = build_filter_clauses(filters) + + # Should have nested filter for tenure + tenure_filter = next((c for c in clauses if 'nested' in c and c['nested'].get('path') == 'mine_types'), None) + assert tenure_filter is not None + + def test_build_filter_clauses_mine_commodity(self): + filters = { + 'mine_commodity': ['CU', 'AU'] + } + + clauses = build_filter_clauses(filters) + + # Should have nested filter for commodity + commodity_filter = next((c for c in clauses if 'nested' in c), None) + assert commodity_filter is not None diff --git a/services/core-api/tests/search/test_search_transformers.py b/services/core-api/tests/search/test_search_transformers.py new file mode 100644 index 0000000000..0e62e58507 --- /dev/null +++ b/services/core-api/tests/search/test_search_transformers.py @@ -0,0 +1,357 @@ +"""Tests for search transformers.""" + +import pytest +from app.api.search.search.search_transformers import ( + prepare_mine_source, + prepare_party_source, + prepare_permit_source, + prepare_document_source, + prepare_explosives_permit_source, + prepare_now_application_source, + prepare_nod_source, + transform_es_results, + PREPARE_FUNCTIONS, + SEARCH_RESULT_MODELS, +) + + +class TestPrepareMineSources: + """Test mine source preparation.""" + + def test_prepare_mine_source_basic(self): + source = { + 'mine_guid': 'test-guid-123', + 'mine_name': 'Test Mine', + 'mine_no': 'M-001', + 'mine_region': 'SW', + 'major_mine_ind': True, + 'mms_alias': 'ALIAS001', + } + + result = prepare_mine_source(source) + + assert result['mine_guid'] == 'test-guid-123' + assert result['mine_name'] == 'Test Mine' + assert result['mine_no'] == 'M-001' + assert result['mine_region'] == 'SW' + assert result['major_mine_ind'] is True + + def test_prepare_mine_source_with_status(self): + source = { + 'mine_guid': 'test-guid-123', + 'mine_name': 'Test Mine', + 'mine_status': [{ + 'status_xref': { + 'mine_operation_status_code': 'OP' + } + }] + } + + result = prepare_mine_source(source) + + assert 'mine_status' in result + assert result['mine_status']['status_labels'] == ['OP'] + + def test_prepare_mine_source_with_nested_fields(self): + source = { + 'mine_guid': 'test-guid-123', + 'mine_types': [{'mine_type_guid': 'type-1'}], + 'tailings_storage_facilities': [{'tsf_guid': 'tsf-1'}], + 'work_information': {'work_start_date': '2024-01-01'}, + 'verified_status': {'healthy_ind': True} + } + + result = prepare_mine_source(source) + + assert result['mine_type'] == [{'mine_type_guid': 'type-1'}] + assert result['mine_tailings_storage_facilities'] == [{'tsf_guid': 'tsf-1'}] + assert result['mine_work_information'] == {'work_start_date': '2024-01-01'} + assert result['verified_status'] == {'healthy_ind': True} + + +class TestPreparePartySources: + """Test party source preparation.""" + + def test_prepare_party_source_person(self): + source = { + 'party_guid': 'party-123', + 'first_name': 'John', + 'party_name': 'Doe', + 'party_type_code': 'PER', + 'email': 'john.doe@example.com', + 'phone_no': '555-1234' + } + + result = prepare_party_source(source) + + assert result['party_guid'] == 'party-123' + assert result['name'] == 'John Doe' + assert result['first_name'] == 'John' + assert result['party_name'] == 'Doe' + assert result['email'] == 'john.doe@example.com' + assert result['party_orgbook_entity'] is None + assert result['business_role_appts'] == [] + assert result['address'] == [] + + def test_prepare_party_source_organization(self): + source = { + 'party_guid': 'party-456', + 'first_name': '', + 'party_name': 'ACME Corporation', + 'party_type_code': 'ORG' + } + + result = prepare_party_source(source) + + assert result['name'] == 'ACME Corporation' + assert result['first_name'] == '' + + def test_prepare_party_source_with_appointments(self): + source = { + 'party_guid': 'party-123', + 'first_name': 'Jane', + 'party_name': 'Smith', + 'mine_party_appt': [{ + 'mine_party_appt_guid': 'appt-123', + 'mine_party_appt_type_code': 'PMT', + 'start_date': '2024-01-01', + 'end_date': None + }] + } + + result = prepare_party_source(source) + + assert len(result['mine_party_appt']) == 1 + appt = result['mine_party_appt'][0] + assert appt['mine_party_appt_guid'] == 'appt-123' + assert appt['mine_party_appt_type_code'] == 'PMT' + assert appt['start_date'] == '2024-01-01' + assert appt['end_date'] is None + assert appt['mine'] is None + assert appt['permit_no'] is None + + +class TestPreparePermitSources: + """Test permit source preparation.""" + + def test_prepare_permit_source_basic(self): + source = { + 'permit_guid': 'permit-123', + 'permit_no': 'P-001', + 'permittees': [{ + 'first_name': 'John', + 'party_name': 'Doe' + }], + 'mine_guids': ['mine-guid-1', 'mine-guid-2'] + } + + result = prepare_permit_source(source) + + assert result['permit_guid'] == 'permit-123' + assert result['permit_no'] == 'P-001' + assert result['current_permittee'] == 'John Doe' + assert len(result['mine']) == 2 + assert result['mine'][0]['mine_guid'] == 'mine-guid-1' + + def test_prepare_permit_source_organization_permittee(self): + source = { + 'permit_guid': 'permit-456', + 'permit_no': 'P-002', + 'permittees': [{ + 'first_name': '', + 'party_name': 'Mining Corp' + }] + } + + result = prepare_permit_source(source) + + assert result['current_permittee'] == 'Mining Corp' + + +class TestPrepareDocumentSources: + """Test document source preparation.""" + + def test_prepare_document_source_with_mine_info(self): + source = { + 'mine_document_guid': 'doc-123', + 'document_name': 'Test Document.pdf', + 'mine': { + 'mine_name': 'Test Mine', + 'mine_guid': 'mine-123' + } + } + + result = prepare_document_source(source) + + assert result['mine_name'] == 'Test Mine' + + def test_prepare_document_source_without_mine_info(self): + source = { + 'mine_document_guid': 'doc-123', + 'document_name': 'Test Document.pdf' + } + + result = prepare_document_source(source) + + assert result['mine_name'] is None + + +class TestPrepareExplosivesPermitSources: + """Test explosives permit source preparation.""" + + def test_prepare_explosives_permit_source(self): + source = { + 'explosives_permit_guid': 'exp-123', + 'application_number': 'APP-001', + 'mine': { + 'mine_name': 'Test Mine' + } + } + + result = prepare_explosives_permit_source(source) + + assert result['mine_name'] == 'Test Mine' + + +class TestPrepareNowApplicationSources: + """Test NoW application source preparation.""" + + def test_prepare_now_application_source(self): + source = { + 'now_application_guid': 'now-123', + 'now_number': 'NOW-001', + 'mine': { + 'mine_name': 'Test Mine' + }, + 'application': { + 'now_application_status_code': 'REC', + 'notice_of_work_type_code': 'QIM' + } + } + + result = prepare_now_application_source(source) + + assert result['mine_name'] == 'Test Mine' + assert result['now_application_status_code'] == 'REC' + assert result['notice_of_work_type_code'] == 'QIM' + + +class TestPrepareNodSources: + """Test NOD source preparation.""" + + def test_prepare_nod_source(self): + source = { + 'nod_guid': 'nod-123', + 'nod_no': 'NOD-001', + 'mine': { + 'mine_name': 'Test Mine' + } + } + + result = prepare_nod_source(source) + + assert result['mine_name'] == 'Test Mine' + + +class TestTransformESResults: + """Test ES results transformation.""" + + def test_transform_es_results_multiple_types(self): + hits = [ + { + '_index': 'mines', + '_score': 10.5, + '_source': { + 'mine_guid': 'mine-123', + 'mine_name': 'Test Mine', + 'mine_no': 'M-001' + } + }, + { + '_index': 'parties', + '_score': 8.3, + '_source': { + 'party_guid': 'party-123', + 'first_name': 'John', + 'party_name': 'Doe', + 'party_type_code': 'PER' + } + } + ] + + results = transform_es_results(hits) + + assert 'mine' in results + assert 'party' in results + assert len(results['mine']) == 1 + assert len(results['party']) == 1 + + mine_result = results['mine'][0] + assert mine_result['score'] == 10.5 + assert mine_result['type'] == 'mine' + assert mine_result['result']['mine_name'] == 'Test Mine' + + party_result = results['party'][0] + assert party_result['score'] == 8.3 + assert party_result['type'] == 'party' + assert party_result['result']['name'] == 'John Doe' + + def test_transform_es_results_unknown_index(self): + hits = [ + { + '_index': 'unknown_index', + '_score': 5.0, + '_source': {'test': 'data'} + } + ] + + results = transform_es_results(hits) + + assert results == {} + + def test_transform_es_results_groups_by_type(self): + hits = [ + { + '_index': 'mines', + '_score': 10.0, + '_source': {'mine_guid': 'mine-1', 'mine_name': 'Mine 1'} + }, + { + '_index': 'mines', + '_score': 9.0, + '_source': {'mine_guid': 'mine-2', 'mine_name': 'Mine 2'} + } + ] + + results = transform_es_results(hits) + + assert 'mine' in results + assert len(results['mine']) == 2 + assert results['mine'][0]['result']['mine_name'] == 'Mine 1' + assert results['mine'][1]['result']['mine_name'] == 'Mine 2' + + +class TestTransformerMappings: + """Test transformer configuration mappings.""" + + def test_prepare_functions_has_all_types(self): + expected_types = [ + 'mine', 'party', 'permit', 'mine_documents', + 'explosives_permit', 'now_application', 'notice_of_departure' + ] + + for doc_type in expected_types: + assert doc_type in PREPARE_FUNCTIONS, f"Missing prepare function for {doc_type}" + + def test_search_result_models_has_all_types(self): + expected_types = [ + 'mine', 'party', 'permit', 'mine_documents', + 'explosives_permit', 'now_application', 'notice_of_departure' + ] + + for doc_type in expected_types: + assert doc_type in SEARCH_RESULT_MODELS, f"Missing search result model for {doc_type}" + + def test_mappings_are_synchronized(self): + """Ensure both mappings have the same keys.""" + assert set(PREPARE_FUNCTIONS.keys()) == set(SEARCH_RESULT_MODELS.keys()) diff --git a/services/core-api/tests/search/test_simple_search_service.py b/services/core-api/tests/search/test_simple_search_service.py new file mode 100644 index 0000000000..59baed693d --- /dev/null +++ b/services/core-api/tests/search/test_simple_search_service.py @@ -0,0 +1,464 @@ +""" +Unit tests for SimpleSearchService + +Tests the business logic layer independently of HTTP/Flask concerns. +""" + +import pytest +from unittest.mock import Mock, patch, MagicMock +from app.api.search.search.simple_search_service import SimpleSearchService + + +class TestSimpleSearchService: + """Unit tests for SimpleSearchService business logic.""" + + @pytest.fixture + def service(self): + """Create a SimpleSearchService instance.""" + return SimpleSearchService() + + # ==================== Index Selection Tests ==================== + + def test_determine_search_indices_all_types(self, service): + """Test determining indices when no type filter specified.""" + indices = service._determine_search_indices(None) + + assert len(indices) > 0 + assert 'mines' in indices + assert 'parties' in indices + assert 'mine_permits' in indices + + def test_determine_search_indices_single_type(self, service): + """Test determining indices with single type filter.""" + indices = service._determine_search_indices(['mine']) + + assert len(indices) == 1 + assert 'mines' in indices + + def test_determine_search_indices_multiple_types(self, service): + """Test determining indices with multiple type filters.""" + indices = service._determine_search_indices(['mine', 'permit']) + + assert len(indices) == 2 + assert 'mines' in indices + assert 'mine_permits' in indices + + def test_determine_search_indices_empty_types(self, service): + """Test determining indices with empty type list.""" + indices = service._determine_search_indices([]) + + assert len(indices) == 0 + + # ==================== Filter Building Tests ==================== + + def test_build_base_filters_without_mine_guid(self, app, service): + """Test building base filters without mine_guid.""" + with app.app_context(): + # Configure app config + app.config['DELETED_DOCUMENTS_LOOKBACK_MONTHS'] = 12 + + filters = service._build_base_filters(None) + + assert len(filters) == 1 + assert 'bool' in filters[0] # Deleted filter + + def test_build_base_filters_with_mine_guid(self, app, service): + """Test building base filters with mine_guid.""" + with app.app_context(): + # Configure app config + app.config['DELETED_DOCUMENTS_LOOKBACK_MONTHS'] = 12 + + filters = service._build_base_filters('test-guid-123') + + assert len(filters) == 2 + assert 'bool' in filters[0] # Deleted filter + assert 'bool' in filters[1] # Mine GUID filter + + # ==================== Query Building Tests ==================== + + def test_build_search_query_wildcard(self, service): + """Test building wildcard search query.""" + filters = [{'term': {'deleted_ind': False}}] + query = service._build_search_query('*', filters) + + assert 'query' in query + assert 'match_all' in query['query']['bool']['must'][0] + assert 'sort' in query + + def test_build_search_query_short_term(self, service): + """Test building query for short search term (< 3 chars).""" + filters = [{'term': {'deleted_ind': False}}] + query = service._build_search_query('ab', filters) + + assert 'query' in query + assert 'bool' in query['query'] + assert 'should' in query['query']['bool'] + assert 'phrase_prefix' in str(query) + assert 'highlight' in query + + def test_build_search_query_long_term(self, service): + """Test building query for long search term (>= 3 chars).""" + filters = [{'term': {'deleted_ind': False}}] + query = service._build_search_query('mountain', filters) + + assert 'query' in query + assert 'bool' in query['query'] + assert 'should' in query['query']['bool'] + assert 'fuzziness' in str(query) + assert 'highlight' in query + + def test_build_search_query_without_highlight(self, service): + """Test building query without highlight configuration.""" + filters = [{'term': {'deleted_ind': False}}] + query = service._build_search_query('test', filters, include_highlight=False) + + assert 'query' in query + assert 'highlight' not in query + + # ==================== GUID Extraction Tests ==================== + + def test_extract_mine_guid_from_mine(self, service): + """Test extracting mine_guid from mine document.""" + source = {'mine_guid': 'abc-123', 'mine_name': 'Test Mine'} + + guid = service._extract_mine_guid('mine', source) + + assert guid == 'abc-123' + + def test_extract_mine_guid_from_permit(self, service): + """Test extracting mine_guid from permit document.""" + source = {'mine_guids': ['def-456', 'ghi-789'], 'permit_no': 'P-001'} + + guid = service._extract_mine_guid('permit', source) + + assert guid == 'def-456' # First GUID + + def test_extract_mine_guid_from_nod(self, service): + """Test extracting mine_guid from NOD document.""" + source = {'mine': {'mine_guid': 'jkl-012'}, 'nod_no': 'NOD-001'} + + guid = service._extract_mine_guid('notice_of_departure', source) + + assert guid == 'jkl-012' + + def test_extract_mine_guid_missing(self, service): + """Test extracting mine_guid when not present.""" + source = {'mine_name': 'Test Mine'} + + guid = service._extract_mine_guid('mine', source) + + assert guid is None + + # ==================== Result Processing Tests ==================== + + def test_process_mine_result(self, service): + """Test processing mine search result.""" + source = { + 'mine_name': 'Test Mine', + 'mine_no': 'M-123', + 'mms_alias': 'TM', + 'mine_types': [ + { + 'mine_type_details': [ + {'mine_commodity_code': 'Coal'}, + {'mine_commodity_code': 'Gold'} + ] + } + ] + } + + result_type, value, description = service._process_mine_result(source) + + assert result_type == 'mine' + assert value == 'Test Mine' + assert 'M-123' in description + assert ('Coal' in description or 'Gold' in description) + assert 'Alias: TM' in description + + def test_process_party_result_person(self, service): + """Test processing person party result.""" + source = { + 'first_name': 'John', + 'party_name': 'Doe', + 'party_type_code': 'PER', + 'email': 'john@example.com', + 'phone_no': '555-1234' + } + + result_type, value, description = service._process_party_result(source) + + assert result_type == 'person' + assert value == 'John Doe' + assert 'john@example.com' in description + assert '555-1234' in description + + def test_process_party_result_organization(self, service): + """Test processing organization party result.""" + source = { + 'party_name': 'ACME Corp', + 'party_type_code': 'ORG', + 'email': 'info@acme.com' + } + + result_type, value, description = service._process_party_result(source) + + assert result_type == 'organization' + assert value == 'ACME Corp' + assert 'info@acme.com' in description + + def test_process_permit_result(self, service): + """Test processing permit search result.""" + source = { + 'permit_no': 'P-001', + 'permit_status_code': 'APP', + 'permittees': [ + {'first_name': 'Jane', 'party_name': 'Smith'} + ] + } + + result_type, value, description = service._process_permit_result(source) + + assert result_type == 'permit' + assert value == 'P-001' + assert 'Jane Smith' in description + assert 'Status: APP' in description + + def test_process_nod_result(self, service): + """Test processing NOD search result.""" + source = { + 'nod_title': 'Test NOD', + 'nod_no': 'NOD-001', + 'nod_status': 'approved', + 'mine': {'mine_name': 'Test Mine'} + } + + result_type, value, description = service._process_nod_result(source) + + assert result_type == 'nod' + assert value == 'Test NOD' + assert 'NOD-001' in description + assert 'Test Mine' in description + assert 'Approved' in description + + def test_process_explosives_permit_result(self, service): + """Test processing explosives permit result.""" + source = { + 'permit_number': 'EP-001', + 'application_status': 'APP', + 'is_closed': False, + 'mine': {'mine_name': 'Test Mine'} + } + + result_type, value, description = service._process_explosives_permit_result(source) + + assert result_type == 'explosives_permit' + assert value == 'EP-001' + assert 'Test Mine' in description + assert 'Approved' in description + + def test_process_now_application_result(self, service): + """Test processing NOW application result.""" + source = { + 'now_number': 'NOW-001', + 'application': { + 'property_name': 'Test Property', + 'now_application_status_code': 'AIA' + }, + 'mine': {'mine_name': 'Test Mine'} + } + + result_type, value, description = service._process_now_application_result(source) + + assert result_type == 'now_application' + assert value == 'NOW-001' + assert 'Test Property' in description + assert 'Test Mine' in description + assert 'Approved' in description + + # ==================== Group and Rank Tests ==================== + + def test_group_and_rank_results(self, service): + """Test grouping and ranking search results.""" + # Create mock SearchResult objects + result1 = Mock(score=10.0, result={'id': 'id-1', 'value': 'Result 1'}) + result2 = Mock(score=20.0, result={'id': 'id-2', 'value': 'Result 2'}) + result3 = Mock(score=15.0, result={'id': 'id-1', 'value': 'Result 1'}) # Duplicate + + search_results = [result1, result2, result3] + + ranked = service._group_and_rank_results(search_results) + + assert len(ranked) == 2 # Duplicates merged + assert ranked[0].result['id'] == 'id-1' # Highest score (10+15=25) + assert ranked[0].score == 25.0 + assert ranked[1].result['id'] == 'id-2' + assert ranked[1].score == 20.0 + + def test_group_and_rank_results_limits_to_4(self, service): + """Test that ranking limits results to top 4.""" + # Create 6 mock results + results = [ + Mock(score=float(i), result={'id': f'id-{i}', 'value': f'Result {i}'}) + for i in range(6) + ] + + ranked = service._group_and_rank_results(results) + + assert len(ranked) == 4 + assert ranked[0].score == 5.0 # Highest score first + assert ranked[3].score == 2.0 + + # ==================== Integration Tests (with mocked ES) ==================== + + @patch('app.api.search.search.simple_search_service.ElasticSearchService.search') + def test_execute_search_basic(self, mock_es_search, app, service): + """Test basic search execution flow.""" + with app.app_context(): + # Mock the search method directly + mock_es_search.return_value = { + 'hits': { + 'hits': [ + { + '_index': 'mines', + '_score': 10.0, + '_source': { + 'mine_guid': 'test-guid', + 'mine_name': 'Test Mine', + 'mine_no': 'M-001', + 'mine_types': [] + }, + 'highlight': {} + } + ] + }, + 'aggregations': { + 'by_index': { + 'buckets': [{'key': 'mines', 'doc_count': 1}] + } + } + } + + result = service.execute_search('test', None, None) + + assert 'search_results' in result + assert 'facets' in result + assert len(result['search_results']) == 1 + assert result['search_results'][0].result['value'] == 'Test Mine' + + @patch('app.api.search.search.simple_search_service.ElasticSearchService.search') + def test_execute_search_with_mine_guid_filter(self, mock_es_search, app, service): + """Test search execution with mine_guid filter.""" + with app.app_context(): + mock_es_search.return_value = { + 'hits': {'hits': []}, + 'aggregations': {'by_index': {'buckets': []}} + } + + result = service.execute_search('test', None, 'test-mine-guid') + + assert 'search_results' in result + assert 'facets' in result + # Verify ElasticSearchService was called + assert mock_es_search.called + + @patch('app.api.search.search.simple_search_service.ElasticSearchService.search') + def test_execute_search_with_type_filter(self, mock_es_search, app, service): + """Test search execution with type filter.""" + with app.app_context(): + mock_es_search.return_value = { + 'hits': {'hits': []}, + 'aggregations': {'by_index': {'buckets': []}} + } + + result = service.execute_search('test', 'mine,permit', None) + + assert 'search_results' in result + assert 'facets' in result + + @patch('app.api.search.search.simple_search_service.ElasticSearchService.search') + def test_execute_search_no_indices(self, mock_es_search, app, service): + """Test search execution when no indices match.""" + with app.app_context(): + result = service.execute_search('test', 'invalid_type', None) + + assert result['search_results'] == [] + assert result['facets'] == {} + # ES should not be called + assert not mock_es_search.called + + @patch('app.api.search.search.simple_search_service.ElasticSearchService.search') + def test_execute_search_handles_es_error(self, mock_es_search, app, service): + """Test that search handles Elasticsearch errors gracefully.""" + with app.app_context(): + # Mock Elasticsearch error + mock_es_search.side_effect = Exception("ES connection error") + + result = service.execute_search('test', None, None) + + assert 'search_results' in result + assert 'facets' in result + assert len(result['search_results']) == 0 # Empty results on error + + # ==================== Edge Cases ==================== + + def test_process_mine_result_minimal_data(self, service): + """Test processing mine result with minimal data.""" + source = {'mine_name': 'Test Mine'} + + result_type, value, description = service._process_mine_result(source) + + assert result_type == 'mine' + assert value == 'Test Mine' + assert description == '' # No additional data + + def test_process_party_result_no_first_name(self, service): + """Test processing party result without first name.""" + source = { + 'party_name': 'Doe', + 'party_type_code': 'PER' + } + + result_type, value, description = service._process_party_result(source) + + assert result_type == 'person' + assert value == 'Doe' + + def test_process_permit_result_no_permittees(self, service): + """Test processing permit result without permittees.""" + source = { + 'permit_no': 'P-001', + 'permittees': [] + } + + result_type, value, description = service._process_permit_result(source) + + assert result_type == 'permit' + assert value == 'P-001' + + def test_extract_mine_guid_from_permit_empty_list(self, service): + """Test extracting mine_guid from permit with empty mine_guids list.""" + source = {'mine_guids': [], 'permit_no': 'P-001'} + + guid = service._extract_mine_guid('permit', source) + + assert guid is None + + def test_build_search_query_empty_term(self, service): + """Test building query with empty search term.""" + filters = [{'term': {'deleted_ind': False}}] + query = service._build_search_query('', filters) + + assert 'match_all' in query['query']['bool']['must'][0] + + @patch('app.api.search.search.simple_search_service.ElasticSearchService.search') + def test_get_facet_counts_no_search_term(self, mock_es_search, app, service): + """Test getting facet counts with no search term.""" + with app.app_context(): + facets = service._get_facet_counts('') + + # Should return empty facets structure + assert 'mine' in facets + assert 'person' in facets + assert all(v == 0 for v in facets.values()) + # ES should not be called + assert not mock_es_search.called diff --git a/services/core-web/rsbuild.config.ts b/services/core-web/rsbuild.config.ts index e802f9462a..3f33e29f08 100644 --- a/services/core-web/rsbuild.config.ts +++ b/services/core-web/rsbuild.config.ts @@ -79,7 +79,7 @@ export default defineConfig({ pluginTypeCheck(), ], source: { - include: [/\.(?:ts|tsx|jsx|mts|cts|js)$/], + include: [PATHS.commonPackage, PATHS.sharedPackage], assetsInclude: /\.(?:png|jpe?g|gif|svg|mp3|pdf|docx?|xlsx?|woff2?|ttf|eot)$/, define: { "process.env": JSON.stringify(envFile), diff --git a/services/core-web/src/components/Forms/noticeOfWork/VerifyNoWContacts.tsx b/services/core-web/src/components/Forms/noticeOfWork/VerifyNoWContacts.tsx index c186dfab8c..a50bb66835 100644 --- a/services/core-web/src/components/Forms/noticeOfWork/VerifyNoWContacts.tsx +++ b/services/core-web/src/components/Forms/noticeOfWork/VerifyNoWContacts.tsx @@ -15,16 +15,14 @@ import { required } from "@mds/common/redux/utils/Validate"; import { fetchSearchResults as fetchSearchResultsAction, clearAllSearchResults as clearAllSearchResultsAction, -} from "@mds/common/redux/actionCreators/searchActionCreator"; + storeSubsetSearchResults as storeSubsetSearchResultsAction, + getSearchResults, + getSearchSubsetResults, +} from "@mds/common/redux/slices/searchSlice"; import { fetchPartyById as fetchPartyByIdAction, updateParty as updatePartyAction } from "@mds/common/redux/slices/partiesSlice"; -import { storeSubsetSearchResults as storeSubsetSearchResultsAction } from "@mds/common/redux/actions/searchActions"; import { TRASHCAN, PROFILE_NOCIRCLE } from "@/constants/assets"; import AuthorizationWrapper from "@/components/common/wrappers/AuthorizationWrapper"; import * as Permission from "@/constants/permissions"; -import { - getSearchResults, - getSearchSubsetResults, -} from "@mds/common/redux/selectors/searchSelectors"; import * as Strings from "@mds/common/constants/strings"; import Address from "@/components/common/Address"; @@ -32,7 +30,7 @@ import AddButton from "@/components/common/buttons/AddButton"; import RenderSelect from "@mds/common/components/forms/RenderSelect"; import CoreTable from "@mds/common/components/common/CoreTable"; import LoadingWrapper from "@/components/common/wrappers/LoadingWrapper"; -import { IMinePartyApptType, IParty, IPartyRelationshipType } from "@mds/common/interfaces"; +import { IMinePartyApptType, IParty } from "@mds/common/interfaces"; export interface VerifyNoWContactValue { id?: string; @@ -305,7 +303,7 @@ export const VerifyNoWContacts: React.FC = (props) => { const openModal = (cfg: any) => dispatch(openModalAction(cfg)); const closeModal = () => dispatch(closeModalAction()); const fetchSearchResults = (term: string, category: string) => - dispatch(fetchSearchResultsAction(term, category)); + dispatch(fetchSearchResultsAction({ searchTerm: term, searchTypes: [category] })); const clearAllSearchResults = () => dispatch(clearAllSearchResultsAction()); const storeSubsetSearchResults = (r: any) => dispatch(storeSubsetSearchResultsAction(r)); const fetchPartyById = (pg: string) => dispatch(fetchPartyByIdAction(pg)); @@ -489,8 +487,8 @@ export const VerifyNoWContacts: React.FC = (props) => { } setIsLoading(true); - Promise.resolve(fetchSearchResults(newSearchTerm, "party")).then((response: any) => { - const data = response?.data ?? response; + Promise.resolve(fetchSearchResults(newSearchTerm || searchTerm, "party")).then((action: any) => { + const data = action?.payload ?? action; const partyResults: any[] = data?.search_results?.party ?? []; // Merge with any persisted selected results not present in latest search const merged = [ diff --git a/services/core-web/src/components/common/PartySelectField.tsx b/services/core-web/src/components/common/PartySelectField.tsx index 6d91742a3f..a3acd6ee19 100644 --- a/services/core-web/src/components/common/PartySelectField.tsx +++ b/services/core-web/src/components/common/PartySelectField.tsx @@ -8,9 +8,8 @@ import { Divider } from "antd"; import { MailOutlined, PhoneOutlined, PlusOutlined } from "@ant-design/icons"; import { change, Field } from "@mds/common/components/forms/form"; import { getSearchResults } from "@mds/common/redux/selectors/searchSelectors"; -import { getLastCreatedParty } from "@mds/common/redux/slices/partiesSlice"; -import { fetchSearchResults } from "@mds/common/redux/actionCreators/searchActionCreator"; -import { setAddPartyFormState } from "@mds/common/redux/slices/partiesSlice"; +import { getLastCreatedParty, setAddPartyFormState } from "@mds/common/redux/slices/partiesSlice"; +import { fetchSearchResults } from "@mds/common/redux/slices/searchSlice"; import { createItemMap } from "@common/utils/helpers"; import { Validate } from "@mds/common/redux/utils/Validate"; import LinkButton from "@mds/common/components/common/LinkButton"; @@ -121,7 +120,7 @@ export const PartySelectField: FC = ({ const handleFetchSearchResults = useCallback( (searchTerm: string, searchType: string) => { setSearching(true); - dispatch(fetchSearchResults(searchTerm, searchType)); + dispatch(fetchSearchResults({ searchTerm, searchTypes: [searchType] })); }, [dispatch] ); diff --git a/services/core-web/src/components/common/RenderMultiSelectPartySearch.js b/services/core-web/src/components/common/RenderMultiSelectPartySearch.js index a9a62c9f87..bafeee272f 100644 --- a/services/core-web/src/components/common/RenderMultiSelectPartySearch.js +++ b/services/core-web/src/components/common/RenderMultiSelectPartySearch.js @@ -9,7 +9,7 @@ import debounce from "lodash/debounce"; import { fetchSearchResults, clearAllSearchResults, -} from "@mds/common/redux/actionCreators/searchActionCreator"; +} from "@mds/common/redux/slices/searchSlice"; const debouncePropTypes = { fetchOptions: PropTypes.func.isRequired, @@ -80,8 +80,8 @@ export const RenderMultiSelectPartySearch = (props) => { }, [props.triggerSelectReset]); const getFetchOptions = (value) => - props.fetchSearchResults(value, "party").then((response) => { - const results = response?.data?.search_results || []; + props.fetchSearchResults({ searchTerm: value, searchTypes: ["party"] }).then((response) => { + const results = response?.payload?.search_results || response?.data?.search_results || []; props.onSearchResultsChanged(results); setSearchResults(results); return results?.party diff --git a/services/core-web/src/components/homepage/HomeBanner.tsx b/services/core-web/src/components/homepage/HomeBanner.tsx index 140c858dfa..a823d345d1 100644 --- a/services/core-web/src/components/homepage/HomeBanner.tsx +++ b/services/core-web/src/components/homepage/HomeBanner.tsx @@ -1,10 +1,14 @@ import React from "react"; import { Typography, Col, Row } from "antd"; +import GlobalSearch from "@/components/search/GlobalSearch/GlobalSearch"; import SearchBar from "@/components/search/SearchBar"; import { BACKGROUND } from "@/constants/assets"; +import { useFeatureFlag } from "@mds/common/providers/featureFlags/useFeatureFlag"; +import { Feature } from "@mds/common/utils"; const HomeBanner = () => { + const { isFeatureEnabled } = useFeatureFlag(); return (
{ } id="homepage-banner" > - - - Welcome back to CORE - - - - - - - + + + + Welcome back to CORE + + + {isFeatureEnabled(Feature.GLOBAL_SEARCH_V2) ? ( + + + + ) : ( + + + + )} + + - - Photo Credit: Dominic Yague - + + Photo Credit: Dominic Yague + +
); }; diff --git a/services/core-web/src/components/mine/Permit/Search/components/PermitDocumentsModal.tsx b/services/core-web/src/components/mine/Permit/Search/components/PermitDocumentsModal.tsx new file mode 100644 index 0000000000..6e884fdcc2 --- /dev/null +++ b/services/core-web/src/components/mine/Permit/Search/components/PermitDocumentsModal.tsx @@ -0,0 +1,72 @@ +import React, { useEffect } from "react"; +import { Button, Table } from "antd"; +import { fetchPermits } from "@mds/common/redux/actionCreators/permitActionCreator"; +import { getAmendment } from "@mds/common/redux/selectors/permitSelectors"; +import DocumentLink from "@mds/common/components/documents/DocumentLink"; +import { IPermitAmendment } from "@mds/common/interfaces/permits/permitAmendment.interface"; +import { useAppDispatch, useAppSelector } from "@mds/common/redux/rootState"; +import { formatDate } from "@common/utils/helpers"; + +interface Props { + onCancel: () => void; + permitAmendmentGuid: string; + mineGuid: string; + permitGuid: string; +} + +const PermitDocumentsModal: React.FC = ({ + onCancel, + permitAmendmentGuid, + mineGuid, + permitGuid, +}) => { + const dispatch = useAppDispatch(); + const currentAmendment: IPermitAmendment = useAppSelector((state) => + getAmendment(permitGuid, permitAmendmentGuid)(state) + ); + + useEffect(() => { + dispatch(fetchPermits(mineGuid)); + }, [mineGuid, dispatch]); + + const columns = [ + { + title: "Document Name", + dataIndex: "document_name", + key: "document_name", + render: (text, record) => ( + + ), + }, + { + title: "Category", + dataIndex: "category", + key: "category", + }, + { + title: "Upload Date", + dataIndex: "upload_date", + key: "upload_date", + render: (text) => formatDate(text), + }, + ]; + + const allDocuments = [ + ...(currentAmendment?.related_documents || []).map(doc => ({ ...doc, category: 'Permit Document' })), + ...(currentAmendment?.now_application_documents || []).filter(d => d.is_final_package).map(doc => ({ ...doc, document_name: doc.mine_document.document_name, document_manager_guid: doc.mine_document.document_manager_guid, upload_date: doc.mine_document.upload_date, category: 'Final Application Package' })) + ]; + + return ( +
+ +
+ +
+ + ); +}; + +export default PermitDocumentsModal; diff --git a/services/core-web/src/components/mine/Permit/Search/components/ResultItem.tsx b/services/core-web/src/components/mine/Permit/Search/components/ResultItem.tsx index f00a33488d..fcfb423457 100644 --- a/services/core-web/src/components/mine/Permit/Search/components/ResultItem.tsx +++ b/services/core-web/src/components/mine/Permit/Search/components/ResultItem.tsx @@ -16,6 +16,7 @@ import { faChevronUp } from '@fortawesome/pro-solid-svg-icons'; import PermitAmendmentPreviewModal from './PermitAmendmentPreviewModal'; +import PermitDocumentsModal from './PermitDocumentsModal'; import DocumentLink from '@mds/common/components/documents/DocumentLink'; import { ActionMenuButton } from '@mds/common/components/common/ActionMenu'; import { WarningOutlined } from '@ant-design/icons'; @@ -59,6 +60,19 @@ const ResultItem: React.FC = ({ result, onFilterClick }) => { })); }; + const handleViewPermitDocuments = () => { + dispatch(openModal({ + props: { + title: 'Permit Documents', + permitAmendmentGuid: meta.permit_amendment_guid, + mineGuid: meta.mine_guid, + permitGuid: meta.permit_guid, + }, + width: '50%', + content: PermitDocumentsModal, + })); + }; + useEffect(() => { // Highlight conditions when url changes. // E.g. /conditions/#condition-123 will highlight condition with id 123 for 5 seconds @@ -315,12 +329,18 @@ const ResultItem: React.FC = ({ result, onFilterClick }) => { - + {meta.permit_type === 'Notice of Work' ? ( + + ) : ( + + )} diff --git a/services/core-web/src/components/navigation/NavBar.tsx b/services/core-web/src/components/navigation/NavBar.tsx index 537130132c..c2940d2b87 100644 --- a/services/core-web/src/components/navigation/NavBar.tsx +++ b/services/core-web/src/components/navigation/NavBar.tsx @@ -15,6 +15,7 @@ import * as Strings from "@mds/common/constants/strings"; import * as router from "@/constants/routes"; import * as Permission from "@/constants/permissions"; import AuthorizationWrapper from "@/components/common/wrappers/AuthorizationWrapper"; +import GlobalSearch from "@/components/search/GlobalSearch/GlobalSearch"; import SearchBar from "@/components/search/SearchBar"; import { LOGO, HAMBURGER, CLOSE, SUCCESS_CHECKMARK, YELLOW_HAZARD } from "@/constants/assets"; import NotificationDrawer from "@/components/navigation/NotificationDrawer"; @@ -408,7 +409,11 @@ export const NavBar: FC = ({ activeButton, isMenuOpen, toggleHambur Home
- + {isFeatureEnabled(Feature.GLOBAL_SEARCH_V2) ? ( + + ) : ( + + )}
+ + ); +}; diff --git a/services/core-web/src/components/search/GlobalSearch/GlobalSearch.tsx b/services/core-web/src/components/search/GlobalSearch/GlobalSearch.tsx new file mode 100644 index 0000000000..4e4fb31e93 --- /dev/null +++ b/services/core-web/src/components/search/GlobalSearch/GlobalSearch.tsx @@ -0,0 +1,413 @@ +import React, { useState, useEffect, useRef, useMemo, useCallback } from "react"; +import { useDispatch, useSelector } from "react-redux"; +import { useHistory, useLocation } from "react-router-dom"; +import { Modal, Input, Typography, Button, List, Space, Row, Divider, Tag } from "antd"; +import { SearchOutlined } from "@ant-design/icons"; +import { fetchSearchBarResults, selectSearchBarResults, selectSearchBarFacets } from "@mds/common/redux/slices/searchSlice"; +import * as router from "@/constants/routes"; +import { ISearchResult, ISimpleSearchResult } from "@mds/common/interfaces"; +import { SearchTriggerButton } from "./components/SearchTriggerButton"; +import { SearchFilters } from "./components/SearchFilters"; +import { SearchResultItem } from "./components/SearchResultItem"; +import { RecentSearches } from "./components/RecentSearches"; +import { EmptySearchState } from "./components/EmptySearchState"; +import { useRecentSearches } from "./hooks/useRecentSearches"; +import { SEARCH_TYPE_CONFIG, RESULT_TYPE_MAP } from "./utils/searchConfig"; +import { getSearchTypes, extractMineGuidFromPath } from "./utils/searchHelpers"; + +const { Text } = Typography; + +interface GlobalSearchProps { + placeholder?: string; + size?: "small" | "middle" | "large"; + enableShortcut?: boolean; +} + +const GlobalSearch: React.FC = ({ + placeholder = "Search Core...", + enableShortcut = true, + size, +}) => { + const [isModalVisible, setIsModalVisible] = useState(false); + const [searchTerm, setSearchTerm] = useState(""); + const [selectedIndex, setSelectedIndex] = useState(0); + const [activeFilters, setActiveFilters] = useState([]); + const [scopeToMine, setScopeToMine] = useState(false); + const [quickFilter, setQuickFilter] = useState(null); + + const dispatch = useDispatch(); + const searchResults = useSelector(selectSearchBarResults); + const facets = useSelector(selectSearchBarFacets); + const history = useHistory(); + const location = useLocation(); + const inputRef = useRef(null); + + const currentMineGuid = useMemo(() => extractMineGuidFromPath(location.pathname), [location.pathname]); + const isOnMinePage = !!currentMineGuid; + + const { recentSearches, saveRecentSearch, removeRecentSearch } = useRecentSearches(); + + const handleSearch = useCallback( + (term: string, filters: string[], mineGuid: string | null) => { + const effectiveTerm = term || "*"; + if (effectiveTerm.length > 0) { + const derivedTypes = getSearchTypes(filters, quickFilter); + // If no filters are selected, explicitly send all search types to allow 1-char search + const allTypes = Object.values(SEARCH_TYPE_CONFIG).flatMap((c) => c.types); + + dispatch(fetchSearchBarResults({ + searchTerm: effectiveTerm, + searchTypes: derivedTypes || allTypes, + mineGuid + })); + } + }, + [dispatch, quickFilter] + ); + + const handleOpen = () => { + setIsModalVisible(true); + setTimeout(() => inputRef.current?.focus(), 50); + }; + + const handleClose = useCallback(() => { + setIsModalVisible(false); + setSearchTerm(""); + setSelectedIndex(0); + setActiveFilters([]); + setScopeToMine(false); + setQuickFilter(null); + }, []); + + useEffect(() => { + if (!enableShortcut) return; + + const handleKeyDown = (e: KeyboardEvent) => { + if ((e.metaKey || e.ctrlKey) && e.key === "k") { + e.preventDefault(); + const isAnyModalOpen = document.querySelector(".global-search-modal"); + + if (isModalVisible) { + handleClose(); + } else if (!isAnyModalOpen) { + handleOpen(); + } + } + }; + globalThis.addEventListener("keydown", handleKeyDown); + return () => globalThis.removeEventListener("keydown", handleKeyDown); + }, [isModalVisible, handleClose, enableShortcut]); + + const getMineGuidForSearch = () => (scopeToMine && currentMineGuid ? currentMineGuid : null); + + const handleSearchChange = (e: React.ChangeEvent) => { + const value = e.target.value; + + setSearchTerm(value); + setSelectedIndex(0); + if (value.length > 0) { + handleSearch(value, activeFilters, getMineGuidForSearch()); + } + }; + + const toggleFilter = (filterKey: string) => { + const newFilters = activeFilters.includes(filterKey) + ? activeFilters.filter((f) => f !== filterKey) + : [...activeFilters, filterKey]; + setActiveFilters(newFilters); + setSelectedIndex(0); + if (searchTerm.length > 0 || newFilters.length > 0) { + handleSearch(searchTerm, newFilters, getMineGuidForSearch()); + } + }; + + const toggleScopeToMine = (checked: boolean) => { + setScopeToMine(checked); + const mineGuid = checked && currentMineGuid ? currentMineGuid : null; + const term = searchTerm || "*"; + handleSearch(term, activeFilters, mineGuid); + }; + + const navigateToResult = (item: ISearchResult) => { + saveRecentSearch(item.result.value); + let routeUrl = ""; + switch (item.type) { + case "mine": + routeUrl = router.MINE_GENERAL.dynamicRoute(item.result.id); + break; + case "person": + case "organization": + case "party": + routeUrl = router.PARTY_PROFILE.dynamicRoute(item.result.id); + break; + case "now_application": + routeUrl = router.NOTICE_OF_WORK_APPLICATION.dynamicRoute(item.result.id, "verification"); + break; + case "permit": + routeUrl = router.VIEW_MINE_PERMIT.dynamicRoute(item.result.mine_guid, item.result.id); + break; + case "explosives_permit": + routeUrl = router.MINE_PERMITS.dynamicRoute(item.result.mine_guid); + break; + case "nod": + routeUrl = router.NOTICE_OF_DEPARTURE.dynamicRoute(item.result.mine_guid, item.result.id); + break; + } + if (routeUrl) { + handleClose(); + history.push(routeUrl); + } + }; + + const handleEnter = () => { + if (searchResults?.length > 0) { + navigateToResult(searchResults[selectedIndex]); + } else if (searchTerm.length > 0) { + saveRecentSearch(searchTerm); + handleClose(); + history.push(router.SEARCH_RESULTS.dynamicRoute({ q: searchTerm })); + } + }; + + const handleRecentSearchClick = (term: string) => { + setSearchTerm(term); + handleSearch(term, activeFilters, getMineGuidForSearch()); + }; + + const handleKeyDown = (e: React.KeyboardEvent) => { + const totalItems = searchResults?.length || recentSearches.length || 0; + switch (e.key) { + case "ArrowDown": + e.preventDefault(); + setSelectedIndex((prev) => (prev + 1) % (totalItems || 1)); + break; + case "ArrowUp": + e.preventDefault(); + setSelectedIndex((prev) => (prev - 1 + (totalItems || 1)) % (totalItems || 1)); + break; + case "Enter": + e.preventDefault(); + if (!searchTerm && recentSearches.length > 0) { + handleRecentSearchClick(recentSearches[selectedIndex]); + } else { + handleEnter(); + } + break; + case "Escape": + handleClose(); + break; + case "Backspace": + if (searchTerm === "" && quickFilter) { + e.preventDefault(); + setQuickFilter(null); + } + break; + } + }; + + const groupedResults = useMemo(() => { + if (!searchResults?.length) return null; + const groups: Record[]> = {}; + searchResults.forEach((result) => { + if (!groups[result.type]) groups[result.type] = []; + groups[result.type].push(result); + }); + return groups; + }, [searchResults]); + + const handleViewAll = () => { + saveRecentSearch(searchTerm); + handleClose(); + history.push(router.SEARCH_RESULTS.dynamicRoute({ q: searchTerm })); + }; + + const handleQuickAction = (route: string) => { + handleClose(); + history.push(route); + }; + + const renderResults = () => { + const hasActiveSearch = searchTerm || scopeToMine || activeFilters.length > 0; + + if (hasActiveSearch && groupedResults) { + let globalIndex = 0; + return ( +
+ {Object.entries(groupedResults).map(([type, results]) => { + const configKey = RESULT_TYPE_MAP[type] || "document"; + const config = SEARCH_TYPE_CONFIG[configKey]; + return ( +
+ + {config.pluralLabel} + + ( + + )} + split={false} + /> +
+ ); + })} +
+ +
+
+ ); + } + + if (hasActiveSearch && searchResults?.length === 0) { + return ( + + ); + } + + if (!hasActiveSearch && recentSearches.length > 0) { + return ( + { + e.stopPropagation(); + removeRecentSearch(term); + }} + onSetSelectedIndex={setSelectedIndex} + /> + ); + } + + return ( + , + label: "Browse Mines", + color: "#2e7d32", + route: router.MINE_HOME_PAGE.dynamicRoute({ page: "1", per_page: "25" }), + }, + { + icon: , + label: "Browse Contacts", + color: "#1565c0", + route: router.CONTACT_HOME_PAGE.dynamicRoute({ page: "1", per_page: "25" }), + }, + { + icon: , + label: "Reports", + color: "#7b1fa2", + route: router.REPORTING_DASHBOARD.route, + }, + ]} + /> + ); + }; + + return ( + <> + + + + + + select + + + ↑↓ navigate + + + esc close + + + + } + closable={false} + maskClosable + keyboard + width={580} + style={{ top: 80 }} + bodyStyle={{ padding: 0 }} + className="global-search-modal" + destroyOnClose + > + + + {quickFilter && ( + { + e.preventDefault(); + setQuickFilter(null); + }} + className="search-tag-icon" + > + {SEARCH_TYPE_CONFIG[quickFilter]?.icon} + {SEARCH_TYPE_CONFIG[quickFilter]?.pluralLabel || quickFilter} + + )} + + } + placeholder={ + quickFilter + ? "Search within filter..." + : "Search for mines, contacts, permits..." + } + value={searchTerm} + onChange={handleSearchChange} + onKeyDown={handleKeyDown} + bordered={false} + allowClear + size="large" + className="global-search-modal__input" + /> + + {renderResults()} + + + ); +}; + +export default GlobalSearch; diff --git a/services/core-web/src/components/search/GlobalSearch/components/EmptySearchState.tsx b/services/core-web/src/components/search/GlobalSearch/components/EmptySearchState.tsx new file mode 100644 index 0000000000..c60cd215dd --- /dev/null +++ b/services/core-web/src/components/search/GlobalSearch/components/EmptySearchState.tsx @@ -0,0 +1,103 @@ +import React from "react"; +import { Space, Typography, Button, Row, Col, Avatar } from "antd"; +import { + SearchOutlined, + EnvironmentOutlined, + TeamOutlined, + FileSearchOutlined, +} from "@ant-design/icons"; + +const { Text, Title } = Typography; + +interface EmptySearchStateProps { + hasSearchTerm: boolean; + scopeToMine: boolean; + activeFiltersCount: number; + searchTerm?: string; + onViewAll?: () => void; + onQuickAction?: (route: string) => void; + quickActions?: Array<{ icon: React.ReactNode; label: string; color: string; route: string }>; +} + +export const EmptySearchState: React.FC = ({ + hasSearchTerm, + scopeToMine, + activeFiltersCount, + searchTerm, + onViewAll, + onQuickAction, + quickActions, +}) => { + if (hasSearchTerm) { + return ( +
+ + + No results found + + {scopeToMine && !searchTerm + ? "No items found for this mine" + : activeFiltersCount > 0 + ? "Try removing some filters or adjusting your search" + : "Try adjusting your search or browse all results"} + + {searchTerm && onViewAll && ( + + )} + +
+ ); + } + + // Default state with quick actions + const defaultQuickActions = quickActions || [ + { + icon: , + label: "Browse Mines", + color: "#2e7d32", + route: "/mine-home-page", + }, + { + icon: , + label: "Browse Contacts", + color: "#1565c0", + route: "/contact-home-page", + }, + { + icon: , + label: "Reports", + color: "#7b1fa2", + route: "/reports", + }, + ]; + + return ( + + + Quick Actions + + + {defaultQuickActions.map((action) => ( +
+ + + ))} + + + ); +}; diff --git a/services/core-web/src/components/search/GlobalSearch/components/RecentSearches.tsx b/services/core-web/src/components/search/GlobalSearch/components/RecentSearches.tsx new file mode 100644 index 0000000000..9161f8fe0b --- /dev/null +++ b/services/core-web/src/components/search/GlobalSearch/components/RecentSearches.tsx @@ -0,0 +1,63 @@ +import React from "react"; +import { List, Space, Divider } from "antd"; +import { HistoryOutlined, ClockCircleOutlined, DeleteOutlined } from "@ant-design/icons"; +import classNames from "classnames"; + + +interface RecentSearchesProps { + recentSearches: string[]; + selectedIndex: number; + onSearchClick: (term: string) => void; + onRemoveSearch: (term: string, e: React.MouseEvent) => void; + onSetSelectedIndex: (index: number) => void; +} + +export const RecentSearches: React.FC = ({ + recentSearches, + selectedIndex, + onSearchClick, + onRemoveSearch, + onSetSelectedIndex, +}) => { + const [hoveredIndex, setHoveredIndex] = React.useState(null); + return ( +
+ + + + Recent Searches + + + { + const isSelected = index === selectedIndex; + const isHovered = hoveredIndex === index; + return ( + onSearchClick(term)} + onMouseEnter={() => { + onSetSelectedIndex(index); + setHoveredIndex(index); + }} + onMouseLeave={() => setHoveredIndex(null)} + className={classNames("recent-searches__item", { + "recent-searches__item--selected": isSelected, + "recent-searches__item--hovered": isHovered && !isSelected + })} + extra={ + onRemoveSearch(term, e)} + className="recent-searches__delete-icon" + /> + } + > + } title={term} /> + + ); + }} + split={false} + /> +
+ ); +}; diff --git a/services/core-web/src/components/search/GlobalSearch/components/SearchFilters.tsx b/services/core-web/src/components/search/GlobalSearch/components/SearchFilters.tsx new file mode 100644 index 0000000000..deac6f4f86 --- /dev/null +++ b/services/core-web/src/components/search/GlobalSearch/components/SearchFilters.tsx @@ -0,0 +1,84 @@ +import React from "react"; +import { Tag, Space, Divider } from "antd"; +import { AimOutlined } from "@ant-design/icons"; +import classNames from "classnames"; +import { SEARCH_TYPE_CONFIG } from "../utils/searchConfig"; + +interface SearchFiltersProps { + activeFilters: string[]; + onToggleFilter: (filterKey: string) => void; + facets: Record; + isOnMinePage: boolean; + scopeToMine: boolean; + onToggleScopeToMine: (checked: boolean) => void; + searchTerm: string; +} + +export const SearchFilters: React.FC = ({ + activeFilters, + onToggleFilter, + facets, + isOnMinePage, + scopeToMine, + onToggleScopeToMine, + searchTerm, +}) => { + const facetCountMap: Record = { + mine: facets.mine ?? 0, + contact: facets.person ?? 0, + organization: facets.organization ?? 0, + permit: facets.permit ?? 0, + explosives_permit: facets.explosives_permit ?? 0, + now_application: facets.now_application ?? 0, + nod: facets.nod ?? 0, + document: (facets.mine_documents ?? 0) + (facets.permit_documents ?? 0), + }; + + const getFacetCount = (filterKey: string): number => facetCountMap[filterKey] ?? 0; + + return ( +
+ + {isOnMinePage && ( + onToggleScopeToMine(checked)} + className={classNames("search-filters__tag search-filters__tag--mine-scope", { + "checked": scopeToMine + })} + > + + + This Mine + + + )} + {isOnMinePage && } + {Object.entries(SEARCH_TYPE_CONFIG).map(([key, config]) => { + const isActive = activeFilters.includes(key); + const count = getFacetCount(key); + + return ( + onToggleFilter(key)} + className="search-filters__tag" + style={{ + backgroundColor: isActive ? config.color : undefined, + border: `1px solid ${isActive ? config.color : "#d9d9d9"}`, + color: isActive ? "#fff" : "#595959", + }} + > + + {config.icon} + {config.pluralLabel} + {count > 0 && ({count})} + + + ); + })} + +
+ ); +}; diff --git a/services/core-web/src/components/search/GlobalSearch/components/SearchResultItem.tsx b/services/core-web/src/components/search/GlobalSearch/components/SearchResultItem.tsx new file mode 100644 index 0000000000..18d4d1f189 --- /dev/null +++ b/services/core-web/src/components/search/GlobalSearch/components/SearchResultItem.tsx @@ -0,0 +1,70 @@ +import React, { useState } from "react"; +import { List, Avatar, Typography } from "antd"; +import { EnterOutlined } from "@ant-design/icons"; +import classNames from "classnames"; +import { ISearchResult, ISimpleSearchResult } from "@mds/common/interfaces"; +import { SEARCH_TYPE_CONFIG, RESULT_TYPE_MAP } from "../utils/searchConfig"; +import { highlightMatch } from "../utils/searchHelpers"; + +const { Text } = Typography; + +interface SearchResultItemProps { + item: ISearchResult; + index: number; + selectedIndex: number; + searchTerm: string; + onClick: (item: ISearchResult) => void; + onMouseEnter: (index: number) => void; +} + +export const SearchResultItem: React.FC = ({ + item, + index, + selectedIndex, + searchTerm, + onClick, + onMouseEnter, +}) => { + const [isHovered, setIsHovered] = useState(false); + const configKey = RESULT_TYPE_MAP[item.type] || "document"; + const config = SEARCH_TYPE_CONFIG[configKey]; + const isSelected = index === selectedIndex; + + return ( + onClick(item)} + onMouseEnter={() => { + onMouseEnter(index); + setIsHovered(true); + }} + onMouseLeave={() => setIsHovered(false)} + className={classNames("search-result-item", { + "search-result-item--selected": isSelected, + "search-result-item--hovered": isHovered && !isSelected + })} + > + + } + title={{highlightMatch(item.result.value, searchTerm)}} + description={ + + {config.label} + {item.result.description && • {item.result.description}} + {item.result.highlight && ( + + )} + + } + /> + + + ); +}; diff --git a/services/core-web/src/components/search/GlobalSearch/components/SearchTriggerButton.tsx b/services/core-web/src/components/search/GlobalSearch/components/SearchTriggerButton.tsx new file mode 100644 index 0000000000..1931380153 --- /dev/null +++ b/services/core-web/src/components/search/GlobalSearch/components/SearchTriggerButton.tsx @@ -0,0 +1,49 @@ +import React, { useState } from "react"; +import { Button, Input } from "antd"; +import { SearchOutlined } from "@ant-design/icons"; +import classNames from "classnames"; + +interface SearchTriggerButtonProps { + onClick: () => void; + placeholder?: string; + size?: "small" | "middle" | "large"; + enableShortcut?: boolean; +} + +export const SearchTriggerButton: React.FC = ({ + onClick, + placeholder = "Search Core...", + size = "middle", + enableShortcut = true +}) => { + const [isFocussed, setIsFocussed] = useState(false); + const platform: string = window.navigator.platform.toLowerCase(); + const isMac = platform.includes("mac"); + let buttonText = isMac ? "⌘ + K" : "CTRL + K"; + + const suffix = enableShortcut ? ( + + ) : null; + + return ( +
+ : undefined} + suffix={suffix || } + placeholder={placeholder} + className={classNames( + "searchbar", + isFocussed ? "search-focussed" : "search-not-focussed", + size === "large" ? "searchbar-large" : "" + )} + readOnly + onFocus={() => setIsFocussed(true)} + onBlur={() => setIsFocussed(false)} + onClick={onClick} + /> +
+ ); +}; diff --git a/services/core-web/src/components/search/GlobalSearch/hooks/useRecentSearches.ts b/services/core-web/src/components/search/GlobalSearch/hooks/useRecentSearches.ts new file mode 100644 index 0000000000..2a5dfc3a06 --- /dev/null +++ b/services/core-web/src/components/search/GlobalSearch/hooks/useRecentSearches.ts @@ -0,0 +1,32 @@ +import { useState, useEffect } from "react"; +import { RECENT_SEARCHES_KEY, MAX_RECENT_SEARCHES } from "../utils/searchConfig"; + +export const useRecentSearches = () => { + const [recentSearches, setRecentSearches] = useState([]); + + useEffect(() => { + const stored = localStorage.getItem(RECENT_SEARCHES_KEY); + if (stored) { + try { + setRecentSearches(JSON.parse(stored)); + } catch { + setRecentSearches([]); + } + } + }, []); + + const saveRecentSearch = (term: string) => { + if (!term.trim()) return; + const updated = [term, ...recentSearches.filter((s) => s !== term)].slice(0, MAX_RECENT_SEARCHES); + setRecentSearches(updated); + localStorage.setItem(RECENT_SEARCHES_KEY, JSON.stringify(updated)); + }; + + const removeRecentSearch = (term: string) => { + const updated = recentSearches.filter((s) => s !== term); + setRecentSearches(updated); + localStorage.setItem(RECENT_SEARCHES_KEY, JSON.stringify(updated)); + }; + + return { recentSearches, saveRecentSearch, removeRecentSearch }; +}; diff --git a/services/core-web/src/components/search/GlobalSearch/utils/searchConfig.tsx b/services/core-web/src/components/search/GlobalSearch/utils/searchConfig.tsx new file mode 100644 index 0000000000..7cce95486c --- /dev/null +++ b/services/core-web/src/components/search/GlobalSearch/utils/searchConfig.tsx @@ -0,0 +1,95 @@ +import React from "react"; +import { + EnvironmentOutlined, + UserOutlined, + BankOutlined, + FileProtectOutlined, + AlertOutlined, + FileSearchOutlined, + ExceptionOutlined, +} from "@ant-design/icons"; + +export interface SearchTypeConfig { + icon: React.ReactNode; + label: string; + pluralLabel: string; + color: string; + types: string[]; +} + +export const SEARCH_TYPE_CONFIG: Record = { + mine: { + icon: , + label: "Mine", + pluralLabel: "Mines", + color: "#2e7d32", + types: ["mine"], + }, + contact: { + icon: , + label: "Person", + pluralLabel: "People", + color: "#1565c0", + types: ["person", "party"], + }, + organization: { + icon: , + label: "Organization", + pluralLabel: "Organizations", + color: "#f57c00", + types: ["organization"], + }, + permit: { + icon: , + label: "Permit", + pluralLabel: "Permits", + color: "#e65100", + types: ["permit"], + }, + explosives_permit: { + icon: , + label: "Explosives Permit", + pluralLabel: "Explosives", + color: "#d32f2f", + types: ["explosives_permit"], + }, + now_application: { + icon: , + label: "Notice of Work", + pluralLabel: "NoW", + color: "#0288d1", + types: ["now_application"], + }, + nod: { + icon: , + label: "NOD", + pluralLabel: "NODs", + color: "#7b1fa2", + types: ["nod", "notice_of_departure"], + }, + document: { + icon: , + label: "Document", + pluralLabel: "Documents", + color: "#455a64", + types: ["mine_documents", "permit_documents"], + }, +}; + +// Mapping for individual result types (more specific than filter types) +export const RESULT_TYPE_MAP: Record = { + mine: "mine", + person: "contact", + party: "contact", + organization: "organization", + permit: "permit", + explosives_permit: "explosives_permit", + now_application: "now_application", + nod: "nod", + notice_of_departure: "nod", + mine_documents: "document", + permit_documents: "document", +}; + +export const RECENT_SEARCHES_KEY = "mds_recent_searches"; +export const MAX_RECENT_SEARCHES = 5; diff --git a/services/core-web/src/components/search/GlobalSearch/utils/searchHelpers.tsx b/services/core-web/src/components/search/GlobalSearch/utils/searchHelpers.tsx new file mode 100644 index 0000000000..fb6cf604fe --- /dev/null +++ b/services/core-web/src/components/search/GlobalSearch/utils/searchHelpers.tsx @@ -0,0 +1,22 @@ +import React from "react"; +import { SEARCH_TYPE_CONFIG } from "./searchConfig"; + +export const getSearchTypes = (filters: string[], includeQuickFilter?: string | null): string[] | null => { + const allFilters = includeQuickFilter ? [...filters, includeQuickFilter] : filters; + const uniqueFilters = [...new Set(allFilters)]; + if (uniqueFilters.length === 0) return null; + return uniqueFilters.flatMap((f) => SEARCH_TYPE_CONFIG[f]?.types || []); +}; + +export const highlightMatch = (text: string, search: string): React.ReactNode => { + if (!search || !text) return text; + const escapedSearch = search.replace(/[.*+?^${}()|[\]\\]/g, String.raw`\$&`); + const regex = new RegExp(`(${escapedSearch})`, "gi"); + const parts = text.split(regex); + return parts.map((part, i) => (regex.test(part) ? {part} : part)); +}; + +export const extractMineGuidFromPath = (pathname: string): string | null => { + const match = pathname.match(/\/mine-dashboard\/([a-f0-9-]+)/i); + return match ? match[1] : null; +}; diff --git a/services/core-web/src/components/search/PermitResultsTable.js b/services/core-web/src/components/search/PermitResultsTable.js index f0e2b06c47..3b782dbb86 100644 --- a/services/core-web/src/components/search/PermitResultsTable.js +++ b/services/core-web/src/components/search/PermitResultsTable.js @@ -32,7 +32,8 @@ export const PermitResultsTable = (props) => { title: "Permit No.", key: "permit_no", render: (record) => { - if (isFeatureEnabled(Feature.DIGITIZED_PERMITS)) { + const hasMine = record.mine && record.mine.length > 0 && record.mine[0]?.mine_guid; + if (isFeatureEnabled(Feature.DIGITIZED_PERMITS) && hasMine) { return ( { title: "Mine(s)", key: "mine_guid", render: (record) => { + if (!record.mine || record.mine.length === 0) { + return "-"; + } return record.mine.map((mine) => ( - {mine.mine_name} + {mine.mine_name || mine.mine_guid} )); }, diff --git a/services/core-web/src/components/search/SearchBar.tsx b/services/core-web/src/components/search/SearchBar.tsx index 7f0daa7b16..70ac605530 100644 --- a/services/core-web/src/components/search/SearchBar.tsx +++ b/services/core-web/src/components/search/SearchBar.tsx @@ -1,40 +1,35 @@ import React, { useState, FC, useRef } from "react"; -import { bindActionCreators } from "redux"; -import { connect } from "react-redux"; -import { withRouter, useHistory, RouteComponentProps } from "react-router-dom"; +import { useHistory } from "react-router-dom"; +import { useDispatch, useSelector } from "react-redux"; import { Input, InputProps, Button } from "antd"; import { fetchSearchBarResults, - clearSearchBarResults, -} from "@mds/common/redux/actionCreators/searchActionCreator"; + selectSearchBarResults, +} from "@mds/common/redux/slices/searchSlice"; import * as router from "@/constants/routes"; -import { getSearchBarResults } from "@mds/common/redux/reducers/searchReducer"; import { SearchOutlined } from "@ant-design/icons"; import { useKey } from "@/App"; -import { ISearchResult, ISimpleSearchResult } from "@mds/common/interfaces/search/searchResult.interface"; import { SearchBarDropdown } from "@/components/search/SearchBarDropdown"; import { throttle } from "lodash"; -import { ActionCreator } from "@mds/common/interfaces/actionCreator"; -// any attribute that can be passed to antd Input can be passed in here without being explicitly named interface SearchBarProps extends InputProps { iconPlacement: "prefix" | "suffix" | false; placeholderText: string; showFocusButton: boolean; - searchBarResults: ISearchResult[]; - fetchSearchBarResults: ActionCreator; } -const SearchBar: FC = ({ +const SearchBar: FC = ({ iconPlacement = "suffix", placeholderText = "Search...", showFocusButton = false, ...props }) => { + const dispatch = useDispatch(); + const searchBarResults = useSelector(selectSearchBarResults); const [searchTerm, setSearchTerm] = useState(""); const [searchTermHistory, setSearchTermHistory] = useState([]); const [isFocussed, setIsFocussed] = useState(false); @@ -42,7 +37,9 @@ const SearchBar: FC = ({ const history = useHistory(); const hotKeyRef = useRef(); - const fetchSearchBarResultsThrottled = throttle(props.fetchSearchBarResults, 2000, { + const fetchSearchBarResultsThrottled = throttle((term: string) => { + dispatch(fetchSearchBarResults({ searchTerm: term })); + }, 2000, { leading: true, trailing: true, }); @@ -106,7 +103,7 @@ const SearchBar: FC = ({ history={history} searchTerm={searchTerm} searchTermHistory={searchTermHistory} - searchBarResults={props.searchBarResults} + searchBarResults={searchBarResults} > = ({ setIsFocussed(false); }} ref={hotKeyRef} + {...iconProps} {...(showFocusButton ? getFocusButton() : null)} {...props} - {...iconProps} /> ); }; -const mapStateToProps = (state) => ({ - searchBarResults: getSearchBarResults(state), -}); - -const mapDispatchToProps = (dispatch) => - bindActionCreators( - { - fetchSearchBarResults, - clearSearchBarResults, - }, - dispatch - ); - -export default connect(mapStateToProps, mapDispatchToProps)(withRouter(SearchBar)); +export default SearchBar; diff --git a/services/core-web/src/components/search/SearchFiltersPanel.tsx b/services/core-web/src/components/search/SearchFiltersPanel.tsx new file mode 100644 index 0000000000..31f7668530 --- /dev/null +++ b/services/core-web/src/components/search/SearchFiltersPanel.tsx @@ -0,0 +1,119 @@ +import React, { useMemo } from "react"; +import { Card, Typography, Tag, Button, Space, Checkbox, Collapse } from "antd"; +import { FilterOutlined, ClearOutlined } from "@ant-design/icons"; +import { FACET_GROUPS, FACET_LABELS, FacetBucket, SearchFacets } from "./searchResultsConfig"; + +const { Text } = Typography; +const { Panel } = Collapse; + +interface SearchFiltersPanelProps { + searchFacets: SearchFacets | null; + selectedFilters: Record; + hasActiveFilters: boolean; + onFilterChange: (category: string, value: string, checked: boolean) => void; + onClearAllFilters: () => void; +} + +export const SearchFiltersPanel: React.FC = ({ + searchFacets, + selectedFilters, + hasActiveFilters, + onFilterChange, + onClearAllFilters, +}) => { + const groupedFacets = useMemo(() => { + return FACET_GROUPS.map((group) => { + const facets = group.facets + .map((facetKey) => ({ + key: facetKey, + label: FACET_LABELS[facetKey] || facetKey, + data: searchFacets?.[facetKey as keyof SearchFacets] || [], + })) + .filter((f) => f.data.length > 0); + + return { + ...group, + facets: facets, + }; + }).filter((group) => group.facets.length > 0); + }, [searchFacets]); + + return ( + +
+ + + Filters + + {hasActiveFilters && ( + + )} +
+ + {hasActiveFilters && ( +
+ + {Object.entries(selectedFilters).map(([category, values]) => + values.map((value) => ( + onFilterChange(category, value, false)} + color="blue" + className="search-results-v2__filters-card-tag" + > + {value} + + )) + )} + +
+ )} + + {groupedFacets.length > 0 ? ( + g.key)}> + {groupedFacets.map((group) => ( + + {group.icon} + {group.label} + + } + key={group.key} + > + {group.facets.map((facet) => ( +
+ + {facet.label} + +
+ {[...facet.data] + .sort((a: FacetBucket, b: FacetBucket) => b.count - a.count) + .map((bucket: FacetBucket) => ( +
+ onFilterChange(facet.key, bucket.key, e.target.checked)} + > + + {bucket.key}{" "} + ({bucket.count}) + + +
+ ))} +
+
+ ))} +
+ ))} +
+ ) : ( + No filters available + )} +
+ ); +}; diff --git a/services/core-web/src/components/search/SearchHeader.tsx b/services/core-web/src/components/search/SearchHeader.tsx new file mode 100644 index 0000000000..b92da23b97 --- /dev/null +++ b/services/core-web/src/components/search/SearchHeader.tsx @@ -0,0 +1,36 @@ +import React from "react"; +import { Input } from "antd"; +import { SearchOutlined } from "@ant-design/icons"; + +interface SearchHeaderProps { + searchInputValue: string; + onSearchInputChange: (value: string) => void; + onSearch: (value: string) => void; +} + +export const SearchHeader: React.FC = ({ + searchInputValue, + onSearchInputChange, + onSearch, +}) => { + return ( +
+
+
+

Search Results

+
+
+
+ } + size="large" + value={searchInputValue} + onChange={(e) => onSearchInputChange(e.target.value)} + onSearch={onSearch} + /> +
+
+ ); +}; diff --git a/services/core-web/src/components/search/SearchResults.tsx b/services/core-web/src/components/search/SearchResults.tsx index e49082285c..3f9e196347 100644 --- a/services/core-web/src/components/search/SearchResults.tsx +++ b/services/core-web/src/components/search/SearchResults.tsx @@ -1,259 +1,17 @@ -import React, { useEffect, useMemo, useState } from "react"; -import { bindActionCreators } from "redux"; -import { ConnectedProps, connect } from "react-redux"; -import queryString from "query-string"; -import { Row, Col } from "antd"; -import { ArrowLeftOutlined } from "@ant-design/icons"; -import { Link } from "react-router-dom"; -import { sumBy, map, mapValues, keyBy } from "lodash"; -import { getSearchResults, getSearchTerms } from "@mds/common/redux/selectors/searchSelectors"; -import { getPartyRelationshipTypeHash } from "@mds/common/redux/selectors/staticContentSelectors"; -import { - fetchSearchOptions, - fetchSearchResults, -} from "@mds/common/redux/actionCreators/searchActionCreator"; -import { getSearchOptions } from "@mds/common/redux/reducers/searchReducer"; -import { MineResultsTable } from "@/components/search/MineResultsTable"; -import { PermitResultsTable } from "@/components/search/PermitResultsTable"; -import { ContactResultsTable } from "@/components/search/ContactResultsTable"; -import { DocumentResultsTable } from "@/components/search/DocumentResultsTable"; -import Loading from "@/components/common/Loading"; -import * as router from "@/constants/routes"; -import { ISearchResultList } from "@mds/common/interfaces"; +import React from "react"; +import { useFeatureFlag } from "@mds/common/providers/featureFlags/useFeatureFlag"; +import { Feature } from "@mds/common/utils/featureFlag"; +import SearchResultsV2 from "./SearchResultsV2"; +import SearchResultsLegacy from "./SearchResultsLegacy"; -interface SearchResultsProps { - location: { search: string }; - history: { push: (path: string) => void }; - fetchSearchOptions: () => Promise; - fetchSearchResults: (query, tab) => Promise; - searchOptions: any[]; - searchOptionsHash: { [key: string]: any }; - searchTerms: string[]; - searchResults: ISearchResultList; - partyRelationshipTypeHash: { [key: string]: string }; - hideLoadingIndicator?: boolean; -} +const SearchResults: React.FC = (props) => { + const { isFeatureEnabled } = useFeatureFlag(); -const TableForGroup = ( - group: any, - highlightRegex: RegExp, - partyRelationshipTypeHash: { [key: string]: string }, - query: { q?: string }, - showAdvancedLookup: boolean -) => -({ - mine: ( - - ), - party: ( - - ), - permit: ( - - ), - mine_documents: ( - - ), - permit_documents: ( - - ), -}[group.type]); + if (isFeatureEnabled(Feature.GLOBAL_SEARCH_V2)) { + return ; + } -const NoResults = (searchTerms: string[]) => { - const searchTooShort = !searchTerms.find((term) => term.length > 2); - return ( - -
-

No Results Found.

- {searchTooShort && ( -

At least one word in your search needs to be a minimum of three characters.

- )} -

Please try another search.

- - - ); + return ; }; -const CantFindIt = () => ( - - -

Can't find it?

-

- Try clicking to see more results, or select the advanced lookup if available. Also, double - check your spelling to ensure it is correct. If you feel there is a problem, contact the - Core administrator to ask for assistance. -

- - -); - -export const SearchResults: React.FC = (props) => { - const [isSearching, setIsSearching] = useState(false); - const [hasSearchTerm, setHasSearchTerm] = useState(false); - const [params, setParams] = useState<{ [key: string]: string }>({}); - - const handleSearch = (location: { search: string }) => { - const parsedParams = queryString.parse(location.search); - const { q, t } = parsedParams; - - if (q) { - props.fetchSearchResults(q, t); - setParams(parsedParams); - setIsSearching(true); - setHasSearchTerm(true); - } - }; - - useEffect(() => { - if (!props.searchOptions.length) { - props.fetchSearchOptions(); - } - handleSearch(props.location); - }, []); - - useEffect(() => { - handleSearch(props.location); - }, [props.location]); - - const groupedSearchResults: any[] = useMemo(() => { - const results: any[] = []; - Object.entries(props.searchResults).forEach((entry) => { - const resultGroup = { - type: entry[0], - score: sumBy(entry[1], "score"), - results: map(entry[1], "result"), - }; - if (resultGroup.score > 0) results.push(resultGroup); - }); - - results.sort((a, b) => a.score - b.score); - results.reverse(); - - setIsSearching(false); - - return results; - }, [props.searchResults]); - - const results = useMemo(() => props.searchTerms.map((t) => `"${t}"`).join(", "), [ - props.searchTerms, - ]); - - const type_filter = params.t; - - if (isSearching && !props.hideLoadingIndicator) return ; - - return hasSearchTerm ? ( -
-
-
-

- {`${type_filter ? props.searchOptionsHash[type_filter] : "Search results" - } for ${results}`} -

-
- {type_filter ? ( - - - {`Back to all search results for ${results}`} - - ) : ( -

- Just show me: - {props.searchOptions.map((o) => ( - - - {o.description} - - - ))} -

- )} -
-
-
-
- {groupedSearchResults.length === 0 && NoResults(props.searchTerms)} - {groupedSearchResults.map((group) => ( -
- {TableForGroup( - group, - RegExp(`${props.searchTerms.join("|")}`, "i"), - props.partyRelationshipTypeHash, - params, - !!type_filter - )} - {!type_filter && ( - - See more search results for {props.searchOptionsHash[group.type]} - - )} -
- ))} - -
-
-
-
- ) : ( - <> - ); -}; - -const mapStateToProps = (state: any) => ({ - searchOptions: getSearchOptions(state), - searchOptionsHash: mapValues(keyBy(getSearchOptions(state), "model_id"), "description"), - searchResults: getSearchResults(state), - searchTerms: getSearchTerms(state), - partyRelationshipTypeHash: getPartyRelationshipTypeHash(state), -}); - -const mapDispatchToProps = (dispatch) => - bindActionCreators( - { - fetchSearchOptions, - fetchSearchResults, - }, - dispatch - ); - - -const connector = connect(mapStateToProps, mapDispatchToProps); -type PropsFromRedux = ConnectedProps; - -export default connector(SearchResults); +export default SearchResults; diff --git a/services/core-web/src/components/search/SearchResultsLegacy.tsx b/services/core-web/src/components/search/SearchResultsLegacy.tsx new file mode 100644 index 0000000000..a532b0b503 --- /dev/null +++ b/services/core-web/src/components/search/SearchResultsLegacy.tsx @@ -0,0 +1,260 @@ +import React, { useEffect, useMemo, useState } from "react"; +import { bindActionCreators } from "redux"; +import { ConnectedProps, connect } from "react-redux"; +import queryString from "query-string"; +import { Row, Col } from "antd"; +import { ArrowLeftOutlined } from "@ant-design/icons"; +import { Link } from "react-router-dom"; +import { sumBy, map, mapValues, keyBy } from "lodash"; +import { + selectSearchResults, + selectSearchTerms, + selectSearchOptions, + fetchSearchOptions, + fetchSearchResults, +} from "@mds/common/redux/slices/searchSlice"; +import { getPartyRelationshipTypeHash } from "@mds/common/redux/selectors/staticContentSelectors"; +import { MineResultsTable } from "@/components/search/MineResultsTable"; +import { PermitResultsTable } from "@/components/search/PermitResultsTable"; +import { ContactResultsTable } from "@/components/search/ContactResultsTable"; +import { DocumentResultsTable } from "@/components/search/DocumentResultsTable"; +import Loading from "@/components/common/Loading"; +import * as router from "@/constants/routes"; +import { ISearchResultList } from "@mds/common/interfaces"; + +interface SearchResultsProps { + location: { search: string }; + history: { push: (path: string) => void }; + fetchSearchOptions: any; + fetchSearchResults: any; + searchOptions: any[]; + searchOptionsHash: { [key: string]: any }; + searchTerms: string[]; + searchResults: ISearchResultList; + partyRelationshipTypeHash: { [key: string]: string }; + hideLoadingIndicator?: boolean; +} + +const TableForGroup = ( + group: any, + highlightRegex: RegExp, + partyRelationshipTypeHash: { [key: string]: string }, + query: { q?: string }, + showAdvancedLookup: boolean +) => +({ + mine: ( + + ), + party: ( + + ), + permit: ( + + ), + mine_documents: ( + + ), + permit_documents: ( + + ), +}[group.type]); + +const NoResults = (searchTerms: string[]) => { + const searchTooShort = !searchTerms.find((term) => term.length > 2); + return ( + +
+

No Results Found.

+ {searchTooShort && ( +

At least one word in your search needs to be a minimum of three characters.

+ )} +

Please try another search.

+ + + ); +}; + +const CantFindIt = () => ( + + +

Can't find it?

+

+ Try clicking to see more results, or select the advanced lookup if available. Also, double + check your spelling to ensure it is correct. If you feel there is a problem, contact the + Core administrator to ask for assistance. +

+ + +); + +export const SearchResultsLegacy: React.FC = (props) => { + const [isSearching, setIsSearching] = useState(false); + const [hasSearchTerm, setHasSearchTerm] = useState(false); + const [params, setParams] = useState<{ [key: string]: string }>({}); + + const handleSearch = (location: { search: string }) => { + const parsedParams = queryString.parse(location.search); + const { q, t } = parsedParams; + + if (q) { + props.fetchSearchResults({ searchTerm: q as string, searchTypes: t ? [t as string] : [] }); + setParams(parsedParams); + setIsSearching(true); + setHasSearchTerm(true); + } + }; + + useEffect(() => { + if (!props.searchOptions.length) { + props.fetchSearchOptions(); + } + handleSearch(props.location); + }, []); + + useEffect(() => { + handleSearch(props.location); + }, [props.location]); + + const groupedSearchResults: any[] = useMemo(() => { + const results: any[] = []; + Object.entries(props.searchResults).forEach((entry) => { + const resultGroup = { + type: entry[0], + score: sumBy(entry[1], "score"), + results: map(entry[1], "result"), + }; + if (resultGroup.score > 0) results.push(resultGroup); + }); + + results.sort((a, b) => a.score - b.score); + results.reverse(); + + setIsSearching(false); + + return results; + }, [props.searchResults]); + + const results = useMemo(() => props.searchTerms.map((t) => `"${t}"`).join(", "), [ + props.searchTerms, + ]); + + const type_filter = params.t; + + if (isSearching && !props.hideLoadingIndicator) return ; + + return hasSearchTerm ? ( +
+
+
+

+ {`${type_filter ? props.searchOptionsHash[type_filter] : "Search results" + } for ${results}`} +

+
+ {type_filter ? ( + + + {`Back to all search results for ${results}`} + + ) : ( +

+ Just show me: + {props.searchOptions.map((o) => ( + + + {o.description} + + + ))} +

+ )} +
+
+
+
+ {groupedSearchResults.length === 0 && NoResults(props.searchTerms)} + {groupedSearchResults.map((group) => ( +
+ {TableForGroup( + group, + RegExp(`${props.searchTerms.join("|")}`, "i"), + props.partyRelationshipTypeHash, + params, + !!type_filter + )} + {!type_filter && ( + + See more search results for {props.searchOptionsHash[group.type]} + + )} +
+ ))} + +
+
+
+
+ ) : ( + <> + ); +}; + +const mapStateToProps = (state: any) => ({ + searchOptions: selectSearchOptions(state), + searchOptionsHash: mapValues(keyBy(selectSearchOptions(state), "model_id"), "description"), + searchResults: selectSearchResults(state), + searchTerms: selectSearchTerms(state), + partyRelationshipTypeHash: getPartyRelationshipTypeHash(state), +}); + +const mapDispatchToProps = (dispatch) => + bindActionCreators( + { + fetchSearchOptions, + fetchSearchResults, + }, + dispatch + ); + + +const connector = connect(mapStateToProps, mapDispatchToProps); +type PropsFromRedux = ConnectedProps; + +export default connector(SearchResultsLegacy); diff --git a/services/core-web/src/components/search/SearchResultsTabs.tsx b/services/core-web/src/components/search/SearchResultsTabs.tsx new file mode 100644 index 0000000000..627c1c622f --- /dev/null +++ b/services/core-web/src/components/search/SearchResultsTabs.tsx @@ -0,0 +1,280 @@ +import React from "react"; +import { Tabs, Empty } from "antd"; +import { MineResultsTable } from "./MineResultsTable"; +import { PermitResultsTable } from "./PermitResultsTable"; +import { ContactResultsTable } from "./ContactResultsTable"; +import { DocumentResultsTable } from "./DocumentResultsTable"; +import { GenericResultsTable } from "./GenericResultsTable"; +import * as router from "@/constants/routes"; + +interface SearchResultsTabsProps { + activeTab: string; + onTabChange: (key: string) => void; + query: string; + highlightRegex: RegExp | null; + partyRelationshipTypeHash: any; + results: { + mines: any[]; + mineResults: any[]; + peopleResults: any[]; + organizationResults: any[]; + permitResults: any[]; + documentResults: any[]; + explosivesPermitResults: any[]; + explosivesPermits: any[]; + nowApplicationResults: any[]; + nowApplications: any[]; + nodResults: any[]; + nods: any[]; + totalResults: number; + }; +} + +const renderEmptyState = () => ( + No results in this category} + image={Empty.PRESENTED_IMAGE_SIMPLE} + className="search-results-v2__empty-state" + /> +); + +export const SearchResultsTabs: React.FC = ({ + activeTab, + onTabChange, + query, + highlightRegex, + partyRelationshipTypeHash, + results, +}) => { + const { + mines, + mineResults, + peopleResults, + organizationResults, + permitResults, + documentResults, + explosivesPermitResults, + explosivesPermits, + nowApplicationResults, + nowApplications, + nodResults, + nods, + totalResults, + } = results; + + const explosivesColumns = [ + { title: "Application #", dataIndex: "application_number", key: "application_number" }, + { title: "Status", dataIndex: "application_status", key: "application_status" }, + { title: "Mine", dataIndex: "mine_name", key: "mine_name", link: (record: any) => router.MINE_PERMITS.dynamicRoute(record.mine_guid) }, + { title: "Closed", dataIndex: "is_closed", key: "is_closed", customRender: (text: boolean) => text ? "Yes" : "No" }, + ]; + + const nowColumns = [ + { title: "NoW #", dataIndex: "now_number", key: "now_number", link: (record: any) => router.NOTICE_OF_WORK_APPLICATION.dynamicRoute(record.now_application_guid, "verification") }, + { title: "Status", dataIndex: "now_application_status_code", key: "status" }, + { title: "Type", dataIndex: "notice_of_work_type_code", key: "type" }, + { title: "Mine", dataIndex: "mine_name", key: "mine_name", link: (record: any) => router.MINE_GENERAL.dynamicRoute(record.mine_guid) }, + ]; + + const nodColumns = [ + { title: "NOD #", dataIndex: "nod_no", key: "nod_no", link: (record: any) => router.NOTICE_OF_DEPARTURE.dynamicRoute(record.mine_guid, record.nod_guid) }, + { title: "Title", dataIndex: "nod_title", key: "nod_title" }, + { title: "Type", dataIndex: "nod_type", key: "nod_type" }, + { title: "Status", dataIndex: "nod_status", key: "nod_status" }, + { title: "Mine", dataIndex: "mine_name", key: "mine_name", link: (record: any) => router.MINE_GENERAL.dynamicRoute(record.mine_guid) }, + ]; + + const tabItems = [ + { + key: "all", + label: `All (${totalResults})`, + children: totalResults === 0 ? renderEmptyState() : ( + <> + {mineResults.length > 0 && ( + + )} + {peopleResults.length > 0 && ( + + )} + {organizationResults.length > 0 && ( + + )} + {permitResults.length > 0 && ( + + )} + {explosivesPermitResults.length > 0 && ( + record.explosives_permit_guid} + columns={explosivesColumns} + /> + )} + {nowApplicationResults.length > 0 && ( + record.now_application_guid} + columns={nowColumns} + /> + )} + {nodResults.length > 0 && ( + record.nod_guid} + columns={nodColumns} + /> + )} + {documentResults.length > 0 && ( + + )} + + ), + }, + { + key: "mine", + label: `Mines (${mines.length})`, + children: mineResults.length === 0 ? renderEmptyState() : ( + + ), + }, + { + key: "people", + label: `People (${peopleResults.length})`, + children: peopleResults.length === 0 ? renderEmptyState() : ( + + ), + }, + { + key: "organization", + label: `Organizations (${organizationResults.length})`, + children: organizationResults.length === 0 ? renderEmptyState() : ( + + ), + }, + { + key: "permit", + label: `Permits (${results.permitResults.length})`, + children: permitResults.length === 0 ? renderEmptyState() : ( + + ), + }, + { + key: "explosives_permit", + label: `Explosives (${explosivesPermits.length})`, + children: explosivesPermitResults.length === 0 ? renderEmptyState() : ( + record.explosives_permit_guid} + columns={explosivesColumns} + /> + ), + }, + { + key: "now_application", + label: `NoW (${nowApplications.length})`, + children: nowApplicationResults.length === 0 ? renderEmptyState() : ( + record.now_application_guid} + columns={nowColumns} + /> + ), + }, + { + key: "notice_of_departure", + label: `NODs (${nods.length})`, + children: nodResults.length === 0 ? renderEmptyState() : ( + record.nod_guid} + columns={nodColumns} + /> + ), + }, + { + key: "document", + label: `Documents (${documentResults.length})`, + children: documentResults.length === 0 ? renderEmptyState() : ( + + ), + }, + ]; + + return ( + + ); +}; diff --git a/services/core-web/src/components/search/SearchResultsV2.tsx b/services/core-web/src/components/search/SearchResultsV2.tsx new file mode 100644 index 0000000000..de5401ccf6 --- /dev/null +++ b/services/core-web/src/components/search/SearchResultsV2.tsx @@ -0,0 +1,87 @@ +import React from "react"; +import { Row, Col, Spin, Typography } from "antd"; +import { useSearchResults } from "./useSearchResults"; +import { SearchHeader } from "./SearchHeader"; +import { SearchFiltersPanel } from "./SearchFiltersPanel"; +import { SearchResultsTabs } from "./SearchResultsTabs"; +import { PageTracker } from "@common/utils/trackers"; + +const { Text } = Typography; + +export const SearchResults: React.FC = () => { + const { + isSearching, + params, + searchInputValue, + setSearchInputValue, + selectedFilters, + searchFacets, + partyRelationshipTypeHash, + highlightRegex, + onSearch, + onTabChange, + handleFilterChange, + clearAllFilters, + hasActiveFilters, + results, + } = useSearchResults(); + + const activeTab = params.t || "all"; + + return ( +
+ + + +
+
+ {isSearching ? ( +
+ +
+ Searching... +
+
+ ) : ( + +
+ + + +
+ + {results.totalResults === 0 ? ( + <>No results for "{params.q}"{hasActiveFilters && " (filtered)"} + ) : ( + <>Showing {results.totalResults} results for "{params.q}"{hasActiveFilters && " (filtered)"} + )} + +
+ + + + )} + + + + ); +}; + +export default SearchResults; diff --git a/services/core-web/src/components/search/searchResultsConfig.ts b/services/core-web/src/components/search/searchResultsConfig.ts new file mode 100644 index 0000000000..86e0414979 --- /dev/null +++ b/services/core-web/src/components/search/searchResultsConfig.ts @@ -0,0 +1,106 @@ +import React from "react"; +import { + EnvironmentOutlined, + UserOutlined, + FileProtectOutlined, + AlertOutlined, + FileSearchOutlined, + ExceptionOutlined, +} from "@ant-design/icons"; + +export interface FacetBucket { + key: string; + count: number; +} + +export interface FacetGroup { + key: string; + label: string; + icon: React.ReactNode; + color: string; + facets: string[]; +} + +export const FACET_GROUPS: FacetGroup[] = [ + { + key: "mine", + label: "Mine Filters", + icon: React.createElement(EnvironmentOutlined), + color: "#2e7d32", + facets: ["mine_region", "mine_classification", "mine_operation_status", "mine_tenure", "mine_commodity", "has_tsf", "verified_status"] + }, + { + key: "permit", + label: "Permit Filters", + icon: React.createElement(FileProtectOutlined), + color: "#e65100", + facets: ["permit_status", "is_exploration"] + }, + { + key: "party", + label: "Contact Filters", + icon: React.createElement(UserOutlined), + color: "#1565c0", + facets: ["party_type"] + }, + { + key: "explosives_permit", + label: "Explosives Filters", + icon: React.createElement(AlertOutlined), + color: "#d32f2f", + facets: ["explosives_permit_status", "explosives_permit_closed"] + }, + { + key: "now_application", + label: "NoW Filters", + icon: React.createElement(FileSearchOutlined), + color: "#0288d1", + facets: ["now_application_status", "now_type"] + }, + { + key: "nod", + label: "NOD Filters", + icon: React.createElement(ExceptionOutlined), + color: "#7b1fa2", + facets: ["nod_type", "nod_status"] + }, +]; + +export const FACET_LABELS: Record = { + mine_region: "Region", + mine_classification: "Classification", + mine_operation_status: "Operation Status", + mine_tenure: "Tenure Type", + mine_commodity: "Commodity", + has_tsf: "Tailings Storage Facility", + verified_status: "Verification Status", + permit_status: "Status", + is_exploration: "Exploration", + party_type: "Type", + explosives_permit_status: "Status", + explosives_permit_closed: "Status", + nod_type: "Type", + nod_status: "Status", + now_application_status: "Status", + now_type: "Type", +}; + +export interface SearchFacets { + mine_region?: FacetBucket[]; + mine_classification?: FacetBucket[]; + mine_operation_status?: FacetBucket[]; + mine_tenure?: FacetBucket[]; + mine_commodity?: FacetBucket[]; + has_tsf?: FacetBucket[]; + verified_status?: FacetBucket[]; + permit_status?: FacetBucket[]; + is_exploration?: FacetBucket[]; + party_type?: FacetBucket[]; + explosives_permit_status?: FacetBucket[]; + explosives_permit_closed?: FacetBucket[]; + nod_type?: FacetBucket[]; + nod_status?: FacetBucket[]; + now_application_status?: FacetBucket[]; + now_type?: FacetBucket[]; + type?: FacetBucket[]; +} diff --git a/services/core-web/src/components/search/useSearchResults.ts b/services/core-web/src/components/search/useSearchResults.ts new file mode 100644 index 0000000000..09690285fb --- /dev/null +++ b/services/core-web/src/components/search/useSearchResults.ts @@ -0,0 +1,215 @@ +import { useEffect, useState, useMemo, useCallback } from "react"; +import { useSelector, useDispatch } from "react-redux"; +import { useLocation, useHistory } from "react-router-dom"; +import queryString from "query-string"; +import { + selectSearchResults, + selectSearchFacets, + selectSearchOptions, + fetchSearchOptions, + fetchSearchResults, +} from "@mds/common/redux/slices/searchSlice"; +import { getPartyRelationshipTypeHash } from "@mds/common/redux/selectors/staticContentSelectors"; +import * as router from "@/constants/routes"; + +export interface SearchParams { + q?: string; + t?: string; +} + +export const useSearchResults = () => { + const [isSearching, setIsSearching] = useState(false); + const [params, setParams] = useState({}); + const [searchInputValue, setSearchInputValue] = useState(""); + const [selectedFilters, setSelectedFilters] = useState>({}); + const history = useHistory(); + const location = useLocation(); + const dispatch = useDispatch(); + + const searchOptions = useSelector(selectSearchOptions); + const searchResults = useSelector(selectSearchResults); + const searchFacets = useSelector(selectSearchFacets); + const partyRelationshipTypeHash = useSelector(getPartyRelationshipTypeHash); + + const highlightRegex = useMemo(() => { + if (!params.q) return null; + const escapedTerm = params.q.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); + try { + return new RegExp(escapedTerm, "i"); + } catch { + return null; + } + }, [params.q]); + + const getFiltersForApi = useCallback((filters: Record): Record => { + const apiFilters: Record = {}; + Object.entries(filters).forEach(([key, values]) => { + if (values.length > 0) { + apiFilters[key] = values.join(","); + } + }); + return apiFilters; + }, []); + + const mapTabToSearchType = useCallback((tabKey: string | undefined, currentFilters: Record): { types: string[] | undefined; filters: Record } => { + if (!tabKey || tabKey === "all") return { types: undefined, filters: currentFilters }; + + const tabToTypeMap: Record = { + "mine": "mine", + "people": "party", + "organization": "party", + "permit": "permit", + "explosives_permit": "explosives_permit", + "now_application": "now_application", + "notice_of_departure": "notice_of_departure", + "document": "mine_documents,permit_documents", + }; + + const newFilters = { ...currentFilters }; + + if (tabKey === "people") { + newFilters.party_type = ["Person"]; + } else if (tabKey === "organization") { + newFilters.party_type = ["Organization"]; + } + + const typeString = tabToTypeMap[tabKey]; + return { + types: typeString ? typeString.split(",") : undefined, + filters: newFilters + }; + }, []); + + const triggerSearch = useCallback((searchTerm: string, searchTypes?: string, filters?: Record) => { + if (!searchTerm) return; + setIsSearching(true); + const { types, filters: enhancedFilters } = mapTabToSearchType(searchTypes, filters || {}); + const apiFilters = getFiltersForApi(enhancedFilters); + dispatch(fetchSearchResults({ searchTerm, searchTypes: types, filters: apiFilters })); + }, [dispatch, getFiltersForApi, mapTabToSearchType]); + + const onSearch = useCallback((value: string) => { + if (value) { + setSelectedFilters({}); + const newParams: Record = { q: value }; + if (params.t) { + newParams.t = params.t; + } + history.push(router.SEARCH_RESULTS.dynamicRoute(newParams)); + } + }, [history, params.t]); + + const onTabChange = useCallback((key: string) => { + const newParams = { q: params.q || "", t: key === "all" ? null : key }; + history.push(router.SEARCH_RESULTS.dynamicRoute(newParams)); + }, [history, params.q]); + + const handleFilterChange = useCallback((category: string, value: string, checked: boolean) => { + const newFilters = { ...selectedFilters }; + const current = newFilters[category] || []; + + if (checked) { + newFilters[category] = [...current, value]; + } else { + const updated = current.filter((v) => v !== value); + if (updated.length === 0) { + delete newFilters[category]; + } else { + newFilters[category] = updated; + } + } + + setSelectedFilters(newFilters); + + if (params.q) { + triggerSearch(params.q, params.t, newFilters); + } + }, [selectedFilters, params.q, params.t, triggerSearch]); + + const clearAllFilters = useCallback(() => { + setSelectedFilters({}); + if (params.q) { + triggerSearch(params.q, params.t, {}); + } + }, [params.q, params.t, triggerSearch]); + + useEffect(() => { + if (!searchOptions.length) { + dispatch(fetchSearchOptions(undefined)); + } + }, [searchOptions.length, dispatch]); + + useEffect(() => { + const parsedParams = queryString.parse(location.search); + const { q, t } = parsedParams; + if (q) { + setParams({ q: q as string, t: t as string }); + setSearchInputValue(q as string); + setIsSearching(true); + // Always fetch all types to populate all tab counts + dispatch(fetchSearchResults({ searchTerm: q as string, searchTypes: undefined, filters: {} })); + } + }, [location.search, dispatch]); + + useEffect(() => { + if (searchResults) { + setIsSearching(false); + } + }, [searchResults]); + + // Process results + const mines = searchResults?.mine || []; + const parties = searchResults?.party || []; + const permits = searchResults?.permit || []; + const mineDocuments = searchResults?.mine_documents || []; + const permitDocuments = searchResults?.permit_documents || []; + const explosivesPermits = searchResults?.explosives_permit || []; + const nowApplications = searchResults?.now_application || []; + const nods = searchResults?.notice_of_departure || []; + + const mineResults = mines.map((item: any) => item.result).filter(Boolean); + const partyResults = parties.map((item: any) => item.result).filter(Boolean); + const peopleResults = partyResults.filter((p: any) => p?.party_type_code === "PER"); + const organizationResults = partyResults.filter((p: any) => p?.party_type_code === "ORG"); + const permitResults = permits.map((item: any) => item.result).filter(Boolean); + const documentResults = [...mineDocuments, ...permitDocuments].map((item: any) => item.result).filter(Boolean); + const explosivesPermitResults = explosivesPermits.map((item: any) => item.result).filter(Boolean); + const nowApplicationResults = nowApplications.map((item: any) => item.result).filter(Boolean); + const nodResults = nods.map((item: any) => item.result).filter(Boolean); + + const totalResults = mines.length + parties.length + permits.length + mineDocuments.length + permitDocuments.length + + explosivesPermits.length + nowApplications.length + nods.length; + + const hasActiveFilters = Object.keys(selectedFilters).length > 0; + + return { + isSearching, + params, + searchInputValue, + setSearchInputValue, + selectedFilters, + searchFacets, + partyRelationshipTypeHash, + highlightRegex, + onSearch, + onTabChange, + handleFilterChange, + clearAllFilters, + hasActiveFilters, + results: { + mines, + mineResults, + peopleResults, + organizationResults, + permitResults, + documentResults, + explosivesPermitResults, + explosivesPermits, + nowApplicationResults, + nowApplications, + nodResults, + nods, + totalResults, + }, + }; +}; diff --git a/services/core-web/src/styles/components/EmptySearchState.scss b/services/core-web/src/styles/components/EmptySearchState.scss new file mode 100644 index 0000000000..011dda7068 --- /dev/null +++ b/services/core-web/src/styles/components/EmptySearchState.scss @@ -0,0 +1,27 @@ +@use "@/styles/base.scss" as *; + +.empty-search-state { + &__container { + width: 100%; + padding: 32px; + } + + &__icon { + font-size: 48px; + color: #d9d9d9; + } + + &__quick-actions { + width: 100%; + padding: 16px 20px; + } + + &__action-btn { + height: auto; + padding: 12px 8px; + } + + &__action-label { + font-size: 12px; + } +} diff --git a/services/core-web/src/styles/components/GlobalSearch.scss b/services/core-web/src/styles/components/GlobalSearch.scss new file mode 100644 index 0000000000..efb8f646f1 --- /dev/null +++ b/services/core-web/src/styles/components/GlobalSearch.scss @@ -0,0 +1,51 @@ +@use "@/styles/base.scss" as *; + +.global-search-modal { + top: 80px; + + .ant-modal-content { + border-radius: 8px; + padding: 0; + } + + &__input { + border-bottom: 1px solid #f0f0f0; + border-radius: 0; + height: 60px; // Increased height to prevent "squished" look + padding: 0 16px; // Horizontal padding to match other elements + display: flex; + align-items: center; + + input { + font-size: 16px; + height: 100%; + } + + .ant-input-prefix { + margin-right: 12px; + } + } + + &__footer { + font-size: 12px; + color: #8c8c8c; + padding: 12px 16px; // Added horizontal padding to align with content + background-color: #fcfcfc; // Optional: slight background distinction + border-top: 1px solid #f0f0f0; + border-radius: 0 0 8px 8px; + } +} + +.search-icon { + color: $violet; + font-size: 20px; // Slightly larger icon +} + +.search-tag-icon { + margin: 0; + margin-left: 4px; +} + +.search-type-label { + margin-left: 4px; +} diff --git a/services/core-web/src/styles/components/GlobalSearchResults.scss b/services/core-web/src/styles/components/GlobalSearchResults.scss new file mode 100644 index 0000000000..a670fff641 --- /dev/null +++ b/services/core-web/src/styles/components/GlobalSearchResults.scss @@ -0,0 +1,30 @@ +@use "@/styles/base.scss" as *; + +.global-search-results { + max-height: 400px; + overflow-y: auto; + + .ant-list-items { + padding: 4px 0; + } + + .ant-list-item { + border-bottom: none; + } + + &__section { + &-divider { + margin: 8px 0; + font-size: 12px; + } + } + + &__view-all { + padding: 12px 20px; + border-top: 1px solid #f0f0f0; + + &-btn { + color: $violet; + } + } +} diff --git a/services/core-web/src/styles/components/RecentSearches.scss b/services/core-web/src/styles/components/RecentSearches.scss new file mode 100644 index 0000000000..23cb3151e3 --- /dev/null +++ b/services/core-web/src/styles/components/RecentSearches.scss @@ -0,0 +1,54 @@ +@use "@/styles/base.scss" as *; + +.recent-searches { + &__divider { + background-color: #fafafa; + margin: 0 !important; + padding: 8px 16px; + border-bottom: 1px solid #f0f0f0; + width: 100%; + + .ant-divider-inner-text { + padding: 0; + font-size: 12px; + font-weight: 600; + color: #8c8c8c; + } + + &::before, &::after { + display: none !important; + } + } + + &__item { + cursor: pointer; + padding: 10px 16px; // Slightly increased vertical padding + transition: all 0.2s; + border-left: 3px solid transparent; // Increased border width for better visibility + background: transparent; + + &--selected { + border-left: 3px solid $violet; + background: rgba($violet, 0.08); + } + + &--hovered { + background: rgba($violet, 0.04); + } + } + + &__delete-icon { + color: #bfbfbf; + cursor: pointer; + padding: 4px; + + &:hover { + color: $danger-btn-color; + } + } + + &__icon { + color: #bfbfbf; + margin-right: 8px; + } +} \ No newline at end of file diff --git a/services/core-web/src/styles/components/SearchBar.scss b/services/core-web/src/styles/components/SearchBar.scss index 10e27a45cc..26aeb5793b 100644 --- a/services/core-web/src/styles/components/SearchBar.scss +++ b/services/core-web/src/styles/components/SearchBar.scss @@ -21,4 +21,12 @@ background-color: $lightest-grey; } } +} + +.search-trigger-container { + .searchbar { + .anticon { + color: inherit; + } + } } \ No newline at end of file diff --git a/services/core-web/src/styles/components/SearchFilters.scss b/services/core-web/src/styles/components/SearchFilters.scss new file mode 100644 index 0000000000..71fc5f9251 --- /dev/null +++ b/services/core-web/src/styles/components/SearchFilters.scss @@ -0,0 +1,33 @@ +@use "@/styles/base.scss" as *; + +.search-filters { + padding: 12px 16px; + border-bottom: 1px solid #f0f0f0; + + &__container { + padding: 0; + } + + &__tag { + border-radius: 4px; + padding: 0 8px; + font-size: 13px; + + &--mine-scope { + border: 1px solid #d9d9d9; + + &.checked { + border: 1px solid $violet; + } + } + } + + &__divider { + margin: 0 4px; + height: 20px; + } + + &__count { + opacity: 0.6; + } +} diff --git a/services/core-web/src/styles/components/SearchResultItem.scss b/services/core-web/src/styles/components/SearchResultItem.scss new file mode 100644 index 0000000000..e3f5d35f72 --- /dev/null +++ b/services/core-web/src/styles/components/SearchResultItem.scss @@ -0,0 +1,67 @@ +@use "@/styles/base.scss" as *; + +.search-result-item { + cursor: pointer; + padding: 8px 16px; + transition: background-color 0.15s ease; + border-left: 2px solid transparent; + background: transparent; + min-height: 56px; + height: auto; + + &.ant-list-item { + padding: 8px 16px; + } + + .ant-list-item-meta { + align-items: center; + flex: 1; + min-width: 0; + } + + .ant-list-item-meta-content { + flex: 1; + min-width: 0; + overflow: hidden; + } + + .ant-list-item-meta-title { + margin-bottom: 2px; + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; + } + + .ant-list-item-meta-description { + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; + font-size: 12px; + line-height: 1.4; + } + + &--selected { + border-left: 2px solid $violet; + background: rgba($violet, 0.08); + } + + &--hovered { + background: rgba($violet, 0.04); + } + + &__title { + font-weight: 500; + display: inline; + } + + &__highlight { + margin-left: 8px; + font-style: italic; + } + + &__enter-icon { + color: $violet; + flex-shrink: 0; + margin-left: 8px; + } +} diff --git a/services/core-web/src/styles/components/SearchResults.scss b/services/core-web/src/styles/components/SearchResults.scss new file mode 100644 index 0000000000..ec736a80c4 --- /dev/null +++ b/services/core-web/src/styles/components/SearchResults.scss @@ -0,0 +1,41 @@ +@use "../base.scss" as *; + +.search-results-tabs { + .ant-tabs-nav { + &::before { + border-bottom: 1px solid #f0f0f0; + } + } + + .ant-tabs-nav-list { + flex-wrap: wrap !important; + gap: 0; + } + + .ant-tabs-tab { + flex-shrink: 0; + margin: 0 16px 0 0 !important; + padding: 8px 0 !important; + + &:last-child { + margin-right: 0 !important; + } + } + + // Ensure tabs can wrap to next line + .ant-tabs-nav-wrap { + overflow: visible !important; + } + + .ant-tabs-nav-operations { + display: none !important; // Hide the dropdown arrows that Ant Design adds + } +} + +.search-results-page { + // Inherits from .landing-page styles + // Explicitly set background to match other pages + background-color: $secondary-background-colour !important; + + // tab__content padding comes from layout.scss +} diff --git a/services/core-web/src/styles/components/SearchResultsV2.scss b/services/core-web/src/styles/components/SearchResultsV2.scss new file mode 100644 index 0000000000..8c45a26437 --- /dev/null +++ b/services/core-web/src/styles/components/SearchResultsV2.scss @@ -0,0 +1,81 @@ +@use "@/styles/base.scss" as *; + +.search-results-v2 { + + &__loading { + text-align: center; + padding: 48px; + + &-text { + margin-top: 16px; + } + } + + &__filters-card { + margin-bottom: 16px; + + &-header { + display: flex; + justify-content: space-between; + align-items: center; + margin-bottom: 12px; + } + + &-icon { + margin-right: 8px; + } + + &-tags { + margin-bottom: 12px; + } + + &-tag { + margin: 0; + } + + &-group { + margin-bottom: 8px; + } + + &-label { + font-size: 12px; + display: block; + margin-bottom: 4px; + } + + &-list { + max-height: 200px; + overflow-y: auto; + padding-left: 8px; + } + + &-item { + margin-bottom: 4px; + + &-text { + font-size: 13px; + } + + &-count { + font-size: 11px; + } + } + + &-empty { + font-size: 12px; + } + } + + &__empty-state { + padding: 40px 0; + } + + &__header-search { + margin-top: 16px; + max-width: 600px; + } + + &__result-count { + margin-bottom: 16px; + } +} diff --git a/services/core-web/src/styles/index.scss b/services/core-web/src/styles/index.scss index cdebff2a0d..e4af9013d6 100755 --- a/services/core-web/src/styles/index.scss +++ b/services/core-web/src/styles/index.scss @@ -23,6 +23,13 @@ @forward "./components/HomePage.scss"; @forward "./components/NoticeOfWork.scss"; @forward "./components/SearchBar.scss"; +@forward "./components/GlobalSearch.scss"; +@forward "./components/SearchFilters.scss"; +@forward "./components/SearchResultItem.scss"; +@forward "./components/RecentSearches.scss"; +@forward "./components/EmptySearchState.scss"; +@forward "./components/GlobalSearchResults.scss"; +@forward "./components/SearchResultsV2.scss"; @forward "./components/CoreTooltip.scss"; @forward "./components/MergeContacts.scss"; @forward "./components/NoticeOfDeparture.scss"; diff --git a/services/core-web/src/tests/actionCreators/searchActionCreator.spec.js b/services/core-web/src/tests/actionCreators/searchActionCreator.spec.js deleted file mode 100644 index 78e88a751f..0000000000 --- a/services/core-web/src/tests/actionCreators/searchActionCreator.spec.js +++ /dev/null @@ -1,99 +0,0 @@ -import MockAdapter from "axios-mock-adapter"; -import axios from "axios"; -import { - fetchSearchResults, - fetchSearchBarResults, - fetchSearchOptions, -} from "@mds/common/redux/actionCreators/searchActionCreator"; -import * as genericActions from "@mds/common/redux/actions/genericActions"; -import { ENVIRONMENT } from "@mds/common/constants/environment"; -import * as API from "@mds/common/constants/API"; -import * as MOCK from "@mds/common/tests/mocks/dataMocks"; - -const dispatch = jest.fn(); -const requestSpy = jest.spyOn(genericActions, "request"); -const successSpy = jest.spyOn(genericActions, "success"); -const errorSpy = jest.spyOn(genericActions, "error"); -const mockAxios = new MockAdapter(axios); - -beforeEach(() => { - mockAxios.reset(); - dispatch.mockClear(); - requestSpy.mockClear(); - successSpy.mockClear(); - errorSpy.mockClear(); -}); - -describe("`fetchSearchResults` action creator", () => { - const searchTerm = "abb"; - const url = ENVIRONMENT.apiUrl + API.SEARCH({ search_term: searchTerm, search_types: null }); - it("Request successful, dispatches `success` with correct response", () => { - const mockResponse = { data: { success: true } }; - mockAxios.onGet(url).reply(200, mockResponse); - return fetchSearchResults( - searchTerm, - null - )(dispatch).then(() => { - expect(requestSpy).toHaveBeenCalledTimes(1); - expect(successSpy).toHaveBeenCalledTimes(1); - expect(dispatch).toHaveBeenCalledTimes(6); - }); - }); - - it("Request failure, dispatches `error` with correct response", () => { - mockAxios.onGet(url).reply(418, MOCK.ERROR); - return fetchSearchResults( - searchTerm, - null - )(dispatch).catch(() => { - expect(requestSpy).toHaveBeenCalledTimes(1); - expect(errorSpy).toHaveBeenCalledTimes(1); - expect(dispatch).toHaveBeenCalledTimes(4); - }); - }); -}); - -describe("`fetchSearchBarResults` action creator", () => { - const searchTerm = "abb"; - const url = `${ENVIRONMENT.apiUrl + API.SIMPLE_SEARCH}?search_term=${searchTerm}`; - it("Request successful, dispatches `success` with correct response", () => { - const mockResponse = { data: { success: true } }; - mockAxios.onGet(url).reply(200, mockResponse); - return fetchSearchBarResults(searchTerm)(dispatch).then(() => { - expect(requestSpy).toHaveBeenCalledTimes(1); - expect(successSpy).toHaveBeenCalledTimes(1); - expect(dispatch).toHaveBeenCalledTimes(6); - }); - }); - - it("Request failure, dispatches `error` with correct response", () => { - mockAxios.onGet(url).reply(418, MOCK.ERROR); - return fetchSearchBarResults(searchTerm)(dispatch).then(() => { - expect(requestSpy).toHaveBeenCalledTimes(1); - expect(errorSpy).toHaveBeenCalledTimes(1); - expect(dispatch).toHaveBeenCalledTimes(4); - }); - }); -}); - -describe("`fetchSearchOptions` action creator", () => { - const url = ENVIRONMENT.apiUrl + API.SEARCH_OPTIONS; - it("Request successful, dispatches `success` with correct response", () => { - const mockResponse = { data: { success: true } }; - mockAxios.onGet(url).reply(200, mockResponse); - return fetchSearchOptions()(dispatch).then(() => { - expect(requestSpy).toHaveBeenCalledTimes(1); - expect(successSpy).toHaveBeenCalledTimes(1); - expect(dispatch).toHaveBeenCalledTimes(6); - }); - }); - - it("Request failure, dispatches `error` with correct response", () => { - mockAxios.onGet(url).reply(418, MOCK.ERROR); - return fetchSearchOptions()(dispatch).then(() => { - expect(requestSpy).toHaveBeenCalledTimes(1); - expect(errorSpy).toHaveBeenCalledTimes(1); - expect(dispatch).toHaveBeenCalledTimes(4); - }); - }); -}); diff --git a/services/core-web/src/tests/components/Forms/noticeOfWork/VerifyNoWContacts.spec.tsx b/services/core-web/src/tests/components/Forms/noticeOfWork/VerifyNoWContacts.spec.tsx index 8bb872df7d..f6378fd124 100644 --- a/services/core-web/src/tests/components/Forms/noticeOfWork/VerifyNoWContacts.spec.tsx +++ b/services/core-web/src/tests/components/Forms/noticeOfWork/VerifyNoWContacts.spec.tsx @@ -1,10 +1,11 @@ import React from "react"; -import { render, screen, fireEvent, waitFor, within } from "@testing-library/react"; +import { render, screen, fireEvent, waitFor, within, act } from "@testing-library/react"; import VerifyNoWContacts from "@/components/Forms/noticeOfWork/VerifyNoWContacts"; import * as FORM from "@/constants/forms"; import { ReduxWrapper } from "@/tests/utils/ReduxWrapper"; import FormWrapper from "@mds/common/components/forms/FormWrapper"; -import { STATIC_CONTENT, SEARCH } from "@mds/common/constants/reducerTypes"; +import { STATIC_CONTENT } from "@mds/common/constants/reducerTypes"; +import { searchReducerType } from "@mds/common/redux/slices/searchSlice"; jest.mock("@/components/common/wrappers/AuthorizationWrapper", () => ({ children }: any) => <>{children}); @@ -38,7 +39,7 @@ const initialState = { { value: "AGT", label: "Agent", isActive: true, subType: null }, ], }, - [SEARCH]: { + [searchReducerType]: { searchResults: { party: [] }, searchSubsetResults: [], }, @@ -132,7 +133,9 @@ describe("VerifyNoWContacts (add new contact flow)", () => { expect(typeof capturedAfterSubmit).toBe("function"); // Simulate modal afterSubmit callback supplying new party guid - capturedAfterSubmit("new-1", { party_guid: "new-1", name: "New Person", email: "new@test.ca", phone_no: "333", address: [{}] }); + await act(async () => { + capturedAfterSubmit("new-1", { party_guid: "new-1", name: "New Person", email: "new@test.ca", phone_no: "333", address: [{}] }); + }); // The component will re-search using the new party name; MSW handler returns results including New Person await waitFor(() => { const coreDetailHeading = screen.getByRole("heading", { name: /Core Contact Detail/i }); diff --git a/services/core-web/src/tests/components/Forms/permits/conditions/Condition.spec.tsx b/services/core-web/src/tests/components/Forms/permits/conditions/Condition.spec.tsx index c03919c542..2d2640f64c 100644 --- a/services/core-web/src/tests/components/Forms/permits/conditions/Condition.spec.tsx +++ b/services/core-web/src/tests/components/Forms/permits/conditions/Condition.spec.tsx @@ -3,6 +3,18 @@ import { render } from "@testing-library/react"; import { Condition } from "@/components/Forms/permits/conditions/Condition"; import { ReduxWrapper } from "@/tests/utils/ReduxWrapper"; +// Mock ConditionForm to avoid SIGSEGV crash with FormWrapper/AUTO_SIZE_FIELD +jest.mock("@/components/Forms/permits/conditions/ConditionForm", () => ({ + __esModule: true, + default: ({ layer, onCancel, onSubmit, initialValues }: any) => ( +
+