diff --git a/data-loading/setup-and-load-solr.sh b/data-loading/setup-and-load-solr.sh index 7b47fc82..2b0a9bec 100755 --- a/data-loading/setup-and-load-solr.sh +++ b/data-loading/setup-and-load-solr.sh @@ -1,162 +1,35 @@ #!/usr/bin/env bash -SOLR_PORT=8983 - -is_solr_up(){ - echo "Checking if solr is up on http://localhost:$SOLR_PORT/solr/admin/cores" - http_code=`echo $(curl -s -o /dev/null -w "%{http_code}" "http://localhost:$SOLR_PORT/solr/admin/cores")` - echo $http_code - return `test $http_code = "200"` -} - -wait_for_solr(){ - while ! is_solr_up; do - sleep 3 - done -} - -wait_for_solr - -# add collection -curl -X POST 'http://localhost:8983/solr/admin/collections?action=CREATE&name=name_lookup&numShards=1&replicationFactor=1' - -# do not autocreate fields -curl 'http://localhost:8983/solr/name_lookup/config' -d '{"set-user-property": {"update.autoCreateFields": "false"}}' - -# add lowercase text type -curl -X POST -H 'Content-type:application/json' --data-binary '{ - "add-field-type" : { - "name": "LowerTextField", - "class": "solr.TextField", - "positionIncrementGap": "100", - "analyzer": { - "tokenizer": { - "class": "solr.StandardTokenizerFactory" - }, - "filters": [{ - "class": "solr.LowerCaseFilterFactory" - }] - } - } -}' 'http://localhost:8983/solr/name_lookup/schema' - -# add exactish text type (as described at https://stackoverflow.com/a/29105025/27310) -curl -X POST -H 'Content-type:application/json' --data-binary '{ - "add-field-type" : { - "name": "exactish", - "class": "solr.TextField", - "positionIncrementGap": "100", - "analyzer": { - "tokenizer": { - "class": "solr.KeywordTokenizerFactory" - }, - "filters": [{ - "class": "solr.LowerCaseFilterFactory" - }] - } - } -}' 'http://localhost:8983/solr/name_lookup/schema' - - - -# add fields -curl -X POST -H 'Content-type:application/json' --data-binary '{ - "add-field": [ - { - "name":"names", - "type":"LowerTextField", - "indexed":true, - "stored":true, - "multiValued":true - }, - { - "name":"names_exactish", - "type":"exactish", - "indexed":true, - "stored":false, - "multiValued":true - }, - { - "name":"curie", - "type":"string", - "stored":true - }, - { - "name":"preferred_name", - "type":"LowerTextField", - "stored":true - }, - { - "name":"preferred_name_exactish", - "type":"exactish", - "indexed":true, - "stored":false, - "multiValued":false - }, - { - "name":"types", - "type":"string", - "stored":true - "multiValued":true - }, - { - "name":"shortest_name_length", - "type":"pint", - "stored":true - }, - { - "name":"curie_suffix", - "type":"plong", - "docValues":true, - "stored":true, - "required":false, - "sortMissingLast":true - }, - { - "name":"taxa", - "type":"string", - "stored":true, - "multiValued":true - }, - { - "name":"taxon_specific", - "type":"boolean", - "stored":true, - "multiValued":false, - "sortMissingLast":true - }, - { - "name":"clique_identifier_count", - "type":"pint", - "stored":true - } - ] }' 'http://localhost:8983/solr/name_lookup/schema' - -# Add a copy field to copy names into names_exactish. -curl -X POST -H 'Content-type:application/json' --data-binary '{ - "add-copy-field": { - "source": "names", - "dest": "names_exactish" - } -}' 'http://localhost:8983/solr/name_lookup/schema' +# We don't use set -e because the loop test relies on failures being ignored. +set -uo pipefail + +# Configuration options +SOLR_SERVER="http://localhost:8983" + +# Step 1. Make sure the Solr service is up and running. +HEALTH_ENDPOINT="${SOLR_SERVER}/solr/admin/cores?action=STATUS" +response=$(wget --spider --server-response ${HEALTH_ENDPOINT} 2>&1 | grep "HTTP/" | awk '{ print $2 }') >&2 +until [ "$response" = "200" ]; do + response=$(wget --spider --server-response ${HEALTH_ENDPOINT} 2>&1 | grep "HTTP/" | awk '{ print $2 }') >&2 + echo " -- SOLR is unavailable - sleeping" + sleep 3 +done +echo "SOLR is up and running at ${SOLR_SERVER}." -# Add a copy field to copy preferred_name into preferred_name_exactish. -curl -X POST -H 'Content-type:application/json' --data-binary '{ - "add-copy-field": { - "source": "preferred_name", - "dest": "preferred_name_exactish" - } -}' 'http://localhost:8983/solr/name_lookup/schema' +# Step 2. Create fields for search. +SCRIPT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)" +source "$SCRIPT_DIR/setup_solr.sh" +echo Solr database has been set up. -# add data +# Step 3. Load specified files. for f in $1; do echo "Loading $f..." # curl -d @$f needs to load the entire file into memory before uploading it, whereas # curl -X POST -T $f will stream it. See https://github.com/TranslatorSRI/NameResolution/issues/194 curl -H 'Content-Type: application/json' -X POST -T $f \ - 'http://localhost:8983/solr/name_lookup/update/json/docs?processor=uuid&uuid.fieldName=id&commit=true' - sleep 30 + "$SOLR_SERVER/solr/name_lookup/update/json/docs?processor=uuid&uuid.fieldName=id&commit=true" + sleep 60 done echo "Check solr" -curl -s --negotiate -u: 'localhost:8983/solr/name_lookup/query?q=*:*&rows=0' +curl -s --negotiate -u: "$SOLR_SERVER/solr/name_lookup/query?q=*:*&rows=0" diff --git a/data-loading/setup_solr.sh b/data-loading/setup_solr.sh new file mode 100644 index 00000000..0ea2842f --- /dev/null +++ b/data-loading/setup_solr.sh @@ -0,0 +1,147 @@ +#!/usr/bin/env bash +# +# Set up the fields and types needed by NameRes. +# +# This file should be sourced, not called directly. + +# require sourcing +[[ "${BASH_SOURCE[0]}" != "$0" ]] || { + echo "Must be sourced: source $0" >&2 + exit 1 +} + +# require SOLR_SERVER +: "${SOLR_SERVER:?SOLR_SERVER must be set}" + +echo "Setting up Solr database with SOLR_SERVER='$SOLR_SERVER'" + +# add collection +curl -X POST "$SOLR_SERVER/solr/admin/collections?action=CREATE&name=name_lookup&numShards=1&replicationFactor=1" + +# do not autocreate fields +curl "$SOLR_SERVER/solr/name_lookup/config" -d '{"set-user-property": {"update.autoCreateFields": "false"}}' + +# add lowercase text type +curl -X POST -H 'Content-type:application/json' --data-binary '{ + "add-field-type" : { + "name": "LowerTextField", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "class": "solr.StandardTokenizerFactory" + }, + "filters": [{ + "class": "solr.LowerCaseFilterFactory" + }] + } + } +}' "$SOLR_SERVER/solr/name_lookup/schema" + +# add exactish text type (as described at https://stackoverflow.com/a/29105025/27310) +curl -X POST -H 'Content-type:application/json' --data-binary '{ + "add-field-type" : { + "name": "exactish", + "class": "solr.TextField", + "positionIncrementGap": "100", + "analyzer": { + "tokenizer": { + "class": "solr.KeywordTokenizerFactory" + }, + "filters": [{ + "class": "solr.LowerCaseFilterFactory" + }] + } + } +}' "$SOLR_SERVER/solr/name_lookup/schema" + + + +# add fields +curl -X POST -H 'Content-type:application/json' --data-binary '{ + "add-field": [ + { + "name":"names", + "type":"LowerTextField", + "indexed":true, + "stored":true, + "multiValued":true + }, + { + "name":"names_exactish", + "type":"exactish", + "indexed":true, + "stored":false, + "multiValued":true + }, + { + "name":"curie", + "type":"string", + "stored":true + }, + { + "name":"preferred_name", + "type":"LowerTextField", + "stored":true + }, + { + "name":"preferred_name_exactish", + "type":"exactish", + "indexed":true, + "stored":false, + "multiValued":false + }, + { + "name":"types", + "type":"string", + "stored":true + "multiValued":true + }, + { + "name":"shortest_name_length", + "type":"pint", + "stored":true + }, + { + "name":"curie_suffix", + "type":"plong", + "docValues":true, + "stored":true, + "required":false, + "sortMissingLast":true + }, + { + "name":"taxa", + "type":"string", + "stored":true, + "multiValued":true + }, + { + "name":"taxon_specific", + "type":"boolean", + "stored":true, + "multiValued":false, + "sortMissingLast":true + }, + { + "name":"clique_identifier_count", + "type":"pint", + "stored":true + } + ] }' "$SOLR_SERVER/solr/name_lookup/schema" + +# Add a copy field to copy names into names_exactish. +curl -X POST -H 'Content-type:application/json' --data-binary '{ + "add-copy-field": { + "source": "names", + "dest": "names_exactish" + } +}' "$SOLR_SERVER/solr/name_lookup/schema" + +# Add a copy field to copy preferred_name into preferred_name_exactish. +curl -X POST -H 'Content-type:application/json' --data-binary '{ + "add-copy-field": { + "source": "preferred_name", + "dest": "preferred_name_exactish" + } +}' "$SOLR_SERVER/solr/name_lookup/schema" diff --git a/docker-compose.yml b/docker-compose.yml index 78a38e74..1286253c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,11 +1,12 @@ services: - solr: - container_name: name_solr + nameres_solr: + container_name: nameres_solr image: solr:9.1 + mem_limit: 18G environment: # Change this setting to control how much memory you would like your Solr setup to have. # Note that your Docker will need to be configured to allow this amount of memory. - SOLR_JAVA_MEM: '-Xms25G -Xmx25G' + SOLR_JAVA_MEM: '-Xmx16G' ports: - '8983:8983' command: ['-DzkRun'] @@ -17,13 +18,14 @@ services: source: ./data/solr target: /var/solr/data - nameres: - container_name: nameres + nameres_web: + container_name: nameres_web + platform: linux/amd64 environment: - - SOLR_HOST=name_solr + - SOLR_HOST=nameres_solr - BABEL_VERSION= # e.g. 2025mar31 - BABEL_VERSION_URL= # The URL of the Babel version URL - - LOCATION_VALUE=RENCI + - LOCATION_VALUE=localhost - MATURITY_VALUE=development ports: - '2433:2433' diff --git a/documentation/Deployment.md b/documentation/Deployment.md index 5fc144d4..d4ba344e 100644 --- a/documentation/Deployment.md +++ b/documentation/Deployment.md @@ -8,7 +8,7 @@ file, although you will need either (1) a set of synonyms files generated by Bab to load into Solr, or (2) a Solr database backup to load into Solr. The following instructions will work whichever of the two approaches you need to follow. -### Starting NameRes locally with loading from a Solr backup +### Starting NameRes locally by loading a Solr backup The simplest way to run NameRes locally is by using a Solr backup from another NameRes instance or from Translator. @@ -21,33 +21,46 @@ instance or from Translator. storage of approx 400G: 104G of the downloaded file (which can be deleted once decompressed), 147G of uncompressed backup (both of which can be deleted once restored) and 147G of Apache Solr databases. -3. Download the Solr backup URL you want to use into your Solr data directory. It should be +3. Download the Solr backup URL you want to use and save it in `./data/solr`. It should be approximately 104G in size. 4. Uncompress the Solr backup file. It should produce a `var/solr/data/snapshot.backup` directory in the Solr data (by default, `./data/solr/var/solr/data/snapshot.backup`). You can delete the downloaded file (`snapshot.backup.tar.gz`) once it has been decompressed. 5. Check the [docker-compose.yml](./docker-compose.yml) file to ensure that it is as you expect. - * By default, the Docker Compose file will use the latest released version of NameRes + * The Docker Compose file will use the latest released version of NameRes as the frontend. To use the source code in this repository, you will need to change the build instructions for the `nameres` service in the Docker Compose file. -6. Start the Solr and NameRes pods by running `docker-compose up`. By default, Docker Compose + * Solr will be given 16G of memory, which seems sufficient for testing. + If you want to run many Solr queries, you might want to increase this. To do this, + you will need to change BOTH the `mem_limit` setting in the `nameres_solr` service in + `docker-compose.yml` and the `SOLR_JAVA_MEM` setting. + * The `docker-compose.yml` file also mounts the local `data/` directory into the Solr + container as `/var/solr`. This will allow you to start a new NameRes from the same + directory in the future. If you want to use a different directory, please change + the `volumes` setting in the `nameres_solr` service in `docker-compose.yml`. Removing + the binding will cause the Solr data to be stored in the Docker instance, and the + data will be lost when the container is stopped. +6. Start the Solr and NameRes pods by running `docker compose up`. By default, Docker Compose will download and start the relevant pods and show you logs from both sources. You may press `Ctrl+C` to stop the pods. -7. Look for a line similar to `Uvicorn running on http://0.0.0.0:2433 (Press CTRL+C to quit)`, - which tells you where NameRes is running. - * By default, the web frontend (http://0.0.0.0:2433/docs) defaults to using the - [NameRes RENCI Dev](https://name-resolution-sri.renci.org/docs) — you will need to - change the "Servers" setting to use your local NameRes instance. - * Note that looking up http://0.0.0.0:2433/status will give you an error (`Expected core not found.`). - This is because the Solr database and indexes have not yet been loaded. -8. Run the Solr restore script using `bash`, i.e. `bash solr-restore/restore.sh`. This script - assumes that the Solr pod is available on `localhost:8983` and contains a - `var/solr/data/snapshot.backup` directory with the data to restore. -9. Look for the script to end properly (`Solr restore complete!`). Look up http://localhost:2433/status - to ensure that the database has been loaded as expected, and use http://localhost:2433/docs (after - changing the server) to try some test queries to make sure NameRes is working properly. -10. You can now delete the uncompressed database backup in `$SOLR_DATA/var` to save disk space. +7. Trigger the Solr restore by running the restore script using `bash`, i.e. + `bash solr-restore/restore.sh`. This script assumes that the Solr pod is available on `localhost:8983` + and contains a `var/solr/data/snapshot.backup` directory with the data to restore. It will set up + some data types needed by NameRes and then triggering a restore of a backup. It will then go into a + sleep loop until the restore is complete, which should take 15-20 minutes. +8. Check that the script ended properly (`Solr restore complete!`). Look up http://localhost:2433/status + to ensure that the database has been loaded as expected. You can now delete the uncompressed database + backup in `$SOLR_DATA/var` to save disk space. +9. With the default settings, NameRes should be running on localhost on port 2433 (i.e. http://localhost:2433/). + You should see a message in the NameRes pod log saying something like + `Uvicorn running on http://0.0.0.0:2433 (Press CTRL+C to quit)` to confirm this. + * By default, the web frontend (http://0.0.0.0:2433/docs) defaults to using the + [NameRes RENCI Dev](https://name-resolution-sri.renci.org/docs) — you will need to + change the "Servers" setting to use your local NameRes instance. + * If you try this before the restore has finished, looking up http://0.0.0.0:2433/status will give you an error + (`Expected core not found.`). This is because the Solr database and indexes have not yet been loaded. + Once this is finished, the NameRes instance should be ready to use. #### Loading from synonyms files diff --git a/solr-restore/restore.sh b/solr-restore/restore.sh index 69386fb5..4bc6133c 100644 --- a/solr-restore/restore.sh +++ b/solr-restore/restore.sh @@ -12,15 +12,23 @@ # This script should only require the `wget` program. # # TODO: This script does not currently implement any Blocklists. -set -xa + +# We don't use set -e because the loop test relies on failures being ignored. +set -uo pipefail # Configuration options SOLR_SERVER="http://localhost:8983" +SLEEP_INTERVAL=60 # Please don't change these values unless you change NameRes appropriately! COLLECTION_NAME="name_lookup" BACKUP_NAME="backup" +# Step 0. Make sure the Solr data directory looks like it contains the uncompressed backup. +if [ ! -d "./data/solr/var" ]; then + echo 'WARNING: No ./data/solr/var directory found; are you sure you uncompressed the NameRes backup into the Solr data directory?' >&2 +fi + # Step 1. Make sure the Solr service is up and running. HEALTH_ENDPOINT="${SOLR_SERVER}/solr/admin/cores?action=STATUS" response=$(wget --spider --server-response ${HEALTH_ENDPOINT} 2>&1 | grep "HTTP/" | awk '{ print $2 }') >&2 @@ -31,156 +39,26 @@ until [ "$response" = "200" ]; do done echo "SOLR is up and running at ${SOLR_SERVER}." -# Step 2. Create the COLLECTION_NAME if it doesn't exist. - -EXISTS=$(wget -O - ${SOLR_SERVER}/solr/admin/collections?action=LIST | grep ${COLLECTION_NAME}) - -# create collection / shard if it doesn't exist. -if [ -z "$EXISTS" ] -then - wget -O- ${SOLR_SERVER}/solr/admin/collections?action=CREATE'&'name=${COLLECTION_NAME}'&'numShards=1'&'replicationFactor=1 - sleep 3 -fi - -# Step 3. Begin restoring the data. - -# Setup fields for search -wget --post-data '{"set-user-property": {"update.autoCreateFields": "false"}}' \ - --header='Content-Type:application/json' \ - -O- ${SOLR_SERVER}/solr/${COLLECTION_NAME}/config -sleep 1 +# Step 2. Create fields for search. +SCRIPT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)" +source "$SCRIPT_DIR/../data-loading/setup_solr.sh" +echo Solr database has been set up. -# Restore data -CORE_NAME=${COLLECTION_NAME}_shard1_replica_n1 +# Step 3. Restore the data +CORE_NAME="${COLLECTION_NAME}_shard1_replica_n1" +echo "Starting Solr restore on core ${CORE_NAME}, with status at ${SOLR_SERVER}/solr/${CORE_NAME}/replication?command=restorestatus" RESTORE_URL="${SOLR_SERVER}/solr/${CORE_NAME}/replication?command=restore&location=/var/solr/data/var/solr/data/&name=${BACKUP_NAME}" wget -O - "$RESTORE_URL" -sleep 10 -RESTORE_STATUS=$(wget -q -O - ${SOLR_SERVER}/solr/${CORE_NAME}/replication?command=restorestatus 2>&1 | grep "success") >&2 -echo "Restore status: ${RESTORE_STATUS}" -until [ ! -z "$RESTORE_STATUS" ] ; do - echo "Solr restore in progress. Note: if this takes too long please check solr health." - RESTORE_STATUS=$(wget -O - ${SOLR_SERVER}/solr/${CORE_NAME}/replication?command=restorestatus 2>&1 | grep "success") >&2 - sleep 10 +sleep "$SLEEP_INTERVAL" +RESTORE_STATUS_URL="${SOLR_SERVER}/solr/${CORE_NAME}/replication?command=restorestatus" +RESTORE_STATUS=$(wget -q -O - "$RESTORE_STATUS_URL" 2>&1 | grep "success") +RESTORE_STATUS="" +until [ -n "$RESTORE_STATUS" ] ; do + echo "Solr restore in progress. If this takes longer than 30 minutes, please visit ${SOLR_SERVER} with your browser to check Solr." + RESTORE_STATUS=$(wget -q -O - "$RESTORE_STATUS_URL" 2>&1 | grep "success") + sleep "$SLEEP_INTERVAL" done -echo "Solr restore complete" - -# Step 4. Create fields for search. -# (It might be possible to do this before the restore, but I'm going to follow the existing code for now.) -wget --post-data '{ - "add-field-type" : { - "name": "LowerTextField", - "class": "solr.TextField", - "positionIncrementGap": "100", - "analyzer": { - "tokenizer": { - "class": "solr.StandardTokenizerFactory" - }, - "filters": [{ - "class": "solr.LowerCaseFilterFactory" - }] - } - }}' \ - --header='Content-Type:application/json' \ - -O- ${SOLR_SERVER}/solr/${COLLECTION_NAME}/schema -sleep 1 -# exactish type taken from https://stackoverflow.com/a/29105025/27310 -wget --post-data '{ - "add-field-type" : { - "name": "exactish", - "class": "solr.TextField", - "analyzer": { - "tokenizer": { - "class": "solr.KeywordTokenizerFactory" - }, - "filters": [{ - "class": "solr.LowerCaseFilterFactory" - }] - } - }}' \ - --header='Content-Type:application/json' \ - -O- ${SOLR_SERVER}/solr/${COLLECTION_NAME}/schema -sleep 1 -wget --post-data '{ - "add-field": [ - { - "name":"names", - "type":"LowerTextField", - "stored": true, - "multiValued": true - }, - { - "name":"names_exactish", - "type":"exactish", - "indexed":true, - "stored":true, - "multiValued":true - }, - { - "name":"curie", - "type":"string", - "stored":true - }, - { - "name": "preferred_name", - "type": "LowerTextField", - "stored": true - }, - { - "name": "preferred_name_exactish", - "type": "exactish", - "indexed": true, - "stored": false, - "multiValued": false - }, - { - "name": "types", - "type": "string", - "stored": true, - "multiValued": true - }, - { - "name": "shortest_name_length", - "type": "pint", - "stored": true - }, - { - "name": "curie_suffix", - "type": "plong", - "docValues": true, - "stored": true, - "required": false, - "sortMissingLast": true - }, - { - "name": "taxa", - "type": "string", - "stored": true, - "multiValued": true - }, - { - "name": "clique_identifier_count", - "type": "pint", - "stored": true - } - ] - }' \ - --header='Content-Type:application/json' \ - -O- ${SOLR_SERVER}/solr/${COLLECTION_NAME}/schema -sleep 1 -wget --post-data '{ - "add-copy-field" : { - "source": "names", - "dest": "names_exactish" - }}' \ - --header='Content-Type:application/json' \ - -O- ${SOLR_SERVER}/solr/${COLLECTION_NAME}/schema -wget --post-data '{ - "add-copy-field" : { - "source": "preferred_name", - "dest": "preferred_name_exactish" - }}' \ - --header='Content-Type:application/json' \ - -O- ${SOLR_SERVER}/solr/${COLLECTION_NAME}/schema -sleep 1 - echo "Solr restore complete!" + +echo "Solr contents:" +curl -s --negotiate -u: "$SOLR_SERVER/solr/name_lookup/query?q=*:*&rows=0"