diff --git a/docker/main/dataservice/Dockerfile b/docker/main/dataservice/Dockerfile index 804e4e2c1..113bd53b2 100644 --- a/docker/main/dataservice/Dockerfile +++ b/docker/main/dataservice/Dockerfile @@ -1,6 +1,6 @@ ARG docker_internal_registry ################################################################################################################ -FROM ${docker_internal_registry}/dmod-py-sources as sources +FROM ${docker_internal_registry}/dmod-py-sources:latest as sources ################################################################################################################ FROM python:3.8-alpine3.15 diff --git a/docker/main/dataservice/entrypoint.sh b/docker/main/dataservice/entrypoint.sh index e42ae462d..433ecc801 100644 --- a/docker/main/dataservice/entrypoint.sh +++ b/docker/main/dataservice/entrypoint.sh @@ -62,7 +62,7 @@ if [ -d ${UPDATED_PACKAGES_DIR:=/updated_packages} ]; then for srv in $(pip -qq freeze | grep dmod | awk -F= '{print $1}' | awk -F- '{print $2}'); do if [ $(ls ${UPDATED_PACKAGES_DIR} | grep dmod.${srv}- | wc -l) -eq 1 ]; then pip uninstall -y --no-input $(pip -qq freeze | grep dmod.${srv} | awk -F= '{print $1}') - pip install $(ls ${UPDATED_PACKAGES_DIR}/*.whl | grep dmod.${srv}-) + pip install --no-deps $(ls ${UPDATED_PACKAGES_DIR}/*.whl | grep dmod.${srv}-) fi done #pip install ${UPDATED_PACKAGES_DIR}/*.whl diff --git a/docker/main/docker-deploy.yml b/docker/main/docker-deploy.yml index 665668e58..13c93963d 100644 --- a/docker/main/docker-deploy.yml +++ b/docker/main/docker-deploy.yml @@ -88,6 +88,9 @@ services: - DATA_SERVICE_ENDPOINT_HOST=${DOCKER_REQUESTS_DATASERVICE_ENDPOINT_HOST:-data-service} - DATA_SERVICE_ENDPOINT_PORT=${DOCKER_DATASERVICE_CONTAINER_PORT:-3015} - DATA_SERVICE_CLIENT_SSL_DIR=${DOCKER_REQUESTS_CONTAINER_DATASERVICE_CLIENT_SSL_DIR:-/ssl/dataservice} + - EVALUATION_SERVICE_ENDPOINT_HOST=${DOCKER_REQUESTS_EVALUATIONSERVICE_ENDPOINT_HOST:-evaluation-service} + - EVALUATION_SERVICE_ENDPOINT_PORT=${DOCKER_EVALUATIONSERVICE_CONTAINER_PORT:-3015} + - EVALUATION_SERVICE_CLIENT_SSL_DIR=${DOCKER_REQUESTS_CONTAINER_EVALUATIONSERVICE_CLIENT_SSL_DIR:-/ssl/evaluationservice} - PARTITIONER_SERVICE_ENDPOINT_HOST=${DOCKER_REQUESTS_PARTITIONERSERVICE_ENDPOINT_HOST:-partitioner-service} - PARTITIONER_SERVICE_ENDPOINT_PORT=${DOCKER_PARTITIONER_SERVICE_CONTAINER_PORT:-3014} - PARTITIONER_SERVICE_CLIENT_SSL_DIR=${DOCKER_REQUESTS_CONTAINER_PARTITIONERSERVICE_CLIENT_SSL_DIR:-/ssl/partitionerservice} @@ -114,7 +117,9 @@ services: ports: - ${DOCKER_SUBSET_API_PORT:-5001}:${DOCKER_SUBSET_CONTAINER_PORT:-5001} volumes: - - ${HYDROFABRIC_DATA_DIR:?Please set HYDROFABRIC_DATA_DIR for Docker environment in .env config file}:/hydrofabric_data + # TODO: even if this works, need to make it configurable + - hydrofabric:/hydrofabric_data/${DMOD_HYDROFABRIC_VOLUME:?} + #- ${HYDROFABRIC_DATA_DIR:?Please set HYDROFABRIC_DATA_DIR for Docker environment in .env config file}:/hydrofabric_data # This typically needs to be commented out; intended for development use (see related 'environment' config above) #- updated_packages:${UPDATED_PACKAGES_CONTAINER_DIR:?Check if updated packages directory should be used} deploy: @@ -128,6 +133,9 @@ services: - LISTEN_PORT=${DOCKER_SUBSET_CONTAINER_PORT:-5000} - SERVICE_PACKAGE_NAME=${PYTHON_PACKAGE_NAME_SUBSET_SERVICE:?} - FILES_DIRECTORY=/hydrofabric_data + - PYCHARM_REMOTE_DEBUG_ACTIVE=${PYCHARM_REMOTE_DEBUG_SUBSET_SERVICE_ACTIVE:-false} + - PYCHARM_REMOTE_DEBUG_SERVER_HOST=${PYCHARM_REMOTE_DEBUG_SERVER_HOST:-host.docker.internal} + - PYCHARM_REMOTE_DEBUG_SERVER_PORT=${PYCHARM_REMOTE_DEBUG_SERVER_PORT_SUBSET_SERVICE:-55874} # This typically needs to be commented out; intended for development use (see related 'volume' config below) #- UPDATED_PACKAGES_DIR=${UPDATED_PACKAGES_CONTAINER_DIR:?Updated packages directory not set, make sure this should be active} entrypoint: ["python3", "-m", "${PYTHON_PACKAGE_NAME_SUBSET_SERVICE:?}"] @@ -226,6 +234,9 @@ volumes: # configs above for several services) updated_packages: external: true + hydrofabric: + name: ${DMOD_HYDROFABRIC_VOLUME:?} + external: true secrets: myredis_pass: diff --git a/docker/main/ngen-calibration/Dockerfile b/docker/main/ngen-calibration/Dockerfile new file mode 100644 index 000000000..e84be87f6 --- /dev/null +++ b/docker/main/ngen-calibration/Dockerfile @@ -0,0 +1,38 @@ +ARG DOCKER_INTERNAL_REGISTRY + +FROM ${DOCKER_INTERNAL_REGISTRY}/ngen:latest + +ARG WORKDIR=/ngen +ARG USER=mpi +ARG NGEN_CAL_BRANCH=master +ARG NGEN_CAL_COMMIT + +ENV USER=${USER} USER_HOME=/home/${USER} + +WORKDIR ${WORKDIR} +USER ${USER} + +# try NGEN_CAL_COMMIT, if not set or empty, use NGEN_CAL_BRANCH +RUN pip install "git+https://github.com/noaa-owp/ngen-cal@${NGEN_CAL_COMMIT:-${NGEN_CAL_BRANCH}}#egg=ngen_cal&subdirectory=python/ngen_cal" + +COPY --chown=${USER} entrypoint.sh ${WORKDIR} + +# Change permissions for entrypoint and make sure dataset volume mount parent directories exists +RUN chmod +x ${WORKDIR}/entrypoint.sh \ + && for d in ${DATASET_DIRECTORIES}; do mkdir -p /dmod/datasets/${d}; done \ + && for d in noah-owp-modular topmodel cfe sloth 'evapotranspiration/evapotranspiration'; do \ + if [ -d ${WORKDIR}/ngen/extern/${d}/cmake_build ]; then \ + cp -a ${WORKDIR}/ngen/extern/${d}/cmake_build/*.so* /dmod/shared_libs/.; \ + fi; \ + done \ + && ( cp -a ${WORKDIR}/ngen/cmake_build_parallel/ngen /dmod/bin/ngen-parallel || true ) \ + && ( cp -a ${WORKDIR}/ngen/cmake_build_serial/ngen /dmod/bin/ngen-serial || true ) \ + && ( cp -a ${WORKDIR}/ngen/cmake_build/partitionGenerator /dmod/bin/partitionGenerator || true ) \ + && pushd /dmod/bin \ + # NOTE use of `ln -sf`. \ + && ( ( stat ngen-parallel && ln -sf ngen-parallel ngen ) || ( stat ngen-serial && ln -sf ngen-serial ngen ) ) \ + && popd + +ENV PATH=${WORKDIR}:$PATH +ENTRYPOINT ["entrypoint.sh"] +CMD [""] diff --git a/docker/main/ngen-calibration/entrypoint.sh b/docker/main/ngen-calibration/entrypoint.sh new file mode 100755 index 000000000..c4d054610 --- /dev/null +++ b/docker/main/ngen-calibration/entrypoint.sh @@ -0,0 +1,119 @@ +#!/bin/sh +# Managed by the _generate_docker_cmd_args function in scheduler.py of dmod.scheduler +# +# $1 will have the number of nodes associated with this run +# $2 will have comma-delimited host strings in MPI form; e.g., hostname:N,hostname:M +# $3 will have the unique job id +# $4 is the worker index +# $5 will be the name of the output dataset (which will imply a directory location) +# $6 will be the name of the hydrofabric dataset (which will imply a directory location) +# $7 will be the name of the realization configuration dataset (which will imply a directory location) +# $8 will be the name of the BMI configuration dataset (which will imply a directory location) +# $9 will be the name of the partition configuration dataset (which will imply a directory location) +# TODO: wire up $10 +# $10 will be the name of the calibration configuration dataset (which will imply a directory location) + +# Not yet supported +# no-op +MPI_NODE_COUNT="${1:?No MPI node count given}" +# no-op +MPI_HOST_STRING="${2:?No MPI host string given}" +# no-op +PARTITION_DATASET_NAME="${9:?}" + +JOB_ID=${3:?No Job id given} +WORKER_INDEX=${4:?No worker index given} + +OUTPUT_DATASET_NAME="${5:?}" +HYDROFABRIC_DATASET_NAME="${6:?}" +REALIZATION_CONFIG_DATASET_NAME="${7:?}" +BMI_CONFIG_DATASET_NAME="${8:?}" +CALIBRATION_CONFIG_DATASET_NAME="${10:?}" + +ACCESS_KEY_SECRET="object_store_exec_user_name" +SECRET_KEY_SECRET="object_store_exec_user_passwd" +DOCKER_SECRETS_DIR="/run/secrets" +ACCESS_KEY_FILE="${DOCKER_SECRETS_DIR}/${ACCESS_KEY_SECRET}" +SECRET_KEY_FILE="${DOCKER_SECRETS_DIR}/${SECRET_KEY_SECRET}" + +NGEN_EXECUTABLE="/ngen/ngen/cmake_build/ngen" + +ALL_DATASET_DIR="/dmod/datasets" +OUTPUT_DATASET_DIR="${ALL_DATASET_DIR}/output/${OUTPUT_DATASET_NAME}" +HYDROFABRIC_DATASET_DIR="${ALL_DATASET_DIR}/hydrofabric/${HYDROFABRIC_DATASET_NAME}" +REALIZATION_CONFIG_DATASET_DIR="${ALL_DATASET_DIR}/config/${REALIZATION_CONFIG_DATASET_NAME}" +BMI_CONFIG_DATASET_DIR="${ALL_DATASET_DIR}/config/${BMI_CONFIG_DATASET_NAME}" +PARTITION_DATASET_DIR="${ALL_DATASET_DIR}/config/${PARTITION_DATASET_NAME}" +CALIBRATION_CONFIG_DATASET_DIR="${ALL_DATASET_DIR}/config/${CALIBRATION_CONFIG_DATASET_NAME}" + +print_date() { + date "+%Y-%m-%d,%H:%M:%S" +} + +check_for_dataset_dir() { + # Dataset dir is $1 + _CATEG="$(echo "${1}" | sed "s|${ALL_DATASET_DIR}/\([^/]*\)/.*|\1|" | awk '{print toupper($0)}')" + if [ ! -d "${1}" ]; then + echo "Error: expected ${_CATEG} dataset directory ${1} not found." 2>&1 + exit 1 + fi +} + +load_object_store_keys_from_docker_secrets() { + # Read Docker Secrets files for Object Store access, if they exist + if [ -z "${ACCESS_KEY_FILE:-}" ]; then + echo "WARN: Cannot load object store access key when Docker secret file name not set" + elif [ -e "${ACCESS_KEY_FILE}" ]; then + ACCESS_KEY="$(cat "${ACCESS_KEY_FILE}")" + else + echo "WARN: Cannot load object store access key when Docker secret file does not exist" + fi + + if [ -z "${SECRET_KEY_FILE:-}" ]; then + echo "WARN: Cannot load object store secret key when Docker secret file name not set" + elif [ -e "${SECRET_KEY_FILE}" ]; then + SECRET_KEY="$(cat "${SECRET_KEY_FILE}")" + else + echo "WARN: Cannot load object store secret key when Docker secret file does not exist" + fi + + test -n "${ACCESS_KEY:-}" && test -n "${SECRET_KEY:-}" +} + +start_calibration() { + # Start ngen calibration + echo "$(print_date) Starting serial ngen calibration" + # CALIBRATION_CONFIG_FILE=${CALIBRATION_CONFIG_DATASET_DIR}/$(basename $(find ${CALIBRATION_CONFIG_DATASET_DIR} -name "*.yaml" -maxdepth 1 | head -1)) + + # TODO: move this to CALIBRATION_CONFIG_DATASET_DIR + # NOTE: assumes that calibration dataset will be in realization config dataset AND that it is + # the only yaml file at the top level of that dataset. + CALIBRATION_CONFIG_FILE=${REALIZATION_CONFIG_DATASET_DIR}/$(basename $(find ${REALIZATION_CONFIG_DATASET_DIR} -name "*.yaml" -maxdepth 1 | head -1)) + + if [ -z "${CALIBRATION_CONFIG_FILE}" ]; then + echo "Error: NGEN calibration yaml file not found" 2>&1 + exit 1 + fi + python3 -m ngen.cal "${CALIBRATION_CONFIG_FILE}" + + #Capture the return value to use as service exit code + NGEN_RETURN=$? + + echo "$(print_date) ngen calibration finished with return value: ${NGEN_RETURN}" + + # Exit with the model's exit code + return ${NGEN_RETURN} +} + +# Sanity check that the output, hydrofabric, and config datasets are available (i.e., their directories are in place) +check_for_dataset_dir "${REALIZATION_CONFIG_DATASET_DIR}" +check_for_dataset_dir "${BMI_CONFIG_DATASET_DIR}" +check_for_dataset_dir "${PARTITION_DATASET_DIR}" +check_for_dataset_dir "${HYDROFABRIC_DATASET_DIR}" +check_for_dataset_dir "${OUTPUT_DATASET_DIR}" +# check_for_dataset_dir "${CALIBRATION_CONFIG_DATASET_DIR}" + +# Move to the output dataset mounted directory +cd ${OUTPUT_DATASET_DIR} + +start_calibration diff --git a/docker/main/ngen/entrypoint.sh b/docker/main/ngen/entrypoint.sh index 239b7fd88..b9f568935 100755 --- a/docker/main/ngen/entrypoint.sh +++ b/docker/main/ngen/entrypoint.sh @@ -19,7 +19,10 @@ OUTPUT_DATASET_NAME="${5:?}" HYDROFABRIC_DATASET_NAME="${6:?}" REALIZATION_CONFIG_DATASET_NAME="${7:?}" BMI_CONFIG_DATASET_NAME="${8:?}" -PARTITION_DATASET_NAME="${9:?}" +# Don't require a partitioning config when only using a single node +if [ ${MPI_NODE_COUNT:?} -gt 1 ]; then + PARTITION_DATASET_NAME="${9:?No argument for partition config dataset when expecting one for MPI-based job}" +fi ACCESS_KEY_SECRET="object_store_exec_user_name" SECRET_KEY_SECRET="object_store_exec_user_passwd" @@ -36,6 +39,9 @@ fi MPI_RUN="mpirun" #NGEN_EXECUTABLE="ngen" +NGEN_SERIAL_EXECUTABLE="/ngen/ngen/cmake_build_serial/ngen" +NGEN_PARALLEL_EXECUTABLE="/ngen/ngen/cmake_build_parallel/ngen" +# This will be symlinked to the parallel one currently NGEN_EXECUTABLE="/ngen/ngen/cmake_build/ngen" ALL_DATASET_DIR="/dmod/datasets" @@ -43,7 +49,10 @@ OUTPUT_DATASET_DIR="${ALL_DATASET_DIR}/output/${OUTPUT_DATASET_NAME}" HYDROFABRIC_DATASET_DIR="${ALL_DATASET_DIR}/hydrofabric/${HYDROFABRIC_DATASET_NAME}" REALIZATION_CONFIG_DATASET_DIR="${ALL_DATASET_DIR}/config/${REALIZATION_CONFIG_DATASET_NAME}" BMI_CONFIG_DATASET_DIR="${ALL_DATASET_DIR}/config/${BMI_CONFIG_DATASET_NAME}" -PARTITION_DATASET_DIR="${ALL_DATASET_DIR}/config/${PARTITION_DATASET_NAME}" +# Don't require a partitioning dataset when only using a single node +if [ ${MPI_NODE_COUNT:?} -gt 1 ]; then + PARTITION_DATASET_DIR="${ALL_DATASET_DIR}/config/${PARTITION_DATASET_NAME:?No partition config dataset name for directory}" +fi RUN_SENTINEL="/home/${MPI_USER}/.run_sentinel" @@ -127,10 +136,32 @@ exec_main_worker_ngen_run() return ${NGEN_RETURN} } +exec_serial_ngen_run() +{ + echo "$(print_date) Skipping host checks since job uses ${MPI_NODE_COUNT} worker hosts and framework will run serially" + + # Execute the model on the linked data + echo "$(print_date) Executing serial build of ngen" + ${NGEN_SERIAL_EXECUTABLE:?} ${HYDROFABRIC_DATASET_DIR}/catchment_data.geojson "" \ + ${HYDROFABRIC_DATASET_DIR}/nexus_data.geojson "" \ + ${REALIZATION_CONFIG_DATASET_DIR}/realization_config.json + + #Capture the return value to use as service exit code + NGEN_RETURN=$? + + echo "$(print_date) serial ngen command finished with return value: ${NGEN_RETURN}" + + # Exit with the model's exit code + return ${NGEN_RETURN} +} + # Sanity check that the output, hydrofabric, and config datasets are available (i.e., their directories are in place) check_for_dataset_dir "${REALIZATION_CONFIG_DATASET_DIR}" check_for_dataset_dir "${BMI_CONFIG_DATASET_DIR}" -check_for_dataset_dir "${PARTITION_DATASET_DIR}" +# Don't require a partitioning dataset when only using a single node +if [ ${MPI_NODE_COUNT:?} -gt 1 ]; then + check_for_dataset_dir "${PARTITION_DATASET_DIR:?No partition dataset directory defined}" +fi check_for_dataset_dir "${HYDROFABRIC_DATASET_DIR}" check_for_dataset_dir "${OUTPUT_DATASET_DIR}" @@ -139,7 +170,11 @@ cd ${OUTPUT_DATASET_DIR} if [ "${WORKER_INDEX}" = "0" ]; then if [ "$(whoami)" = "${MPI_USER}" ]; then - exec_main_worker_ngen_run + if [ ${MPI_NODE_COUNT:-1} -gt 1 ]; then + exec_main_worker_ngen_run + else + exec_serial_ngen_run + fi else echo "$(print_date) Starting SSH daemon on main worker" /usr/sbin/sshd -D & diff --git a/docker/main/requestservice/entrypoint.sh b/docker/main/requestservice/entrypoint.sh index da38e9ec7..2dcae593a 100755 --- a/docker/main/requestservice/entrypoint.sh +++ b/docker/main/requestservice/entrypoint.sh @@ -54,6 +54,9 @@ python -m ${SERVICE_PACKAGE_NAME:?} \ --data-service-host ${DATA_SERVICE_ENDPOINT_HOST:?} \ --data-service-port ${DATA_SERVICE_ENDPOINT_PORT:?} \ --data-service-ssl-dir ${DATA_SERVICE_CLIENT_SSL_DIR:?} \ + --evaluation-service-host ${EVALUATION_SERVICE_ENDPOINT_HOST:?} \ + --evaluation-service-port ${EVALUATION_SERVICE_ENDPOINT_PORT:?} \ + --evaluation-service-ssl-dir ${EVALUATION_SERVICE_CLIENT_SSL_DIR:?} \ --partitioner-service-host ${PARTITIONER_SERVICE_ENDPOINT_HOST:?} \ --partitioner-service-port ${PARTITIONER_SERVICE_ENDPOINT_PORT:?} \ --partitioner-service-ssl-dir ${PARTITIONER_SERVICE_CLIENT_SSL_DIR:?} diff --git a/docker/main/s3fs-volume-helper/Dockerfile b/docker/main/s3fs-volume-helper/Dockerfile index b8a261aeb..39100f924 100644 --- a/docker/main/s3fs-volume-helper/Dockerfile +++ b/docker/main/s3fs-volume-helper/Dockerfile @@ -1,8 +1,8 @@ FROM alpine:3.15 -RUN apk update && apk upgrade && apk add docker bash && mkdir -p /dmod/scripts +RUN apk update && apk upgrade && apk add docker bash && mkdir -p /dmod -COPY ./scripts/* /dmod/scripts/. +COPY ./scripts /dmod/scripts ENV OUT_OF_GIT_REPO='true' diff --git a/docker/main/subsetservice/entrypoint.sh b/docker/main/subsetservice/entrypoint.sh index cfc3728de..65eae6335 100755 --- a/docker/main/subsetservice/entrypoint.sh +++ b/docker/main/subsetservice/entrypoint.sh @@ -12,6 +12,23 @@ if [ -n "${VENV_DIR:-}" ]; then pip install --update -r /code/requirements.txt fi +# Install for debugging when appropriate +if [ "$(echo "${PYCHARM_REMOTE_DEBUG_ACTIVE:-false}" | tr '[:upper:]' '[:lower:]' | tr -d '[:space:]')" = "true" ]; then + _DEBUG_ARG="--pycharm-remote-debug" +fi + +# Handle some things in any cases when there is debugging +if [ -n "${_DEBUG_ARG:-}" ]; then + # Append these as well if appropriate, though defaults are coded (and they are somewhat agnostic to the debug setup) + if [ -n "${PYCHARM_REMOTE_DEBUG_SERVER_HOST:-}" ]; then + _DEBUG_ARG="${_DEBUG_ARG:-} --remote-debug-host ${PYCHARM_REMOTE_DEBUG_SERVER_HOST}" + fi + + if [ -n "${PYCHARM_REMOTE_DEBUG_SERVER_PORT:-}" ]; then + _DEBUG_ARG="${_DEBUG_ARG:-} --remote-debug-port ${PYCHARM_REMOTE_DEBUG_SERVER_PORT}" + fi +fi + # If we find this directory, and if there are wheels in it, then install those if [ -d ${UPDATED_PACKAGES_DIR:=/updated_packages} ]; then if [ $(ls ${UPDATED_PACKAGES_DIR}/*.whl | wc -l) -gt 0 ]; then @@ -44,4 +61,4 @@ fi #set +e #export PYTHONASYNCIODEBUG=1 -python3 -m ${SERVICE_PACKAGE_NAME:?} ${args} \ No newline at end of file +python3 -m ${SERVICE_PACKAGE_NAME:?} ${_DEBUG_ARG:-} ${args} \ No newline at end of file diff --git a/docker/nwm_gui/app_server/Dockerfile b/docker/nwm_gui/app_server/Dockerfile index 09720c9e0..070a0e28f 100644 --- a/docker/nwm_gui/app_server/Dockerfile +++ b/docker/nwm_gui/app_server/Dockerfile @@ -20,12 +20,14 @@ RUN pip install -r dependencies.txt ENV PYTHONUNBUFFERED 1 # Slurp (or set default) wheel package names ... +ARG core_package_name=dmod-core ARG comms_package_name=dmod-communication ARG client_package_name=dmod-client # Copy custom built packages from external sources image COPY --from=sources /DIST /DIST -RUN pip install --upgrade --find-links=/DIST ${comms_package_name} \ +RUN pip install --upgrade --find-links=/DIST ${core_package_name} \ + && pip install --upgrade --find-links=/DIST ${comms_package_name} \ && pip install --upgrade --find-links=/DIST ${client_package_name} \ # After eventually installing all dist files like this, clean up ... \ && rm -rf /DIST diff --git a/docker/nwm_gui/app_server/entrypoint.sh b/docker/nwm_gui/app_server/entrypoint.sh index a2d8a531c..62c90eac2 100755 --- a/docker/nwm_gui/app_server/entrypoint.sh +++ b/docker/nwm_gui/app_server/entrypoint.sh @@ -34,6 +34,10 @@ echo "Starting dmod app" #Extract the DB secrets into correct ENV variables POSTGRES_SECRET_FILE="/run/secrets/${DOCKER_SECRET_POSTGRES_PASS:?}" export SQL_PASSWORD="$(cat ${POSTGRES_SECRET_FILE})" +export DMOD_SU_PASSWORD="$(cat ${POSTGRES_SECRET_FILE})" + +# Execute the migration scripts on the designated database +python manage.py migrate # Handle for debugging when appropriate if [ "$(echo "${PYCHARM_REMOTE_DEBUG_ACTIVE:-false}" | tr '[:upper:]' '[:lower:]' | tr -d '[:space:]')" == "true" ]; then diff --git a/docker/nwm_gui/docker-compose.yml b/docker/nwm_gui/docker-compose.yml index 509441649..403c6a92b 100644 --- a/docker/nwm_gui/docker-compose.yml +++ b/docker/nwm_gui/docker-compose.yml @@ -34,6 +34,7 @@ services: args: docker_internal_registry: ${DOCKER_INTERNAL_REGISTRY:?Missing DOCKER_INTERNAL_REGISTRY value (see 'Private Docker Registry ' section in example.env)} comms_package_name: ${PYTHON_PACKAGE_DIST_NAME_COMMS:?} + client_package_name: ${PYTHON_PACKAGE_DIST_NAME_CLIENT:?} networks: - request-listener-net # Call this when starting the container @@ -42,6 +43,8 @@ services: restart: on-failure secrets: - postgres_password + - object_store_exec_user_name + - object_store_exec_user_passwd environment: #- VENV_DIR=${DOCKER_GUI_CONTAINER_VENV_DIR:-} #- CERT_PATH # used by dispatch.py @@ -50,18 +53,29 @@ services: - MAAS_ENDPOINT_PORT=${DOCKER_REQUESTS_HOST_PORT:-3012} - MAAS_PORTAL_DEBUG_HOST=${PYCHARM_REMOTE_DEBUG_SERVER_HOST:-host.docker.internal} - MAAS_PORTAL_DEBUG_PORT=${PYCHARM_REMOTE_DEBUG_SERVER_PORT_GUI:-55875} + - GUI_SUBSET_SERVICE_API_URL=http://${DMOD_GUI_SUBSET_SERVICE_API_HOST:?}:${DOCKER_SUBSET_API_PORT:-5001} - PYCHARM_REMOTE_DEBUG_ACTIVE=${PYCHARM_REMOTE_DEBUG_GUI_ACTIVE:-false} - PYCHARM_REMOTE_DEBUG_VERSION=${PYCHARM_REMOTE_DEBUG_VERSION:-~=211.7628.24} + - DMOD_SU_NAME=dmod_db_admin + # TODO: this needs to be changed eventually + - DMOD_SU_EMAIL=robert.bartel@noaa.gov - SQL_ENGINE=django.db.backends.postgresql - SQL_DATABASE=${DMOD_GUI_POSTGRES_DB:-dmod_dev} - SQL_USER=${DMOD_GUI_POSTGRES_USER:?} - SQL_HOST=db - SQL_PORT=5432 + - DMOD_SU_NAME=dmod_super_user + - DMOD_SU_EMAIL=none@noaa.gov - DATABASE=postgres - DOCKER_SECRET_POSTGRES_PASS=postgres_password + - OBJECT_STORE_HOSTNAME=${EXTERNAL_OBJECT_STORE_HOSTNAME:?} + - OBJECT_STORE_PORT=${EXTERNAL_OBJECT_STORE_PORT:?} + - DMOD_GUI_CSRF_TRUSTED_ORIGINS=${DMOD_GUI_CSRF_TRUSTED_ORIGINS:?No CSRF trusted origins configured (provide '' at least)} volumes: - ${DMOD_APP_STATIC:?}:/usr/maas_portal/static - ${DMOD_SSL_DIR}/requestservice:/usr/maas_portal/ssl + # Needed only for speeding debugging + #- ${DOCKER_GUI_HOST_SRC:?GUI sources path not configured in environment}/MaaS:/usr/maas_portal/MaaS #- ${DOCKER_GUI_HOST_VENV_DIR:-/tmp/blah}:${DOCKER_GUI_CONTAINER_VENV_DIR:-/tmp/blah} # Expose Django's port to the internal network so that the web server may access it expose: @@ -76,8 +90,10 @@ services: networks: - request-listener-net volumes: - #- ${DMOD_GUI_POSTGRES_DATA:?}:/var/lib/postgresql/data - - dmod_db_volume:/var/lib/postgresql/data + # TODO: look back later at why this was done during dev work, and whether it is still needed + # TODO: for now, implement by just defaulting to same Docker volume name (i.e., without bind mount) as before + - ${DMOD_GUI_POSTGRES_DATA_VOLUME_HOST_DIR:-dmod_db_volume}:/var/lib/postgresql/data + #- ${DMOD_GUI_POSTGRES_DATA_VOLUME_HOST_DIR:?}:/var/lib/postgresql/data secrets: - postgres_password environment: @@ -98,6 +114,10 @@ networks: secrets: postgres_password: file: ../secrets/postgres_password.txt + object_store_exec_user_passwd: + file: ${DMOD_OBJECT_STORE_EXEC_USER_PASSWD_SECRET_FILE:?} + object_store_exec_user_name: + file: ${DMOD_OBJECT_STORE_EXEC_USER_NAME_SECRET_FILE:?} # Define persistent volumes that may be shared and persisted between containers volumes: dmod_db_volume: diff --git a/docker/nwm_gui/web_server/nginx/default.conf b/docker/nwm_gui/web_server/nginx/default.conf index 33b1f88ce..7d71e51c1 100644 --- a/docker/nwm_gui/web_server/nginx/default.conf +++ b/docker/nwm_gui/web_server/nginx/default.conf @@ -4,6 +4,7 @@ upstream wresgui { server { listen 80; + client_max_body_size 0; # Restrict verbs to GET, HEAD, and POST if ($request_method !~ ^(GET|HEAD|POST)$ ) diff --git a/docker/py-sources/py-deps.Dockerfile b/docker/py-sources/py-deps.Dockerfile index 9a0ff75fe..da6d7e35b 100644 --- a/docker/py-sources/py-deps.Dockerfile +++ b/docker/py-sources/py-deps.Dockerfile @@ -1,4 +1,4 @@ -ARG REQUIRE="gcc g++ musl-dev gdal-dev libffi-dev openssl-dev rust cargo git proj proj-dev proj-util openblas openblas-dev lapack lapack-dev" +ARG REQUIRE="gcc g++ musl-dev gdal-dev libffi-dev openssl-dev rust cargo git proj proj-dev proj-util openblas openblas-dev lapack lapack-dev geos-dev" ################################################################################################################ ################################################################################################################ ##### Create foundational level build stage with initial structure diff --git a/docker/py-sources/py-sources.Dockerfile b/docker/py-sources/py-sources.Dockerfile index 9e015ad6e..3d190d092 100644 --- a/docker/py-sources/py-sources.Dockerfile +++ b/docker/py-sources/py-sources.Dockerfile @@ -1,5 +1,5 @@ ARG docker_internal_registry -FROM ${docker_internal_registry}/dmod-py-deps as basis +FROM ${docker_internal_registry}/dmod-py-deps:latest as basis # Copy these needed for sourced functions used by build scripts in later stages RUN mkdir -p /dmod/scripts/shared COPY ./scripts/dist_package.sh /dmod/scripts diff --git a/example.env b/example.env index 637e9e5ce..bafbc8e63 100644 --- a/example.env +++ b/example.env @@ -108,6 +108,11 @@ TROUTE_BRANCH=ngen ## Python Packages Settings ## ######################################################################## +## The "name" of the built client Python distribution package, for purposes of installing (e.g., via pip) +PYTHON_PACKAGE_DIST_NAME_CLIENT=dmod-client +## The name of the actual Python communication package (i.e., for importing or specifying as a module on the command line) +PYTHON_PACKAGE_NAME_CLIENT=dmod.client + ## The "name" of the built communication Python distribution package, for purposes of installing (e.g., via pip) PYTHON_PACKAGE_DIST_NAME_COMMS=dmod-communication ## The name of the actual Python communication package (i.e., for importing or specifying as a module on the command line) diff --git a/python/gui/MaaS/cbv/AbstractDatasetView.py b/python/gui/MaaS/cbv/AbstractDatasetView.py new file mode 100644 index 000000000..5917d5035 --- /dev/null +++ b/python/gui/MaaS/cbv/AbstractDatasetView.py @@ -0,0 +1,47 @@ +from abc import ABC +from django.views.generic.base import View +from dmod.client.request_clients import DatasetExternalClient +import logging +logger = logging.getLogger("gui_log") +from .DMODProxy import DMODMixin, GUI_STATIC_SSL_DIR +from typing import Dict, Optional +from pathlib import Path +from django.conf import settings +import minio + +MINIO_HOST_STRING = settings.MINIO_HOST_STRING +MINIO_ACCESS = Path(settings.MINIO_ACCESS_FILE).read_text().strip() +MINIO_SECRET = Path(settings.MINIO_SECRET_FILE).read_text().strip() +MINIO_SECURE_CONNECT = settings.MINIO_SECURE_CONNECT + + +class AbstractDatasetView(View, DMODMixin, ABC): + + @classmethod + def factory_minio_client(cls, endpoint: Optional[str] = None, access: Optional[str] = None, + secret: Optional[str] = None, is_secure: Optional[bool] = False) -> minio.Minio: + client = minio.Minio(endpoint=MINIO_HOST_STRING if endpoint is None else endpoint, + access_key=MINIO_ACCESS if access is None else access, + secret_key=MINIO_SECRET if secret is None else secret, + secure=MINIO_SECURE_CONNECT if is_secure is None else is_secure) + + return client + + def __init__(self, *args, **kwargs): + super(AbstractDatasetView, self).__init__(*args, **kwargs) + self._dataset_client = None + + async def get_dataset(self, dataset_name: str) -> Dict[str, dict]: + serial_dataset = await self.dataset_client.get_serialized_datasets(dataset_name=dataset_name) + return serial_dataset + + async def get_datasets(self) -> Dict[str, dict]: + serial_datasets = await self.dataset_client.get_serialized_datasets() + return serial_datasets + + @property + def dataset_client(self) -> DatasetExternalClient: + if self._dataset_client is None: + self._dataset_client = DatasetExternalClient(endpoint_uri=self.maas_endpoint_uri, + ssl_directory=GUI_STATIC_SSL_DIR) + return self._dataset_client diff --git a/python/gui/MaaS/cbv/DMODProxy.py b/python/gui/MaaS/cbv/DMODProxy.py index fc0fcb7a1..99bb725dd 100644 --- a/python/gui/MaaS/cbv/DMODProxy.py +++ b/python/gui/MaaS/cbv/DMODProxy.py @@ -7,6 +7,9 @@ from django.http import HttpRequest, HttpResponse from django.views.generic.base import View from django.shortcuts import render +from django.conf import settings + +DEFAULT_MAAS_URI = settings.DEFAULT_MAAS_ENDPOINT_URI import logging logger = logging.getLogger("gui_log") @@ -16,6 +19,8 @@ from pathlib import Path from typing import List, Optional, Tuple, Type +GUI_STATIC_SSL_DIR = Path(settings.GUI_SSL_DIR) + class RequestFormProcessor(ABC): @@ -209,7 +214,7 @@ class PostFormRequestClient(ModelExecRequestClient): def _bootstrap_ssl_dir(cls, ssl_dir: Optional[Path] = None): if ssl_dir is None: ssl_dir = Path(__file__).resolve().parent.parent.parent.joinpath('ssl') - ssl_dir = Path('/usr/maas_portal/ssl') #Fixme + ssl_dir = GUI_STATIC_SSL_DIR #Fixme return ssl_dir def __init__(self, endpoint_uri: str, http_request: HttpRequest, ssl_dir: Optional[Path] = None): @@ -289,8 +294,7 @@ class DMODMixin: @property def maas_endpoint_uri(self): if not hasattr(self, '_maas_endpoint_uri') or self._maas_endpoint_uri is None: - self._maas_endpoint_uri = 'wss://' + os.environ.get('MAAS_ENDPOINT_HOST') + ':' - self._maas_endpoint_uri += os.environ.get('MAAS_ENDPOINT_PORT') + self._maas_endpoint_uri = DEFAULT_MAAS_URI return self._maas_endpoint_uri def forward_request(self, request: HttpRequest, event_type: MessageEventType) -> Tuple[ @@ -315,6 +319,7 @@ def forward_request(self, request: HttpRequest, event_type: MessageEventType) -> client = PostFormRequestClient(endpoint_uri=self.maas_endpoint_uri, http_request=request) if event_type == MessageEventType.MODEL_EXEC_REQUEST: form_processor_type = ModelExecRequestFormProcessor + # TODO: need a new type of form processor here (or 3 more, for management, uploading, and downloading) else: raise RuntimeError("{} got unsupported event type: {}".format(self.__class__.__name__, str(event_type))) diff --git a/python/gui/MaaS/cbv/DatasetApiView.py b/python/gui/MaaS/cbv/DatasetApiView.py new file mode 100644 index 000000000..45b689b6d --- /dev/null +++ b/python/gui/MaaS/cbv/DatasetApiView.py @@ -0,0 +1,163 @@ +import asyncio +import zipfile + +from django.http import JsonResponse +from .AbstractDatasetView import AbstractDatasetView +from pathlib import Path +from django.conf import settings +from typing import Optional, Set +import logging + +logger = logging.getLogger("gui_log") + +CACHE_DIR: Path = Path(settings.DATA_CACHE_DIR) +DOWNLOADS_DIR: Path = Path(settings.DATA_DOWNLOADS_DIR) +UPLOADS_DIR: Path = Path(settings.DATA_UPLOADS_DIR) + + +class DatasetApiView(AbstractDatasetView): + + @classmethod + def _cleanup_dir(cls, dir_path: Path) -> bool: + """ + Cleanup contents and remove a given directory, returning whether this was done or nothing exists at the path. + + Parameters + ---------- + dir_path : Path + Path to an expected directory. + + Returns + ------- + bool + ``True`` if nothing exists at this path, either because a directory was deleted or because nothing was + there; or ``False`` if there is an existing non-directory file at this path. + """ + # TODO: implement and then use in caching method and after zip file is created + if not dir_path.exists(): + return True + elif not dir_path.is_dir(): + return False + else: + results = True + for p in dir_path.glob('*'): + if p.is_dir(): + results = results and cls._cleanup_dir(p) + else: + p.unlink() + dir_path.rmdir() + return results + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def _cache_dataset_downloads(self, dataset_name: str, files: Optional[Set[str]] = None) -> Path: + """ + Cache contents (files) of the dataset to files in the local downloads cache. + + Parameters + ---------- + dataset_name : str + The name of the dataset of interest. + files : Optional[Set[str]] + An optional subset of the files in the dataset to be cached locally, with the default of ``None`` implying + all files within the dataset. + + Returns + ---------- + Path + The cache directory path containing the downloaded dataset data. + """ + #returned_json = asyncio.get_event_loop().run_until_complete(self.get_dataset(dataset_name=dataset_name)) + #dataset_json = returned_json[dataset_name] + # TODO: maybe check to make sure dataset exists? + local_copy_dir = CACHE_DIR.joinpath(dataset_name) + if local_copy_dir.is_dir(): + self._cleanup_dir(local_copy_dir) + elif local_copy_dir.exists(): + local_copy_dir.unlink() + local_copy_dir.mkdir(parents=True) + # TODO: later devise something better for dealing with prefixes for emulated directory structure + #for minio_object in self.minio_client.list_objects(dataset_name): + logger.info("Retrieving a list of dataset files for {}".format(dataset_name)) + minio_client = self.factory_minio_client() + file_list = [obj.object_name for obj in minio_client.list_objects(dataset_name)] + logger.info("Downloading {} dataset files to GUI app server".format(len(file_list))) + for filename in file_list: + minio_client.fget_object(bucket_name=dataset_name, object_name=filename, + file_path=str(local_copy_dir.joinpath(filename))) + logger.info("Dataset {} locally cached".format(dataset_name)) + return local_copy_dir + + def _get_dataset_content_details(self, dataset_name: str): + result = asyncio.get_event_loop().run_until_complete(self.dataset_client.get_dataset_content_details(name=dataset_name)) + logger.info(result) + return JsonResponse({"contents": result}, status=200) + + def _delete_dataset(self, dataset_name: str) -> JsonResponse: + result = asyncio.get_event_loop().run_until_complete(self.dataset_client.delete_dataset(name=dataset_name)) + return JsonResponse({"successful": result}, status=200) + + def _get_dataset_download(self, request, *args, **kwargs): + dataset_name = request.GET.get("dataset_name", None) + local_dir = self._cache_dataset_downloads(dataset_name).resolve(strict=True) + logger.info("Caching data to {}".format(local_dir)) + zip_path = DOWNLOADS_DIR.joinpath('{}.zip'.format(dataset_name)) + if not DOWNLOADS_DIR.is_dir(): + DOWNLOADS_DIR.mkdir(parents=True) + logger.info("Creating zip file for dataset contents at {}".format(zip_path)) + with zipfile.ZipFile(zip_path, mode='w', compression=zipfile.ZIP_STORED) as zip_file: + for file in local_dir.glob('*'): + logger.info("Writing {} to zip file {}".format(file, zip_path)) + zip_file.write(file, file.relative_to(local_dir.parent)) + + logger.info("Dataset zip file {} fully created".format(zip_path)) + self._cleanup_dir(local_dir) + + # TODO: make sure downloading actually works + + #response = HttpResponse(zip_path.open(), mimetype='application/zip') + #return response + # TODO: later, figure out something to clean up these zip files + return JsonResponse({"zip_file": str(zip_path.relative_to(DOWNLOADS_DIR))}, status=200) + + def _get_datasets_json(self) -> JsonResponse: + serial_dataset_map = asyncio.get_event_loop().run_until_complete(self.get_datasets()) + return JsonResponse({"datasets": serial_dataset_map}, status=200) + + def _get_dataset_json(self, dataset_name: str) -> JsonResponse: + serial_dataset = asyncio.get_event_loop().run_until_complete(self.get_dataset(dataset_name=dataset_name)) + return JsonResponse({"dataset": serial_dataset[dataset_name]}, status=200) + + def _get_download(self, request, *args, **kwargs): + dataset_name = request.GET.get("dataset_name", None) + item_name = request.GET.get("item_name", None) + local_dir = self._cache_dataset_downloads(dataset_name=dataset_name, files={item_name}).resolve(strict=True) + download_subdir = DOWNLOADS_DIR.joinpath(dataset_name) + if not DOWNLOADS_DIR.is_dir(): + DOWNLOADS_DIR.mkdir(parents=True) + elif download_subdir.is_dir(): + self._cleanup_dir(download_subdir) + elif download_subdir.exists(): + download_subdir.unlink() + local_dir.rename(download_subdir) + + return JsonResponse({"dataset": dataset_name}, status=200) + + def get(self, request, *args, **kwargs): + request_type = request.GET.get("request_type", None) + if request_type == 'download_dataset': + return self._get_dataset_download(request) + if request_type == 'download_file': + return self._get_download(request) + elif request_type == 'datasets': + return self._get_datasets_json() + elif request_type == 'dataset': + return self._get_dataset_json(dataset_name=request.GET.get("name", None)) + elif request_type == 'contents': + return self._get_dataset_content_details(dataset_name=request.GET.get("name", None)) + if request_type == 'delete': + return self._delete_dataset(dataset_name=request.GET.get("name", None)) + + # TODO: finish + return JsonResponse({}, status=400) diff --git a/python/gui/MaaS/cbv/DatasetFileWebsocketFilelike.py b/python/gui/MaaS/cbv/DatasetFileWebsocketFilelike.py new file mode 100644 index 000000000..9e69409ad --- /dev/null +++ b/python/gui/MaaS/cbv/DatasetFileWebsocketFilelike.py @@ -0,0 +1,20 @@ +import asyncio +from typing import AnyStr +from dmod.client.request_clients import DatasetExternalClient + + +class DatasetFileWebsocketFilelike: + + def __init__(self, client: DatasetExternalClient, dataset_name: str, file_name: str): + self._client = client + self._dataset_name = dataset_name + self._file_name = file_name + self._read_index: int = 0 + + def read(self, blksize: int) -> AnyStr: + + result = asyncio.get_event_loop().run_until_complete( + self._client.download_item_block(dataset_name=self._dataset_name, item_name=self._file_name, + blk_start=self._read_index, blk_size=blksize)) + self._read_index += blksize + return result diff --git a/python/gui/MaaS/cbv/DatasetManagementForms.py b/python/gui/MaaS/cbv/DatasetManagementForms.py new file mode 100644 index 000000000..7566d0bee --- /dev/null +++ b/python/gui/MaaS/cbv/DatasetManagementForms.py @@ -0,0 +1,242 @@ +from django import forms +from enum import Enum +from functools import partial + +from dmod.core.meta_data import DataCategory, DataFormat +from django.conf import settings + +from .js_utils import start_end_time_validation + +# typing imports +from typing import Optional + +# form field type alias +# correspond to `dmod.core.meta_data.StandardDatasetIndex`` +def _time(start_time_id: str, end_time_id: str): + return partial( + forms.DateTimeField, + widget=forms.DateTimeInput( + attrs={ + "type": "datetime-local", + "onchange": start_end_time_validation(start_time_id, end_time_id), + } + ), + # TODO: this should be removed once we upgrade django versions >= 3.1 (tracked by #209) + input_formats=[settings.DATE_TIME_FORMAT], + ) + + +_Unknown = forms.CharField +_CatchmentId = forms.CharField +_DataId = forms.CharField +_HydrofabricId = forms.CharField +_Length = forms.IntegerField +_GlobalChecksum = forms.CharField +_ElementId = forms.CharField +_Files = partial( + forms.FileField, + widget=forms.ClearableFileInput( + attrs={ + 'multiple': True, + # filename cannot contain underscore (_) + "oninput": """((el) => { + const files = el.files; + + for (let {name} of files){ + // filenames cannot include _'s. + //if (name.includes('_')){ + + // see constraint validation API for more detail (https://developer.mozilla.org/en-US/docs/Web/API/Constraint_validation) + // el.setCustomValidity('Filename cannot contain underscores \"_\"'); + // return; + //} + + // valid input + el.setCustomValidity(''); + } + + })(this)""" + } + ), +) + + +class FormNameMixIn: + def form_name(self) -> str: + """returns class name of form""" + return type(self).__name__ + + +class DynamicFormMixIn: + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + for visible in self.visible_fields(): + # input field have id's of form: `id_{{field instance var name}}_{{form name}} + visible.field.widget.attrs["id"] = f"{visible.auto_id}_{self.form_name()}" + visible.field.widget.attrs["class"] = self.form_name() + visible.field.widget.attrs["style"] = "display: none;" + visible.field.widget.attrs["disabled"] = "true" + + +class DatasetForm(FormNameMixIn, forms.Form): + name = forms.CharField(max_length=100, label="Dataset Name") + category = forms.ChoiceField( + choices=[(f.name, f.name.title()) for f in DataCategory], + label="Dataset Category", + ) + data_format = forms.ChoiceField( + choices=[("---", "---")] + [(f.name, f.name) for f in DataFormat], + label="Data Format", + widget=forms.Select( + attrs={ + # when selection changes, unhide and enable the form fields and labels for the + # corresponding DataFormat. form fields and labels have an html class name of their + # DataFormat. i.e. + "onchange": """((name) => { + // remove previously active fields, if any + const active_fields = document.querySelectorAll('.active_field') + active_fields.forEach(el => { + + // disable field, hide it, and remove flag class, 'active_field' + el.setAttribute('disabled', true) + el.style.display = 'none' + el.classList.remove('active_field') + }) + + const els_with_class = document.querySelectorAll(`.${name}`) + els_with_class.forEach(el => { + + // enable field, hide it, and remove flag class, 'active_field' + el.removeAttribute('disabled') + el.style.display = 'block' + el.classList.add('active_field') + }) + })(this.value)""" + } + ), + ) + + +class AORC_CSV(DynamicFormMixIn, FormNameMixIn, forms.Form): + catchment_id = _CatchmentId() + start_time = _time("id_start_time_AORC_CSV", "id_end_time_AORC_CSV")( + label="Start Datetime" + ) + end_time = _time("id_start_time_AORC_CSV", "id_end_time_AORC_CSV")( + label="End Datetime" + ) + # TODO: note if end times are inclusive. + # TODO: note that all datetimes are naive UTC time. + # help_text="", + # ) + files = _Files() + + +class NETCDF_FORCING_CANONICAL(DynamicFormMixIn, FormNameMixIn, forms.Form): + catchment_id = _CatchmentId() + start_time = _time( + "id_start_time_NETCDF_FORCING_CANONICAL", "id_end_time_NETCDF_FORCING_CANONICAL" + )(label="Start Datetime") + end_time = _time( + "id_start_time_NETCDF_FORCING_CANONICAL", "id_end_time_NETCDF_FORCING_CANONICAL" + )(label="End Datetime") + files = _Files() + + +class NETCDF_AORC_DEFAULT(DynamicFormMixIn, FormNameMixIn, forms.Form): + catchment_id = _CatchmentId() + start_time = _time( + "id_start_time_NETCDF_AORC_DEFAULT", "id_end_time_NETCDF_AORC_DEFAULT" + )(label="Start Datetime") + end_time = _time( + "id_start_time_NETCDF_AORC_DEFAULT", "id_end_time_NETCDF_AORC_DEFAULT" + )(label="End Datetime") + files = _Files() + + +class NGEN_OUTPUT(DynamicFormMixIn, FormNameMixIn, forms.Form): + catchment_id = _CatchmentId() + start_time = _time("id_start_time_NGEN_OUTPUT", "id_end_time_NGEN_OUTPUT")( + label="Start Datetime" + ) + end_time = _time("id_start_time_NGEN_OUTPUT", "id_end_time_NGEN_OUTPUT")( + label="End Datetime" + ) + data_id = _DataId() + files = _Files() + + +class NGEN_REALIZATION_CONFIG(DynamicFormMixIn, FormNameMixIn, forms.Form): + catchment_id = _CatchmentId() + start_time = _time( + "id_start_time_NGEN_REALIZATION_CONFIG", "id_end_time_NGEN_REALIZATION_CONFIG" + )(label="Start Datetime") + end_time = _time( + "id_start_time_NGEN_REALIZATION_CONFIG", "id_end_time_NGEN_REALIZATION_CONFIG" + )(label="End Datetime") + data_id = _DataId() + files = _Files() + + +class NGEN_GEOJSON_HYDROFABRIC(DynamicFormMixIn, FormNameMixIn, forms.Form): + catchment_id = _CatchmentId() + hydrofabric_id = _HydrofabricId() + data_id = _DataId() + files = _Files() + + +class NGEN_PARTITION_CONFIG(DynamicFormMixIn, FormNameMixIn, forms.Form): + data_id = _DataId() + hydrofabric_id = _HydrofabricId + length = _Length() + files = _Files() + + +class BMI_CONFIG(DynamicFormMixIn, FormNameMixIn, forms.Form): + global_checksum = _GlobalChecksum() + data_id = _DataId() + files = _Files() + + +class NWM_OUTPUT(DynamicFormMixIn, FormNameMixIn, forms.Form): + catchment_id = _CatchmentId() + start_time = _time("id_start_time_NWM_OUTPUT", "id_end_time_NWM_OUTPUT")( + label="Start Datetime" + ) + end_time = _time("id_start_time_NWM_OUTPUT", "id_end_time_NWM_OUTPUT")( + label="End Datetime" + ) + data_id = _DataId() + files = _Files() + + +class NWM_CONFIG(DynamicFormMixIn, FormNameMixIn, forms.Form): + element_id = _ElementId() + start_time = _time("id_start_time_NWM_CONFIG", "id_end_time_NWM_CONFIG")( + label="Start Datetime" + ) + end_time = _time("id_start_time_NWM_CONFIG", "id_end_time_NWM_CONFIG")( + label="End Datetime" + ) + data_id = _DataId() + files = _Files() + + +class DatasetFormatForm(Enum): + AORC_CSV = AORC_CSV + NETCDF_FORCING_CANONICAL = NETCDF_FORCING_CANONICAL + NETCDF_AORC_DEFAULT = NETCDF_AORC_DEFAULT + NGEN_OUTPUT = NGEN_OUTPUT + NGEN_REALIZATION_CONFIG = NGEN_REALIZATION_CONFIG + NGEN_GEOJSON_HYDROFABRIC = NGEN_GEOJSON_HYDROFABRIC + NGEN_PARTITION_CONFIG = NGEN_PARTITION_CONFIG + BMI_CONFIG = BMI_CONFIG + NWM_OUTPUT = NWM_OUTPUT + NWM_CONFIG = NWM_CONFIG + + @staticmethod + def get_form_from_name(name: str) -> Optional[forms.Form]: + try: + return DatasetFormatForm[name].value + except KeyError: + return None diff --git a/python/gui/MaaS/cbv/DatasetManagementView.py b/python/gui/MaaS/cbv/DatasetManagementView.py new file mode 100644 index 000000000..5087ceb36 --- /dev/null +++ b/python/gui/MaaS/cbv/DatasetManagementView.py @@ -0,0 +1,192 @@ +""" +Defines a view that may be used to configure a MaaS request +""" +import asyncio +import os.path + +from django.http import HttpRequest, HttpResponse +from django.shortcuts import render +from django.core.files.uploadedfile import UploadedFile, InMemoryUploadedFile, TemporaryUploadedFile +from django.conf import settings +from datetime import datetime + +import dmod.communication as communication +from dmod.core.meta_data import DataCategory, DataDomain, DataFormat, Serializable + +import logging +logger = logging.getLogger("gui_log") + +from .utils import extract_log_data +from .AbstractDatasetView import AbstractDatasetView +from .DatasetManagementForms import DatasetForm, DatasetFormatForm +from typing import List + +DT_FORMAT = settings.DATE_TIME_FORMAT + + +class DatasetManagementView(AbstractDatasetView): + + """ + A view used to configure a dataset management request or requests for transmitting dataset data. + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def _process_event_type(self, http_request: HttpRequest) -> communication.MessageEventType: + """ + Determine and return whether this request is for a ``DATASET_MANAGEMENT`` or ``DATA_TRANSMISSION`` event. + + Parameters + ---------- + http_request : HttpRequest + The raw HTTP request in question. + + Returns + ------- + communication.MessageEventType + Either ``communication.MessageEventType.DATASET_MANAGEMENT`` or + ``communication.MessageEventType.DATA_TRANSMISSION``. + """ + # TODO: + raise NotImplementedError("{}._process_event_type not implemented".format(self.__class__.__name__)) + + def _create_dataset(self, name: str, category: str, data_format: str, *args, **kwargs) -> bool: + d_format = DataFormat.get_for_name(data_format) + if format is None: + return False + else: + try: + domain = DataDomain.factory_init_from_restriction_collections(d_format, **kwargs) + except Exception as e: + msg = 'Failed to create dataset {}: {} creating domain ({})'.format(name, e.__class__.__name__, str(e)) + logger.error(msg) + raise RuntimeError(msg) + return asyncio.get_event_loop().run_until_complete( + self.dataset_client.create_dataset(name=name, category=DataCategory.get_for_name(category), domain=domain)) + + def _upload_files_to_dataset(self, dataset_name: str, files: List[UploadedFile]) -> bool: + # TODO: (later) consider modifying files to account for DMOD-specific characteristics (e.g., file paths for + # inside worker containers) + minio_client = self.factory_minio_client() + result = True + for f in files: + if isinstance(f, TemporaryUploadedFile): + length = os.path.getsize(f.file.name) + else: + length = f.file.getbuffer().nbytes + result_obj = minio_client.put_object(bucket_name=dataset_name, object_name=f.name, data=f.file, + length=length) + # TODO: (later) try to do something based on result_obj.last_modified + result = result and result_obj.bucket_name == dataset_name and result_obj.object_name == f.name + return result + + def get(self, http_request: HttpRequest, *args, **kwargs) -> HttpResponse: + """ + The handler for 'get' requests. + + This will render the 'maas/dataset_management.html' template after retrieving necessary information to initially + populate the forms it displays. + + Parameters + ---------- + http_request : HttpRequest + The request asking to render this page. + args + kwargs + + Returns + ------- + A rendered page. + """ + errors, warnings, info = extract_log_data(kwargs) + + # Gather map of serialized datasets, keyed by dataset name + serial_dataset_map = asyncio.get_event_loop().run_until_complete(self.get_datasets()) + serial_dataset_list = [serial_dataset_map[d] for d in serial_dataset_map] + + dataset_categories = [c.name.title() for c in DataCategory] + dataset_formats = [f.name for f in DataFormat] + + form = DatasetForm() + + payload = { + 'form': form, + 'dynamic_forms': [f.value() for f in DatasetFormatForm], + 'datasets': serial_dataset_list, + 'dataset_categories': dataset_categories, + 'dataset_formats': dataset_formats, + 'errors': errors, + 'info': info, + 'warnings': warnings + } + + return render(http_request, 'maas/dataset_management.html', payload) + + def post(self, http_request: HttpRequest, *args, **kwargs) -> HttpResponse: + """ + The handler for 'post' requests. + + This will attempt to submit the request and rerender the page like a 'get' request. + + Parameters + ---------- + http_request : HttpRequest + The request asking to render this page. + args + kwargs + + Returns + ------- + A rendered page. + """ + # Should get a list of file-type objects, with a ``name`` property and a ``file`` BytesIO property + files = http_request.FILES.getlist('files') + + csrf_token_key = 'csrfmiddlewaretoken' + + # name (dataset name), category, data_format, and any other applicable dynamic form items + # e.g., catchment_id, hydrofabric_id, data_id, etc. + dataset_details = dict([(k, v) for k, v in http_request.POST.items() if k != csrf_token_key]) + dataset_name = dataset_details['name'] + + # TODO: consider reading files to validate/replace domain details from form + + # If present, parse catchment ids string to list of individual ids + if 'catchment_id' in dataset_details: + dataset_details['catchment_id'] = [s.strip() for s in dataset_details.pop('catchment_id').split(',')] + + # Fix keys for start and end times + if 'start_time' in dataset_details and 'end_time' in dataset_details: + start = datetime.strptime(dataset_details.pop('start_time'), settings.DATE_TIME_FORMAT) + end = datetime.strptime(dataset_details.pop('end_time'), settings.DATE_TIME_FORMAT) + dataset_details['time'] = {'start': start.strftime(Serializable.get_datetime_str_format()), + 'end': end.strftime(Serializable.get_datetime_str_format())} + elif 'start_time' in dataset_details or 'end_time' in dataset_details: + # TODO: figure out best way to handle this; for now ... + raise RuntimeError('Cannot create a dataset of this format unless both a start and end time are given') + + was_created = self._create_dataset(**dataset_details) + + if not was_created: + err_msg = 'Could not created dataset {}'.format(dataset_name) + logger.error(err_msg) + http_response = self.get(http_request=http_request, errors=[err_msg], *args, **kwargs) + elif files is None or len(files) == 0: + info_msg = 'Created empty dataset {}'.format(dataset_name) + logger.info(info_msg) + http_response = self.get(http_request=http_request, info_msg=[info_msg], *args, **kwargs) + # With this condition test (if we get here), put files in dataset + elif not self._upload_files_to_dataset(dataset_name=dataset_name, files=files): + err_msg = 'Could not upload requested files to dataset {}'.format(dataset_name) + logger.error(err_msg) + http_response = self.get(http_request=http_request, errors=[err_msg], *args, **kwargs) + else: + info_msg = 'Created dataset {} with {} files uploaded'.format(dataset_name, len(files)) + logger.info(info_msg) + http_response = self.get(http_request=http_request, info_msg=[info_msg], *args, **kwargs) + + #for k, v in session_data.items(): + # http_response.set_cookie(k, v) + + return http_response diff --git a/python/gui/MaaS/cbv/MapView.py b/python/gui/MaaS/cbv/MapView.py index 398c95ea9..f96d6c85d 100644 --- a/python/gui/MaaS/cbv/MapView.py +++ b/python/gui/MaaS/cbv/MapView.py @@ -12,18 +12,22 @@ from django.shortcuts import render from django.conf import settings from rest_framework.views import APIView -PROJECT_ROOT = settings.BASE_DIR +#PROJECT_ROOT = settings.BASE_DIR +HYDROFABRICS_DIR = settings.HYDROFABRIC_ROOT +SUBSET_SERVICE_URL = settings.SUBSET_SERVICE_URL import json from pathlib import Path from .. import datapane from .. import configuration +import requests import logging logger = logging.getLogger("gui_log") _resolution_regex = re.compile("(.+) \((.+)\)") -def _build_fabric_path(fabric, type): + +def _build_fabric_path(fabric, fabric_type): """ build a qualified path from the hydrofabric name and type """ @@ -36,28 +40,46 @@ def _build_fabric_path(fabric, type): name = fabric resolution='' - path = Path(PROJECT_ROOT, 'static', 'ngen', 'hydrofabric', name, resolution, type+'_data.geojson') + path = Path(HYDROFABRICS_DIR, name, resolution, fabric_type + '_data.geojson') + #path = Path(HYDROFABRICS_DIR, name, fabric_type + '_data.geojson') return path + class Fabrics(APIView): + + def _get_geojson_in_bounds(self, fabric_name: str, feature_type:str, min_x: float, min_y: float, max_x: float, + max_y: float) -> dict: + url_path = '{}/subset/bounds'.format(SUBSET_SERVICE_URL) + request_data = {'fabric_name': fabric_name, 'feature_type': feature_type, 'min_x': min_x, 'min_y': min_y, + 'max_x': max_x, 'max_y': max_y} + subset_response = requests.post(url=url_path, data=request_data) + return subset_response.json() + def get(self, request: HttpRequest, fabric: str = None) -> typing.Optional[JsonResponse]: if fabric is None: fabric = 'example' - type = request.GET.get('fabric_type', 'catchment') - if not type: - type="catchment" + + fabric_type = request.GET.get('fabric_type', 'catchment') + min_x = request.GET.get('min_x', None) + min_y = request.GET.get('min_y', None) + max_x = request.GET.get('max_x', None) + max_y = request.GET.get('max_y', None) + + if not fabric_type: + fabric_type = "catchment" - path = _build_fabric_path(fabric, type) + path = _build_fabric_path(fabric, fabric_type) if path is None: - return None - - with open(path) as fp: - data = json.load(fp) - return JsonResponse(data) + return JsonResponse(self._get_geojson_in_bounds(fabric_name=fabric, feature_type=fabric_type, min_x=min_x, + min_y=min_y, max_x=max_x, max_y=max_y)) + else: + with open(path) as fp: + data = json.load(fp) + return JsonResponse(data) class FabricNames(APIView): - _fabric_dir = Path(PROJECT_ROOT, 'static', 'ngen', 'hydrofabric') + _fabric_dir = Path(HYDROFABRICS_DIR) def get(self, request: HttpRequest) -> JsonResponse: names = [] @@ -93,6 +115,9 @@ def get(self, request: HttpRequest) -> JsonResponse: class MapView(View): + # TODO: update view/template to only do things for low enough zoom levels. + # TODO: update view/template to get features inside bounding box + """ A view used to render the map """ diff --git a/python/gui/MaaS/cbv/crosswalk.py b/python/gui/MaaS/cbv/crosswalk.py index 272cffbf5..51acf2da9 100644 --- a/python/gui/MaaS/cbv/crosswalk.py +++ b/python/gui/MaaS/cbv/crosswalk.py @@ -15,6 +15,7 @@ import json import re from pathlib import Path +HYDROFABRICS_DIR = settings.HYDROFABRIC_ROOT from .. import datapane from .. import configuration @@ -39,6 +40,10 @@ def _build_fabric_path(fabric, type=""): logger.debug("fabric path:", fabric, name, resolution) path = Path(PROJECT_ROOT, 'static', 'ngen', 'hydrofabric', name, resolution, type+'crosswalk.json') + + #path = Path(HYDROFABRICS_DIR, name, resolution, type+'crosswalk.json') + #path = Path(HYDROFABRICS_DIR, name, 'crosswalk.json') + if (path == None): return JsonResponse({}) return path @@ -46,11 +51,11 @@ def _build_fabric_path(fabric, type=""): class Crosswalk(APIView): def get(self, request: HttpRequest, crosswalk: str = None) -> typing.Optional[JsonResponse]: - logger.debug("crosswalk path:", crosswalk) + #logger.debug("crosswalk path:", crosswalk) if crosswalk is None: return JsonResponse({}) - logger.debug("crosswalk path:", crosswalk) + #logger.debug("crosswalk path:", crosswalk) path = _build_fabric_path(crosswalk) if path is None: diff --git a/python/gui/MaaS/cbv/execution.py b/python/gui/MaaS/cbv/execution.py index 49293e0dd..2968e789a 100644 --- a/python/gui/MaaS/cbv/execution.py +++ b/python/gui/MaaS/cbv/execution.py @@ -10,7 +10,7 @@ from django.conf import settings -from django.http import HttpRequest +from django.http import HttpRequest, QueryDict from django.http import JsonResponse from rest_framework.views import APIView @@ -19,6 +19,8 @@ from .. import processors from ..client import JobRequestClient +from datetime import datetime +import re LOGGER = logging.getLogger("gui_log") @@ -64,6 +66,77 @@ class Execute(APIView): """ API view for executing a configured model on a specified framework """ + + def _parse_post_keys(self, data: dict, feature_key: str, formulation_key: str) -> dict: + applicable = dict() + + applicable['formulation-type'] = formulation_key + applicable['forcing-pattern'] = data['{}-forcing-pattern'.format(feature_key)] + + properties_to_retype = dict() + + #property_key_pattern = re.compile(r'(' + feature_key + '):::([^:].+[^:]):::([^:].+[^:])(::([^:].+))?') + property_key_pattern = re.compile(r'(' + feature_key + '):::(.+?):::(.+)') + meta_property_subpattern = re.compile(r'(.+):::(.+)') + + for k, value in data.items(): + if value == '': + continue + + match_obj = property_key_pattern.match(k) + if match_obj is None: + continue + + matched_feature = match_obj.group(1) + matched_form = match_obj.group(2) + + # Skip if no match, or if either the matched feature or formulation is not of interest + if matched_feature != feature_key or matched_form != formulation_key: + continue + + prop_meta_match_obj = meta_property_subpattern.match(match_obj.group(3)) + if prop_meta_match_obj is None: + applicable[match_obj.group(3)] = value + elif prop_meta_match_obj.group(2) == 'config-type': + properties_to_retype[prop_meta_match_obj.group(1)] = value + + for prop_key, type_str in properties_to_retype.items(): + if type_str.lower() == 'text' or prop_key not in applicable: + continue + if type_str.lower() == 'number': + applicable[prop_key] = float(applicable[prop_key]) + elif type_str.lower() == 'numbers': + applicable[prop_key] = [float(s.strip()) for s in applicable[prop_key].split(',')] + elif type_str.lower() == 'list': + applicable[prop_key] = [s.strip() for s in applicable[prop_key].split(',')] + + return applicable + + def _parse_config_request(self, post_data: QueryDict) -> dict: + features = post_data['features'].split('|') + formulations_map = json.loads(post_data['formulations']) + + global_formulation_key = formulations_map[post_data['global-formulation-type']] + + # TODO: add other properties besides formulations configs (e.g., list of features) + config_properties = dict() + config_properties['features'] = features + config_properties['cpu_count'] = post_data['requested-cpu-count'] + config_properties['start'] = datetime.strptime(post_data['start-time'], settings.DATE_TIME_FORMAT) + config_properties['end'] = datetime.strptime(post_data['end-time'], settings.DATE_TIME_FORMAT) + feature_configs = dict() + feature_configs['global'] = self._parse_post_keys(data=post_data, feature_key='global', formulation_key=global_formulation_key) + + for feature in features: + formulation_type_key = post_data['{}-formulation-type'.format(feature)] + if formulation_type_key == 'global': + continue + formulation_type = formulations_map[formulation_type_key] + feature_configs[feature] = self._parse_post_keys(data=post_data, feature_key=feature, formulation_key=formulation_type) + + config_properties['formulations'] = feature_configs + return config_properties + def post(self, request: HttpRequest): """ The post handler @@ -100,7 +173,17 @@ def post(self, request: HttpRequest): # Allow the caller to determine whether or not a new session should be created force_new_session = request.POST.get("force_new_session", False) + framework_name = request.POST.get('framework', None) + + if framework_name == 'ngen': + parsed_config = self._parse_config_request(request.POST) + # TODO: implement to process config details from GUI, creating any necessary realization config datasets + required_datasets_names = self._process_ngen_configuration_into_datasets(parsed_config) + else: + raise RuntimeError('Unsupported framework {}'.format(None)) + # Issue the request + # TODO: modify the way client makes requires to be regular client response: ExternalRequestResponse = client.make_maas_request(request, force_new_session) # Throw an error if the request could not be successfully issued @@ -113,6 +196,7 @@ def post(self, request: HttpRequest): # Set a cookie if a job was started and we have the id (rely on client to manage multiple job ids) if response is not None and 'job_id' in response.data: + # TODO: make sure that the client displays this job id somehow http_response.set_cookie('new_job_id', response.data['job_id']) # Set cookies if a new session was acquired diff --git a/python/gui/MaaS/cbv/js_utils.py b/python/gui/MaaS/cbv/js_utils.py new file mode 100644 index 000000000..27ece4f7b --- /dev/null +++ b/python/gui/MaaS/cbv/js_utils.py @@ -0,0 +1,36 @@ +def start_end_time_validation(start_time_id: str, end_time_id: str) -> str: + """Applies validity testing to start and end time input DOM elements of type`datetime-local`. If + start time is after end time or end time is prior to start, an input validity message is tagged + on the `start_time_id` element. + """ + return f"""((start_time_id, end_time_id) => {{ + + let start_time_el = document.getElementById(start_time_id); + let end_time_el = document.getElementById(end_time_id); + + if (start_time_el == null){{ + console.error(`invalid start_time_id: ${{start_time_id}}`) + return; + }} + + if (end_time_el == null){{ + console.error(`invalid end_time_id: ${{end_time_id}}`) + return; + }} + + if (start_time_el.value === '' || end_time_el.value === ''){{ + // missing time value + return; + }} + + const start_time = new Date(start_time_el.value); + const end_time = new Date(end_time_el.value); + + if (start_time.getTime() > end_time.getTime()){{ + start_time_el.setCustomValidity('Start time after end time'); + return; + }} + + // reset + start_time_el.setCustomValidity(''); + }})('{start_time_id}', '{end_time_id}')""" diff --git a/python/gui/MaaS/migrations/0001_initial.py b/python/gui/MaaS/migrations/0001_initial.py index a78db615f..2d20a8a93 100644 --- a/python/gui/MaaS/migrations/0001_initial.py +++ b/python/gui/MaaS/migrations/0001_initial.py @@ -10,9 +10,9 @@ class Migration(migrations.Migration): def create_superuser(apps, schema_editor): from django.contrib.auth.models import User - SU_NAME = os.environ.get('DMOD_SU_NAME') - SU_EMAIL = os.environ.get('DMOD_SU_EMAIL') - SU_PASSWORD = os.environ.get('DMOD_SU_PASSWORD') + SU_NAME = os.environ.get('DMOD_SU_NAME').strip() + SU_EMAIL = os.environ.get('DMOD_SU_EMAIL').strip() + SU_PASSWORD = os.environ.get('DMOD_SU_PASSWORD').strip() superuser = User.objects.create_superuser( username=SU_NAME, diff --git a/python/gui/MaaS/migrations/0002_formulation_formulationparameter.py b/python/gui/MaaS/migrations/0002_formulation_formulationparameter.py index d9fe516b6..b795729a1 100644 --- a/python/gui/MaaS/migrations/0002_formulation_formulationparameter.py +++ b/python/gui/MaaS/migrations/0002_formulation_formulationparameter.py @@ -28,6 +28,7 @@ class Migration(migrations.Migration): ('name', models.CharField(help_text='The name of the parameter for the formulation', max_length=50)), ('description', models.CharField(blank=True, help_text='How this parameter affects the formulation', max_length=200, null=True)), ('value_type', models.CharField(choices=[('number', 'Number'), ('text', 'Text'), ('date', 'Date'), ('datetime-local', 'Date and Time')], help_text='The type of the ', max_length=50)), + ('config_type', models.CharField(choices=[('number', 'Number'), ('text', 'Text'), ('date', 'Date'), ('datetime-local', 'Date and Time'), ('numbers', 'Numbers'), ('list', 'List'), ('dataset', 'Dataset')], help_text='The type in realization configurations of the ', max_length=50)), ('group', models.CharField(blank=True, help_text='A sub-group to which this parameter belongs', max_length=50, null=True)), ('is_list', models.BooleanField(default=False, help_text='Whether or not this variable should be a list')), ('minimum', models.FloatField(blank=True, help_text='The minimum possible numerical value for this parameter', null=True)), diff --git a/python/gui/MaaS/migrations/0003_formulation_records.py b/python/gui/MaaS/migrations/0003_formulation_records.py new file mode 100644 index 000000000..bcf936907 --- /dev/null +++ b/python/gui/MaaS/migrations/0003_formulation_records.py @@ -0,0 +1,82 @@ +from django.db import migrations + + +def create_premade_formulations(apps, schema_editor): + + Formulation = apps.get_model('MaaS', 'Formulation') + FormulationParameter = apps.get_model('MaaS', 'FormulationParameter') + + raw_form_details = [ + ('CFE', 'External BMI module implementation of CFE.'), + ('Multi::Noah_OWP::CFE', 'Combination of external Noah OWP Modular and CFE BMI modules.'), + ('Multi::Noah_OWP::PET::CFE', 'Combination of external Noah OWP Modular, PET and CFE BMI modules.') + ] + formulations = dict([(n, Formulation.objects.create(name=n, description=d)) for n, d in raw_form_details]) + + opt_param_desc = 'Optional value to use for {} module {} parameter' + + FormulationParameter.objects.bulk_create([ + #FormulationParameter(name='surface_partitioning_scheme', description='Scheme for surface runoff partitioning', value_type='text', default_value='Schaake', formulation=formulations['CFE']), + + # CFE params + FormulationParameter(name='BMI Config Dataset', group='CFE', description='Name of dataset containing required BMI initialization files', value_type='text', config_type='dataset', formulation=formulations['CFE']), + FormulationParameter(name='BMI Init File Pattern', group='CFE', description='The name or pattern for BMI initialization files', value_type='text', config_type='text', default_value='{{id}}_config.ini', formulation=formulations['CFE']), + FormulationParameter(name='CFE::satdk', group='CFE', description=opt_param_desc.format('CFE', 'satdk'), value_type='number', config_type='number', formulation=formulations['CFE']), + FormulationParameter(name='CFE::maxsmc', group='CFE', description=opt_param_desc.format('CFE', 'maxsmc'), value_type='number', config_type='number', formulation=formulations['CFE']), + FormulationParameter(name='CFE::slope', group='CFE', description=opt_param_desc.format('CFE', 'slope'), value_type='number', config_type='number', formulation=formulations['CFE']), + FormulationParameter(name='CFE::b', group='CFE', description=opt_param_desc.format('CFE', 'b'), value_type='number', config_type='number', formulation=formulations['CFE']), + FormulationParameter(name='CFE::multiplier', group='CFE', description=opt_param_desc.format('CFE', 'multiplier'), value_type='number', config_type='number', formulation=formulations['CFE']), + FormulationParameter(name='CFE::Klf', group='CFE', description=opt_param_desc.format('CFE', 'Klf'), value_type='number', config_type='number', formulation=formulations['CFE']), + FormulationParameter(name='CFE::Kn', group='CFE', description=opt_param_desc.format('CFE', 'Kn'), value_type='number', config_type='number', formulation=formulations['CFE']), + FormulationParameter(name='CFE::Cgw', group='CFE', description=opt_param_desc.format('CFE', 'Cgw'), value_type='number', config_type='number', formulation=formulations['CFE']), + FormulationParameter(name='CFE::expon', group='CFE', description=opt_param_desc.format('CFE', 'expon'), value_type='number', config_type='number', formulation=formulations['CFE']), + FormulationParameter(name='CFE::max_gw_storage', group='CFE', description=opt_param_desc.format('CFE', 'max_gw_storage'), value_type='number', config_type='number', formulation=formulations['CFE']), + + # Multi::Noah_OWP::CFE params + FormulationParameter(name='Noah_OWP::BMI Config Dataset', group='Noah_OWP', description='Name of dataset containing required BMI initialization files for Noah OWP', value_type='text', config_type='dataset', formulation=formulations['Multi::Noah_OWP::CFE']), + FormulationParameter(name='Noah_OWP::BMI Init File Pattern', group='Noah_OWP', description='The name or pattern for Noah OWP BMI initialization files', value_type='text', config_type='text', default_value='noah-owp-modular-init-{{id}}.namelist.input', formulation=formulations['Multi::Noah_OWP::CFE']), + FormulationParameter(name='CFE::BMI Config Dataset', group='CFE', description='Name of dataset containing required BMI initialization files for CFE', value_type='text', config_type='dataset', formulation=formulations['Multi::Noah_OWP::CFE']), + FormulationParameter(name='CFE::BMI Init File Pattern', group='CFE', description='The name or pattern for CFE BMI initialization files', value_type='text', config_type='text', default_value='{{id}}_config.ini', formulation=formulations['Multi::Noah_OWP::CFE']), + FormulationParameter(name='CFE::satdk', group='CFE', description=opt_param_desc.format('CFE', 'satdk'), value_type='number', config_type='number', formulation=formulations['Multi::Noah_OWP::CFE']), + FormulationParameter(name='CFE::maxsmc', group='CFE', description=opt_param_desc.format('CFE', 'maxsmc'), value_type='number', config_type='number', formulation=formulations['Multi::Noah_OWP::CFE']), + FormulationParameter(name='CFE::slope', group='CFE', description=opt_param_desc.format('CFE', 'slope'), value_type='number', config_type='number', formulation=formulations['Multi::Noah_OWP::CFE']), + FormulationParameter(name='CFE::b', group='CFE', description=opt_param_desc.format('CFE', 'b'), value_type='number', config_type='number', formulation=formulations['Multi::Noah_OWP::CFE']), + FormulationParameter(name='CFE::multiplier', group='CFE', description=opt_param_desc.format('CFE', 'multiplier'), value_type='number', config_type='number', formulation=formulations['Multi::Noah_OWP::CFE']), + FormulationParameter(name='CFE::Klf', group='CFE', description=opt_param_desc.format('CFE', 'Klf'), value_type='number', config_type='number', formulation=formulations['Multi::Noah_OWP::CFE']), + FormulationParameter(name='CFE::Kn', group='CFE', description=opt_param_desc.format('CFE', 'Kn'), value_type='number', config_type='number', formulation=formulations['Multi::Noah_OWP::CFE']), + FormulationParameter(name='CFE::Cgw', group='CFE', description=opt_param_desc.format('CFE', 'Cgw'), value_type='number', config_type='number', formulation=formulations['Multi::Noah_OWP::CFE']), + FormulationParameter(name='CFE::expon', group='CFE', description=opt_param_desc.format('CFE', 'expon'), value_type='number', config_type='number', formulation=formulations['Multi::Noah_OWP::CFE']), + FormulationParameter(name='CFE::max_gw_storage', group='CFE', description=opt_param_desc.format('CFE', 'max_gw_storage'), value_type='number', config_type='number', formulation=formulations['Multi::Noah_OWP::CFE']), + + # Multi::Noah_OWP::PET::CFE params + FormulationParameter(name='Noah_OWP::BMI Config Dataset', group='Noah_OWP', description='Name of dataset containing required BMI initialization files for Noah OWP', value_type='text', config_type='dataset', formulation=formulations['Multi::Noah_OWP::PET::CFE']), + FormulationParameter(name='Noah_OWP::BMI Init File Pattern', group='Noah_OWP', description='The name or pattern for Noah OWP BMI initialization files', value_type='text', config_type='text', default_value='noah-owp-modular-init-{{id}}.namelist.input', formulation=formulations['Multi::Noah_OWP::PET::CFE']), + FormulationParameter(name='PET::BMI Config Dataset', group='PET', description='Name of dataset containing required BMI initialization files for PET', value_type='text', config_type='dataset', formulation=formulations['Multi::Noah_OWP::PET::CFE']), + FormulationParameter(name='PET::BMI Init File Pattern', group='PET', description='The name or pattern for PET BMI initialization files', value_type='text', config_type='text', default_value='{{id}}_bmi_config.ini', formulation=formulations['Multi::Noah_OWP::PET::CFE']), + FormulationParameter(name='CFE::BMI Config Dataset', group='CFE', description='Name of dataset containing required BMI initialization files for CFE', value_type='text', config_type='dataset', formulation=formulations['Multi::Noah_OWP::PET::CFE']), + FormulationParameter(name='CFE::BMI Init File Pattern', group='CFE', description='The name or pattern for CFE BMI initialization files', value_type='text', config_type='text', default_value='{{id}}_bmi_config.ini', formulation=formulations['Multi::Noah_OWP::PET::CFE']), + FormulationParameter(name='CFE::satdk', group='CFE', description=opt_param_desc.format('CFE', 'satdk'), value_type='number', config_type='number', formulation=formulations['Multi::Noah_OWP::PET::CFE']), + FormulationParameter(name='CFE::maxsmc', group='CFE', description=opt_param_desc.format('CFE', 'maxsmc'), value_type='number', config_type='number', formulation=formulations['Multi::Noah_OWP::PET::CFE']), + FormulationParameter(name='CFE::slope', group='CFE', description=opt_param_desc.format('CFE', 'slope'), value_type='number', config_type='number', formulation=formulations['Multi::Noah_OWP::PET::CFE']), + FormulationParameter(name='CFE::b', group='CFE', description=opt_param_desc.format('CFE', 'b'), value_type='number', config_type='number', formulation=formulations['Multi::Noah_OWP::PET::CFE']), + FormulationParameter(name='CFE::multiplier', group='CFE', description=opt_param_desc.format('CFE', 'multiplier'), value_type='number', config_type='number', formulation=formulations['Multi::Noah_OWP::PET::CFE']), + FormulationParameter(name='CFE::Klf', group='CFE', description=opt_param_desc.format('CFE', 'Klf'), value_type='number', config_type='number', formulation=formulations['Multi::Noah_OWP::PET::CFE']), + FormulationParameter(name='CFE::Kn', group='CFE', description=opt_param_desc.format('CFE', 'Kn'), value_type='number', config_type='number', formulation=formulations['Multi::Noah_OWP::PET::CFE']), + FormulationParameter(name='CFE::Cgw', group='CFE', description=opt_param_desc.format('CFE', 'Cgw'), value_type='number', config_type='number', formulation=formulations['Multi::Noah_OWP::PET::CFE']), + FormulationParameter(name='CFE::expon', group='CFE', description=opt_param_desc.format('CFE', 'expon'), value_type='number', config_type='number', formulation=formulations['Multi::Noah_OWP::PET::CFE']), + FormulationParameter(name='CFE::max_gw_storage', group='CFE', description=opt_param_desc.format('CFE', 'max_gw_storage'), value_type='number', config_type='number', formulation=formulations['Multi::Noah_OWP::PET::CFE']) + + ]) + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('MaaS', '0002_formulation_formulationparameter'), + ] + + operations = [ + migrations.RunPython(create_premade_formulations) + ] diff --git a/python/gui/MaaS/migrations/0004_formulation_records_2.py b/python/gui/MaaS/migrations/0004_formulation_records_2.py new file mode 100644 index 000000000..753bb9a4e --- /dev/null +++ b/python/gui/MaaS/migrations/0004_formulation_records_2.py @@ -0,0 +1,48 @@ +from django.db import migrations + + +def create_premade_formulations(apps, schema_editor): + + Formulation = apps.get_model('MaaS', 'Formulation') + FormulationParameter = apps.get_model('MaaS', 'FormulationParameter') + + raw_form_details = [ + ('Multi::PET::CFE', 'Combination of external PET and CFE BMI modules.') + ] + formulations = dict([(n, Formulation.objects.create(name=n, description=d)) for n, d in raw_form_details]) + + opt_param_desc = 'Optional value to use for {} module {} parameter' + + FormulationParameter.objects.bulk_create([ + #FormulationParameter(name='surface_partitioning_scheme', description='Scheme for surface runoff partitioning', value_type='text', default_value='Schaake', formulation=formulations['CFE']), + + # Multi::PET::CFE params + FormulationParameter(name='PET::BMI Config Dataset', group='PET', description='Name of dataset containing required BMI initialization files for PET', value_type='text', config_type='dataset', formulation=formulations['Multi::PET::CFE']), + FormulationParameter(name='PET::BMI Init File Pattern', group='PET', description='The name or pattern for PET BMI initialization files', value_type='text', config_type='text', default_value='{{id}}_bmi_config.ini', formulation=formulations['Multi::PET::CFE']), + FormulationParameter(name='CFE::BMI Config Dataset', group='CFE', description='Name of dataset containing required BMI initialization files for CFE', value_type='text', config_type='dataset', formulation=formulations['Multi::PET::CFE']), + FormulationParameter(name='CFE::BMI Init File Pattern', group='CFE', description='The name or pattern for CFE BMI initialization files', value_type='text', config_type='text', default_value='{{id}}_bmi_config.ini', formulation=formulations['Multi::PET::CFE']), + FormulationParameter(name='CFE::satdk', group='CFE', description=opt_param_desc.format('CFE', 'satdk'), value_type='number', config_type='number', formulation=formulations['Multi::PET::CFE']), + FormulationParameter(name='CFE::maxsmc', group='CFE', description=opt_param_desc.format('CFE', 'maxsmc'), value_type='number', config_type='number', formulation=formulations['Multi::PET::CFE']), + FormulationParameter(name='CFE::slope', group='CFE', description=opt_param_desc.format('CFE', 'slope'), value_type='number', config_type='number', formulation=formulations['Multi::PET::CFE']), + FormulationParameter(name='CFE::b', group='CFE', description=opt_param_desc.format('CFE', 'b'), value_type='number', config_type='number', formulation=formulations['Multi::PET::CFE']), + FormulationParameter(name='CFE::multiplier', group='CFE', description=opt_param_desc.format('CFE', 'multiplier'), value_type='number', config_type='number', formulation=formulations['Multi::PET::CFE']), + FormulationParameter(name='CFE::Klf', group='CFE', description=opt_param_desc.format('CFE', 'Klf'), value_type='number', config_type='number', formulation=formulations['Multi::PET::CFE']), + FormulationParameter(name='CFE::Kn', group='CFE', description=opt_param_desc.format('CFE', 'Kn'), value_type='number', config_type='number', formulation=formulations['Multi::PET::CFE']), + FormulationParameter(name='CFE::Cgw', group='CFE', description=opt_param_desc.format('CFE', 'Cgw'), value_type='number', config_type='number', formulation=formulations['Multi::PET::CFE']), + FormulationParameter(name='CFE::expon', group='CFE', description=opt_param_desc.format('CFE', 'expon'), value_type='number', config_type='number', formulation=formulations['Multi::PET::CFE']), + FormulationParameter(name='CFE::max_gw_storage', group='CFE', description=opt_param_desc.format('CFE', 'max_gw_storage'), value_type='number', config_type='number', formulation=formulations['Multi::PET::CFE']) + + ]) + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('MaaS', '0003_formulation_records'), + ] + + operations = [ + migrations.RunPython(create_premade_formulations) + ] diff --git a/python/gui/MaaS/static/common/css/base.css b/python/gui/MaaS/static/common/css/base.css index f8e2fff1f..b4311fd95 100644 --- a/python/gui/MaaS/static/common/css/base.css +++ b/python/gui/MaaS/static/common/css/base.css @@ -4,7 +4,7 @@ background-size: 100px 100px; background-position-x: 10px; height: fit-content; - background-color: #33ade0; + background-color: #0a5190; } #base-banner h1 { @@ -38,7 +38,7 @@ input[type="checkbox"] + label { .MaaS-btn { margin: 10px 0px; color: white; - background-color: #E06633; + background-color: #900f0f; border: none; font-weight: bold; height: 40px; @@ -47,7 +47,7 @@ input[type="checkbox"] + label { } .MaaS-btn:hover { - background-color: #e7875f; + background-color: #c92e2e; } fieldset { @@ -58,7 +58,7 @@ fieldset { legend { padding: 3px; color: white; - background-color: #33ade0; + background-color: #0a5190; font-weight: bold; border: none; width: fit-content; @@ -67,12 +67,12 @@ legend { details summary { font-weight: bold; padding: 10px; - background-color: #33ade0; + background-color: #0a5190; color: white; border: none; cursor: pointer; } summary:hover, summary.summary-hover { - background-color: #E06633; + background-color: #900f0f; } diff --git a/python/gui/MaaS/static/common/js/map.js b/python/gui/MaaS/static/common/js/map.js index 1bb3e0508..0bc1f1eab 100644 --- a/python/gui/MaaS/static/common/js/map.js +++ b/python/gui/MaaS/static/common/js/map.js @@ -51,6 +51,10 @@ startup_scripts.push( function(){ mymap = L.map('mapid').setView(centerLine, zoom); + //mymap.on('zoomend', function() { + // loadFabric('zoomend'); + //}); + L.tileLayer(mapUrl, { maxZoom: maxZoom, attribution: attribution, @@ -152,6 +156,11 @@ function plotMapLayers(featureDocuments, map) { function propertiesToHTML(geojson, xwalk) { var properties = geojson.properties; var markup = ""; + + if (!("id" in geojson) && "id" in properties) { + geojson.id = properties.id; + } + if ("Name" in properties) { markup += "