From 3aedebc9a9fc79bfcb73484069c17a7bdc738a44 Mon Sep 17 00:00:00 2001 From: Nicole Tregoning Date: Tue, 21 Oct 2025 14:18:55 -0400 Subject: [PATCH 1/3] mkdocs-macros-plugin used to replace DANDI and https://dandiarchive.org with variables. Enables vendorziation of docs --- docs/api/dandi-client.md | 18 +++++----- docs/api/rest-api.md | 8 ++--- .../creating-dandi-instance/dandi-archive.md | 30 ++++++++-------- .../dandi-authentication.md | 10 +++--- .../creating-dandi-instance/dandi-cli.md | 14 ++++---- .../creating-dandi-instance/dandi-hub.md | 4 +-- .../dandi-infrastructure.md | 24 ++++++------- .../creating-dandi-instance/index.md | 6 ++-- .../initialize-vendors.md | 14 ++++---- docs/developer-guide/developer-notes.md | 34 +++++++++---------- .../integrate-external-services.md | 22 ++++++------ docs/developer-guide/system-architecture.md | 8 ++--- docs/getting-started/creating-account.md | 14 ++++---- docs/getting-started/dandi-ecosystem.md | 20 +++++------ docs/getting-started/data-standards/nwb.md | 2 +- docs/index.md | 12 +++---- docs/introduction.md | 18 +++++----- docs/terms-policies/policies.md | 22 ++++++------ docs/terms-policies/terms.md | 28 +++++++-------- .../contributing-notebook.md | 12 +++---- .../converting-data/index.md | 4 +-- .../converting-data/nwb/nwb-guide.md | 4 +-- docs/user-guide-sharing/creating-dandiset.md | 12 +++---- docs/user-guide-sharing/dandiset-metadata.md | 2 +- docs/user-guide-sharing/data-licenses.md | 2 +- .../publishing-dandisets.md | 8 ++--- docs/user-guide-sharing/uploading-data.md | 14 ++++---- docs/user-guide-sharing/validating-files.md | 2 +- .../accessing-data/downloading.md | 32 ++++++++--------- .../accessing-data/external-services.md | 8 ++--- docs/user-guide-using/accessing-data/index.md | 16 ++++----- .../accessing-data/streaming.md | 4 +-- docs/user-guide-using/citing-dandisets.md | 6 ++-- docs/user-guide-using/dandi-hub.md | 20 +++++------ docs/user-guide-using/exploring-dandisets.md | 4 +-- mkdocs.yml | 4 +++ requirements.txt | 1 + 37 files changed, 234 insertions(+), 229 deletions(-) diff --git a/docs/api/dandi-client.md b/docs/api/dandi-client.md index a84fc612..4b9875f3 100644 --- a/docs/api/dandi-client.md +++ b/docs/api/dandi-client.md @@ -1,10 +1,10 @@ -# DANDI Client +# {{ instance.name }} Client -The DANDI Client is a Python library and command-line tool for interacting with the DANDI Archive. It provides functionality for downloading, validating, organizing, and uploading data to and from the DANDI Archive. +The {{ instance.name }} Client is a Python library and command-line tool for interacting with the {{ instance.name }} Archive. It provides functionality for downloading, validating, organizing, and uploading data to and from the {{ instance.name }} Archive. ## Installation -You can install the DANDI Client using pip: +You can install the {{ instance.name }} Client using pip: ```bash pip install dandi @@ -12,20 +12,20 @@ pip install dandi ## Documentation -The full documentation for the DANDI Client is available at [https://dandi.readthedocs.io/](https://dandi.readthedocs.io/). +The full documentation for the {{ instance.name }} Client is available at [https://dandi.readthedocs.io/](https://dandi.readthedocs.io/). ## Key Features -- Download data from the DANDI Archive +- Download data from the {{ instance.name }} Archive - Validate NWB files -- Organize data for upload to the DANDI Archive -- Upload data to the DANDI Archive +- Organize data for upload to the {{ instance.name }} Archive +- Upload data to the {{ instance.name }} Archive - Search for Dandisets - Manage Dandisets and their metadata ## Python API -The DANDI Client provides a Python API for programmatic interaction with the DANDI Archive. Here's a simple example of using the API to download a Dandiset: +The {{ instance.name }} Client provides a Python API for programmatic interaction with the {{ instance.name }} Archive. Here's a simple example of using the API to download a Dandiset: ```python from dandi.dandiapi import DandiAPIClient @@ -44,7 +44,7 @@ For more information on the Python API, see the [API documentation](https://dand ## Command-Line Interface -The DANDI Client also provides a command-line interface for interacting with the DANDI Archive. Here are some common commands: +The {{ instance.name }} Client also provides a command-line interface for interacting with the {{ instance.name }} Archive. Here are some common commands: ```bash # Download a Dandiset diff --git a/docs/api/rest-api.md b/docs/api/rest-api.md index bda01a66..367c6523 100644 --- a/docs/api/rest-api.md +++ b/docs/api/rest-api.md @@ -1,10 +1,10 @@ # REST API -The DANDI Archive provides a RESTful API that allows programmatic access to the archive. The API is documented using both Swagger and ReDoc, which provide interactive interfaces for exploring and testing the API. +The {{ instance.name }} Archive provides a RESTful API that allows programmatic access to the archive. The API is documented using both Swagger and ReDoc, which provide interactive interfaces for exploring and testing the API. ## Accessing the API -The DANDI API is available at: +The {{ instance.name }} API is available at: - Production: [https://api.dandiarchive.org/](https://api.dandiarchive.org/) - Sandbox: [https://api.sandbox.dandiarchive.org/](https://api.sandbox.dandiarchive.org/) @@ -43,9 +43,9 @@ The ReDoc interface provides: ## Authentication -Some API endpoints require authentication. You can authenticate using an API key, which you can obtain from your DANDI account. To use the API key: +Some API endpoints require authentication. You can authenticate using an API key, which you can obtain from your {{ instance.name }} account. To use the API key: -1. Log in to the DANDI Archive +1. Log in to the {{ instance.name }} Archive 2. Click on your user initials in the top-right corner 3. Copy your API key 4. Use the API key in the `Authorization` header of your requests: diff --git a/docs/developer-guide/creating-dandi-instance/dandi-archive.md b/docs/developer-guide/creating-dandi-instance/dandi-archive.md index ba6f7934..91c92fe6 100644 --- a/docs/developer-guide/creating-dandi-instance/dandi-archive.md +++ b/docs/developer-guide/creating-dandi-instance/dandi-archive.md @@ -4,11 +4,11 @@ This step assumes that you have completed all steps in: [Initialize Vendors](./i ### Running "terraform apply" upon dandi-infrastructure for the first time -Resources (e.g. the servers and environment ) for DANDI Archive are provisioned upon applying the Terraform definitions in +Resources (e.g. the servers and environment ) for {{ instance.name }} Archive are provisioned upon applying the Terraform definitions in `dandi-infrastructure`, specifically in the [api.tf definition](https://github.com/dandi/dandi-infrastructure/blob/master/terraform/api.tf) The resources won't be running anything until your first Heroku `release` upon the Heroku app. -To see how your code would translate into a new `Heroku` release, [see the GitHub actions workflow used by DANDI Archive here](https://github.com/dandi/dandi-archive/blob/master/.github/workflows/backend-production-deploy.yml). +To see how your code would translate into a new `Heroku` release, [see the GitHub actions workflow used by {{ instance.name }} Archive here](https://github.com/dandi/dandi-archive/blob/master/.github/workflows/backend-production-deploy.yml). ## Understanding the concept of the Procfile for Heroku @@ -18,15 +18,15 @@ Which process, the resources allocated to that process, and how that process is DANDI Archive defines a [Procfile](https://github.com/dandi/dandi-archive/blob/master/Procfile). In this `Procfile`, you'll see several entries: -- `release`: a command that is run each time a new version of DANDI API is pushed to Heroku. -- `web`: runs `gunicorn`, a persistent server that handles HTTP requests for the DANDI API. +- `release`: a command that is run each time a new version of {{ instance.name }} API is pushed to Heroku. +- `web`: runs `gunicorn`, a persistent server that handles HTTP requests for the {{ instance.name }} API. - `worker`: a worker process that runs `celery` behind-the-scenes. `celery` handles tasks that would otherwise cause the API to timeout. -- `checksum-worker`: another worker, also using `celery`, that specifically calculates if a new file pushed to DANDI Archive is new/updated, and determines what exactly has been changed. +- `checksum-worker`: another worker, also using `celery`, that specifically calculates if a new file pushed to {{ instance.name }} Archive is new/updated, and determines what exactly has been changed. - `analytics-worker`: another `celery` worker that handles all tasks related to processing of S3-related logs. -This `Procfile` shouldn't need to be changed or reconfigured much for a DANDI-clone; however, it is important to note so that one may understand how DANDI Archive is working. +This `Procfile` shouldn't need to be changed or reconfigured much for a DANDI-clone; however, it is important to note so that one may understand how {{ instance.name }} Archive is working. -For information on the resource allocation of `dynos` in DANDI Archive, please reference the [DANDI Infrastructure Docs](./dandi-infrastructure.md). +For information on the resource allocation of `dynos` in {{ instance.name }} Archive, please reference the [DANDI Infrastructure Docs](./dandi-infrastructure.md). ## Understanding metrics and logging via Heroku @@ -54,7 +54,7 @@ style="width: 60%; height: auto; display: block; margin-left: auto; margin-righ ## Creating a Django "superuser" (Admin) Account -Django has the concept of a `superuser` -- essentially an `administrator` user type. For steps such as [setting up authentication for DANDI Archive](./dandi-authentication.md#creating-and-updating-objects-in-the-dandi-archive-admin-panel) +Django has the concept of a `superuser` -- essentially an `administrator` user type. For steps such as [setting up authentication for {{ instance.name }} Archive](./dandi-authentication.md#creating-and-updating-objects-in-the-dandi-archive-admin-panel) you'll need to set up a `superuser` account. Go into your Heroku app, and identify the `Run Console` option: @@ -86,7 +86,7 @@ alt="heroku_user" style="width: 60%; height: auto; display: block; margin-left: auto; margin-right: auto;"/>

-You'll be prompted to create a user -- **Note: use an email that is not associated with your GitHub account, as GitHub is the default authentication provider for DANDI Archive**. +You'll be prompted to create a user -- **Note: use an email that is not associated with your GitHub account, as GitHub is the default authentication provider for {{ instance.name }} Archive**. To do one final test, try using your credentials to log into the Django Admin panel -- it should be located at `/admin` for your API, such as `your-apps-domain.com/admin`. @@ -96,11 +96,11 @@ You are all set here! A majority of the necessary setup steps here are defined already [during the vendor account setup for Netlify](./initialize-vendors.md#netlify). -The only other major initial setup step for the DANDI Archive frontend is regarding authentication -- [see here for more details](./dandi-authentication.md#populating-appropriate-values-for-the-frontend-to-handle-authentication) +The only other major initial setup step for the {{ instance.name }} Archive frontend is regarding authentication -- [see here for more details](./dandi-authentication.md#populating-appropriate-values-for-the-frontend-to-handle-authentication) ## API Deployment with GitHub CI/CD -Within the DANDI Archive repository, GitHub actions workflows exist for deployments to [production](https://github.com/dandi/dandi-archive/blob/master/.github/workflows/backend-production-deploy.yml) and [sandbox](https://github.com/dandi/dandi-archive/blob/master/.github/workflows/backend-staging-deploy.yml) environments +Within the {{ instance.name }} Archive repository, GitHub actions workflows exist for deployments to [production](https://github.com/dandi/dandi-archive/blob/master/.github/workflows/backend-production-deploy.yml) and [sandbox](https://github.com/dandi/dandi-archive/blob/master/.github/workflows/backend-staging-deploy.yml) environments - **Sandbox**: by default, releases are manual via the `workflow_dispatch` clause in the workflow @@ -137,7 +137,7 @@ style="width: 60%; height: auto; display: block; margin-left: auto; margin-righ ## Updating Allowed Hosts -For the Django-based DANDI Archive API to receive and send HTTP requests without CORS errors, you'll need to add `ALLOWED_HOSTS` within the `dandiapi/settings.py` file. +For the Django-based {{ instance.name }} Archive API to receive and send HTTP requests without CORS errors, you'll need to add `ALLOWED_HOSTS` within the `dandiapi/settings.py` file. The `settings.py` file, in general, can be understood as the configuration file for the Django app -- [see the Django docs for more info here](https://docs.djangoproject.com/en/5.0/topics/settings/) @@ -188,7 +188,7 @@ Additionally, we are configured to take a backup image of the Postgres database ## Addition of the Cache Table for Metrics -On the DANDI homepage, metrics exist for how many users, and how much data is stored in the Archive. +On the {{ instance.name }} homepage, metrics exist for how many users, and how much data is stored in the Archive.

dandi_stats

-In order to not constantly query for those values, DANDI uses a [Django cache table](https://docs.djangoproject.com/en/5.1/topics/cache/#creating-the-cache-table). This table must be separately initialized. +In order to not constantly query for those values, {{ instance.name }} uses a [Django cache table](https://docs.djangoproject.com/en/5.1/topics/cache/#creating-the-cache-table). This table must be separately initialized. This can be done via the CLI command of: @@ -209,7 +209,7 @@ You may not see updated stats immediately, as the stats are cached for 12 hours ## Customizing Logos on the UI -If you'd like to provide your own logo specific to your DANDI clone, you'll need to simply replace the referenced SVG logo in `web/src/assets/logo.svg` +If you'd like to provide your own logo specific to your {{ instance.name }} clone, you'll need to simply replace the referenced SVG logo in `web/src/assets/logo.svg` For reference, [see here](https://github.com/dandi/dandi-archive/blob/master/web/src/assets/logo.svg) diff --git a/docs/developer-guide/creating-dandi-instance/dandi-authentication.md b/docs/developer-guide/creating-dandi-instance/dandi-authentication.md index aaf2f3ee..d4ff7c89 100644 --- a/docs/developer-guide/creating-dandi-instance/dandi-authentication.md +++ b/docs/developer-guide/creating-dandi-instance/dandi-authentication.md @@ -5,13 +5,13 @@ Please follow the steps in [Initialize Vendors - GitHub](./initialize-vendors.md For the next steps in setting up authentication, you'll want to record the values created during [Obtaining your Oauth App creds](./initialize-vendors.md#obtaining-your-oauth-app-credentials). -## Creating and Updating Objects in the DANDI Archive Admin Panel +## Creating and Updating Objects in the {{ instance.name }} Archive Admin Panel ### Creating "Sites" and "Social App" Objects -**In order to complete this step, you will need to have deployed an initial DANDI Archive API**. See [Creating the DANDI Archive API](./dandi-archive.md) for more details. +**In order to complete this step, you will need to have deployed an initial {{ instance.name }} Archive API**. See [Creating the {{ instance.name }} Archive API](./dandi-archive.md) for more details. -First, sign in via the Django admin panel with your credentials created via the [Creating an admin user account for the DANDI Archive API step](./dandi-archive.md#creating-a-django-superuser-admin-account) +First, sign in via the Django admin panel with your credentials created via the [Creating an admin user account for the {{ instance.name }} Archive API step](./dandi-archive.md#creating-a-django-superuser-admin-account) The Django Admin panel should be located at: `` ## Access credentials Users will be prompted for a `DANDI_API_KEY` -environment variable. This variable does not need to be unique to your DANDI clone. A user can just set their `DANDI_API_KEY` to the value that your DANDI API clone issues. See docs on [storing access credentials](https://www.dandiarchive.org/handbook/13_upload/#storing-access-credentials). +environment variable. This variable does not need to be unique to your {{ instance.name }} clone. A user can just set their `DANDI_API_KEY` to the value that your {{ instance.name }} API clone issues. See docs on [storing access credentials](https://www.dandiarchive.org/handbook/13_upload/#storing-access-credentials). ## Versioning -The DANDI Client leverages a tool called [versioneer](https://pypi.org/project/versioneer/) for semantic versioning in PyPI. +The {{ instance.name }} Client leverages a tool called [versioneer](https://pypi.org/project/versioneer/) for semantic versioning in PyPI. Upon merging of a PR into `master`, if the `release` label is attached to the PR `versioneer` will generate a human-readable CHANGELOG entry, and then push to PyPI the new semantic version. For more details on labeling `dandi-cli` pull requests, see [here](https://github.com/dandi/dandi-cli/blob/master/DEVELOPMENT.md#releasing-with-github-actions-auto-and-pull-requests). diff --git a/docs/developer-guide/creating-dandi-instance/dandi-hub.md b/docs/developer-guide/creating-dandi-instance/dandi-hub.md index a9ab22ef..cae55de0 100644 --- a/docs/developer-guide/creating-dandi-instance/dandi-hub.md +++ b/docs/developer-guide/creating-dandi-instance/dandi-hub.md @@ -1,5 +1,5 @@ -The DANDI ecosystem includes a self-hosted Jupyter notebook service. This service is hosted on AWS and orchestrated with a Kubernetes (k8s) cluster -that provides different instance types for users to efficiently interact with data in the DANDI Archive. +The {{ instance.name }} ecosystem includes a self-hosted Jupyter notebook service. This service is hosted on AWS and orchestrated with a Kubernetes (k8s) cluster +that provides different instance types for users to efficiently interact with data in the {{ instance.name }} Archive. The instructions for configuring and deploying your own JupyterHub instance are available in the [dandi-hub repository](https://github.com/dandi/dandi-hub) (see [README](https://github.com/dandi/dandi-hub/blob/main/README.md#dandihub)). For example configurations that have been previously generated for the DANDI, LINC, and BICAN projects see the [envs directory](https://github.com/dandi/dandi-hub/tree/main/envs). diff --git a/docs/developer-guide/creating-dandi-instance/dandi-infrastructure.md b/docs/developer-guide/creating-dandi-instance/dandi-infrastructure.md index 5b97e4ce..d5eeec29 100644 --- a/docs/developer-guide/creating-dandi-instance/dandi-infrastructure.md +++ b/docs/developer-guide/creating-dandi-instance/dandi-infrastructure.md @@ -13,7 +13,7 @@ alt="terraform_config" style="width: 60%; height: auto; display: block; margin-left: auto; margin-right: auto;"/>

-As described in the [Understanding the DANDI Infrastructure](#understanding-the-dandi-infrastructure) section, the [dandi-infrastructure](https://github.com/dandi/dandi-infrastructure) +As described in the [Understanding the {{ instance.name }} Infrastructure](#understanding-the-dandi-infrastructure) section, the [dandi-infrastructure](https://github.com/dandi/dandi-infrastructure) repository includes many components that may not be needed for your use case. You will need to define the infrastructure components in stepwise fashion, starting with the `api.tf` and `sponsored_bucket.tf`. ## Applying Terraform @@ -47,19 +47,19 @@ alt="terraform_manual" style="width: 60%; height: auto; display: block; margin-left: auto; margin-right: auto;"/>

-## Understanding the DANDI Infrastructure +## Understanding the {{ instance.name }} Infrastructure ### Resonant In the [api.tf definition](https://github.com/dandi/dandi-infrastructure/blob/master/terraform/api.tf), there is reference to a `source` keyword, where a Terraform module called [Resonant](https://github.com/kitware-resonant/terraform-heroku-resonant) is defined. -In the Resonant submodule, AWS and Heroku resources are defined that facilitate base resources for compute and networking to work with DANDI Archive. -Within the DANDI Infrastructure downstream, Resonant is used by declaring values that the Terraform module expects. **Resources declared by Resonant cannot be overwritten** +In the Resonant submodule, AWS and Heroku resources are defined that facilitate base resources for compute and networking to work with {{ instance.name }} Archive. +Within the {{ instance.name }} Infrastructure downstream, Resonant is used by declaring values that the Terraform module expects. **Resources declared by Resonant cannot be overwritten** #### Sponsored Bucket -**This is DANDI Archive specific** in which the code in `main.tf` and the presence of downstream-related files should be commented out for any clones. +**This is {{ instance.name }} Archive specific** in which the code in `main.tf` and the presence of downstream-related files should be commented out for any clones. A `sponsored bucket` is also declared in the `main.tf`, with downstream, related files called `sponsored_iam.tf` and `sponsored_bucket.tf`. @@ -75,7 +75,7 @@ DANDI Infrastructure connects domains from three different vendors: ### Netlify -Although Netlify prescribes mapping of Netlify-issued DNS records directly, DANDI Infrastructure relies on mapping Netlify's Load Balancer IP to the respective A Name Record in AWS Route 53, +Although Netlify prescribes mapping of Netlify-issued DNS records directly, {{ instance.name }} Infrastructure relies on mapping Netlify's Load Balancer IP to the respective A Name Record in AWS Route 53, as prescribed in [Netlify's docs](https://docs.netlify.com/domains-https/custom-domains/configure-external-dns/#configure-an-apex-domain) ``` @@ -88,11 +88,11 @@ resource "aws_route53_record" "gui" { } ``` -Note the code snippet above is from the DANDI Infrastructure [domain.tf](https://github.com/dandi/dandi-infrastructure/blob/master/terraform/domain.tf#L13). +Note the code snippet above is from the {{ instance.name }} Infrastructure [domain.tf](https://github.com/dandi/dandi-infrastructure/blob/master/terraform/domain.tf#L13). ### AWS Route 53 and ACM -A manual step is necessary to set up an SSL Certificate for your DNS records throughout your DANDI clone. +A manual step is necessary to set up an SSL Certificate for your DNS records throughout your {{ instance.name }} clone. Proceed to AWS `Certificate Manager`. Begin by requesting a certificate -- **Note: Ensure you are in the same region as the default you have provided in your Terraform `main.tf` template. @@ -129,7 +129,7 @@ expressed by the certificate and linking them as records in your DNS Hosted Zone `dandi-infrastructure` defines "dyno" (a.k.a process) sizes for each service being run. For specific reference, [see here in api.tf](https://github.com/dandi/dandi-infrastructure/blob/master/terraform/api.tf#L14-L18). -While your DANDI Archive clone may differ in traffic and activity, the defaults set in `dandi-infrastructure` rarely +While your {{ instance.name }} Archive clone may differ in traffic and activity, the defaults set in `dandi-infrastructure` rarely exceed 75% usage. Keep in mind the different [pricing structures](https://www.heroku.com/pricing) that come with choosing different Heroku dyno sizes @@ -144,7 +144,7 @@ In addition to the Heroku 'dynos' that are added for compute, multiple Heroku 'a ### Heroku API Domain -Heroku will provision an API endpoint for your DANDI Archive. In order to properly map and configure that domain, first proceed to the +Heroku will provision an API endpoint for your {{ instance.name }} Archive. In order to properly map and configure that domain, first proceed to the `Settings` tab in Heroku.

@@ -177,7 +177,7 @@ As long as the CNAME is covered by a valid SSL certificate, should be fully set ## AWS Buckets -While [Resonant](https://github.com/kitware-resonant/terraform-heroku-resonant) does declare S3-based resources, configuration is still needed within DANDI Infrastructure. +While [Resonant](https://github.com/kitware-resonant/terraform-heroku-resonant) does declare S3-based resources, configuration is still needed within {{ instance.name }} Infrastructure. Find your AWS Account ID. This value will be referenced in the `main.tf` Terraform template.

@@ -196,7 +196,7 @@ and [staging_pipeline.tf](https://github.com/dandi/dandi-infrastructure/blob/mas Setting up sandbox will require unique AWS Route 53 Domains, as well a different Heroku app with different compute. -**Note -- ensure you review your `web/netlify.toml` file in DANDI Archive -- this will define different environment variables that correspond with staging vs. production** +**Note -- ensure you review your `web/netlify.toml` file in {{ instance.name }} Archive -- this will define different environment variables that correspond with staging vs. production** ### Email Setup diff --git a/docs/developer-guide/creating-dandi-instance/index.md b/docs/developer-guide/creating-dandi-instance/index.md index 98ba7952..c7dab9c5 100644 --- a/docs/developer-guide/creating-dandi-instance/index.md +++ b/docs/developer-guide/creating-dandi-instance/index.md @@ -1,6 +1,6 @@ -# Creating a DANDI Instance +# Creating a {{ instance.name }} Instance -The series of docs in this directory define how to create your own DANDI ecosystem (i.e. a clone of the entire DANDI ecosystem). +The series of docs in this directory define how to create your own {{ instance.name }} ecosystem (i.e. a clone of the entire {{ instance.name }} ecosystem). It is suggested that you briefly read through each of the documents in this guide before starting. -For an overview of the DANDI system architecture and how its components fit together, please see [System Architecture](../system-architecture.md). +For an overview of the {{ instance.name }} system architecture and how its components fit together, please see [System Architecture](../system-architecture.md). diff --git a/docs/developer-guide/creating-dandi-instance/initialize-vendors.md b/docs/developer-guide/creating-dandi-instance/initialize-vendors.md index 10208c93..532ff067 100644 --- a/docs/developer-guide/creating-dandi-instance/initialize-vendors.md +++ b/docs/developer-guide/creating-dandi-instance/initialize-vendors.md @@ -1,10 +1,10 @@ # Initialize Vendor Accounts -The DANDI ecosystem relies on vendor services to operate. So first you will need to set up accounts with the following vendors: +The {{ instance.name }} ecosystem relies on vendor services to operate. So first you will need to set up accounts with the following vendors: - **Heroku**: Provisions the API components. -- **AWS**: Provides storage buckets (S3), as well as domain management (Route53), for resources across the DANDI ecosystem. As well as the services (EC2, Kubernetes, etc.) for deploying the JupyterHub. -- **GitHub**: Serves as the authentication provider for accounts across the DANDI ecosystem. +- **AWS**: Provides storage buckets (S3), as well as domain management (Route53), for resources across the {{ instance.name }} ecosystem. As well as the services (EC2, Kubernetes, etc.) for deploying the JupyterHub. +- **GitHub**: Serves as the authentication provider for accounts across the {{ instance.name }} ecosystem. - **Terraform Cloud**: Manages provisioned resources across cloud vendors in a version-controlled manner. - **Netlify**: Deploys production frontend build, as well as staging previews to assist with frontend development. - **Sentry**: Provides observability and monitoring for API events. @@ -181,7 +181,7 @@ style="width: 60%; height: auto; display: block; margin-left: auto; margin-righ ## GitHub -You'll need to create a GitHub Organization where you can fork the DANDI repositories. [See here for documentation to create a GitHub organization](https://docs.github.com/en/organizations/collaborating-with-groups-in-organizations/creating-a-new-organization-from-scratch) +You'll need to create a GitHub Organization where you can fork the {{ instance.name }} repositories. [See here for documentation to create a GitHub organization](https://docs.github.com/en/organizations/collaborating-with-groups-in-organizations/creating-a-new-organization-from-scratch) ##### Initialize your OAuth App @@ -358,7 +358,7 @@ alt="retrieve" style="width: 60%; height: auto; display: block; margin-left: auto; margin-right: auto;"/>

-For usage of Netlify, one could refer to declaring a `netlify.toml` configuration file [like the one referenced in DANDI Archive](https://github.com/dandi/dandi-archive/blob/master/web/netlify.toml) +For usage of Netlify, one could refer to declaring a `netlify.toml` configuration file [like the one referenced in {{ instance.name }} Archive](https://github.com/dandi/dandi-archive/blob/master/web/netlify.toml) These values can also be replicated in the settings. @@ -369,10 +369,10 @@ alt="retrieve" style="width: 60%; height: auto; display: block; margin-left: auto; margin-right: auto;"/>

-Your frontend should be able to deploy to an auto-generated URL via Netlify now! Steps for domain management and configuration are described further in the [Frontend Deployment](./dandi-archive.md#frontend-deployment) section within the DANDI Archive setup. +Your frontend should be able to deploy to an auto-generated URL via Netlify now! Steps for domain management and configuration are described further in the [Frontend Deployment](./dandi-archive.md#frontend-deployment) section within the {{ instance.name }} Archive setup. ## Sentry -[Sentry](https://sentry.io/) is a monitoring tool used for the DANDI Archive API. It is integral in order to notify engineers if a system is down, experiencing poor performance, or may have unwanted users. +[Sentry](https://sentry.io/) is a monitoring tool used for the {{ instance.name }} Archive API. It is integral in order to notify engineers if a system is down, experiencing poor performance, or may have unwanted users. Begin by creating a Sentry account -- once successful, you'll start by creating a new Project: diff --git a/docs/developer-guide/developer-notes.md b/docs/developer-guide/developer-notes.md index 49deaf95..1955a922 100644 --- a/docs/developer-guide/developer-notes.md +++ b/docs/developer-guide/developer-notes.md @@ -5,18 +5,18 @@ project. ## Overview -The DANDI archive dev environment comprises three major pieces of software: +The {{ instance.name }} archive dev environment comprises three major pieces of software: `dandi-archive`, `dandi-cli`, and `dandi-schema`. ### `dandi-archive` [`dandi-archive`](https://github.com/dandi/dandi-archive) is the web frontend -application; it connects to `dandi-api` and provides a user interface to all the DANDI functionality. +application; it connects to `dandi-api` and provides a user interface to all the {{ instance.name }} functionality. `dandi-archive` is a standard web application built with `yarn`. See the [`dandi-archive` README](https://github.com/dandi/dandi-archive#readme) for instructions on how to build it locally. The Django application makes use of several services -to provide essential function for the DANDI REST API, including Postgres (to hold +to provide essential function for the {{ instance.name }} REST API, including Postgres (to hold administrative data about the web application itself), Celery (to run asynchronous compute tasks as needed to implement API semantics), and RabbitMQ (to act as a message broker between Celery and the rest of the application). @@ -37,7 +37,7 @@ following the instructions in the [`dandi-cli` README](https://github.com/dandi/ ### `dandi-schema` [`dandi-schema`](https://github.com/dandi/dandi-schema) is a Python library for -creating, maintaining, and validating the DANDI metadata models for dandisets +creating, maintaining, and validating the {{ instance.name }} metadata models for dandisets and assets. You may need to make use of this tool when improving models, or migrating metadata. You can install `dandi-schema` with a command like `pip install dandi-schema`. When releases are published through dandi-schema, @@ -47,10 +47,10 @@ viewing the schemas. ## Technologies Used This section details some foundational technologies used in `dandi-archive`. Some basic understanding of these technologies is the bare minimum -requirement for contributing meaningfully, but keep in mind that the DANDI team +requirement for contributing meaningfully, but keep in mind that the {{ instance.name }} team can help you get spun up as well. -**JavaScript/TypeScript.** The DANDI archive code is a standard JavaScript web +**JavaScript/TypeScript.** The {{ instance.name }} archive code is a standard JavaScript web application, but we try to implement new functionality using TypeScript. **Vue/VueX.** The application's components are written in Vue, and global @@ -70,14 +70,14 @@ For general help with `dandi-archive`, contact @waxlamp. ## Deployment -The DANDI project uses automated services to continuously deploy both the +The {{ instance.name }} project uses automated services to continuously deploy both the `dandi-api` backend and the `dandi-archive` frontend. Heroku manages backend deployment automatically from the `master` branch of the `dandi-api` repository. For this reason it is important that pull requests pass all CI tests before they are merged. Heroku configuration is in turn managed by Terraform code stored in the `dandi-infrastructure` repository. If you need -access to the Heroku DANDI organization, talk to @satra. +access to the Heroku {{ instance.name }} organization, talk to @satra. Netlify manages the frontend deployment process. Similarly to `dandi-api`, these deployments are based on the `master` branch of `dandi-archive`. The @@ -90,10 +90,10 @@ speak to @satra. ### Service(s) status -The DANDI project uses [upptime](https://upptime.js.org/) to monitor the status of DANDI provided and third-party services. +The {{ instance.name }} project uses [upptime](https://upptime.js.org/) to monitor the status of {{ instance.name }} provided and third-party services. The configuration is available in [.upptimerc.yml](https://github.com/dandi/upptime/blob/master/.upptimerc.yml) of the https://github.com/dandi/upptime repository, which is automatically updated by the upptime project pipelines. Upptime automatically opens new issues if any service becomes unresponsive, and closes issues whenever service comes back online. -https://www.dandiarchive.org/upptime/ is the public dashboard for the status of DANDI services. +https://www.dandiarchive.org/upptime/ is the public dashboard for the status of {{ instance.name }} services. ## Logging @@ -112,7 +112,7 @@ Moreover, `heroku logs` processes per app dump logs to `/mnt/backup/dandi/heroku ### Continuous Integration (CI) Jobs -The DANDI project uses GitHub Actions for continuous integration. +The {{ instance.name }} project uses GitHub Actions for continuous integration. Logs for many of the repositories are archived on `drogon` server at `/mnt/backup/dandi/tinuous-logs/`. ## Code Hosting @@ -172,15 +172,15 @@ regulations. Our major use case for mass email is to notify the userbase of upcoming downtime (as is needed for, e.g., a major data migration or maintenance windows). -If you need to mass email the DANDI Archive userbase, speak to Roni Choudhury +If you need to mass email the {{ instance.name }} Archive userbase, speak to Roni Choudhury (). -#### Updating the DANDI userbase audience in Mailchimp +#### Updating the {{ instance.name }} userbase audience in Mailchimp Follow these steps before sending a mass email through Mailchimp to ensure that -the Mailchimp-maintained DANDI userbase audience is up to date. +the Mailchimp-maintained {{ instance.name }} userbase audience is up to date. -1. Log into the DANDI admin panel and navigate to the dashboard page (at, e.g., +1. Log into the {{ instance.name }} admin panel and navigate to the dashboard page (at, e.g., `api.dandiarchive.org/dashboard`). 2. Click on the `Mailchimp CSV` link in the navbar to download the CSV file to disk. @@ -225,10 +225,10 @@ you are setting up the superuser during initial setup. ### Refresh GitHub login to log into prod Django admin panel To log into the production Django admin panel, you must simply be logged into -the DANDI Archive production instance using an admin account. +the {{ instance.name }} Archive production instance using an admin account. However, at times the Django admin panel login seems to expire while the login -to DANDI Archive proper is still live. In this case, simply log out of DANDI, +to {{ instance.name }} Archive proper is still live. In this case, simply log out of DANDI, log back in, and then go to the Django admin panel URL (e.g. https://api.dandiarchive.org/admin) and you should be logged back in there. diff --git a/docs/developer-guide/integrate-external-services.md b/docs/developer-guide/integrate-external-services.md index 8713ff5e..4fe8e4f9 100644 --- a/docs/developer-guide/integrate-external-services.md +++ b/docs/developer-guide/integrate-external-services.md @@ -1,14 +1,14 @@ # Integrate External Services with DANDI -This page provides guidance on how to integrate external services with the DANDI Archive, including how to work with DANDI metadata models and APIs. +This page provides guidance on how to integrate external services with the {{ instance.name }} Archive, including how to work with {{ instance.name }} metadata models and APIs. -## DANDI Metadata Models Integration +## {{ instance.name }} Metadata Models Integration **DANDI metadata models** are defined as [Pydantic models](https://github.com/dandi/dandi-schema/blob/master/dandischema/models.py) in [**dandischema**](https://github.com/dandi/dandi-schema) and transformed into [JSON schemas](https://github.com/dandi/schema). **Both** representations — -the original Pydantic definitions and JSON schemas — are used across the DANDI ecosystem. +the original Pydantic definitions and JSON schemas — are used across the {{ instance.name }} ecosystem. The diagram below outlines how these two representations are integrated into various components, including the CLI, the backend/API, and the web interface. @@ -70,7 +70,7 @@ There are several ways to integrate external services with DANDI: ### 1. REST API Integration -The DANDI Archive provides a comprehensive REST API that allows external services to interact with the archive programmatically. The API documentation is available at: +The {{ instance.name }} Archive provides a comprehensive REST API that allows external services to interact with the archive programmatically. The API documentation is available at: - [Swagger UI](https://api.dandiarchive.org/swagger) - [ReDoc](https://api.dandiarchive.org/redoc) @@ -86,7 +86,7 @@ Authentication is required for write operations and is handled via API keys. Rea ### 2. Python Client Integration -For Python applications, the [DANDI Python client](https://github.com/dandi/dandi-cli) provides a convenient way to interact with the DANDI Archive: +For Python applications, the [DANDI Python client](https://github.com/dandi/dandi-cli) provides a convenient way to interact with the {{ instance.name }} Archive: ```python from dandi.dandiapi import DandiAPIClient @@ -109,7 +109,7 @@ asset.download("local_file.nwb") ### 3. WebDAV Integration -DANDI provides a [WebDAV](https://en.wikipedia.org/wiki/WebDAV) service at https://webdav.dandiarchive.org/ that allows external services to access DANDI data using standard WebDAV clients: +DANDI provides a [WebDAV](https://en.wikipedia.org/wiki/WebDAV) service at https://webdav.dandiarchive.org/ that allows external services to access {{ instance.name }} data using standard WebDAV clients: ```python import requests @@ -123,15 +123,15 @@ response = requests.get("https://webdav.dandiarchive.org/dandisets/000123/draft/ To integrate a custom visualization service with DANDI: 1. Create a service that can accept a URL to an NWB file -2. Register your service with the DANDI team -3. DANDI will add a link to your service next to compatible files in the web interface +2. Register your service with the {{ instance.name }} team +3. {{ instance.name }} will add a link to your service next to compatible files in the web interface -For example, NWB Explorer is integrated this way, allowing users to visualize NWB files directly from the DANDI web interface. +For example, NWB Explorer is integrated this way, allowing users to visualize NWB files directly from the {{ instance.name }} web interface. ## Getting Help If you need assistance integrating your service with DANDI, you can: 1. Open an issue on the [DANDI helpdesk](https://github.com/dandi/helpdesk/issues) -2. Contact the DANDI team at help@dandiarchive.org -3. Join the DANDI Slack workspace (available to registered DANDI users) +2. Contact the {{ instance.name }} team at help@dandiarchive.org +3. Join the {{ instance.name }} Slack workspace (available to registered {{ instance.name }} users) diff --git a/docs/developer-guide/system-architecture.md b/docs/developer-guide/system-architecture.md index ad6c422f..edd74799 100644 --- a/docs/developer-guide/system-architecture.md +++ b/docs/developer-guide/system-architecture.md @@ -4,10 +4,10 @@ This page provides a high-level view of how DANDI's core components fit together ## The Big Picture -The DANDI platform is essentially composed of: +The {{ instance.name }} platform is essentially composed of: 1. **Storage**: S3 buckets (AWS) where data actually resides. -2. **API**: A Django/Resonant-based backend application (hosted on Heroku) that handles the DANDI data model, user authentication, and orchestrates S3 interactions. +2. **API**: A Django/Resonant-based backend application (hosted on Heroku) that handles the {{ instance.name }} data model, user authentication, and orchestrates S3 interactions. 3. **Frontend**: A Vue-based web application (hosted on Netlify) for users to browse, search, and manage data in the archive. 4. **Workers**: Celery workers (also on Heroku) for asynchronous tasks such as file checksum calculations, analytics, and housekeeping. 5. **Observability**: Log aggregation and alerting (Heroku logs), plus Sentry for error-tracking and notifications. @@ -23,7 +23,7 @@ style="width: 90%; height: auto; display: block; margin-left: auto; margin-righ * The user (or script) interacts with the **Web UI** or the **DANDI CLI**. * The **Web UI** calls into the **API** (over HTTPS). * The **API** queries or updates metadata in its Postgres DB (hosted on Heroku). -* The **API** calls AWS S3 to read/write DANDI assets. +* The **API** calls AWS S3 to read/write {{ instance.name }} assets. * Certain heavy-lift or background tasks get queued into Celery tasks, handled by the **Workers**. * Domain names, certificates, and load-balancing records are handled by AWS Route 53 or Netlify's DNS, depending on whether it's the API subdomain or the apex domain for the UI. * Large chunks of data can be streamed from S3 directly to the Client via presigned URLs. @@ -51,7 +51,7 @@ Provisions the servers, worker processes, and the database for the API. ### 3. Netlify (UI) -* **Frontend server**: Serves a static build of the DANDI Archive frontend (Vue.js). +* **Frontend server**: Serves a static build of the {{ instance.name }} Archive frontend (Vue.js). * **Autodeployment**: On each push or merge to `main` (or whichever branch is configured), Netlify automatically builds and deploys. * **Configuration**: - **`netlify.toml`**: Describes build commands, environment variables for sandbox vs. production. diff --git a/docs/getting-started/creating-account.md b/docs/getting-started/creating-account.md index e1657585..05fed10a 100644 --- a/docs/getting-started/creating-account.md +++ b/docs/getting-started/creating-account.md @@ -1,12 +1,12 @@ -# Create a DANDI Account +# Create a {{ instance.name }} Account -A DANDI account enhances your capabilities within the DANDI Archive. +A {{ instance.name }} account enhances your capabilities within the {{ instance.name }} Archive. Without an account, users can freely search, view, and download available datasets. -With an account, users can create and edit Dandisets, and use the DANDI Hub to analyze data. +With an account, users can create and edit Dandisets, and use the {{ instance.name }} Hub to analyze data. DANDI provides two servers: -- **Main server**: [https://dandiarchive.org/](https://dandiarchive.org/) - This is the primary platform for most users. +- **Main server**: [{{ instance.uri }}/]({{ instance.uri }}/) - This is the primary platform for most users. - **Sandbox server**: [https://sandbox.dandiarchive.org/](https://sandbox.dandiarchive.org/) - Ideal for training and testing purposes. Accounts are independently managed on each server, allowing users to register on one or both, depending on their testing and deployment needs. @@ -18,12 +18,12 @@ If your registration is denied: - With an academic email not linked to your GitHub, please contact [help@dandiarchive.org](mailto:help@dandiarchive.org) for assistance using this email address. - Without an academic email, account approval is still possible under specific circumstances. Appeal decisions at [help@dandiarchive.org](mailto:help@dandiarchive.org). -## How to Register for a DANDI Account +## How to Register for a {{ instance.name }} Account 1. **Create a GitHub Account**: If not already a GitHub user, [sign up here](https://github.com/). -2. **Register on DANDI**: Navigate to the [DANDI homepage](https://dandiarchive.org) and click the `LOG IN WITH GITHUB` button to register using your GitHub account. +2. **Register on DANDI**: Navigate to the [DANDI homepage]({{ instance.uri }}) and click the `LOG IN WITH GITHUB` button to register using your GitHub account. 3. **Confirmation of Review**: Post-registration, you will receive an email confirming that your account is under review. Your request will be reviewed within 24 hours. - **Note**: Reviews may extend beyond 24 hours for new GitHub accounts or non-.edu email addresses, particularly if the registration does not describe immediate plans to contribute data. -4. **Accessing DANDI**: Upon approval, access DANDI by logging in through the `LOG IN WITH GITHUB` button. +4. **Accessing DANDI**: Upon approval, access {{ instance.name }} by logging in through the `LOG IN WITH GITHUB` button. For support or further inquiries, reach out to [help@dandiarchive.org](mailto:help@dandiarchive.org). diff --git a/docs/getting-started/dandi-ecosystem.md b/docs/getting-started/dandi-ecosystem.md index 592c5515..36e5fbc4 100644 --- a/docs/getting-started/dandi-ecosystem.md +++ b/docs/getting-started/dandi-ecosystem.md @@ -1,26 +1,26 @@ -# DANDI Ecosystem +# {{ instance.name }} Ecosystem -The DANDI project can be represented schematically: +The {{ instance.name }} project can be represented schematically: dandi_structure -The **Client side** contains the DANDI Python CLI and DANDI Web application. +The **Client side** contains the {{ instance.name }} Python CLI and {{ instance.name }} Web application. -The **Server side** contains a RESTful API and DANDI JupyterHub. +The **Server side** contains a RESTful API and {{ instance.name }} JupyterHub. The **Dandiset** is a file organization to store data together with metadata. -The DANDI project is organized around several **GitHub** repositories: +The {{ instance.name }} project is organized around several **GitHub** repositories: | Repository | Description | |----------|----------| -| [DANDI Archive](https://github.com/dandi/dandi-archive) | Contains the code for deploying the client-side Web application frontend based on the [Vue.js](https://vuejs.org/) framework as well as a Django-based backend to run the DANDI REST API. -| [DANDI JupyterHub](https://github.com/dandi/dandi-hub) | Contains the code for deploying a JupyterHub instance to support interaction with the DANDI archive. +| [DANDI Archive](https://github.com/dandi/dandi-archive) | Contains the code for deploying the client-side Web application frontend based on the [Vue.js](https://vuejs.org/) framework as well as a Django-based backend to run the {{ instance.name }} REST API. +| [DANDI JupyterHub](https://github.com/dandi/dandi-hub) | Contains the code for deploying a JupyterHub instance to support interaction with the {{ instance.name }} archive. | [DANDI Python client](https://github.com/dandi/dandi-cli) | Contains the code for the command line tool used to interact with the archive. It allows you to download data from the archive. It also allows you to locally organize and validate your data before uploading to the archive. | [DANDI Docs](https://github.com/dandi/dandi-docs) | Provides the contents of this website. | [helpdesk](https://github.com/dandi/helpdesk) | Contains our community help platform where you can submit [issues](https://github.com/dandi/helpdesk/issues/new/choose). -| [schema](https://github.com/dandi/schema) | Provides the details and some supporting code for the DANDI metadata schema. -| [schema Python library](https://github.com/dandi/dandi-schema) | Provides a Python library for updating the schema and for creating and validating DANDI objects. -| [DANDI About website](https://github.com/dandi/dandi-about) | Provides an overview of the DANDI project and the team members and collaborators. | +| [schema](https://github.com/dandi/schema) | Provides the details and some supporting code for the {{ instance.name }} metadata schema. +| [schema Python library](https://github.com/dandi/dandi-schema) | Provides a Python library for updating the schema and for creating and validating {{ instance.name }} objects. +| [DANDI About website](https://github.com/dandi/dandi-about) | Provides an overview of the {{ instance.name }} project and the team members and collaborators. | diff --git a/docs/getting-started/data-standards/nwb.md b/docs/getting-started/data-standards/nwb.md index 48ea4e98..9f33ff98 100644 --- a/docs/getting-started/data-standards/nwb.md +++ b/docs/getting-started/data-standards/nwb.md @@ -18,6 +18,6 @@ DANDI is designed to work seamlessly with NWB files. When you upload NWB files t 1. The files are validated to ensure they conform to the NWB standard 2. Metadata is automatically extracted to make your data more discoverable -3. The data can be accessed programmatically through the DANDI API +3. The data can be accessed programmatically through the {{ instance.name }} API For more information on validating NWB files for DANDI, see the [Validating NWB Files](../../user-guide-sharing/validating-files.md) section. diff --git a/docs/index.md b/docs/index.md index 43d19fbc..46998b49 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,16 +1,16 @@ -# Welcome to the DANDI Archive Documentation +# Welcome to the {{ instance.name }} Archive Documentation dandi_banner -The Web interface to the DANDI archive is located at https://dandiarchive.org. +The Web interface to the {{ instance.name }} archive is located at {{ instance.uri }}. This documentation explains how to interact with the archive. ## How to Use This Documentation -If you want to know more about the DANDI project, its goals, and the problems +If you want to know more about the {{ instance.name }} project, its goals, and the problems it tries to solve, check out the [Introduction](./introduction.md). To start using the archive, head over to the User Guide sections for [Sharing Data](./user-guide-sharing/creating-dandiset.md) or [Using Data](./user-guide-using/exploring-dandisets.md). @@ -21,14 +21,14 @@ page in the Getting Started section. ## Where to Get Help -You can communicate with the DANDI team in a variety of ways, depending on your needs: +You can communicate with the {{ instance.name }} team in a variety of ways, depending on your needs: - You can ask questions, report bugs, or request features [at our helpdesk](https://github.com/dandi/helpdesk/issues/new/choose). - For interacting with the global neuroscience community, post on https://neurostars.org and use the tag [dandi](https://neurostars.org/tag/dandi). -- You can use the DANDI Slack workspace, which we will invite you to after approving your [registration on - DANDI using GitHub](https://dandiarchive.org/) (this registration is required to upload data or to use the DANDI +- You can use the {{ instance.name }} Slack workspace, which we will invite you to after approving your [registration on + {{ instance.name }} using GitHub]({{ instance.uri }}/) (this registration is required to upload data or to use the {{ instance.name }} JupyterHub). See [here for details on how to register](./getting-started/creating-account.md). - Email us: [info@dandiarchive.org](mailto: info@dandiarchive.org) diff --git a/docs/introduction.md b/docs/introduction.md index 407f6e38..6ac67dd9 100644 --- a/docs/introduction.md +++ b/docs/introduction.md @@ -21,7 +21,7 @@ DANDI provides significant benefits: - The data can be accessed programmatically allowing for software to work directly with data in the cloud. - The infrastructure is built on a software stack of open source products, thus enriching the ecosystem. -### Properties of DANDI +### Properties of {{ instance.name }} **Data identifiers:** The archive provides persistent identifiers for versioned datasets and assets, thus improving reproducibility of neurophysiology research. @@ -32,14 +32,14 @@ DANDI provides significant benefits: **Accepted Standards and Data File Formats:** NWB (HDF5), BIDS (NIfTI, JSON, PNG, TIF, OME.TIF, OME.BTF, OME.ZARR) (see [Data Standards](./getting-started/data-standards/index.md) for more details) -## Neurophysiology Informatics Challenges and DANDI Solutions +## Neurophysiology Informatics Challenges and {{ instance.name }} Solutions | Challenges | Solutions | |---|---| -| Most raw data stays in laboratories. | DANDI provides a public archive for dissemination of raw and derived data. | -| Non-standardized datasets lead to significant resource needs to understand and adapt code to these datasets. | DANDI standardizes all data using NWB and BIDS standards. | -| The multitude of different hardware platforms and custom binary formats requires significant effort to consolidate into reusable datasets. | The DANDI ecosystem provides tools for converting data from different instruments into NWB and BIDS. | -| There are many domain general places to house data (e.g. Open Science Framework, G-Node, Dropbox, Google drive), but it is difficult to find relevant scientific metadata. | DANDI is focused on neurophysiology data and related metadata. | -| Datasets are growing larger, requiring compute services to be closer to data. | DANDI provides Dandihub, a JupyterHub instance close to the data. | -| Neurotechnology is evolving and requires changes to metadata and data storage. | DANDI works with community members to improve data standards and formats. | -| Consolidating and creating robust algorithms (e.g. spike sorting) requires varied data sources. | DANDI provides access to many different datasets. | +| Most raw data stays in laboratories. | {{ instance.name }} provides a public archive for dissemination of raw and derived data. | +| Non-standardized datasets lead to significant resource needs to understand and adapt code to these datasets. | {{ instance.name }} standardizes all data using NWB and BIDS standards. | +| The multitude of different hardware platforms and custom binary formats requires significant effort to consolidate into reusable datasets. | The {{ instance.name }} ecosystem provides tools for converting data from different instruments into NWB and BIDS. | +| There are many domain general places to house data (e.g. Open Science Framework, G-Node, Dropbox, Google drive), but it is difficult to find relevant scientific metadata. | {{ instance.name }} is focused on neurophysiology data and related metadata. | +| Datasets are growing larger, requiring compute services to be closer to data. | {{ instance.name }} provides Dandihub, a JupyterHub instance close to the data. | +| Neurotechnology is evolving and requires changes to metadata and data storage. | {{ instance.name }} works with community members to improve data standards and formats. | +| Consolidating and creating robust algorithms (e.g. spike sorting) requires varied data sources. | {{ instance.name }} provides access to many different datasets. | diff --git a/docs/terms-policies/policies.md b/docs/terms-policies/policies.md index 28334efa..94589293 100644 --- a/docs/terms-policies/policies.md +++ b/docs/terms-policies/policies.md @@ -6,26 +6,26 @@ must not violate privacy or copyright, or breach confidentiality or non-disclosure agreements for data collected from human subjects. - **Status of research data:** Empirical (not simulated) data and associated metadata from any stage of the - research study's life cycle is accepted. Simulated data is handled on a case-by-case basis, contact the DANDI team + research study's life cycle is accepted. Simulated data is handled on a case-by-case basis, contact the {{ instance.name }} team - **Eligible users:** Anyone working with the data in the scope of the archive may register as a user of DANDI. All users are allowed to deposit content for which they possess the appropriate rights and which falls within the **scope** of the archive. - **Ownership:** By uploading content, no change of ownership is implied and no - property rights are transferred to the DANDI team. All uploaded content remains + property rights are transferred to the {{ instance.name }} team. All uploaded content remains the property of the parties prior to submission and must be accompanied by a license allowing - DANDI project data access, archival, and re-distribution (see **License** below). -- **Data file formats:** DANDI only accepts data using standardized formats such + {{ instance.name }} project data access, archival, and re-distribution (see **License** below). +- **Data file formats:** {{ instance.name }} only accepts data using standardized formats such as [Neurodata Without Borders](https://nwb.org), [Brain Imaging Data Structure](https://bids.neuroimaging.io/), [Neuroimaging Data Model](https://nidm.nidash.org/), and other [BRAIN Initiative](https://braininitiative.nih.gov/) standards. We are working with the community to improve these standards and to - make DANDI archive FAIR. + make {{ instance.name }} archive FAIR. - **Data quality:** All data are provided "as-is", and the user shall hold - DANDI and data providers supplying data to the DANDI Archive free and harmless in + {{ instance.name }} and data providers supplying data to the {{ instance.name }} Archive free and harmless in connection with the use of such data. - **Metadata types and sources:** All metadata is stored internally in JSON format according to a defined JSON schema. Metadata records violating the schema are not allowed. - **Language:** Textual items must be in English. Latin names could be used in exceptional cases where appropriate. -- **Licenses:** Users must specify a license for each dataset chosen from the list of the DANDI archive approved licenses. Users allow for the DANDI archive to extract metadata records and make them available under permissive CC0 license. +- **Licenses:** Users must specify a license for each dataset chosen from the list of the {{ instance.name }} archive approved licenses. Users allow for the {{ instance.name }} archive to extract metadata records and make them available under permissive CC0 license. ## Access and Reuse @@ -50,10 +50,10 @@ ## Removal - **Revocation:** Content not considered to fall under the scope of the repository - can be removed and associated DOIs issued by DANDI revoked. Inform the DANDI team + can be removed and associated DOIs issued by {{ instance.name }} revoked. Inform the {{ instance.name }} team promptly, ideally no later than 24 hours from upload, about any suspected policy violation. Alternatively, content found to already have an external DOI will - have the DANDI DOI invalidated and the record updated to indicate the original + have the {{ instance.name }} DOI invalidated and the record updated to indicate the original external DOI. User access may be revoked on violation of Terms of Use. - **Withdrawal:** If the uploaded research object must later be withdrawn, the @@ -80,7 +80,7 @@ - **Retention period:** Versioned items will be retained for the lifetime of the repository. This is currently the lifetime of the NIH award, which currently expires in April 2029. -- **Functional preservation:** DANDI makes no promises of usability and +- **Functional preservation:** {{ instance.name }} makes no promises of usability and understandability of deposited objects. - **File preservation:** Data files and metadata are backed up nightly and replicated into multiple copies in different storage services. @@ -89,6 +89,6 @@ checksums to assure that file content remains constant. - **Succession plans:** In case of a repository shutdown, our best efforts will be made to integrate all content into suitable alternative institutional and/or - other repositories overlapping in the scope of the DANDI archive. + other repositories overlapping in the scope of the {{ instance.name }} archive. This policy document is derived from the [Zenodo General Policies v1.0](https://about.zenodo.org/policies/). diff --git a/docs/terms-policies/terms.md b/docs/terms-policies/terms.md index 61fdd54e..51150f00 100644 --- a/docs/terms-policies/terms.md +++ b/docs/terms-policies/terms.md @@ -1,12 +1,12 @@ # Terms of Use v1.0.1 -The DANDI data archive ("DANDI") is offered by the DANDI project as part of its +The {{ instance.name }} data archive ("DANDI") is offered by the {{ instance.name }} project as part of its mission to make available the results of its work. Use of DANDI, both the uploading and downloading of data, denotes agreement with the following terms: -1. DANDI is an open dissemination research data repository for the preservation +1. {{ instance.name }} is an open dissemination research data repository for the preservation and making available of research, educational and informational content. Access to DANDI's content is open to all. @@ -15,7 +15,7 @@ the following terms: access to an organized data center. 1. The uploader is exclusively responsible for the content that they upload to - DANDI and shall indemnify and hold the DANDI team free and harmless in + {{ instance.name }} and shall indemnify and hold the {{ instance.name }} team free and harmless in connection with their use of the service. The uploader shall ensure that their content is suitable for open dissemination, and that it complies with these terms and applicable laws, including, but not limited to, privacy, data @@ -26,30 +26,30 @@ the following terms: 1. Access to DANDI, and all content, is provided on an "as-is" basis. Users of content ("Users") shall respect applicable license conditions. Download and - use of content from DANDI does not transfer any intellectual property rights + use of content from {{ instance.name }} does not transfer any intellectual property rights in the content to the User. 1. Users are exclusively responsible for their use of content, and shall indemnify - and hold the DANDI team free and harmless in connection with their download - and/or use. Hosting and making content available through DANDI does not - represent any approval or endorsement of such content by the DANDI team. + and hold the {{ instance.name }} team free and harmless in connection with their download + and/or use. Hosting and making content available through {{ instance.name }} does not + represent any approval or endorsement of such content by the {{ instance.name }} team. -1. The DANDI team reserves the right, without notice, at its sole discretion and +1. The {{ instance.name }} team reserves the right, without notice, at its sole discretion and without liability, (i) to alter, delete or block access to content that it deems to be inappropriate or insufficiently protected, and (ii) to restrict - or remove User access where it considers that use of DANDI interferes with + or remove User access where it considers that use of {{ instance.name }} interferes with its operations or violates these Terms of Use or applicable laws. -1. Unless specified otherwise, DANDI metadata may be freely reused under the +1. Unless specified otherwise, {{ instance.name }} metadata may be freely reused under the [CC0 waiver](https://creativecommons.org/publicdomain/zero/1.0/). -1. These Terms of Use are subject to change by the DANDI team at any time and +1. These Terms of Use are subject to change by the {{ instance.name }} team at any time and without notice, other than through posting the updated Terms of Use on the - DANDI website. + {{ instance.name }} website. -* Uploaders considering DANDI for the storage of unanonymized or encrypted/unencrypted +* Uploaders considering {{ instance.name }} for the storage of unanonymized or encrypted/unencrypted sensitive personal data are advised to use bespoke platforms rather than open - dissemination services like DANDI for sharing their data. + dissemination services like {{ instance.name }} for sharing their data. [1] [2] See further the user pages regarding uploading for information on anonymization of datasets that contain sensitive personal information. diff --git a/docs/user-guide-sharing/contributing-notebook.md b/docs/user-guide-sharing/contributing-notebook.md index 72a23c70..f0f70b5f 100644 --- a/docs/user-guide-sharing/contributing-notebook.md +++ b/docs/user-guide-sharing/contributing-notebook.md @@ -1,6 +1,6 @@ # Contributing an Example Notebook -Example notebooks are a great way to showcase how to use your data and help others understand and reproduce your analyses. This page explains how to contribute an example notebook to the DANDI Archive. +Example notebooks are a great way to showcase how to use your data and help others understand and reproduce your analyses. This page explains how to contribute an example notebook to the {{ instance.name }} Archive. ## What are Example Notebooks? @@ -11,7 +11,7 @@ Example notebooks are Jupyter notebooks that demonstrate how to: - Reproduce figures from associated publications - Showcase the potential uses of your data -These notebooks are maintained in the [dandi/example-notebooks](https://github.com/dandi/example-notebooks) repository and are available to all DANDI Hub users. +These notebooks are maintained in the [dandi/example-notebooks](https://github.com/dandi/example-notebooks) repository and are available to all {{ instance.name }} Hub users. ## Why Contribute an Example Notebook? @@ -30,7 +30,7 @@ To contribute an example notebook: 1. **Create a Jupyter notebook** that demonstrates how to use your data. Your notebook should: - Include clear documentation and comments - - Load data directly from your published Dandiset using the DANDI API + - Load data directly from your published Dandiset using the {{ instance.name }} API - Include examples of basic analyses or visualizations - Be well-organized and easy to follow @@ -39,7 +39,7 @@ To contribute an example notebook: - Add your notebook to the appropriate directory - Submit a pull request with a clear description of your notebook -3. **Wait for review and approval** from the DANDI team. Once approved, your notebook will be merged into the repository and made available to all DANDI Hub users. +3. **Wait for review and approval** from the {{ instance.name }} team. Once approved, your notebook will be merged into the repository and made available to all {{ instance.name }} Hub users. ## Best Practices for Example Notebooks @@ -50,14 +50,14 @@ To create effective example notebooks: - **Use relative paths** when accessing data to ensure portability - **Include visualizations** to help users understand the data - **Document any assumptions or limitations** of your analyses -- **Test your notebook** in the DANDI Hub environment before submitting +- **Test your notebook** in the {{ instance.name }} Hub environment before submitting - **Keep the notebook focused** on demonstrating how to use the data rather than complex analyses ## Example Notebook Organization The [dandi/example-notebooks](https://github.com/dandi/example-notebooks) repository is organized into several directories: -- **[dandi/](https://github.com/dandi/example-notebooks/tree/master/dandi)**: Notebooks that demonstrate general DANDI functionality +- **[dandi/](https://github.com/dandi/example-notebooks/tree/master/dandi)**: Notebooks that demonstrate general {{ instance.name }} functionality - **[tutorials/](https://github.com/dandi/example-notebooks/tree/master/tutorials)**: Notebooks that provide step-by-step tutorials for specific tasks - **[demos/](https://github.com/dandi/example-notebooks/tree/master/demos)**: Notebooks that showcase specific features or use cases - **`{dandiset_id}`/**: Notebooks that demonstrate how to use a specific Dandiset diff --git a/docs/user-guide-sharing/converting-data/index.md b/docs/user-guide-sharing/converting-data/index.md index fc9c2d88..3f0202d2 100644 --- a/docs/user-guide-sharing/converting-data/index.md +++ b/docs/user-guide-sharing/converting-data/index.md @@ -1,8 +1,8 @@ # Standardizing Data -Data contributed to DANDI must be standardized into one of the formats accepted by DANDI. +Data contributed to {{ instance.name }} must be standardized into one of the formats accepted by DANDI. -Most of the data on DANDI is in Neurodata Without Borders (NWB), a data standard designed for sharing data from neurophysiology experiments. +Most of the data on {{ instance.name }} is in Neurodata Without Borders (NWB), a data standard designed for sharing data from neurophysiology experiments. See [Converting data to NWB](./nwb/index.md) for guidance in how to convert your data to NWB and publish on DANDI. DANDI also supports the [Brain Imaging Data Structure (BIDS)](https://bids.neuroimaging.io/). For more information, see: diff --git a/docs/user-guide-sharing/converting-data/nwb/nwb-guide.md b/docs/user-guide-sharing/converting-data/nwb/nwb-guide.md index b58b2001..e790eeef 100644 --- a/docs/user-guide-sharing/converting-data/nwb/nwb-guide.md +++ b/docs/user-guide-sharing/converting-data/nwb/nwb-guide.md @@ -19,14 +19,14 @@ This tool is particularly useful for users who prefer a graphical interface over 2. **Converting Data**: NWB GUIDE supports conversion from various formats. See the [NWB GUIDE tutorials](https://nwb-guide.readthedocs.io/en/stable/tutorials/index.html) for details. -3. **Dataset Publication**: The [Dataset Publication Tutorial](https://nwb-guide.readthedocs.io/en/latest/tutorials/dataset_publication.html) provides step-by-step instructions for publishing data to DANDI using NWB GUIDE. +3. **Dataset Publication**: The [Dataset Publication Tutorial](https://nwb-guide.readthedocs.io/en/latest/tutorials/dataset_publication.html) provides step-by-step instructions for publishing data to {{ instance.name }} using NWB GUIDE. ## Key Features - **Intuitive Interface**: Easy-to-use graphical interface for managing the entire workflow from conversion to publication. - **Format Support**: Supports conversion from many common neurophysiology data formats. - **Validation**: Built-in validation to ensure your NWB files meet DANDI's requirements. -- **DANDI Integration**: Direct upload to DANDI without needing to use the command line. +- **DANDI Integration**: Direct upload to {{ instance.name }} without needing to use the command line. ## When to Use NWB GUIDE diff --git a/docs/user-guide-sharing/creating-dandiset.md b/docs/user-guide-sharing/creating-dandiset.md index 8c84e8cb..5d9d9884 100644 --- a/docs/user-guide-sharing/creating-dandiset.md +++ b/docs/user-guide-sharing/creating-dandiset.md @@ -6,18 +6,18 @@ This page provides instructions for creating a new Dandiset on DANDI. Before creating a Dandiset, you should: -1. **Register for DANDI and obtain an API key.** To create a new Dandiset, you need to have a DANDI account. - * If you do not already have an account, see [Create a DANDI Account](../getting-started/creating-account.md) page for instructions. +1. **Register for {{ instance.name }} and obtain an API key.** To create a new Dandiset, you need to have a {{ instance.name }} account. + * If you do not already have an account, see [Create a {{ instance.name }} Account](../getting-started/creating-account.md) page for instructions. * Once you are logged in, copy your API key by clicking on your user initials in the top-right corner after logging in. - * Production (https://dandiarchive.org) and sandbox (https://sandbox.dandiarchive.org) servers have different API keys and different logins. + * Production ({{ instance.uri }}) and sandbox (https://sandbox.dandiarchive.org) servers have different API keys and different logins. 2. **Choose a server.** - * **Production server**: https://dandiarchive.org. This is the main server for DANDI and should be used for sharing neuroscience data. + * **Production server**: {{ instance.uri }}. This is the main server for {{ instance.name }} and should be used for sharing neuroscience data. When you create a Dandiset, a permanent ID is automatically assigned to it. This Dandiset can be fully public or embargoed according to NIH policy. All data are uploaded as draft and can be adjusted before publishing on the production server. * **Development server**: https://sandbox.dandiarchive.org. This server is for testing and learning how to use DANDI. - It is not recommended for sharing data, but is recommended for testing the DANDI CLI and GUI or as a testing platform for developers. + It is not recommended for sharing data, but is recommended for testing the {{ instance.name }} CLI and GUI or as a testing platform for developers. Note that the development server should not be used to stage your data. ## Creating a New Dandiset @@ -28,7 +28,7 @@ Before creating a Dandiset, you should: * After you provide a name and description, the dataset identifier will be created; we will call this ``. 2. **Add metadata to the Dandiset.** - * Visit your Dandiset landing page: `https://dandiarchive.org/dandiset//draft` and click on the `METADATA` link. + * Visit your Dandiset landing page: `{{ instance.uri }}/dandiset//draft` and click on the `METADATA` link. * Fill in the required metadata fields. For more information on Dandiset metadata, see the [Dandiset Metadata](./dandiset-metadata.md) page. ## Next Steps diff --git a/docs/user-guide-sharing/dandiset-metadata.md b/docs/user-guide-sharing/dandiset-metadata.md index 76535c4b..8456a84b 100644 --- a/docs/user-guide-sharing/dandiset-metadata.md +++ b/docs/user-guide-sharing/dandiset-metadata.md @@ -65,4 +65,4 @@ It is highly recommended to add links to the following resources (if they exist) * The public code repository used to convert the data * A data analysis library associated with the publication that can take this data as input * An example notebook submitted to http://github.com/dandi/example-notebooks that demonstrates how to use the data - * Associated datasets published on DANDI or on other archives. + * Associated datasets published on {{ instance.name }} or on other archives. diff --git a/docs/user-guide-sharing/data-licenses.md b/docs/user-guide-sharing/data-licenses.md index 1ba554e3..a3710d2d 100644 --- a/docs/user-guide-sharing/data-licenses.md +++ b/docs/user-guide-sharing/data-licenses.md @@ -1,7 +1,7 @@ # Data Licenses To create a Dandiset, you must select a license under which to share the data. -Because the DANDI Archive provides a platform for open data sharing, the +Because the {{ instance.name }} Archive provides a platform for open data sharing, the licenses come from [Creative Commons](https://creativecommons.org/), an international nonprofit organization dedicated to establishing, growing, and maintaining a shared commons in the spirit of open source. diff --git a/docs/user-guide-sharing/publishing-dandisets.md b/docs/user-guide-sharing/publishing-dandisets.md index 7f32322b..939b8fde 100644 --- a/docs/user-guide-sharing/publishing-dandisets.md +++ b/docs/user-guide-sharing/publishing-dandisets.md @@ -1,6 +1,6 @@ # Publishing Dandisets -Once you create a Dandiset, DANDI will automatically create a `draft` version +Once you create a Dandiset, {{ instance.name }} will automatically create a `draft` version of the Dandiset that can be changed as many times as needed by editing the metadata or uploading new files. @@ -38,13 +38,13 @@ actively working on enabling this feature. You can add the following statement to the methods section of your manuscript. -> Data and associated metadata were uploaded to the DANDI archive [RRID:SCR_017571] using +> Data and associated metadata were uploaded to the {{ instance.name }} archive [RRID:SCR_017571] using the Python command line tool (https://doi.org/10.5281/zenodo.3692138). The data were first converted into the NWB format (https://doi.org/10.1101/2021.03.13.435173) and organized into a BIDS-like (https://doi.org/10.1038/sdata.2016.44) structure. -You can refer to DANDI using any of the following options: +You can refer to {{ instance.name }} using any of the following options: * Using an RRID [RRID:SCR_017571](https://scicrunch.org/scicrunch/Resources/record/nlx_144509-1/SCR_017571/resolver). -* Using the DANDI CLI reference: https://doi.org/10.5281/zenodo.3692138 +* Using the {{ instance.name }} CLI reference: https://doi.org/10.5281/zenodo.3692138 diff --git a/docs/user-guide-sharing/uploading-data.md b/docs/user-guide-sharing/uploading-data.md index cf461b51..f2cf3cd0 100644 --- a/docs/user-guide-sharing/uploading-data.md +++ b/docs/user-guide-sharing/uploading-data.md @@ -1,6 +1,6 @@ # Uploading Data -This page provides instructions for uploading data to DANDI after you have [created a Dandiset](./creating-dandiset.md) and [converted your data to NWB format](./converting-data/index.md). +This page provides instructions for uploading data to {{ instance.name }} after you have [created a Dandiset](./creating-dandiset.md) and [converted your data to NWB format](./converting-data/index.md). ## Prerequisites @@ -30,13 +30,13 @@ DANDI provides two main methods for uploading data: The NWB GUIDE provides a graphical interface for uploading data to DANDI. See the [NWB GUIDE Dataset Publication Tutorial](https://nwb-guide.readthedocs.io/en/latest/tutorials/dataset_publication.html) for more information. -### 2. Using the DANDI CLI +### 2. Using the {{ instance.name }} CLI -For command-line users or those with larger datasets, the DANDI CLI provides a powerful way to upload data: +For command-line users or those with larger datasets, the {{ instance.name }} CLI provides a powerful way to upload data: 1. **Download the Dandiset locally** ```bash - dandi download https://dandiarchive.org/dandiset//draft + dandi download {{ instance.uri }}/dandiset//draft cd ``` 2. **Organize your data** (skip this step if you are preparing a proper [BIDS dataset](https://bids.neuroimaging.io/) with e.g. OME-Zarr, NWB and other files): @@ -59,11 +59,11 @@ For command-line users or those with larger datasets, the DANDI CLI provides a p ## Storing Access Credentials -There are two options for storing your DANDI access credentials: +There are two options for storing your {{ instance.name }} access credentials: ### 1. `DANDI_API_KEY` Environment Variable -- By default, the DANDI CLI looks for an API key in the `DANDI_API_KEY` environment variable. To set this on Linux or macOS, run: +- By default, the {{ instance.name }} CLI looks for an API key in the `DANDI_API_KEY` environment variable. To set this on Linux or macOS, run: ```bash export DANDI_API_KEY=personal-key-value @@ -94,7 +94,7 @@ If you encounter issues during the upload process: If you continue to have issues, please reach out via the [DANDI Help Desk](https://github.com/dandi/helpdesk/discussions). -## Debugging the DANDI CLI +## Debugging the {{ instance.name }} CLI If something goes wrong while using the Python CLI client, the first place to check for more information so that you can [file a quality bug diff --git a/docs/user-guide-sharing/validating-files.md b/docs/user-guide-sharing/validating-files.md index 035f1fe1..aa6dfa10 100644 --- a/docs/user-guide-sharing/validating-files.md +++ b/docs/user-guide-sharing/validating-files.md @@ -18,7 +18,7 @@ NWB files. The NWB Inspector will print out all warnings, but only `CRITICAL` wa uploaded to DANDI. Errors in NWB Inspector will be block upload as well, but reflect a problem with the NWB Inspector software as opposed to the NWB file. -## Missing DANDI Metadata +## Missing {{ instance.name }} Metadata DANDI has requirements for metadata beyond what is strictly required for NWB validation. The following metadata must be present in the NWB file for a successful upload to DANDI: diff --git a/docs/user-guide-using/accessing-data/downloading.md b/docs/user-guide-using/accessing-data/downloading.md index b6b0c20c..5885a139 100644 --- a/docs/user-guide-using/accessing-data/downloading.md +++ b/docs/user-guide-using/accessing-data/downloading.md @@ -1,13 +1,13 @@ # Downloading Data -You can download the content of a Dandiset using the DANDI Web application (such a specific file) or entire -Dandisets using the DANDI Python CLI. +You can download the content of a Dandiset using the {{ instance.name }} Web application (such a specific file) or entire +Dandisets using the {{ instance.name }} Python CLI. -## Using the DANDI Web Application +## Using the {{ instance.name }} Web Application Once you have the Dandiset you are interested in (see more in [Exploring Dandisets](../exploring-dandisets.md)), you can download the content of the Dandiset. On the landing page of each Dandiset, you can find `Download` button on the right-hand panel. After clicking the -button, you will see the specific command you can use with DANDI Python CLI (as well as the information on how to download the CLI). +button, you will see the specific command you can use with {{ instance.name }} Python CLI (as well as the information on how to download the CLI). `, ``, and asset ``. -The `` can be found by selecting the `View asset metadata` icon next to an asset on https://dandiarchive.org and locating the `path` key. +The `` can be found by selecting the `View asset metadata` icon next to an asset on {{ instance.uri }} and locating the `path` key. dandi download --preserve-tree dandi://dandi/@/ @@ -99,13 +99,13 @@ With DataLad, you can: Learn more about DataLad from its handbook at . -**Developers' note:** DataLad datasets are created using the [dandi/backups2datalad](https://github.com/dandi/backups2datalad/) tool which is also available for use by the community to similarly maintain mirrors of independent DANDI deployments as DataLad datasets. +**Developers' note:** DataLad datasets are created using the [dandi/backups2datalad](https://github.com/dandi/backups2datalad/) tool which is also available for use by the community to similarly maintain mirrors of independent {{ instance.name }} deployments as DataLad datasets. ## Using WebDAV -DANDI provides a [WebDAV](https://en.wikipedia.org/wiki/WebDAV) service at https://webdav.dandiarchive.org/ for accessing the data in the DANDI archive. +DANDI provides a [WebDAV](https://en.wikipedia.org/wiki/WebDAV) service at https://webdav.dandiarchive.org/ for accessing the data in the {{ instance.name }} archive. You can use any WebDAV client or even a web browser to access the data - any dandiset, any version, any file or collection of files. -You can use any web download tool to download the data from the DANDI archive, e.g. +You can use any web download tool to download the data from the {{ instance.name }} archive, e.g. ````commandline wget -r -np -nH --cut-dirs=3 https://webdav.dandiarchive.org/dandisets/000027/releases/0.210831.2033/ @@ -116,4 +116,4 @@ for a download of a specific release `0.210831.2033` of the `000027` dandiset. **Note:** The WebDAV service does not directly serve any file contents; it instead relies on redirects to AWS S3 storage where the contents are stored. You might need to configure your WebDAV client to follow redirects; e.g., for the [davfs2](https://savannah.nongnu.org/projects/davfs2) WebDAV client, set `follow_redirect` to `1` in `/etc/davfs2/davfs2.conf`. -**Developers' note:** The WebDAV service's code is available at https://github.com/dandi/dandidav/ and can also be used for independent DANDI deployments. +**Developers' note:** The WebDAV service's code is available at https://github.com/dandi/dandidav/ and can also be used for independent {{ instance.name }} deployments. diff --git a/docs/user-guide-using/accessing-data/external-services.md b/docs/user-guide-using/accessing-data/external-services.md index dea07ffa..47e26b3c 100644 --- a/docs/user-guide-using/accessing-data/external-services.md +++ b/docs/user-guide-using/accessing-data/external-services.md @@ -1,6 +1,6 @@ # External Services -DANDI integrates with various external services to enhance data accessibility and analysis capabilities. This page describes the external services that can be used with DANDI data. +DANDI integrates with various external services to enhance data accessibility and analysis capabilities. This page describes the external services that can be used with {{ instance.name }} data. ## NWB Explorer @@ -8,7 +8,7 @@ DANDI integrates with various external services to enhance data accessibility an ### Using NWB Explorer with DANDI -1. Navigate to a Dandiset on the DANDI Archive +1. Navigate to a Dandiset on the {{ instance.name }} Archive 2. Browse to an NWB file 3. Look for the "Open with NWB Explorer" link next to the file 4. Click the link to open the file in NWB Explorer @@ -26,7 +26,7 @@ NWB Explorer allows you to: ### Using Neurosift with DANDI -1. Navigate to a Dandiset on the DANDI Archive +1. Navigate to a Dandiset on the {{ instance.name }} Archive 2. Browse to an NWB file 3. Look for the "Open with Neurosift" link next to the file 4. Click the link to open the file in Neurosift @@ -47,4 +47,4 @@ DANDI provides a [REST API](https://api.dandiarchive.org/swagger) that can be us - Download assets - And more -For more information on the DANDI API, see the [API documentation](../../api/rest-api.md). +For more information on the {{ instance.name }} API, see the [API documentation](../../api/rest-api.md). diff --git a/docs/user-guide-using/accessing-data/index.md b/docs/user-guide-using/accessing-data/index.md index eda21e91..6d859b95 100644 --- a/docs/user-guide-using/accessing-data/index.md +++ b/docs/user-guide-using/accessing-data/index.md @@ -6,12 +6,12 @@ DANDI provides multiple ways to access data stored in the archive. This page pro DANDI offers several methods for accessing data, each suited to different use cases: -1. **Web Interface**: Browse and download individual files directly from the DANDI web application. +1. **Web Interface**: Browse and download individual files directly from the {{ instance.name }} web application. 2. **DANDI CLI**: Command-line tool for downloading entire Dandisets or specific files. 3. **DataLad**: Access Dandisets as Git repositories with DataLad for version control and reproducibility. -4. **WebDAV**: Access DANDI data using standard WebDAV clients. +4. **WebDAV**: Access {{ instance.name }} data using standard WebDAV clients. 5. **DANDI Hub**: Analyze data directly in the cloud using Jupyter notebooks. -6. **Programmatic Access**: Access data programmatically using the DANDI API through Python or other languages. +6. **Programmatic Access**: Access data programmatically using the {{ instance.name }} API through Python or other languages. ## Choosing the Right Access Method @@ -28,8 +28,8 @@ The best method for accessing data depends on your specific needs: When accessing data from DANDI, consider the following: -- **Data Size**: Large datasets may be better accessed using the DANDI CLI or DataLad rather than the web interface. -- **Bandwidth**: For users with limited bandwidth, consider using DANDI Hub to analyze data in the cloud. +- **Data Size**: Large datasets may be better accessed using the {{ instance.name }} CLI or DataLad rather than the web interface. +- **Bandwidth**: For users with limited bandwidth, consider using {{ instance.name }} Hub to analyze data in the cloud. - **Reproducibility**: DataLad provides version control and reproducibility features that are valuable for scientific workflows. - **Streaming**: For large files, streaming access may be more efficient than downloading entire files. @@ -37,7 +37,7 @@ When accessing data from DANDI, consider the following: Explore the following pages for detailed information on each access method: -- [Downloading Data](./downloading.md): Learn how to download data using the web interface, DANDI CLI, DataLad, or WebDAV. +- [Downloading Data](./downloading.md): Learn how to download data using the web interface, {{ instance.name }} CLI, DataLad, or WebDAV. - [Streaming Data](./streaming.md): Learn how to stream data without downloading entire files. -- [External Services](./external-services.md): Learn about external services that can be used to access and analyze DANDI data. -- [DANDI Hub](../dandi-hub.md): Learn how to use DANDI Hub for cloud-based analysis. +- [External Services](./external-services.md): Learn about external services that can be used to access and analyze {{ instance.name }} data. +- [DANDI Hub](../dandi-hub.md): Learn how to use {{ instance.name }} Hub for cloud-based analysis. diff --git a/docs/user-guide-using/accessing-data/streaming.md b/docs/user-guide-using/accessing-data/streaming.md index c0ba5620..f5aeece4 100644 --- a/docs/user-guide-using/accessing-data/streaming.md +++ b/docs/user-guide-using/accessing-data/streaming.md @@ -1,6 +1,6 @@ # Streaming Data -Streaming data allows you to access and analyze DANDI data without downloading entire files. This is particularly useful for large datasets where downloading the complete files would be impractical. +Streaming data allows you to access and analyze {{ instance.name }} data without downloading entire files. This is particularly useful for large datasets where downloading the complete files would be impractical. ## Streaming Methods @@ -12,7 +12,7 @@ Using Python, you can set up data streaming using remfile, fsspec, or ros3. See ### 2. DataLad FUSE Mount -[DataLad FUSE](https://github.com/datalad/datalad-fuse/) allows you to mount DANDI datasets as if they were local files, with data being streamed on-demand when accessed. +[DataLad FUSE](https://github.com/datalad/datalad-fuse/) allows you to mount {{ instance.name }} datasets as if they were local files, with data being streamed on-demand when accessed. ```bash # Install DataLad and DataLad FUSE diff --git a/docs/user-guide-using/citing-dandisets.md b/docs/user-guide-using/citing-dandisets.md index 67630fd7..19e04bc5 100644 --- a/docs/user-guide-using/citing-dandisets.md +++ b/docs/user-guide-using/citing-dandisets.md @@ -3,7 +3,7 @@ If you use a Dandiset in your research, please acknowledge the Dandiset by citing it, just as you would a publication, including the DOI. -The DOI can be found in the Dandiset's landing page on the DANDI Archive website. +The DOI can be found in the Dandiset's landing page on the {{ instance.name }} Archive website. An example formatted citation can also be found on the Dandiset's landing page at the "CITE AS" button. This citation uses the DataCite citation style, which is a widely accepted standard for citing datasets, but you may need to adapt it to the citation style required by the journal you are submitting to. @@ -16,7 +16,7 @@ Citing the Dandiset and other datasets is important because it provides a direct - allows others to better understand and verify your results, and facilitates reproducibility, - connects your work to other research using the same dataset, - provides credit to the data collectors and maintainers, -- helps track the impact of DANDI and other data archives. +- helps track the impact of {{ instance.name }} and other data archives. ## Data availability statement @@ -24,7 +24,7 @@ It is common for journals to require a Data Availability Statement in the manusc DANDI Archive RRID and the DOI of the Dandiset used in the research. Here is an example of a well formatted Data Availability Statement: -> The data that support the findings of this study are openly available on the DANDI Archive (RRID:SCR_017571) at [DOI of Dandiset] (citation of Dandiset). +> The data that support the findings of this study are openly available on the {{ instance.name }} Archive (RRID:SCR_017571) at [DOI of Dandiset] (citation of Dandiset). **It is important to note that a Data Availability Statement does not replace the need for a full citation in the manuscript's references section.** diff --git a/docs/user-guide-using/dandi-hub.md b/docs/user-guide-using/dandi-hub.md index 8b61f10b..41bd0af0 100644 --- a/docs/user-guide-using/dandi-hub.md +++ b/docs/user-guide-using/dandi-hub.md @@ -1,21 +1,21 @@ -# Using the DANDI Hub +# Using the {{ instance.name }} Hub [DANDI Hub](http://hub.dandiarchive.org) is a [JupyterHub](https://jupyterhub.readthedocs.io) instance in the cloud to interact with the data stored in DANDI, and is free to use for exploratory analysis of data on DANDI. For instructions on how to navigate JupyterHub see this [YouTube tutorial](https://www.youtube.com/watch?v=5pf0_bpNbkw&t=09m20s). -Note that DANDI Hub is not intended for significant computation, but provides a place to introspect Dandisets and to perform some analysis and visualization of data. +Note that {{ instance.name }} Hub is not intended for significant computation, but provides a place to introspect Dandisets and to perform some analysis and visualization of data. ## Registration To use the [DANDI Hub](http://hub.dandiarchive.org), you must first register for an account using the [DANDI website](http://dandiarchive.org). -See the [Create a DANDI Account](../getting-started/creating-account.md) page. +See the [Create a {{ instance.name }} Account](../getting-started/creating-account.md) page. ## Choosing a server option -When you start up the DANDI Hub, you will be asked to select across a number of server options. +When you start up the {{ instance.name }} Hub, you will be asked to select across a number of server options. For basic exploration, Tiny or Base would most likely be appropriate. -The DANDI Hub also currently offers Medium and Large options, which have more available memory and compute power. +The {{ instance.name }} Hub also currently offers Medium and Large options, which have more available memory and compute power. The "T4 GPU inference" server comes with an associated T4 GPU, and is intended to be used for applications that require GPU for inference. -We request that users of this server be considerate of their usage of the DANDI Hub as a free community resource. +We request that users of this server be considerate of their usage of the {{ instance.name }} Hub as a free community resource. Training large deep neural networks is not appropriate. A "Base (MATLAB)" server is also available, which provides a MATLAB cloud installation but you would be required to provide your own license. @@ -23,7 +23,7 @@ A "Base (MATLAB)" server is also available, which provides a MATLAB cloud instal DANDI Hub provides two ways to work with Python environments: shared environments managed through conda-store, and individual environments you create with conda in your home directory. -**Shared environments** are managed through conda-store and are available to all DANDI Hub users. +**Shared environments** are managed through conda-store and are available to all {{ instance.name }} Hub users. These environments contain commonly used packages for neurophysiology analysis and are maintained by administrators. Use shared environments when: - You need standard analysis tools and packages @@ -111,7 +111,7 @@ pip install package-name ## Custom server image -If you need additional software installed in the image, you can add a server image that will be made available for all users in the `Server Options` menu. Add a server image by following the instructions below and submitting a pull request to the [dandi-hub repository](https://github.com/dandi/dandi-hub). Once the pull request is merged, the DANDI team will redeploy JupyterHub and the image will be available. +If you need additional software installed in the image, you can add a server image that will be made available for all users in the `Server Options` menu. Add a server image by following the instructions below and submitting a pull request to the [dandi-hub repository](https://github.com/dandi/dandi-hub). Once the pull request is merged, the {{ instance.name }} team will redeploy JupyterHub and the image will be available. 1. Fork and clone the [dandi-hub](https://github.com/dandi/dandi-hub) repository. @@ -126,9 +126,9 @@ docker run --rm -p 8888:8888 --name dev_jupyterlab dandihub-dev:latest start-not ## Example notebooks -The best way to share analyses on DANDI data is through the DANDI example notebooks. +The best way to share analyses on {{ instance.name }} data is through the {{ instance.name }} example notebooks. These notebooks are maintained in the [dandi/example-notebooks](https://github.com/dandi/example-notebooks) repository which provides more information about their organization. Dandiset contributors are encouraged to use these notebooks to demonstrate how to read, analyze, and visualize the data, and how to produce figures from associated scientific publications. Notebooks can be added and updated through a pull request to the [dandi/example-notebooks](https://github.com/dandi/example-notebooks) repository. -Once the pull request is merged, your contributed notebook will be available to all DANDI Hub users. +Once the pull request is merged, your contributed notebook will be available to all {{ instance.name }} Hub users. diff --git a/docs/user-guide-using/exploring-dandisets.md b/docs/user-guide-using/exploring-dandisets.md index 16b6fe4f..fbecfdfe 100644 --- a/docs/user-guide-using/exploring-dandisets.md +++ b/docs/user-guide-using/exploring-dandisets.md @@ -1,10 +1,10 @@ # Exploring Dandisets -This page explains how to browse, search, and view Dandisets in the DANDI Archive. +This page explains how to browse, search, and view Dandisets in the {{ instance.name }} Archive. ## Browse Dandisets -When you go to the [DANDI Web application](https://dandiarchive.org/), you can click +When you go to the [DANDI Web application]({{ instance.uri }}/), you can click on `PUBLIC DANDISET` to access all Dandisets currently available in the archive, and you can sort them by name, identifier, size, or date of modification. diff --git a/mkdocs.yml b/mkdocs.yml index 71d4474d..6cd7c7e6 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -112,6 +112,7 @@ plugins: - mkdocs-jupyter: ignore_h1_titles: True include: ["*.ipynb"] + - macros # Customize theme extra: @@ -135,3 +136,6 @@ extra: - icon: fontawesome/brands/bluesky link: https://bsky.app/profile/dandiarchive.org name: Bluesky + instance: + name: DANDI + uri: https://dandiarchive.org diff --git a/requirements.txt b/requirements.txt index 79343af8..9e0c7b44 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,3 +2,4 @@ mkdocs-material>=9.6.3 pymdown-extensions mkdocs-open-in-new-tab mkdocs-jupyter +mkdocs-macros-plugin From 230477f8704c787ee96d5a51f09bf224e39b5907 Mon Sep 17 00:00:00 2001 From: Nicole Tregoning Date: Tue, 21 Oct 2025 14:49:16 -0400 Subject: [PATCH 2/3] Additional vendorization --- docs/api/dandi-client.md | 2 +- docs/api/rest-api.md | 12 +++++----- .../creating-dandi-instance/dandi-archive.md | 10 ++++---- .../creating-dandi-instance/dandi-cli.md | 4 ++-- .../creating-dandi-instance/dandi-hub.md | 4 ++-- .../dandi-infrastructure.md | 4 ++-- .../initialize-vendors.md | 2 +- docs/developer-guide/developer-notes.md | 16 ++++++------- .../integrate-external-services.md | 24 +++++++++---------- docs/developer-guide/system-architecture.md | 4 ++-- docs/getting-started/creating-account.md | 16 ++++++------- docs/getting-started/dandi-ecosystem.md | 10 ++++---- docs/getting-started/data-standards/index.md | 8 +++---- docs/getting-started/data-standards/nwb.md | 6 ++--- docs/index.md | 4 ++-- docs/introduction.md | 6 ++--- docs/terms-policies/policies.md | 2 +- docs/terms-policies/terms.md | 12 +++++----- .../converting-data/index.md | 6 ++--- .../converting-data/nwb/index.md | 8 +++---- .../converting-data/nwb/nwb-guide.md | 10 ++++---- docs/user-guide-sharing/creating-dandiset.md | 8 +++---- docs/user-guide-sharing/dandiset-metadata.md | 2 +- docs/user-guide-sharing/data-licenses.md | 2 +- .../publishing-dandisets.md | 2 +- docs/user-guide-sharing/uploading-data.md | 14 +++++------ docs/user-guide-sharing/validating-files.md | 12 +++++----- .../accessing-data/downloading.md | 10 ++++---- .../accessing-data/external-services.md | 8 +++---- docs/user-guide-using/accessing-data/index.md | 18 +++++++------- .../accessing-data/streaming.md | 4 ++-- docs/user-guide-using/citing-dandisets.md | 2 +- docs/user-guide-using/dandi-hub.md | 8 +++---- docs/user-guide-using/exploring-dandisets.md | 2 +- mkdocs.yml | 4 ++++ 35 files changed, 135 insertions(+), 131 deletions(-) diff --git a/docs/api/dandi-client.md b/docs/api/dandi-client.md index 4b9875f3..9a0672fa 100644 --- a/docs/api/dandi-client.md +++ b/docs/api/dandi-client.md @@ -56,7 +56,7 @@ dandi validate path/to/files # Organize data for upload dandi organize path/to/files -# Upload data to DANDI +# Upload data to {{ instance.name }} dandi upload ``` diff --git a/docs/api/rest-api.md b/docs/api/rest-api.md index 367c6523..fabcdad9 100644 --- a/docs/api/rest-api.md +++ b/docs/api/rest-api.md @@ -6,8 +6,8 @@ The {{ instance.name }} Archive provides a RESTful API that allows programmatic The {{ instance.name }} API is available at: -- Production: [https://api.dandiarchive.org/](https://api.dandiarchive.org/) -- Sandbox: [https://api.sandbox.dandiarchive.org/](https://api.sandbox.dandiarchive.org/) +- Production: [{{ instance.api }}/]({{ instance.api }}/) +- Sandbox: [{{ instance.sandbox_api }}/]({{ instance.sandbox_api }}/) ## API Documentation @@ -15,8 +15,8 @@ The API documentation is available in three formats: ### Swagger UI -- Production: [https://api.dandiarchive.org/swagger/](https://api.dandiarchive.org/swagger/) -- Sandbox: [https://api.sandbox.dandiarchive.org/swagger/](https://api.sandbox.dandiarchive.org/swagger/) +- Production: [{{ instance.api }}/swagger/]({{ instance.api }}/swagger/) +- Sandbox: [{{ instance.sandbox_api }}/swagger/]({{ instance.sandbox_api }}/swagger/) The Swagger UI allows you to: @@ -27,8 +27,8 @@ The Swagger UI allows you to: ### ReDoc -- Production: [https://api.dandiarchive.org/redoc/](https://api.dandiarchive.org/redoc/) -- Sandbox: [https://api.sandbox.dandiarchive.org/redoc/](https://api.sandbox.dandiarchive.org/redoc/) +- Production: [{{ instance.api }}/redoc/]({{ instance.api }}/redoc/) +- Sandbox: [{{ instance.sandbox_api }}/redoc/]({{ instance.sandbox_api }}/redoc/) The ReDoc interface provides: diff --git a/docs/developer-guide/creating-dandi-instance/dandi-archive.md b/docs/developer-guide/creating-dandi-instance/dandi-archive.md index 91c92fe6..9160a954 100644 --- a/docs/developer-guide/creating-dandi-instance/dandi-archive.md +++ b/docs/developer-guide/creating-dandi-instance/dandi-archive.md @@ -1,4 +1,4 @@ -This step assumes that you have completed all steps in: [Initialize Vendors](./initialize-vendors.md) & [DANDI Infrastructure](./dandi-infrastructure.md). +This step assumes that you have completed all steps in: [Initialize Vendors](./initialize-vendors.md) & [{{ instance.name }} Infrastructure](./dandi-infrastructure.md). ## Initial Steps @@ -15,7 +15,7 @@ To see how your code would translate into a new `Heroku` release, [see the GitHu Heroku initializes compute on servers (known as `dynos` in Heroku land). Each `dyno` that you have runs a process. Which process, the resources allocated to that process, and how that process is run, is defined in a `Procfile`. -DANDI Archive defines a [Procfile](https://github.com/dandi/dandi-archive/blob/master/Procfile). In this `Procfile`, +{{ instance.name }} Archive defines a [Procfile](https://github.com/dandi/dandi-archive/blob/master/Procfile). In this `Procfile`, you'll see several entries: - `release`: a command that is run each time a new version of {{ instance.name }} API is pushed to Heroku. @@ -24,9 +24,9 @@ you'll see several entries: - `checksum-worker`: another worker, also using `celery`, that specifically calculates if a new file pushed to {{ instance.name }} Archive is new/updated, and determines what exactly has been changed. - `analytics-worker`: another `celery` worker that handles all tasks related to processing of S3-related logs. -This `Procfile` shouldn't need to be changed or reconfigured much for a DANDI-clone; however, it is important to note so that one may understand how {{ instance.name }} Archive is working. +This `Procfile` shouldn't need to be changed or reconfigured much for a {{ instance.name }}-clone; however, it is important to note so that one may understand how {{ instance.name }} Archive is working. -For information on the resource allocation of `dynos` in {{ instance.name }} Archive, please reference the [DANDI Infrastructure Docs](./dandi-infrastructure.md). +For information on the resource allocation of `dynos` in {{ instance.name }} Archive, please reference the [{{ instance.name }} Infrastructure Docs](./dandi-infrastructure.md). ## Understanding metrics and logging via Heroku @@ -157,7 +157,7 @@ class HerokuProductionConfiguration(DandiMixin, HerokuProductionBaseConfiguratio ## Approval of Users -By default in DANDI, only users whose emails end in `.edu` are automatically approved -- [for code reference, see here](https://github.com/dandi/dandi-archive/blob/6e72653688a6b45066c04b94a44f830b734887dd/dandiapi/api/views/auth.py#L127). +By default in {{ instance.name }}, only users whose emails end in `.edu` are automatically approved -- [for code reference, see here](https://github.com/dandi/dandi-archive/blob/6e72653688a6b45066c04b94a44f830b734887dd/dandiapi/api/views/auth.py#L127). For all other users, proceed into the Django Admin panel. diff --git a/docs/developer-guide/creating-dandi-instance/dandi-cli.md b/docs/developer-guide/creating-dandi-instance/dandi-cli.md index 259f42cb..57892f79 100644 --- a/docs/developer-guide/creating-dandi-instance/dandi-cli.md +++ b/docs/developer-guide/creating-dandi-instance/dandi-cli.md @@ -3,7 +3,7 @@ The {{ instance.name }} Client provides both a command line interface (CLI) and ## Update the {{ instance.name }} Client to reference your API -To reference your DANDI-clone API, [update the URLs reference per each CLI action](https://github.com/dandi/dandi-cli/blob/15196a93310618f8897c7b43444e216bbb094549/dandi/consts.py#L119-L135) and push a PR to the [dandi-cli GitHub repository](https://github.com/dandi/dandi-cli). +To reference your {{ instance.name }}-clone API, [update the URLs reference per each CLI action](https://github.com/dandi/dandi-cli/blob/15196a93310618f8897c7b43444e216bbb094549/dandi/consts.py#L119-L135) and push a PR to the [dandi-cli GitHub repository](https://github.com/dandi/dandi-cli). Here is an [example PR](https://github.com/dandi/dandi-cli/pull/1527) of another clone adding to the available instances of `DandiInstance` objects in the `dandi-cli`. @@ -14,7 +14,7 @@ known_instances = { "dandi": DandiInstance( "dandi", "{{ instance.uri }}", - "https://api.dandiarchive.org/api", + "{{ instance.api }}/api", ), "dandi-sandbox": DandiInstance( "dandi-sandbox", diff --git a/docs/developer-guide/creating-dandi-instance/dandi-hub.md b/docs/developer-guide/creating-dandi-instance/dandi-hub.md index cae55de0..69ee9bbd 100644 --- a/docs/developer-guide/creating-dandi-instance/dandi-hub.md +++ b/docs/developer-guide/creating-dandi-instance/dandi-hub.md @@ -2,13 +2,13 @@ The {{ instance.name }} ecosystem includes a self-hosted Jupyter notebook servic that provides different instance types for users to efficiently interact with data in the {{ instance.name }} Archive. The instructions for configuring and deploying your own JupyterHub instance are available in the [dandi-hub repository](https://github.com/dandi/dandi-hub) (see [README](https://github.com/dandi/dandi-hub/blob/main/README.md#dandihub)). -For example configurations that have been previously generated for the DANDI, LINC, and BICAN projects see the [envs directory](https://github.com/dandi/dandi-hub/tree/main/envs). +For example configurations that have been previously generated for the {{ instance.name }}, LINC, and BICAN projects see the [envs directory](https://github.com/dandi/dandi-hub/tree/main/envs). **Note: it is important that your k8s cluster is in the same region as your data.** Resources • [Source code and instructions](https://github.com/dandi/dandi-hub) -• [DANDI Hub](https://hub.dandiarchive.org/) +• [{{ instance.name }} Hub](https://hub.dandiarchive.org/) • [LINC Hub](https://hub.lincbrain.org/) \ No newline at end of file diff --git a/docs/developer-guide/creating-dandi-instance/dandi-infrastructure.md b/docs/developer-guide/creating-dandi-instance/dandi-infrastructure.md index d5eeec29..257b51c8 100644 --- a/docs/developer-guide/creating-dandi-instance/dandi-infrastructure.md +++ b/docs/developer-guide/creating-dandi-instance/dandi-infrastructure.md @@ -65,9 +65,9 @@ A `sponsored bucket` is also declared in the `main.tf`, with downstream, related ## Domain Management -**DANDI Infrastructure assumes that you 1. own a domain, and 2. have purchased that domain (or have that domain managed) via AWS Route 53** +**{{ instance.name }} Infrastructure assumes that you 1. own a domain, and 2. have purchased that domain (or have that domain managed) via AWS Route 53** -DANDI Infrastructure connects domains from three different vendors: +{{ instance.name }} Infrastructure connects domains from three different vendors: - **Netlify**: Manages load balancer IPs and custom domains for the UI. - **AWS Route 53**: Manages CNAME records for SSL certificates and links Heroku API URLs to domains. diff --git a/docs/developer-guide/creating-dandi-instance/initialize-vendors.md b/docs/developer-guide/creating-dandi-instance/initialize-vendors.md index 532ff067..98b1a092 100644 --- a/docs/developer-guide/creating-dandi-instance/initialize-vendors.md +++ b/docs/developer-guide/creating-dandi-instance/initialize-vendors.md @@ -387,7 +387,7 @@ For your account, the free `Developer` account should be sufficient, as defined ##### Select Django as an App Type -DANDI Archive API is built as a Django app -- so proceed to select `Django` on the following screen: +{{ instance.name }} Archive API is built as a Django app -- so proceed to select `Django` on the following screen:

dandi/dandi-archive"] dandi_archive_db[("PostgresDB")] - dandi_archive_backend@{ label: "api.dandiarchive.org
(Backend/API)" } + dandi_archive_backend@{ label: "api.dandiarchive.org
(Backend/API)" } dandi_archive_frontend@{ label: "www.dandiarchive.org
(Frontend/Web UI)" } meditor["Meditor
(vjsf-based web form)"] dandi_archive_validate[/"Celery task to validate
dandiset and asset metadata"/] @@ -66,14 +66,14 @@ flowchart TD ## Integration Methods -There are several ways to integrate external services with DANDI: +There are several ways to integrate external services with {{ instance.name }}: ### 1. REST API Integration The {{ instance.name }} Archive provides a comprehensive REST API that allows external services to interact with the archive programmatically. The API documentation is available at: -- [Swagger UI](https://api.dandiarchive.org/swagger) -- [ReDoc](https://api.dandiarchive.org/redoc) +- [Swagger UI]({{ instance.api }}/swagger) +- [ReDoc]({{ instance.api }}/redoc) Key API endpoints include: @@ -86,7 +86,7 @@ Authentication is required for write operations and is handled via API keys. Rea ### 2. Python Client Integration -For Python applications, the [DANDI Python client](https://github.com/dandi/dandi-cli) provides a convenient way to interact with the {{ instance.name }} Archive: +For Python applications, the [{{ instance.name }} Python client](https://github.com/dandi/dandi-cli) provides a convenient way to interact with the {{ instance.name }} Archive: ```python from dandi.dandiapi import DandiAPIClient @@ -109,7 +109,7 @@ asset.download("local_file.nwb") ### 3. WebDAV Integration -DANDI provides a [WebDAV](https://en.wikipedia.org/wiki/WebDAV) service at https://webdav.dandiarchive.org/ that allows external services to access {{ instance.name }} data using standard WebDAV clients: +{{ instance.name }} provides a [WebDAV](https://en.wikipedia.org/wiki/WebDAV) service at https://webdav.dandiarchive.org/ that allows external services to access {{ instance.name }} data using standard WebDAV clients: ```python import requests @@ -120,7 +120,7 @@ response = requests.get("https://webdav.dandiarchive.org/dandisets/000123/draft/ ### 4. Custom Visualization Services -To integrate a custom visualization service with DANDI: +To integrate a custom visualization service with {{ instance.name }}: 1. Create a service that can accept a URL to an NWB file 2. Register your service with the {{ instance.name }} team @@ -130,8 +130,8 @@ For example, NWB Explorer is integrated this way, allowing users to visualize NW ## Getting Help -If you need assistance integrating your service with DANDI, you can: +If you need assistance integrating your service with {{ instance.name }}, you can: -1. Open an issue on the [DANDI helpdesk](https://github.com/dandi/helpdesk/issues) -2. Contact the {{ instance.name }} team at help@dandiarchive.org +1. Open an issue on the [{{ instance.name }} helpdesk](https://github.com/dandi/helpdesk/issues) +2. Contact the {{ instance.name }} team at help@{{ instance.domain }} 3. Join the {{ instance.name }} Slack workspace (available to registered {{ instance.name }} users) diff --git a/docs/developer-guide/system-architecture.md b/docs/developer-guide/system-architecture.md index edd74799..e41245a7 100644 --- a/docs/developer-guide/system-architecture.md +++ b/docs/developer-guide/system-architecture.md @@ -1,6 +1,6 @@ # System Architecture -This page provides a high-level view of how DANDI's core components fit together in a typical "full stack" deployment. +This page provides a high-level view of how {{ instance.name }}'s core components fit together in a typical "full stack" deployment. ## The Big Picture @@ -20,7 +20,7 @@ src="../../img/client_requests.jpg" alt="client_requests" style="width: 90%; height: auto; display: block; margin-left: auto; margin-right: auto;"/> -* The user (or script) interacts with the **Web UI** or the **DANDI CLI**. +* The user (or script) interacts with the **Web UI** or the **{{ instance.name }} CLI**. * The **Web UI** calls into the **API** (over HTTPS). * The **API** queries or updates metadata in its Postgres DB (hosted on Heroku). * The **API** calls AWS S3 to read/write {{ instance.name }} assets. diff --git a/docs/getting-started/creating-account.md b/docs/getting-started/creating-account.md index 05fed10a..9cdf0487 100644 --- a/docs/getting-started/creating-account.md +++ b/docs/getting-started/creating-account.md @@ -4,26 +4,26 @@ A {{ instance.name }} account enhances your capabilities within the {{ instance. Without an account, users can freely search, view, and download available datasets. With an account, users can create and edit Dandisets, and use the {{ instance.name }} Hub to analyze data. -DANDI provides two servers: +{{ instance.name }} provides two servers: - **Main server**: [{{ instance.uri }}/]({{ instance.uri }}/) - This is the primary platform for most users. -- **Sandbox server**: [https://sandbox.dandiarchive.org/](https://sandbox.dandiarchive.org/) - Ideal for training and testing purposes. +- **Sandbox server**: [{{ instance.sandbox_uri }}/]({{ instance.sandbox_uri }}/) - Ideal for training and testing purposes. Accounts are independently managed on each server, allowing users to register on one or both, depending on their testing and deployment needs. -DANDI is freely accessible to the neuroscience research community. +{{ instance.name }} is freely accessible to the neuroscience research community. Membership is usually granted automatically to GitHub accounts with a `.edu` or similar academic email. If your registration is denied: -- With an academic email not linked to your GitHub, please contact [help@dandiarchive.org](mailto:help@dandiarchive.org) for assistance using this email address. -- Without an academic email, account approval is still possible under specific circumstances. Appeal decisions at [help@dandiarchive.org](mailto:help@dandiarchive.org). +- With an academic email not linked to your GitHub, please contact [help@{{ instance.domain }}](mailto:help@{{ instance.domain }}) for assistance using this email address. +- Without an academic email, account approval is still possible under specific circumstances. Appeal decisions at [help@{{ instance.domain }}](mailto:help@{{ instance.domain }}). ## How to Register for a {{ instance.name }} Account 1. **Create a GitHub Account**: If not already a GitHub user, [sign up here](https://github.com/). -2. **Register on DANDI**: Navigate to the [DANDI homepage]({{ instance.uri }}) and click the `LOG IN WITH GITHUB` button to register using your GitHub account. +2. **Register on {{ instance.name }}**: Navigate to the [{{ instance.name }} homepage]({{ instance.uri }}) and click the `LOG IN WITH GITHUB` button to register using your GitHub account. 3. **Confirmation of Review**: Post-registration, you will receive an email confirming that your account is under review. Your request will be reviewed within 24 hours. - **Note**: Reviews may extend beyond 24 hours for new GitHub accounts or non-.edu email addresses, particularly if the registration does not describe immediate plans to contribute data. -4. **Accessing DANDI**: Upon approval, access {{ instance.name }} by logging in through the `LOG IN WITH GITHUB` button. +4. **Accessing {{ instance.name }}**: Upon approval, access {{ instance.name }} by logging in through the `LOG IN WITH GITHUB` button. -For support or further inquiries, reach out to [help@dandiarchive.org](mailto:help@dandiarchive.org). +For support or further inquiries, reach out to [help@{{ instance.domain }}](mailto:help@{{ instance.domain }}). diff --git a/docs/getting-started/dandi-ecosystem.md b/docs/getting-started/dandi-ecosystem.md index 36e5fbc4..c314eb80 100644 --- a/docs/getting-started/dandi-ecosystem.md +++ b/docs/getting-started/dandi-ecosystem.md @@ -16,11 +16,11 @@ The {{ instance.name }} project is organized around several **GitHub** repositor | Repository | Description | |----------|----------| -| [DANDI Archive](https://github.com/dandi/dandi-archive) | Contains the code for deploying the client-side Web application frontend based on the [Vue.js](https://vuejs.org/) framework as well as a Django-based backend to run the {{ instance.name }} REST API. -| [DANDI JupyterHub](https://github.com/dandi/dandi-hub) | Contains the code for deploying a JupyterHub instance to support interaction with the {{ instance.name }} archive. -| [DANDI Python client](https://github.com/dandi/dandi-cli) | Contains the code for the command line tool used to interact with the archive. It allows you to download data from the archive. It also allows you to locally organize and validate your data before uploading to the archive. -| [DANDI Docs](https://github.com/dandi/dandi-docs) | Provides the contents of this website. +| [{{ instance.name }} Archive](https://github.com/dandi/dandi-archive) | Contains the code for deploying the client-side Web application frontend based on the [Vue.js](https://vuejs.org/) framework as well as a Django-based backend to run the {{ instance.name }} REST API. +| [{{ instance.name }} JupyterHub](https://github.com/dandi/dandi-hub) | Contains the code for deploying a JupyterHub instance to support interaction with the {{ instance.name }} archive. +| [{{ instance.name }} Python client](https://github.com/dandi/dandi-cli) | Contains the code for the command line tool used to interact with the archive. It allows you to download data from the archive. It also allows you to locally organize and validate your data before uploading to the archive. +| [{{ instance.name }} Docs](https://github.com/dandi/dandi-docs) | Provides the contents of this website. | [helpdesk](https://github.com/dandi/helpdesk) | Contains our community help platform where you can submit [issues](https://github.com/dandi/helpdesk/issues/new/choose). | [schema](https://github.com/dandi/schema) | Provides the details and some supporting code for the {{ instance.name }} metadata schema. | [schema Python library](https://github.com/dandi/dandi-schema) | Provides a Python library for updating the schema and for creating and validating {{ instance.name }} objects. -| [DANDI About website](https://github.com/dandi/dandi-about) | Provides an overview of the {{ instance.name }} project and the team members and collaborators. | +| [{{ instance.name }} About website](https://github.com/dandi/dandi-about) | Provides an overview of the {{ instance.name }} project and the team members and collaborators. | diff --git a/docs/getting-started/data-standards/index.md b/docs/getting-started/data-standards/index.md index 22e9b6e3..500e02a2 100644 --- a/docs/getting-started/data-standards/index.md +++ b/docs/getting-started/data-standards/index.md @@ -1,15 +1,15 @@ # Data Standards -DANDI requires uploaded data to adhere to community data standards. +{{ instance.name }} requires uploaded data to adhere to community data standards. These standards help data curators package all the necessary metadata and provide a uniform structure so that data can be more easily understood and reused by future users. -DANDI also leverages these standards to provide features like data validation and automatic metadata extraction and search. -DANDI currently supports two data standards: +{{ instance.name }} also leverages these standards to provide features like data validation and automatic metadata extraction and search. +{{ instance.name }} currently supports two data standards: * For cellular neurophysiology, such as electrophysiology and optical physiology, use [Neurodata Without Borders (NWB)](https://www.nwb.org/nwb-neurophysiology/) * For neuroimaging data, such as MRI, use [Brain Imaging Data Structure (BIDS)](https://bids.neuroimaging.io/) For microscopy data from immunostaining, we are using the [BIDS extension for microscopy](https://bids-specification.readthedocs.io/en/stable/04-modality-specific-files/10-microscopy.html). -To share data on DANDI, you will first need to convert your data to an appropriate standard. +To share data on {{ instance.name }}, you will first need to convert your data to an appropriate standard. If you would like help determining which standard is most appropriate for your data, do not hesitate to reach out using the [dandi helpdesk](https://github.com/dandi/helpdesk/discussions/new) and we would be happy to assist. ## Neurodata Without Borders (NWB) diff --git a/docs/getting-started/data-standards/nwb.md b/docs/getting-started/data-standards/nwb.md index 9f33ff98..376efe5d 100644 --- a/docs/getting-started/data-standards/nwb.md +++ b/docs/getting-started/data-standards/nwb.md @@ -12,12 +12,12 @@ The NWB team supports APIs in Python ([PyNWB](https://pynwb.readthedocs.io/)) an The best way to get help from the NWB community is through the [NWB user Slack channel](https://nwb-users.slack.com/). -## Using NWB with DANDI +## Using NWB with {{ instance.name }} -DANDI is designed to work seamlessly with NWB files. When you upload NWB files to DANDI: +{{ instance.name }} is designed to work seamlessly with NWB files. When you upload NWB files to {{ instance.name }}: 1. The files are validated to ensure they conform to the NWB standard 2. Metadata is automatically extracted to make your data more discoverable 3. The data can be accessed programmatically through the {{ instance.name }} API -For more information on validating NWB files for DANDI, see the [Validating NWB Files](../../user-guide-sharing/validating-files.md) section. +For more information on validating NWB files for {{ instance.name }}, see the [Validating NWB Files](../../user-guide-sharing/validating-files.md) section. diff --git a/docs/index.md b/docs/index.md index 46998b49..6568dc5a 100644 --- a/docs/index.md +++ b/docs/index.md @@ -15,7 +15,7 @@ it tries to solve, check out the [Introduction](./introduction.md). To start using the archive, head over to the User Guide sections for [Sharing Data](./user-guide-sharing/creating-dandiset.md) or [Using Data](./user-guide-using/exploring-dandisets.md). -If are a developer and want to know how the project is organized, check out the [DANDI Ecosystem](./getting-started/dandi-ecosystem.md) +If are a developer and want to know how the project is organized, check out the [{{ instance.name }} Ecosystem](./getting-started/dandi-ecosystem.md) page in the Getting Started section. @@ -31,7 +31,7 @@ and use the tag [dandi](https://neurostars.org/tag/dandi). {{ instance.name }} using GitHub]({{ instance.uri }}/) (this registration is required to upload data or to use the {{ instance.name }} JupyterHub). See [here for details on how to register](./getting-started/creating-account.md). -- Email us: [info@dandiarchive.org](mailto: info@dandiarchive.org) +- Email us: [info@{{ instance.domain }}](mailto: info@{{ instance.domain }}) ## Contributing and Feedback diff --git a/docs/introduction.md b/docs/introduction.md index 6ac67dd9..bd4cf939 100644 --- a/docs/introduction.md +++ b/docs/introduction.md @@ -1,8 +1,8 @@ # Introduction -## What is DANDI? +## What is {{ instance.name }}? -DANDI is: +{{ instance.name }} is: - An open data archive to submit neurophysiology data for electrophysiology, optophysiology, and behavioral time-series, and images from immunostaining experiments. @@ -10,7 +10,7 @@ optophysiology, and behavioral time-series, and images from immunostaining exper - A place to house data to collaborate across research sites. - Supported by the BRAIN Initiative and the AWS Public dataset programs. -DANDI provides significant benefits: +{{ instance.name }} provides significant benefits: - A [FAIR (Findable, Accessible, Interoperable, Reusable)](https://www.force11.org/group/fairgroup/fairprinciples) data archive to house standardized neurophysiology and associated data. - Rich metadata to support search across data. diff --git a/docs/terms-policies/policies.md b/docs/terms-policies/policies.md index 94589293..e1dc6132 100644 --- a/docs/terms-policies/policies.md +++ b/docs/terms-policies/policies.md @@ -7,7 +7,7 @@ agreements for data collected from human subjects. - **Status of research data:** Empirical (not simulated) data and associated metadata from any stage of the research study's life cycle is accepted. Simulated data is handled on a case-by-case basis, contact the {{ instance.name }} team -- **Eligible users:** Anyone working with the data in the scope of the archive may register as a user of DANDI. All users are +- **Eligible users:** Anyone working with the data in the scope of the archive may register as a user of {{ instance.name }}. All users are allowed to deposit content for which they possess the appropriate rights and which falls within the **scope** of the archive. - **Ownership:** By uploading content, no change of ownership is implied and no diff --git a/docs/terms-policies/terms.md b/docs/terms-policies/terms.md index 51150f00..d9933d4a 100644 --- a/docs/terms-policies/terms.md +++ b/docs/terms-policies/terms.md @@ -1,14 +1,14 @@ # Terms of Use v1.0.1 -The {{ instance.name }} data archive ("DANDI") is offered by the {{ instance.name }} project as part of its +The {{ instance.name }} data archive ("{{ instance.name }}") is offered by the {{ instance.name }} project as part of its mission to make available the results of its work. -Use of DANDI, both the uploading and downloading of data, denotes agreement with +Use of {{ instance.name }}, both the uploading and downloading of data, denotes agreement with the following terms: 1. {{ instance.name }} is an open dissemination research data repository for the preservation and making available of research, educational and informational content. Access - to DANDI's content is open to all. + to {{ instance.name }}'s content is open to all. 1. Content may be uploaded free of charge by the US BRAIN Initiative and other projects required to submit data to a public archive and those without ready @@ -21,10 +21,10 @@ the following terms: terms and applicable laws, including, but not limited to, privacy, data protection and intellectual property rights [1]. In addition, where data that was originally sensitive personal data is being uploaded for open dissemination - through DANDI, the uploader shall ensure that such data is either anonymized + through {{ instance.name }}, the uploader shall ensure that such data is either anonymized to an appropriate degree or fully consent cleared [2]. -1. Access to DANDI, and all content, is provided on an "as-is" basis. Users of +1. Access to {{ instance.name }}, and all content, is provided on an "as-is" basis. Users of content ("Users") shall respect applicable license conditions. Download and use of content from {{ instance.name }} does not transfer any intellectual property rights in the content to the User. @@ -54,7 +54,7 @@ the following terms: [1] [2] See further the user pages regarding uploading for information on anonymization of datasets that contain sensitive personal information. -If you have any questions or comments with respect to DANDI, or if you are unsure +If you have any questions or comments with respect to {{ instance.name }}, or if you are unsure whether your intended use is in line with these Terms of Use, or if you seek permission for a use that does not fall within these Terms of Use, please [contact us](https://github.com/dandi/helpdesk/issues/new/choose). diff --git a/docs/user-guide-sharing/converting-data/index.md b/docs/user-guide-sharing/converting-data/index.md index 3f0202d2..681abc77 100644 --- a/docs/user-guide-sharing/converting-data/index.md +++ b/docs/user-guide-sharing/converting-data/index.md @@ -1,11 +1,11 @@ # Standardizing Data -Data contributed to {{ instance.name }} must be standardized into one of the formats accepted by DANDI. +Data contributed to {{ instance.name }} must be standardized into one of the formats accepted by {{ instance.name }}. Most of the data on {{ instance.name }} is in Neurodata Without Borders (NWB), a data standard designed for sharing data from neurophysiology experiments. -See [Converting data to NWB](./nwb/index.md) for guidance in how to convert your data to NWB and publish on DANDI. +See [Converting data to NWB](./nwb/index.md) for guidance in how to convert your data to NWB and publish on {{ instance.name }}. -DANDI also supports the [Brain Imaging Data Structure (BIDS)](https://bids.neuroimaging.io/). For more information, see: +{{ instance.name }} also supports the [Brain Imaging Data Structure (BIDS)](https://bids.neuroimaging.io/). For more information, see: - [Getting Started with BIDS](https://bids.neuroimaging.io/getting_started/index.html) - [BIDS converters](https://bids.neuroimaging.io/tools/converters.html) for conversion of raw data to a BIDS dataset - [BIDS specification](https://bids-specification.readthedocs.io) diff --git a/docs/user-guide-sharing/converting-data/nwb/index.md b/docs/user-guide-sharing/converting-data/nwb/index.md index 9ecc7f13..f3be4add 100644 --- a/docs/user-guide-sharing/converting-data/nwb/index.md +++ b/docs/user-guide-sharing/converting-data/nwb/index.md @@ -1,6 +1,6 @@ # Converting Data to NWB -Before uploading data to DANDI, you need to convert it to the NWB (Neurodata Without Borders) format. This page provides an overview of the conversion process and available tools. +Before uploading data to {{ instance.name }}, you need to convert it to the NWB (Neurodata Without Borders) format. This page provides an overview of the conversion process and available tools. ## Why Convert to NWB? @@ -19,7 +19,7 @@ We suggest beginning the conversion process using only a small amount of data so Several tools are available to help you convert your data to NWB format: -1. **[NWB Graphical User Interface for Data Entry (GUIDE)](https://nwb-guide.readthedocs.io/en/stable/)** is a cross-platform desktop application for converting data from common proprietary formats to NWB and uploading it to DANDI. +1. **[NWB Graphical User Interface for Data Entry (GUIDE)](https://nwb-guide.readthedocs.io/en/stable/)** is a cross-platform desktop application for converting data from common proprietary formats to NWB and uploading it to {{ instance.name }}. 2. **[NeuroConv](https://neuroconv.readthedocs.io/)** is a Python library that automates conversion to NWB from a variety of popular formats. See the [Conversion Gallery](https://neuroconv.readthedocs.io/en/main/conversion_examples_gallery/index.html) for example conversion scripts. @@ -35,5 +35,5 @@ Converting data to NWB can be challenging, especially for complex datasets or fo After converting your data to NWB, you should: -1. [Validate your NWB files](../../validating-files.md) to ensure they meet DANDI's requirements -2. [Upload your data to DANDI](../../uploading-data.md) +1. [Validate your NWB files](../../validating-files.md) to ensure they meet {{ instance.name }}'s requirements +2. [Upload your data to {{ instance.name }}](../../uploading-data.md) diff --git a/docs/user-guide-sharing/converting-data/nwb/nwb-guide.md b/docs/user-guide-sharing/converting-data/nwb/nwb-guide.md index e790eeef..85ede8e8 100644 --- a/docs/user-guide-sharing/converting-data/nwb/nwb-guide.md +++ b/docs/user-guide-sharing/converting-data/nwb/nwb-guide.md @@ -1,6 +1,6 @@ # NWB GUIDE -The [NWB GUIDE](https://nwb-guide.readthedocs.io/en/stable/) (Graphical User Interface for Data Entry) is a cross-platform desktop application for converting data from common proprietary formats to NWB and uploading it to DANDI. +The [NWB GUIDE](https://nwb-guide.readthedocs.io/en/stable/) (Graphical User Interface for Data Entry) is a cross-platform desktop application for converting data from common proprietary formats to NWB and uploading it to {{ instance.name }}. ## Overview @@ -8,7 +8,7 @@ NWB GUIDE provides a user-friendly interface for: - Converting data to NWB format - Validating NWB files -- Uploading data to DANDI +- Uploading data to {{ instance.name }} - Managing Dandisets This tool is particularly useful for users who prefer a graphical interface over command-line tools. @@ -25,8 +25,8 @@ This tool is particularly useful for users who prefer a graphical interface over - **Intuitive Interface**: Easy-to-use graphical interface for managing the entire workflow from conversion to publication. - **Format Support**: Supports conversion from many common neurophysiology data formats. -- **Validation**: Built-in validation to ensure your NWB files meet DANDI's requirements. -- **DANDI Integration**: Direct upload to {{ instance.name }} without needing to use the command line. +- **Validation**: Built-in validation to ensure your NWB files meet {{ instance.name }}'s requirements. +- **{{ instance.name }} Integration**: Direct upload to {{ instance.name }} without needing to use the command line. ## When to Use NWB GUIDE @@ -35,6 +35,6 @@ NWB GUIDE is ideal for: - Users who prefer graphical interfaces over command-line tools - Automatically applying best practices for archival storage - Quick validation and inspection of NWB files -- Straightforward uploads to DANDI +- Straightforward uploads to {{ instance.name }} For more complex conversion needs, you might consider [NeuroConv](./neuroconv.md) or direct use of [PyNWB and MatNWB](./pynwb-matnwb.md). diff --git a/docs/user-guide-sharing/creating-dandiset.md b/docs/user-guide-sharing/creating-dandiset.md index 5d9d9884..5f86c072 100644 --- a/docs/user-guide-sharing/creating-dandiset.md +++ b/docs/user-guide-sharing/creating-dandiset.md @@ -1,6 +1,6 @@ # Creating a Dandiset -This page provides instructions for creating a new Dandiset on DANDI. +This page provides instructions for creating a new Dandiset on {{ instance.name }}. ## Prerequisites @@ -9,14 +9,14 @@ Before creating a Dandiset, you should: 1. **Register for {{ instance.name }} and obtain an API key.** To create a new Dandiset, you need to have a {{ instance.name }} account. * If you do not already have an account, see [Create a {{ instance.name }} Account](../getting-started/creating-account.md) page for instructions. * Once you are logged in, copy your API key by clicking on your user initials in the top-right corner after logging in. - * Production ({{ instance.uri }}) and sandbox (https://sandbox.dandiarchive.org) servers have different API keys and different logins. + * Production ({{ instance.uri }}) and sandbox ({{ instance.sandbox_uri }}) servers have different API keys and different logins. 2. **Choose a server.** * **Production server**: {{ instance.uri }}. This is the main server for {{ instance.name }} and should be used for sharing neuroscience data. When you create a Dandiset, a permanent ID is automatically assigned to it. This Dandiset can be fully public or embargoed according to NIH policy. All data are uploaded as draft and can be adjusted before publishing on the production server. - * **Development server**: https://sandbox.dandiarchive.org. This server is for testing and learning how to use DANDI. + * **Development server**: {{ instance.sandbox_uri }}. This server is for testing and learning how to use {{ instance.name }}. It is not recommended for sharing data, but is recommended for testing the {{ instance.name }} CLI and GUI or as a testing platform for developers. Note that the development server should not be used to stage your data. @@ -37,5 +37,5 @@ After creating your Dandiset, you'll need to: 1. [Convert your data to NWB format](./converting-data/index.md) 2. [Validate your NWB files](./validating-files.md) -3. [Upload your data to DANDI](./uploading-data.md) +3. [Upload your data to {{ instance.name }}](./uploading-data.md) 4. [Publish your Dandiset](./publishing-dandisets.md) when you're ready to share it with the community diff --git a/docs/user-guide-sharing/dandiset-metadata.md b/docs/user-guide-sharing/dandiset-metadata.md index 8456a84b..38ca815c 100644 --- a/docs/user-guide-sharing/dandiset-metadata.md +++ b/docs/user-guide-sharing/dandiset-metadata.md @@ -8,7 +8,7 @@ This metadata will automatically be re-computed if you make any changes to the d ## Metadata editor -DANDI also has metadata that you must set manually using the `METADATA` button on the right panel. +{{ instance.name }} also has metadata that you must set manually using the `METADATA` button on the right panel. Any `Owner` of a Dandiset has the ability to edit the manual Dandiset metadata through this editor. Several fields here are essential for publication, and the rest provide opportunities to make your Dandiset more FAIR, enabling secondary analysis. diff --git a/docs/user-guide-sharing/data-licenses.md b/docs/user-guide-sharing/data-licenses.md index a3710d2d..bc5b9ffa 100644 --- a/docs/user-guide-sharing/data-licenses.md +++ b/docs/user-guide-sharing/data-licenses.md @@ -26,4 +26,4 @@ restriction. You can learn more about the theory of how the Creative Commons licenses operate at [their website](https://creativecommons.org/licenses/). If you have any -questions or concerns, send a message to help@dandiarchive.org. +questions or concerns, send a message to help@{{ instance.domain }}. diff --git a/docs/user-guide-sharing/publishing-dandisets.md b/docs/user-guide-sharing/publishing-dandisets.md index 939b8fde..0e320508 100644 --- a/docs/user-guide-sharing/publishing-dandisets.md +++ b/docs/user-guide-sharing/publishing-dandisets.md @@ -34,7 +34,7 @@ Follow these steps to publish your Dandiset: **NOTE:** Dandisets with Zarr assets currently cannot be published. We are actively working on enabling this feature. -## Citing DANDI +## Citing {{ instance.name }} You can add the following statement to the methods section of your manuscript. diff --git a/docs/user-guide-sharing/uploading-data.md b/docs/user-guide-sharing/uploading-data.md index f2cf3cd0..0412d8da 100644 --- a/docs/user-guide-sharing/uploading-data.md +++ b/docs/user-guide-sharing/uploading-data.md @@ -4,12 +4,12 @@ This page provides instructions for uploading data to {{ instance.name }} after ## Prerequisites -Before uploading data to DANDI, ensure you have: +Before uploading data to {{ instance.name }}, ensure you have: -1. [Created a Dandiset](./creating-dandiset.md) on DANDI +1. [Created a Dandiset](./creating-dandiset.md) on {{ instance.name }} 2. [Converted your data to NWB format](./converting-data/index.md) -3. [Validated your NWB files](./validating-files.md) to ensure they meet DANDI's requirements -4. Installed the [DANDI Client](https://pypi.org/project/dandi/): +3. [Validated your NWB files](./validating-files.md) to ensure they meet {{ instance.name }}'s requirements +4. Installed the [{{ instance.name }} Client](https://pypi.org/project/dandi/): ```bash pip install -U dandi ``` @@ -24,11 +24,11 @@ Before uploading data to DANDI, ensure you have: ## Upload Methods -DANDI provides two main methods for uploading data: +{{ instance.name }} provides two main methods for uploading data: ### 1. Using NWB GUIDE -The NWB GUIDE provides a graphical interface for uploading data to DANDI. See the [NWB GUIDE Dataset Publication Tutorial](https://nwb-guide.readthedocs.io/en/latest/tutorials/dataset_publication.html) for more information. +The NWB GUIDE provides a graphical interface for uploading data to {{ instance.name }}. See the [NWB GUIDE Dataset Publication Tutorial](https://nwb-guide.readthedocs.io/en/latest/tutorials/dataset_publication.html) for more information. ### 2. Using the {{ instance.name }} CLI @@ -91,7 +91,7 @@ If you encounter issues during the upload process: - Ensure your NWB files pass validation (see [Validating NWB Files](./validating-files.md)) - Check that you're using the latest versions of `dandi`, `PyNWB`, and `MatNWB` -If you continue to have issues, please reach out via the [DANDI Help Desk](https://github.com/dandi/helpdesk/discussions). +If you continue to have issues, please reach out via the [{{ instance.name }} Help Desk](https://github.com/dandi/helpdesk/discussions). ## Debugging the {{ instance.name }} CLI diff --git a/docs/user-guide-sharing/validating-files.md b/docs/user-guide-sharing/validating-files.md index aa6dfa10..5b495e1c 100644 --- a/docs/user-guide-sharing/validating-files.md +++ b/docs/user-guide-sharing/validating-files.md @@ -2,7 +2,7 @@ ## Validating NWB Files -To be accepted by DANDI, NWB files must conform to criteria that are enforced via three levels of validation: +To be accepted by {{ instance.name }}, NWB files must conform to criteria that are enforced via three levels of validation: ## NWB File Validation [PyNWB validation](https://pynwb.readthedocs.io/en/stable/validation.html) is used to validate the NWB files, @@ -15,12 +15,12 @@ The [NWB Inspector](https://nwbinspector.readthedocs.io/en/dev/) scans NWB files or areas for improvements in NWB files. There are three levels of importance for checks: `CRITICAL`, `BEST PRACTICE VIOLATIONS`, and `BEST PRACTICE SUGGESTIONS`. `CRITICAL` warnings indicate some internal inconsistency in the data of the NWB files. The NWB Inspector will print out all warnings, but only `CRITICAL` warnings will prevent a file from being -uploaded to DANDI. Errors in NWB Inspector will be block upload as well, but reflect a problem with the NWB +uploaded to {{ instance.name }}. Errors in NWB Inspector will be block upload as well, but reflect a problem with the NWB Inspector software as opposed to the NWB file. ## Missing {{ instance.name }} Metadata -DANDI has requirements for metadata beyond what is strictly required for NWB validation. The following metadata must -be present in the NWB file for a successful upload to DANDI: +{{ instance.name }} has requirements for metadata beyond what is strictly required for NWB validation. The following metadata must +be present in the NWB file for a successful upload to {{ instance.name }}: - You must define a `Subject` object. - The `Subject` object must have a `subject_id` attribute. @@ -31,10 +31,10 @@ be present in the NWB file for a successful upload to DANDI: for 70 days, or, if it is a range, must be "[lower]/[upper]", e.g. "P10W/P12W", which means "between 10 and 12 weeks" These requirements are specified in the -[DANDI configuration file of NWB Inspector](https://github.com/NeurodataWithoutBorders/nwbinspector/blob/dev/src/nwbinspector/internal_configs/dandi.inspector_config.yaml). +[{{ instance.name }} configuration file of NWB Inspector](https://github.com/NeurodataWithoutBorders/nwbinspector/blob/dev/src/nwbinspector/internal_configs/dandi.inspector_config.yaml). Passing all of these levels of validation can sometimes be tricky. If you have any questions, please ask them via the -[DANDI Help Desk](https://github.com/dandi/helpdesk/discussions) and we would be happy to assist you. +[{{ instance.name }} Help Desk](https://github.com/dandi/helpdesk/discussions) and we would be happy to assist you. ## Validating BIDS Files diff --git a/docs/user-guide-using/accessing-data/downloading.md b/docs/user-guide-using/accessing-data/downloading.md index 5885a139..1a2c9fd8 100644 --- a/docs/user-guide-using/accessing-data/downloading.md +++ b/docs/user-guide-using/accessing-data/downloading.md @@ -31,11 +31,11 @@ Each file in the Dandiset has a download icon next to it, clicking the icon will ## Using the Python CLI Client -The [DANDI Python client](https://pypi.org/project/dandi/) gives you more options, such as downloading entire +The [{{ instance.name }} Python client](https://pypi.org/project/dandi/) gives you more options, such as downloading entire Dandisets. **Before You Begin**: You need to have Python 3.9+ and install the {{ instance.name }} Python Client using `pip install dandi`. -If you have an issue using the {{ instance.name }} Client, see the [DANDI Client docs](https://dandi.readthedocs.io). +If you have an issue using the {{ instance.name }} Client, see the [{{ instance.name }} Client docs](https://dandi.readthedocs.io). ### Download a Dandiset To download an entire Dandiset, you can use the same command as suggested by {{ instance.name }} web application, e.g.: @@ -48,7 +48,7 @@ Names of the subjects can be found on {{ instance.name }} web application or by DANDI:000023`. Once you have the subject ID, you can download the data, e.g.: - dandi download "https://api.dandiarchive.org/api/dandisets/000023/versions/draft/assets/?path=sub-811677083" + dandi download "{{ instance.api }}/api/dandisets/000023/versions/draft/assets/?path=sub-811677083" You could replace `draft` with a specific non-draft version you are interested in (e.g. `0.210914.1900` in the case of this Dandiset), if you are not interested in the latest, possibly different state of the Dandiset. @@ -61,7 +61,7 @@ You can also use the link from {{ instance.name }} web application, e.g.: You can download a specific file from a Dandiset when the link for the specific file can be found on the {{ instance.name }} web application, e.g.: - dandi download https://api.dandiarchive.org/api/dandisets/000023/versions/0.210914.1900/assets/1a93dc97-327d-4f9c-992d-c2149e7810ae/download/ + dandi download {{ instance.api }}/api/dandisets/000023/versions/0.210914.1900/assets/1a93dc97-327d-4f9c-992d-c2149e7810ae/download/ **Hint:** `dandi download` supports a number of [Resource Identifiers](https://dandi.readthedocs.io/en/latest/ref/urls.html#resource-ids) to point to a Dandiset, folder, or file. Providing @@ -103,7 +103,7 @@ Learn more about DataLad from its handbook at . ## Using WebDAV -DANDI provides a [WebDAV](https://en.wikipedia.org/wiki/WebDAV) service at https://webdav.dandiarchive.org/ for accessing the data in the {{ instance.name }} archive. +{{ instance.name }} provides a [WebDAV](https://en.wikipedia.org/wiki/WebDAV) service at https://webdav.dandiarchive.org/ for accessing the data in the {{ instance.name }} archive. You can use any WebDAV client or even a web browser to access the data - any dandiset, any version, any file or collection of files. You can use any web download tool to download the data from the {{ instance.name }} archive, e.g. diff --git a/docs/user-guide-using/accessing-data/external-services.md b/docs/user-guide-using/accessing-data/external-services.md index 47e26b3c..2ca31df4 100644 --- a/docs/user-guide-using/accessing-data/external-services.md +++ b/docs/user-guide-using/accessing-data/external-services.md @@ -1,12 +1,12 @@ # External Services -DANDI integrates with various external services to enhance data accessibility and analysis capabilities. This page describes the external services that can be used with {{ instance.name }} data. +{{ instance.name }} integrates with various external services to enhance data accessibility and analysis capabilities. This page describes the external services that can be used with {{ instance.name }} data. ## NWB Explorer [NWB Explorer](https://nwbexplorer.opensourcebrain.org/) is a web-based tool for visualizing and exploring NWB files. It provides an interactive interface for browsing NWB file structure and visualizing data without requiring any programming. -### Using NWB Explorer with DANDI +### Using NWB Explorer with {{ instance.name }} 1. Navigate to a Dandiset on the {{ instance.name }} Archive 2. Browse to an NWB file @@ -24,7 +24,7 @@ NWB Explorer allows you to: [Neurosift](https://neurosift.app/) is a web-based visualization platform for neurophysiology data. It provides interactive visualizations for various types of neural data, including spike trains, LFP, and tracking data. -### Using Neurosift with DANDI +### Using Neurosift with {{ instance.name }} 1. Navigate to a Dandiset on the {{ instance.name }} Archive 2. Browse to an NWB file @@ -40,7 +40,7 @@ Neurosift allows you to: ## Custom Integrations -DANDI provides a [REST API](https://api.dandiarchive.org/swagger) that can be used to build custom integrations with other services. The API allows you to: +{{ instance.name }} provides a [REST API]({{ instance.api }}/swagger) that can be used to build custom integrations with other services. The API allows you to: - Search for Dandisets and assets - Get metadata for Dandisets and assets diff --git a/docs/user-guide-using/accessing-data/index.md b/docs/user-guide-using/accessing-data/index.md index 6d859b95..3020d473 100644 --- a/docs/user-guide-using/accessing-data/index.md +++ b/docs/user-guide-using/accessing-data/index.md @@ -1,16 +1,16 @@ # Accessing Data -DANDI provides multiple ways to access data stored in the archive. This page provides an overview of the different methods available for accessing data from Dandisets. +{{ instance.name }} provides multiple ways to access data stored in the archive. This page provides an overview of the different methods available for accessing data from Dandisets. ## Access Methods Overview -DANDI offers several methods for accessing data, each suited to different use cases: +{{ instance.name }} offers several methods for accessing data, each suited to different use cases: 1. **Web Interface**: Browse and download individual files directly from the {{ instance.name }} web application. -2. **DANDI CLI**: Command-line tool for downloading entire Dandisets or specific files. +2. **{{ instance.name }} CLI**: Command-line tool for downloading entire Dandisets or specific files. 3. **DataLad**: Access Dandisets as Git repositories with DataLad for version control and reproducibility. 4. **WebDAV**: Access {{ instance.name }} data using standard WebDAV clients. -5. **DANDI Hub**: Analyze data directly in the cloud using Jupyter notebooks. +5. **{{ instance.name }} Hub**: Analyze data directly in the cloud using Jupyter notebooks. 6. **Programmatic Access**: Access data programmatically using the {{ instance.name }} API through Python or other languages. ## Choosing the Right Access Method @@ -18,15 +18,15 @@ DANDI offers several methods for accessing data, each suited to different use ca The best method for accessing data depends on your specific needs: - **For browsing and exploring data**: Use the [Web Interface](./downloading.md#using-the-dandi-web-application) -- **For downloading entire Dandisets**: Use the [DANDI CLI](./downloading.md#using-the-python-cli-client) +- **For downloading entire Dandisets**: Use the [{{ instance.name }} CLI](./downloading.md#using-the-python-cli-client) - **For version control and reproducibility**: Use [DataLad](./downloading.md#using-datalad) - **For integration with existing tools**: Use [WebDAV](./downloading.md#using-webdav) -- **For cloud-based analysis**: Use [DANDI Hub](../dandi-hub.md) -- **For programmatic access**: Use the [DANDI Python Client](https://dandi.readthedocs.io/) +- **For cloud-based analysis**: Use [{{ instance.name }} Hub](../dandi-hub.md) +- **For programmatic access**: Use the [{{ instance.name }} Python Client](https://dandi.readthedocs.io/) ## Data Access Considerations -When accessing data from DANDI, consider the following: +When accessing data from {{ instance.name }}, consider the following: - **Data Size**: Large datasets may be better accessed using the {{ instance.name }} CLI or DataLad rather than the web interface. - **Bandwidth**: For users with limited bandwidth, consider using {{ instance.name }} Hub to analyze data in the cloud. @@ -40,4 +40,4 @@ Explore the following pages for detailed information on each access method: - [Downloading Data](./downloading.md): Learn how to download data using the web interface, {{ instance.name }} CLI, DataLad, or WebDAV. - [Streaming Data](./streaming.md): Learn how to stream data without downloading entire files. - [External Services](./external-services.md): Learn about external services that can be used to access and analyze {{ instance.name }} data. -- [DANDI Hub](../dandi-hub.md): Learn how to use {{ instance.name }} Hub for cloud-based analysis. +- [{{ instance.name }} Hub](../dandi-hub.md): Learn how to use {{ instance.name }} Hub for cloud-based analysis. diff --git a/docs/user-guide-using/accessing-data/streaming.md b/docs/user-guide-using/accessing-data/streaming.md index f5aeece4..5414cd0f 100644 --- a/docs/user-guide-using/accessing-data/streaming.md +++ b/docs/user-guide-using/accessing-data/streaming.md @@ -4,11 +4,11 @@ Streaming data allows you to access and analyze {{ instance.name }} data without ## Streaming Methods -DANDI provides several methods for streaming data: +{{ instance.name }} provides several methods for streaming data: ### 1. Python-based streaming methods -Using Python, you can set up data streaming using remfile, fsspec, or ros3. See the [PyNWB streaming tutorial](https://pynwb.readthedocs.io/en/stable/tutorials/advanced_io/streaming.html) for details. Note that these streaming methods tend to work better on [DANDI Hub](../dandi-hub.md), where data access is faster. +Using Python, you can set up data streaming using remfile, fsspec, or ros3. See the [PyNWB streaming tutorial](https://pynwb.readthedocs.io/en/stable/tutorials/advanced_io/streaming.html) for details. Note that these streaming methods tend to work better on [{{ instance.name }} Hub](../dandi-hub.md), where data access is faster. ### 2. DataLad FUSE Mount diff --git a/docs/user-guide-using/citing-dandisets.md b/docs/user-guide-using/citing-dandisets.md index 19e04bc5..aab9cea1 100644 --- a/docs/user-guide-using/citing-dandisets.md +++ b/docs/user-guide-using/citing-dandisets.md @@ -21,7 +21,7 @@ Citing the Dandiset and other datasets is important because it provides a direct ## Data availability statement It is common for journals to require a Data Availability Statement in the manuscript, which should include the -DANDI Archive RRID and the DOI of the Dandiset used in the research. +{{ instance.name }} Archive RRID and the DOI of the Dandiset used in the research. Here is an example of a well formatted Data Availability Statement: > The data that support the findings of this study are openly available on the {{ instance.name }} Archive (RRID:SCR_017571) at [DOI of Dandiset] (citation of Dandiset). diff --git a/docs/user-guide-using/dandi-hub.md b/docs/user-guide-using/dandi-hub.md index 41bd0af0..72e134c3 100644 --- a/docs/user-guide-using/dandi-hub.md +++ b/docs/user-guide-using/dandi-hub.md @@ -1,12 +1,12 @@ # Using the {{ instance.name }} Hub -[DANDI Hub](http://hub.dandiarchive.org) is a [JupyterHub](https://jupyterhub.readthedocs.io) instance in the cloud to interact with the data stored in DANDI, and is free to use for exploratory analysis of data on DANDI. +[{{ instance.name }} Hub](http://hub.dandiarchive.org) is a [JupyterHub](https://jupyterhub.readthedocs.io) instance in the cloud to interact with the data stored in {{ instance.name }}, and is free to use for exploratory analysis of data on {{ instance.name }}. For instructions on how to navigate JupyterHub see this [YouTube tutorial](https://www.youtube.com/watch?v=5pf0_bpNbkw&t=09m20s). Note that {{ instance.name }} Hub is not intended for significant computation, but provides a place to introspect Dandisets and to perform some analysis and visualization of data. ## Registration -To use the [DANDI Hub](http://hub.dandiarchive.org), you must first register for an account using the [DANDI website](http://dandiarchive.org). +To use the [{{ instance.name }} Hub](http://hub.dandiarchive.org), you must first register for an account using the [{{ instance.name }} website](http://{{ instance.domain }}). See the [Create a {{ instance.name }} Account](../getting-started/creating-account.md) page. ## Choosing a server option @@ -21,7 +21,7 @@ A "Base (MATLAB)" server is also available, which provides a MATLAB cloud instal ## Using conda environments -DANDI Hub provides two ways to work with Python environments: shared environments managed through conda-store, and individual environments you create with conda in your home directory. +{{ instance.name }} Hub provides two ways to work with Python environments: shared environments managed through conda-store, and individual environments you create with conda in your home directory. **Shared environments** are managed through conda-store and are available to all {{ instance.name }} Hub users. These environments contain commonly used packages for neurophysiology analysis and are maintained by administrators. @@ -121,7 +121,7 @@ If you need additional software installed in the image, you can add a server ima docker build -f "$(CONTAINERFILE)" -t dandihub-dev:latest . docker run --rm -p 8888:8888 --name dev_jupyterlab dandihub-dev:latest start-notebook.sh --NotebookApp.token="" ``` -4. Add the Dockerfile to the `include` matrix of both the [docker-push.yaml](https://github.com/dandi/dandi-hub/blob/main/.github/workflows/docker-push.yaml) and [docker-test.yaml](https://github.com/dandi/dandi-hub/blob/main/.github/workflows/docker-test.yaml) files. This will allow the image to be built when new pull requests are opened and pushed to the [DANDI Archive Docker Hub](https://hub.docker.com/u/dandiarchive) when the pull requests are merged. +4. Add the Dockerfile to the `include` matrix of both the [docker-push.yaml](https://github.com/dandi/dandi-hub/blob/main/.github/workflows/docker-push.yaml) and [docker-test.yaml](https://github.com/dandi/dandi-hub/blob/main/.github/workflows/docker-test.yaml) files. This will allow the image to be built when new pull requests are opened and pushed to the [{{ instance.name }} Archive Docker Hub](https://hub.docker.com/u/dandiarchive) when the pull requests are merged. 5. Add the image to the server options by updating the [jupyterhub.yaml](https://github.com/dandi/dandi-hub/blob/main/envs/shared/jupyterhub.yaml) file. ## Example notebooks diff --git a/docs/user-guide-using/exploring-dandisets.md b/docs/user-guide-using/exploring-dandisets.md index fbecfdfe..b3d6ef50 100644 --- a/docs/user-guide-using/exploring-dandisets.md +++ b/docs/user-guide-using/exploring-dandisets.md @@ -4,7 +4,7 @@ This page explains how to browse, search, and view Dandisets in the {{ instance. ## Browse Dandisets -When you go to the [DANDI Web application]({{ instance.uri }}/), you can click +When you go to the [{{ instance.name }} Web application]({{ instance.uri }}/), you can click on `PUBLIC DANDISET` to access all Dandisets currently available in the archive, and you can sort them by name, identifier, size, or date of modification. diff --git a/mkdocs.yml b/mkdocs.yml index 6cd7c7e6..cb20ec4f 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -139,3 +139,7 @@ extra: instance: name: DANDI uri: https://dandiarchive.org + sandbox_uri: https://sandbox.dandiarchive.org + domain: dandiarchive.org + api: https://api.dandiarchive.org + sandbox_api: https://api.sandbox.dandiarchive.org From 078de195c6aa32443dcfcbd55244bd02501d0667 Mon Sep 17 00:00:00 2001 From: Nicole Tregoning Date: Tue, 21 Oct 2025 15:39:57 -0400 Subject: [PATCH 3/3] EMBER-ify --- docs/img/ember-logo.png | Bin 0 -> 52972 bytes mkdocs.yml | 65 +++++++++++++++++++++------------------- requirements.txt | 1 + 3 files changed, 35 insertions(+), 31 deletions(-) create mode 100644 docs/img/ember-logo.png diff --git a/docs/img/ember-logo.png b/docs/img/ember-logo.png new file mode 100644 index 0000000000000000000000000000000000000000..1a3dac35a2a373b6848b7f239efceab3ddec303d GIT binary patch literal 52972 zcmcG$g2R4IqZ)=U}y1Lj<2U2d*`pb zth(eJtw;Sh+_S;#hSbGK_x7=IP4#x=HRm7c6{S@?+Ar^yaT*R}y|4Rl-Tm4_!d3L7 z#S7V{9_yfOjWS0e<1(AxDkVj2iL_FLF2lGr*30n=+`)1*pJW8eetNi~npmwt47HqQm+_WkK)e03KT-H>PChlG z3EbJqVa(ZNzVQ_mV#varcN}aij`0Lni_Q%C%b(1dt7FIjK{P=&;}DeQmJ#?_?dEP) zE+dW`f>{28*}>hn#J*^h$ooq^u;r}V&D#!wB&Fk>NFbDU_^D-4PB;G;IbSj-+(P^S z54c3PQpq`H6)Fz3P{WMYeD^C*?$4$5#tO>+NFc>;236D$6zTNG93Stt&pGC6$EpAt zIx=>EEFvBM?CrUWjy|Y&N;wCigkg!H?lYg9`1eQqMDx=ftCU+ZKpYQYsmy392L?{R z;=+o5D!pb<{zl<4gtAWRDf=M}r2MXBe{<7ed7t?eYy0Yx<3027+XrARuH23cVE=!` zPLGbl#%R|#4C<&M3Yy+3z-Fh2vtEBbjj2&tmn2dMGIj$x(Sq(gTWCUOZhk#%mCD`6 z6G0^c((v458D|YghdhKs9te8*k5Vk-%+LJE@BA{pWbZ{rh?WJW*qp!gP{TYo}4yhmXi$(d&{13m%5$ffC6$ z6Rqa3OY=I92=$Z$0=^Fm_?B{@hy-8mt>srQVy|x%As|J~C3x(XC6`4H^Y!^JuJJb` zPHl^9Qi%S~DwUl7NFdds&Ghy+o#hMS;eC3PIx<73hYP(G`gGHe$H*(kHOQeB7I^s= znND_1(p6_CL?gAi%=FJe6lAc3-u+lA#>aCuUY`ywzsWfSEOrFeH`K%MQy+M@qvPd$ z)$jsosbIakE_j$lHhOkXqjE67SEzC?EIZi7)9z(#{@_!a6VnZWS@J(p%J3!;iNbD&uZxA{ zO7B2ti%59JSjRspwUhWHp5&ZqDqK9hYWcdavF*s^SaRF99SJHz^bGmdGy+;KA0#@F`f(j8meP4?i z$aGyiShR@0w# zX)3@LE{P&EC#}A3l_KSx1e{2NX<5UY`kZjwpCA64H58t;TIlqzi5eZ|3v82u zu$yUN>zqcfJ~X-#jC`JWpaBr?_5-+C12h}c7N=7(P69hxfbpOPvp){jmU=N5`5k_a zvu+avD1(atkPV=tq_`ajIV}Kw^+9y%^QxLr1ty3jp&9_n*VJ6z-U6@y3)_YOXQSU` z+{xC+jrL`I{X$s3UL!!C$~byVE@+R1b#&l7kXBlJ(Vb-PD|*QzqEwJ34;sj50JGmy z8<}}URbJiMmw5QvxQdf%8w`X>5VH)aeVCGzanNum$q&~Z*p59GK3_*{& z0ED587|i8|uLPG%!0gg}3)s@qNLZ*X65UOAr=jC7g_nB)mi#bRa5;Cff8xIlz`hw} zOous*5WsQ{0kGL^DRY8QYB-m^-J7OhqrcakiOC_wTv*@}jDZeC0+jf=|FIsn!I?*9 zk^)9}MVm{os`$Wuyyot#iCQ0_deO_~3UG0x*kA+yD2d z1sl+54=1!#;?XvhO7_~zFzzAoKW zS?KEje8&Dg7&`a`lFchHL=yjY7YJ`Qn>$(SRRSP38oCB{SOSpT!uF3CB_h`L?(Z^p zVAAaH&bXA#lUCogGA^jXZ1sa};p@b{172E3+dhS)CgpF5?)hQeUYm( zv&!AZAK_$8~2Uh6vt<&yd@@Q?!=TIp?JK9D1&V(%P$5FK!96Pdh~w?c{?oHhI48k$Hfo~(``juEYDALvW-b?g zReu{v3MqkbQn8{lPOm_|!h6mGPQ=Wb>GzB!MLV&d^;Bo+Fn}ZaA)2 z^j+rb6_1Zw?1#B8LM#-(?JC(E4?Z@pzKXfjQC>^Xm+4PzhzQ6!;Y>JUM3ujxpxwqGd7 zjtl-~7SCA;l-yG=M&T*syUcnT_#Rj6@pPN{B}7XHfZ*GBv1`0rZaVi!+kSqE&E&xj z(Y;@lG|)2W@2qN6-`4!nO#2ZLHHRn&>o^Q>3Q`~}5zt7P#+z&OK6%|3el~BSpEzfV zdDrnOlXI2!L##&O3MOA~Ec!PgHCgTP@r7b%y;w?U;~ho>#jy~K%u^+|Qjpzrv^c(n zzcjgDC&=L@0!Hony)KwZzVGRA{<-u|=#?UYibE2oATLp#@Y4@P0}SaT8M*#vwD@|)MC{~kB>Qg&7iL|z!GgEjV0CUbWNBhF#C7#($R;Y z&#O|maRak`XEs9MV{#_N1z01~(8_^sYpsS>-5$%^*jxdRp|=XAs=Y(kR``?FjD?Wx zXWquz(uWzy?(VaXG@__&5H@JmzM@2g&ec-hkv_$fzWFM5c*A3orsV||SAR~F^*NLU z*kSRd*osm(41X!&bP2d{?^PDX(D(w9v=777z=*z*GQTDII#V8mq1h6fJD{!Xs6DS33WTPcKJ#y5Jlp&~!$vr$c7ej8?qTuo4|d5QN)-ZC znj0q_FeaqoQ@1}UEPd$ z^m|IB{N392&jK>k;+Bq4E51(hQ23lE$_m&XIHUFA>!SJN35e9=R5OjymZ{+R0(4#bN|PqIRz;mgmm%O@ciW|Uu`))Xp@&Q19HW13|F3y>Az6HQ z9TWO^R?iIi?D?N8Oni?FdE>$Sh2pKDCUnhoHs2?4dJJ|fmqG5hR zq>Ti>vGU`$k?im3cbMxSS6&b{P{w%*=|4IZa=iL9xe0LNgF&5B;o8n2;KqMYBtT(B zOXW|}DSo(5w%yFfnS&>V$4sUxe`1&YAZBqPhHK$F8EUNY&c@vpM&A(0k^EYUB$Cnq z8G^sxO3~vxW7XXfF<~}cxp~`8M1sSLcd|<4>kLAaGb=+a<>@(v^Qik^4Uv2T$vS!! z$9wa~D`{b=xIuRbX=vhINE4iL)`U_j9`wL45r*1_x zRxUqE94lDDSBNbu0tl0gtp~pB-8lF9AI?7g{Tt+0PI*0*?GT280cMJh@k-=l%Gp7A z<1Hn8c7~30WpfxVo@1nVV>g^o<8}kZ|Em19miSm?+xzy^<(-0Yo1e^|B zZ)Q~V$7x)PXFC3|6!H79TFkIEQB8S*)|hI+{egj&83h00un06l4^Ww9Y$@*X_2mZ{ z>tQn@*1#&gkF^o4vK(wKRJL`5DToQUtOE1eSFYgr# z)k?hqZA!oIfVERJ9<-l0_^;Bsiv(mfa@Ok!-&aimMAYE{@~9SItgam1v&lV0uJHzC z{@e$M&ZFd@49hMj=J-i`c4pl1#D7pV2(4TN6XN4?3)O{L;$!mmpBZUoXA=;e7VXkw zIDsm|W$j9Jl2xoJW6~Ys5oUm%{t?Y18@Y%2+UqtJtQ=Pl5aKQ=B=1ayr4^~E3rHSH zE1X)@yRZ@~dI2k(__hCjy$BAMUy`gsLJWbWDcs3es&|Gkb#PhhbD`!_q82eAac76) zj>jsAM)BEn-zjXguM`mIW%_^XDxk1o-?ju3aRS>kGxi!z=eBA$Bk6Aq-vyDs7gcwnbqDJQZb_MgvG7N1{lcR(?NyD-c`PZ1sXMSRp7}pp#FffRFTr>gfKW}iN>Vc(xbsl$C zkRsferl@Qo*Srp5Gttx5#{NV)%(u0Ih?9j><7sJf&C5Fn8p^Q(^Q{%RsziNcR*KXB zwLV-Q;aF>r^1D%qp-rzeh6>=Y4+g{BLMe4D$dHs$KQ>XI!ylnsxHC1{zswghB!WQ#RzbCw|6`a_E5P9 z(00?SX=?wxQ@jI-cw+R3U>?)`Rfv3Re*gS>I}p$c5RjPIY2)7t;VhztQ=A1!arXdy zah8=}0hwBL{s%iT6!wtA+UD5T2^~mg4^aAR@n><0^WHn3A4LQfPsX`{;8D|hov``K zsa;HiAWo4c&W#RYVhU`U)X6@8Hb&Upw*0hA`38fbWH4OvSzL=>5pYK|b1{Ffda?4I+%9Qah4Rmml)clvY+K@(r^ z1PG|bbQsD?h}B{|3SL9pDZ7|HAbq=Z=JuFJf-{Zl?oSn_vjP!l5EDk z5$OQ2T0Z_MAmMn9-&qwwy#oS`3=hcZkFIb`$*?e!zCd2e>oH7bB^Fc6a`y$0CuLQ# zc@B|Mn7$qnddoiJc|<^c%tLVCCz>&c7xk9A81|v|4%*xQ3au1i?&XYWk0Ybod%aaE z&3!MOT0#$wl^I-R){lLtIquP6Q6w4D8*cNsO5JM7gXV%E%hA8lBQw-n=?<(d7mM*{ z?+@IW6`%bthdjfe+&qYmk9}m%tisdL&2F4jhSALa67cH6mhgjU}VhY_LqwCJw19x>A! zG*K(PnPYsX*<^C`%AJNqQ$lVP`iQJ0UKR(I>T6{pHJG^nR-sc{=!M^f?X1l@1ZwD73)_b78(fHT}&#V?1P$ z4k{bJK3|-s=P;ndDv~M-RLAfvx5j2BQq%YDf~TCE%mpa&*6VL2j3PJcuqu~Y5C|du za5LKjaoX!)2o9RwA);`vQNe;cPpKjI;c^<(*MZ%-3#1Wqj1&{kvjBE^Kv>IBstdvz zN$e_QuPxkk z=LmP~-DsoAx=O2<0^%29;ll`zC%?mimMJbTsx;rdbur{O2LB%v4qQbL!-;Y|r(L%+ zIg}<0#QF}SEpHB4plrq!xy!;eS{r%{JSi-m>|_M`y6nw2?<-GBKP-_9oO1NsDLJ>w zp~wnPAb424#}$nMVuW;yU^z;qVwoXb5D`<{&2AD7ebK+wdMGK(5dNX-$I|fTI;F4l zfk4wovMt77$-^$GA}I3h9HXH4;jZteb}mau6Y*~$>hgh*=Cv?H%g;gcW>4uDpT80% zZ7gO6u=Fk-f3AJBcTh#V?{|;2F~2!r^UsS&K2#fsJRM6_86d^(A+!$e?i5XB>%OG%2eC5y!YiTe6PdNm+bt*OT78S21U zP6KgVSJMw%pIR6`ahV<|qq#(d`9l8d(||Ars!_o+ejOP!#|`R2sB!zSzP6&;Lx;Vp z0ud#v#g*bxYZ*=PFhtrt{lMKRJDZhrCEp&hw4GnYeYX&_Vx})Tz5O8LCXdb?6!u1Z zso$=?w=Em@#?6#u}1C zw53fe$CTGGsVbGcz%`K0Py=WCOh0g8N(ngaNQQ+j8fe84>|%@ZI?I@M(HBJmP~7d^ z0+N;qxJaU9+;n)S;T1*55MYtleF9bNyCRX7`9Rl|BJ^ozxfhq^Njl~d5NYa+p8nU+ zqa>Pdvhth#T+ZCjjwHy2?%wV=*s1wvSSV(IpV1~K3%>Q~p#H$EY-)aYpp=HT_8`Am zeUFhFl#MjeXgCvi=e-#38?T|G{X;N~04UbQ&a4Pb_Q#1@r;r4F2b!HGC^u-w$dAWf z|6y{H5}Oaz_O?#VSe^p-p8-VyO~E=$@OGJr z07&4p#M!`hqb{#NV%5rKXCemP+pbIdoCNm`)FMka505uARtxl3D&NZtrJKSJ+&7w zi$2{u>veH9B zyEiz$+4Tt!-*!^|(A}{1$}5Tw;Y2sIU*gD`_E10Y3)yBV8SNF|gS5P|uXsjxPnW-g z?ui8MnosxpPoC}eHMxJd_e_4O25NzG>#sb(3*5pL-lBMIVv3>d!9f40oRjMi2Lm|y z-0QkY3*sOio>EX@Vgq=pC2q;pD!qwZ&p@y!HRR3jiQpuECkW3~EuROCT1fj3(sel~w#>@cU^7 zzv94RcPi+Vx{!n}c=a~wCm^A%gLVGhyC}}_%MDIMikbjc(xg8DF)AvJ4G|?j0=z2) zwI5A}p-*v{)~sogekxEbMhl;S-Lu3-|PD>5JWNA z0xB(DpvcHg8D(p!kdTA8NJQGxrBN5|DN90e0VsnITm~ZFIyu{us8uD0I02Z_lF)A+ zKGQy*_6!^{&TYT!r$gNUTnK9|TNj48$kpzyp;8MDi{uhvrRAHy^6Rt4>P78YJ}LN-_tT6FjtN?K#@3^b+`{y|YQ& z@>?EA#cL5P6)XE`#rJGeu7CRM%unPr)N;`p4Es>t8Drykm5f6j-ePO-v!1T`tBUX7 z5EGvN15mREUv|P@f1glyCL&!t3Y<$KS)~dF`{U4rdt5K2(Il0KF^0YvmeXi*z{&+c zkmD+vO9#zW3$@PJDV&2F+smBNI10x2QImZ*bz5!`81FuB& zEZ7Qh%h|~}!#ssotKp*D192!*hIppODme4@$VPL|~HJ^1CVkW)-U+(Nx zcMWeBcpkz4Oz$9wdjA)fqk3hU9(q71s0VPH3gV}N<=87bdGkRiOgsxnjY=^=KA^r3V%XO>7N# z|J|`Y7h71q(>f`)O8b>Q4Dor?dv+rdPEOEXc@lH`NsK8}Ws>}4+ndOhf&O4B?GYWe zWT2V`weMB?u+B@DNRi@g4%>eGr>?iMh5Yy20WfQ_gDTc#8SM0AOfmoU;=H`aYLDx9 zd;Xn{$WUJoB-tNncUqfBz8PLLT~EPpnn~}*yYMk*-_`i(t-RoYZm!SjF2}TJ)hx;4 zVa_T6P}WQX=BJeZ2`dn!ttH!b&s@7-nlyb7usdx)8bU3B91Go78xX13hb@eMg?g*B za#Qdd8$GM5-Ud~saq;e`)@PCN#>6Fac-d&F6!tWhQNEp^13+UYxyw;pe9wDHHbYnj zJ1NI!SB%#h;UEo}wR^dM_mJ(s+BVNSMehegf#j%W#u=7%A+vw~EtM3A7)_dVWEj4{CIu?3#U#pP zPh!dupXS$7FeQu2ua=%~pHH(wW7yG{)skThK2`AxH(Dz%1%Z96ChPmE6~+J%A443E zbJ*(Ea=jM0WWt7e@m~}z4XPqu+88}enOgplx1Lv%@GQk=Y1;yyP=0@Kd5=`_;)LVX zs+}K(b(lfe1V5ihBs;kgzv=IZ+idr7>?p2)P>IKp7y?Zb@G$$-b9e1a-G)+1z_W-6JA=Be_H^J@uEUpNd zU3lYol2L_4=84SM7cm`se_2KlX>U8kMpdbue${Tvu`ga)uZw zLDBqcLf!P&lxc4e#m+8`2vVKviR^dx*#3chy7yjtaDon-6L7e_0!Dc|H>v=6R@t(q z86+g_c->5%n8!w2A4eT+?{?DYf|3(u>CT@7bY_C}@kJBa7j#okrCoQ4?>)wftpckjM7GyYz(lvYb zh*%Qs&itChU;K;A>5C;m-r1d}q6;d*50*k@x}r06=!-Ik80|r!_0jV z`Pj5ZxCEp50e7`YL)9(y#dF!6aO+V+`Tl6PFDQlEJTq7da866}m{4y5q|qC^sil!b z93~Dq@kNr}wtCT@hIP|@I{ZBO!JFk&qnF70L|4*w!f`8x25i(=g^0jwMH6oHi)S*uh)b#f5sBNgpA6b>^49w}gPSk^BTt`4Qs_EZu%;dG%XM7ANU*?$j{3#+3!#q>u|jhi@Q4p!?qM%{=HpzmH(W>*arC3`4M} z26WYYUwDcZXJH1?`vQPm>oH&HG7C!Xz-Maom$%Gyuq4I4@=L_z9d9iq6CLwhSg#q3 z4zx($qXzm;8K+@-2}?{|qh-&Ura1~K&Me)S7(`B1Zu;2>x}lF#lTsQhS&?!J>%rL< zyM^a^aF+0u+9lZ59>hSd{}bl6qZ;Rr8zsdc&Uh930%s$iI!)}ULvkLR`yP(F3c_L;j+~;qf&V^IC0f{?0K)Q3U!r7TDyLTy5+UNnraptcwQ-NjxnTc zr~73cM(T+SSB6h0aIwigT?=}I9ucV6P-Zu>7mfD(i!8rW%iz+P9D24whQJ=eA^&(fbur6vcxTodABa}8Ib zuaiG+#BD!?)XG;R8z%e`EDjW2fAez6d})QhjL3=0Wo;2EoZhKcqZKPKnq3+ug21D7Rxxdu!_<|Dwu4AJrhp5wGwA z_mv6G&Rx29FE{3PH&pANM%{Bpb{n`VPsv597mG>A?)jNt-tZCV@ z>s{I%k2aZguDTpQ@6+z&*bR}tN_a-kSo5XZT$h!UGS(In)P&e*Rb`uztc$I>O ztx->Wj}OU7N(lXEVjm&@}H$?Kn}m9t*)d#?v~?->wRb zb;(%>T@!d+5guh-eB8`=PlXW9?5GfsednmY%}m^PyIlSDV_<);4ieQ#YGUA>dQFXL z;wJUASE4PF2OhYpCTy+YeNc@j^nTR^zJ$CDch+cvVbmu_f;H#AW zBm!I^{7T}|jGw}^mA=rGCEqbH2S@d_t-RY0qpK8r#I+=4Oshe#(%H?$dJ)_Tju4@{ zh1_GgiDE7k3)t9hkyiTR^&qYiU|Q=)WA=*e&N#bVl4!ujOM6z*!Hk`7Q;?Dx{%-+e zbrC4C7(Ng8YrnSFINGcVh)NWs1Rso@E{(>onen~(?@0EAis;=mzYTGDGPFco9M71>yeFIk<*`0XUi^geDE*v{%F>|`>$?1xGpx6 z;f2BHT{E2UUQ^#3@SSkl2=V{v{g&?Ou=l6;8zQ#`Iciqqdq?M)TErY^iW#Rc&83V! z&z0sukn{H^$bYVA$qr9|*}>ImBmBsHaLx?^kOa6DQ|))X{kKy~YLh$a63}aV%!6Sm zdMh29YpE(3${_yr)lnPz&pE>eoJw`_DIE;`CM!JIWX7d;*`6eAsqc1xzRddhk3869 z>NzyT3XuaQz?(?w@Zlg|yGKTsSeJ~z;Y;&99(bo95RuQU9A)Zk);3(X0~sbo%OStX znV9*t2IgGAZQ|e&@AwhP0*@ux;So#*qCRm#?7S9OtEI4hZ?;t>hl|>EkwWetD*O^(5r;JLLqrm63z* zqnYjZH4D`phPq;A9sZjI)WzKWg8$4ZQo>ArcgsdV=K7OoMietOL;eNpXLmcUrIs79 zV`U<$@}=%0v}TehC}L_x=aGwrscDK`I?$Nyslk!gVEWTw{^= zoro;2$SkQ~Yl~5j$!DXlTK(k)`#@hm+d4wionO;&E)9FzLw`fCj4^ z_~V~U0R16Vqh8Zj^gDLIYxVb>2awP9l6o#@Xd@%Q9J=M@3XOO~Jv zJbn71IO$K##SUa+`;dG5Ts_X~7J7rU>xVVs*@GBUU!r!)Lc-}CP!|H${WHOi@HAA@ zE71kyeOT^>?>Mp0b|5w8=_^5AUgakcbZqBQF$hRGR1*mv`Zb`V@pdgK-ByqJ@F+f_ zn_FxuutSHyg4^}Emg?DdEewCSWJE5R?hQZ;Sstn~4-oCq%nM-}ulbEhj8qt2&}*FJ zMkn=B;@sXhK0y7>2uvch$Q@<9?Y0dDpJ9sf3_b#SG4aa?M^c1VnOq?FG35gf z5oDEmM(~}b?I=f%MA6ezuyuUB@leqTbC1vRH8`RC%xl^`5YBmJ1wU)Wsj=Ux;xJaB~{aC3PzcRFo;#N zOW}%8S0@o1x{h>j%ivJ|D$dq&*cvAo!i_#bmOguSbWtB00ggbf5%uqlaNt6iBMaG^ z=UFd)Y3Xl?nXW=LTp+NDT~|q;;$8Cr&)io#%B{V>F&>%Ln*yIl48{aq=cMq?cHA?l z`svoE0zC_^3#2KUC35W?Z2kvouzw33;OEEb_9hKwdEMJ`BM#0AFS8w3(@Fga ztR(l!;!}?rUzFDMg<67D1i8N>u1(kroxZHkjh5>jptdM~lm0d*=4&hu^~`5st~S%X z+82bLcwylAVCFolS(R3co}2zK(Eo7npkgkX=f9cHSH&j!L{`G*MswGv7%wmfn-(`t zA}g(7Wf)HO{5v_efGneFM~p0)E*pn%k9lmWUk9aS@*-F(IGeeA51sqwcV0@mg34g* zO=RUWUwsubewwKb+OFa|hz{EfPaF56xdgTCYGXNCW_^6TgC9o33Pd~H>WlWNNv7ww zYsUlDe|md72UK>d_(EhXY~it;H)Qu%J_99*hHIm)N4^hl@v*e|nau_ltL$=J-8qoD zu0mM*Sd2(1W>@_f(#?XE3lSX~XWy!3w#9wC<8L!YHM1|w;La)_!S;HYSUNgAKt+XUvQR`QY5VDfXgnb4U?hjKo`l(Jlocc$szk6_-Lm(%oli@j#J=0 zupw7JIxJ>>Qlxm(#L+o$COn5&8r)f9_DZ4hNogSYV(j3z^~jzz{rQ1x8!p3A2%H$` zI!0wTZFku8YX_-2$`ycSa3Yb~PJSQoXcCd~6QaO+OL2KG8@1nR;I#_y=QxZI_~{z~`qu-b1j;>&t8pXr6kO827a{;}ns6SEj{epU0@rzhLmruw?m zx|+APkHRj|;iJut*tEFybATMHx2lNYZ1Bf~*`vpviR`w^9^n8_05pHqgijZDqWKL% zTBz$@R?zW3tLFJi|HnwEFM_kcZu`VwHanh2+Gh5PSERoF@#6<}|L&V|w}6SOb))b~ z9*tiPEG4jV!-Spp-&U8Ok1{uHx~au*I){2yJEk-gs9JvTksj{4Z9e%o%$;V!V&r)1 z@XzU)5z996#IpdY`@H;Mb~oOgf`u&wLB7qeF-cjhbQ&^6eCnIS($}hO{q2*v7 za8;yj5B)fieU`D8b|LtwdCa}KtZvfG{KLZ5&|jJ{Sm)YG*)ZVe*66@WOfKQK@22mTao=54yZKUE5K!)zexaqSC zS9jh^U;hP$oCZHPmXk9-i7vlv#ejv)n(X}AebXw%-xlJq%k<;3SCYWMi*pM_MeYy_ z_+ajagx==m$JbS`a!)k)-_jQ%XRey6%HgOaGmjq!cplC6_xr5Z3WuRq9PPJxk)bY&ONH5vO5a9;fdUrZH9QMJKD$STTb-R_b`CF5qEW3`ClP zWe1gt+FwYcKloO#cOnW8T>fq3)I$RDy^kvetRBrd&eoo+54twyD7)zF?PA5s0j;m^l}Op~ zF3EP~p#@^5-#0sJWnTe#G9lOZO6aFjGh`86L*&tQ;|iWWMlne*!~v9c}KhmtcX+z&+=)f%5d%1 zK}B4~s&PbA(H=9qsf%izc?`cpN@HgCUUM8fJ@{hgIZ%^3%NmKvXS6gfJFmfMMX{mJ zX40y9ox-X4=ubuFrV*avyG}&cu&G>9<;lj_rYvKa3R6_^UcvpArvVt#-}BivtaZPV z5@q#AU1Xc&OCpHXhW+=vBhni!aW4=q4BmUoO$K(Jjl&Xbcru$Mu=CJ*9f@r(*@j8e z$UaV0mo(S6H^tb@r|o5%rX0Y}?@VUTZb@xJepMeG9AWqA=^m>A`K!Xo&8AG`_EBS0 zaF_R8z)C?jeEEhZ7|pzhxzDkAd{X&5n?SS>!y_MG(CbYrk*}R>Q`2v+<1sSpCp@Rr zoo^G5<)%IaZc6RVTm}2l-`PAFzjEd&|FkGTqI$Zov)UJYbz~99J0AJ-Z@t0q`jBb~ z^wzHW?NsCfzv_xOI_-JqH>c)RFq~TSSmz5GYmJybyQfS~O4Mn|t51sh`7|hNKR0cb zGK-^KtDYU1=6r&0W!@0nkVixT{jmit^h$Rt>7ypS^gR>`2EDBO^9k)f z&EQ8RhQ^r+Z7*9o)s57V#Az};DY&q`d30mQ(7qk#2S9~Gv2Wco<#ECN84Nc%BZgC( ztCP<<;K;NbvoU0IhloXg(^{toBZi(CwWYf@PB|mv|KaYpaOi0-q2@p1+kOv*YDAVJaFy&6$$n zN`|rmZ%Yfh?-Uvc>GA3(eOUzS3xa3<%Z;{<*5K^PZm(H?P7^pZtbD#N%Bg@miHXeB zQ7&A}MyGJG%OE=STtF)uFFWxW|sh|+KwE||rl%{0Xasb)sr%F2qr zo=EeGR09e{%=SbQ*gQ?c!12CM&cO?ht7Ir)F@>h4LGY{+LN$%GvmoAMD9t zN{0aH40o5DZdD4P8=XZ}07UayT~gazC=4 z70&2c3HyZ2`WJtqJfD%o<}$auh;YBDB00wmj?eo*K`+7quhCBTuAXWV zsC)>;R{ZpfZe@h52K-+7bAX;xMgb{%o#rAL(m)y$@8+c3+|W@XV{M$~YSo;eN|QA? ztfj$arpT}sC_p2RQWQ-!F~3q=I>y@DrI2pP$e*$SA7#75hki-*yioDxMz?kCV7hPw zXT-ELs`N`{X6@SchAcGjI6(eM0TSFm!8dV%SpFk8L!y|qzy@VXyI3dN*#DG>eHyNg zUCx3c3kMCr2ZA5g0wutNEux=>_2ZWN7Mqzptg~v@wuCafk?TWU;)~yIPzQbSqA>@z z#$T?_2KTdlY5?aQ25;m}?rszKKv0pey>m2Ph-4sCKRY6>ccgE@*}wS;rUZrP8AWhz z`IY#jFeh)i(^5c7k_6?_moxN5|$e)e#D1Zg%M`D*85}eNzJULZvwbfruUCE9Es$q{AO1y=ZW&r@^*5hMgCL z1~k%rOmHU7bm9%JK6iOX<%=p8b!bvwN3nDusX&&|kgKz_GkUknpkh&H)mUubnJUia zyooIBxufY%uP<=89*FS^otg3k{xLPfl{sERSu-C!EL2dQ2%D3l0oi%-1`qy5{@t<4 z0Y9HM9MS^(FB0?kz)j|Nwe^t~0)0G;0`R7KFRQPABMHM(h^9^%{AZDWJfec!uQK$! zwY}KSoZsWdgTfhidgUJcQEvW@I~Ut?>bW>W@=j@IIl{)qXY|-JJNX58*_rkocjZm@ zYHlJC%k<~25=|2nD+SR4-AxS|2cv)D`*)-+Ow=n1|`a)?D$Uw$v}#jrc3*5N6` z!$J}Z7f(a-mY^(fGcn}d#KG*iZBA0^nCq^i){YMI33&9~t`t%k9>>$Ag9n-D?42{8 zlfb4FJ=OZu-rK1c8yK3LJd9$diR`w0OQyMEj1!OLAO3#2qa$mkoxPYNUbQ`FU2KHl z`DqKr+lXJFnHC?vv@>|U=Exbg=}=xAjqcsLmtHv%y#t@zeo3p`osvJHy8J8Mv2oW( z>%&9)1?#3X1>;1w*JOzkGt3W})D?8FS+?K{Tj*%iXVo#idsFciJKq&o91zHdOtqfC z3!(=p1}On=lhhwP53sj(Uo9+;`i`x{OqwV~mxUK0?`p3?BjdWAgYrzqE-9?v^*w*J zj{Ol~-Y_$cqklMEiP1u-^<%-;qt~pec85mexzrXt&fDfAJ2^F^k;{KXPV4S~h;P;L z$qV&0_Jjc23I#gpJu1J8_Bu~j7<#PA9iNr`(9+wG#bu@ zUwohnhVsyf;Y?WmQ@&eMs++lt5SKQ-U}~&|+&No+XwTFs^Yu#9aD8Ejtqh3s=*r=P zY}A|-|DCW5NlH_Fd5H|8r(V-hVvSF^i^GU(1&uHCaV7eFqNnS6M(~qIPk`YyVm9-8 z|GSwNN3T8U*Z%Wk@<+G3G0bZBHHy}Ir<_;fc@$hS4+0x!$=cn6cXw}8Y(Bt(1cWT6 zkQYut`GQF5VZhz364Bu?*Td0>6LICR-(a7)Yju408>>WLxF3%C$nP0tAG4HXs(zeV z*f>>-_F=30Wqjd5h2zoa{wF8A=_0zd-p)%mDRDw~1*wzkbwJ&vR4n(~%Dtbku#AwH zkfTpFPX!mnY@L5*r%rw@!vM#;!}1Zlssc=G%yf-U@5rm&{psj=X!vQ9c0<)Ct2NbA zwU|#K(?L7hk7pwKnC`z}>#%H?*iP)SI{-TOHGQ^WJjH^59o z_vBLGC+1)tw4UY{++alzaQ{Dav#)5$(to)VvZ;a&@VLP$FIk_vT^C)?JY67~iZg61 zqh?RmxVf9XpAywdcfR)a!m*dYBO?}hNncg%;scgD-eK|6Tc&XpvVL1*i!bZ3y`S_* zi@Z;GbA;UsMI=9hV{8gA^jiQy+mE-tjXvFx`sBIm8*RqBg&m)q$e#9#+<1BA?FEd7 z=<;JtQIcJ8f#tY{cN@};wh0dM)umb8?#>VDJZLuZ_B9wxJyJq0V5-`8^SDL1i1++~ zfF!*M!WEcVC4kP1iCCvUVmw|^)?d5JzwSw-jz1Sm9S5F)e;Q0Y1b&(_^*fYETO1ZS z{So^!jtZO6=}LWg@uxehW@^8aL~GgDJU1#6%v0s8GD@r*U%uw`b33}GMwN_PqfNP{qRS)d?YQUgj!D+5D=ihzL3NOy}v*U&ZJ9`E~K z->k*D*XMao@3YT7JAQk>MyhtKDEMr`E>lL#n7&1p@bo=5ba7&0^+B`OuXzV#KLEl8 zlHdflAh8=l-Yt2R?1ErM*fd=EkeNX$zP4-5xvsSo)E?GX2QS zN~Cv-Z6orb;jNUBUTdk^HV3KNhzFi8>3c$keR$&~>PmmP)|ce+0gIo5t#UtJD_Z{D zo% z6(P8r)kyEhIJY^ow|A|iYHvJ+<}@>7FZQ&x`bgzY{v|0HOUHr;vG^h`f@5W>4&3Bk zB?#Ku%IjhAV?VaWsIU+k%v(MbS)&*^-05wS)&+klekm#+;8imG<$_km@&IFXFto27 z$ng7hfgb5~Nd7LeKXTRLQ9+?7YxC@V6Nb;KI?n}ea0XM=J69P9kZ+7XOAFrR*=_`tEl@s)=z z4y3ooZ(moqB700{w zh$`mE=GPLsPL+4ShUf!^a2H@WuwkxCFh?z&zZJ;v%QaDObAT^*>wANY!QijSsCODJ zi3g$`ZWw=07F%$q5vLKw5Y} zHBHSthuTW%q5y+A-xCxFWS7125RLrjSLvPC{^txGQSIV0E<5lC5Rw z(r~Y4CJWW3K%?+K+Hz=^ms5{7V%z`0s{1`;qN$T^bZ5@$H#+oqz4Pv7#uY8lf{e!~ zKcqlBAW9?DWG3D}y~!Nx;r)aUbD?KfdqfF%i;&RnkyzZ=(#jDlKyE-RJT7{bc<`KD zI3diPEnBSaZ|yd@ud;OE0iIN~)03CsY+J-+ap@LqhyRndgHL25bV2I%L zH4csEhXT($r23RoiP_mAcjuHm(Z6r3K4p(`8zKZ$AHGu;V8W4J!95*`J}!5f2MyE4 zR_TZAQqd9<`t(K@WYC*~ixbU9r=*jukMBw6mHc2zpV5(X3@C8+pF-_~>(Dw{cy1_X z0h$G4z>swEIjwct@b713_|}g;6>0h*v*OfNhNcHQ5ddEy3H0fG7E}a%qNBxa?`#u< zRZc+>NrOpTNh#;_#?Ft-81}3l5&UyRx=V5k)cm!r&BOKaCfRUSzP|`R_A6t*Hqlxq zP^Nl~JftCaH6FoOumz7ysrcCVIT>2@pju(h+6y478l~&yd~@9yF2G0l(!Nn3D`)B{ zpfhL|)j7eLWFvg16a2}v+r*}c-CPyhI*_Z*f-hqMy*imv*f1EBpu@MuPKUK8?0Q@$+}v%n1bS9q|2(P zojV)l?sp^4Jd|mdpaN4EJ%Ud@j<%uK^7gMHeyWKzel>kg4dH5;wsAagU!F)K&HY>|DLLizMDuLJ4&f01MSRsFi)Y$Uz4@N8 zyF<^^(kMC6EzZ_|uBL%WlRM3Q{i3GY%lQpSqvW*RbEAs2lEQf!L+7d(_KY}mM(!?% zYCH9q*0!$Be{w=;>awWgsKCy`z}ful-NpdwZJ3Vs!yX#OFsHYH4PRzTr;%~sBj+Dw z(ux($^AhNI;V#CMyuYiV_^_ve3k1f9G2giPXMVocR-4;;rtF7Knc)m|T>}f9i)}$; zB_ZVPlP@?OfTqu~Al*3{F-5>uiuiv*lNN8^`zS3fe5nF&4$eBG5Fh9Rnpm-JXi5S7 zgX^G7)%zNDPCQzG!O#*suOt;pG+xp%RT*LTq%(KeUhZ=$mqgDzqZ@X1GWg$^#Cy#5 z7wWmmm7@ADH>-3b{bI|mSNbnUd*yj{7WJRp&7DF>G&>jxC%h|o% z&F%OMsI1LLfRr##W>OwAYzdt@Q~1s68m5J6*6ZiqU;f2}1#+8)`MS;VthT(lw&LZkHDl~PXFFP8tN+}7m`(MCT?I-DHOtYt z|60%@Gh`tnGr=S;^XL(ABx)(cjQy*4!$hbGYO#Y{mmPo(c!7rFU2dZoou$N&2__iQrFpC z(HDAV6}b7y=WCLGe4RDj(42e6?l2b7-OL|R#uVm{k&AK9UX481%a~awa}6I%7~e8< zjs%{e@Kqz?S?fC4f5CZb+wXb_7trX}R14DLqeE9YYk$F(#!ratMiy0E2FGqMwUTJ- zPvjl^QSQ(@{+)i!oZ-=0#?D9NjjVIyO_h89*SZ&ayOyvzpPDffd^X73c<7rS_gW8m zFS~qq1XLQ>{Y7QED_%k8#l0eL?poR?M!M!Zi*6iw0sM{oNl{@V&q?_VD=^_LlQkzu5N$0S7a((J#szYi&$iiGA4`?JG6pp`i5r1LShi6=4pM9!fiC zH%zkiTPM@AgCR^EtCDCnMS5~b*q!C1K4X;fB1)pmah-bPlyQ)(K07S7vdxu9vdM^0 zTHaybC23Kf5be05i10+8pMJh!$(C8``xCLN_{gl9qVqf7$axWk`Mk2!KR%p}-jg(6 z3^Ro#XSRZ#oUL|C6225fd}`=9>W7KiovV^uDJgjZZ?388Fq><`b?en734+o*`jut$ zsc&AgH~fAc_?qHW?e=hGRP159SJ1p>wZ6^}c|KhmfaKmN1(XV2iH zK6}X}=}NsBl?3ivVBGJD`Jd=viKs$ zz8>&XR5`!krm?<%zpA5a!SeN5PSFcwMD+2U1Yw(KC$EVq`FeQ zl&G|lqwtabwBPIOflEJJ<}b|nnOiIMSz0O0J*8Xh{hFgU=ukCMR&@W{TU8B^$@54xmWmbji>rk-US)Z?tN+xFRGOmT(7;^)j}2!ekasrB!w@xPQKs>1$o_d zmQk1<^|m*aB=5}wBm1YnS<;qZV<+9U0?A3!{ejcuVGYH;jz%g`-vfhbC-tr^h|-7_ z*zP)KkiFD$P~J?%AyE7idjq`bOge?II6*M*Ep z8_!-IFrbp_!O^Q|?~No{ulYIfN4Tgn;{iJ?G0p`GHOlY*W}ywX=C^-t|7XHZl;%^Q zL6>6K3A^P%KZ~3X8XXE5cLn?$0yKidHbtGQ$(|;ggg(Qa+k2LodhmB5qas@aE4(S` z^xE6wu@6J}h=GAzxkb#31vx%DoqSw&^<@Cr`j_czsX9ZJSR2;LdjIiGwY4LPGELCwfO|s zaSMkiqyvrB#cdlgse8ZbNnEvmmiL8fPJd${$CcEi&w{G4D0~DvwU!cpZb4e8gl`(H zdW#C-9Urqgu`i?^d;{G^9~I_@qh@+3f|Nsi9J!iCp3EMD zm$A|8;`L7I2>&z3v6?;7`T)rHcCu15(MNcY}wNqVgt#hl3(aevdNWE&dqrK^rtj55t-db#=q~~ z-vR^9nxES~@JowiI9)x#dMkwZS(43G!1pxO<#nD=~T|s=vY2c8ahk_=tYCO-`dc zrBQm9WmYmnui)EuIMS#Gx>W5Jxx<>XA9pcu$2h@&$tJNprv~-8CUDR}px|a<3luF_ zt-W=2^40>3M>%@tKAkTEy2`$cO_aW7*kdsvWQ@dQxSTgJPa`@^ZclIY*o7@6-1UY2 zF}GjX2G1i_xdVvQ&`jR7q|hU@OnXMx833P1rFcIs6lLzEoIA=_a?)(sdr;t6pEn_E3Zw+Xrgwzc~Csaz3N8)W2D=dYEBo zBSFC4F$$awXvu786Fus+WUa|rfpj`WxRjzWTHua{!>IG?%Fp2Cq_Owp4=I#*XMZZv zjZ?q3+^xEha&(|~Wws**N7W&>Ha~01ME3{Tv^sl2FH)`-3yBJg78U=sHuH~Q>8X@( zRu2yH!t%Lw*mGD|+UHzJhG7N8Vv3X)Sr$ZR%zfb*d=4U`+S}F6kK;z`C6oCa;XD~x zK~I@+M>o%woO(p`u(Y?n-)Ov8WnAf>qAJ(M97emeMeXjNT%L`a5~D(>1YFq~(cQQo z+g%Zf1I0gR)A{6?>PegUL5(#36m`WmW&;1h<)0bJ<(I=~{oO!wA2j~$_CBrT->e^i z7do$ZmAd$hL`5V^Rou~Sr~DVbO7lO|QCSI>9bY2cY*OT6yFC@q)ijBMItD}M7fh1p z4H6Pmb8LjmdJGI`jsKHfS+orc>XQ}Kg;oSKOKCRx$19{@T!Mc%x|^Crj}G{i@GBX| zy9W_XQ~b*$7RA9q=JDwEPr5*3|D8F51nUP1?;zNeF_>Gv3C|lRbZX?rGqe0l6rr1H zp#op-`dm6!bCi%1!xu!Xaz7)uB&S4_?udzCwRt-}Ao&G|>!4zgmyj3&kt32W#$fgV14rFBHQ!vL%YIa)_%*$4BzV$-2z!;Jp?Z=R9- z0zDG8i@O8a;u#eNFL!>`uTA|=;2zJJKzk5Jl*-wJYA#?(2$YH|WQTBi@?8RBzU2lR zoN~(s_^2?aM&p2jl*omN^^tOtLsr1##x%e+b5H;Ei>rpU{PcjLqFeFId^hQnf5JCY ze50O(cL0I0%UxbM)%!Fd`?SrHOW~Xi`cG})+R;i&bd z!@#$R$l&ZPb(R{zWosDrD2{^d)C5-;1+{gmWUVIY+GqQ4015FbPdwG$2+PL*G|pd( zwl1gXc+y;^CAZ%^9X)T`EZI>~{{8=t!uYO&&uNnjev_Uv>*&UbhS!mchPf4f4~2JG z2$Raj$-`Ts>I<1Xe<;mFf}2?#zBb+(@U$si=a?y)a7t^@^?BB>NoWk2;H`aExv@=~ zM->f2x~PAuy7L8_(g+8rBqEbX5)5K%*B~wqk@27-6n%)x8dN1*0)q~(Y*gv9u>6nW z;Fc7s`gt;f{DtD0Bb$J`;vVw>$|arM;+(y5s!EHu$_w`5YBn`Tlyw1vu=7sl-mGS4Erp?{O4?Jj>VOV8L1rsObcvxKf2%+c4oQ#M$ewU zbEi|(gT}>--nEeacc*ZA?SlTJnv+hmf%|0B;?PrGJdsUbp?7L8mw&9%y}(uB*PYje znkAwCh4}^HNQIXM`2AV|ZJZB3hp%1l@rf;T(t?G0o0F$Mg$jI3G*+h1?SEm~KOL8P zf1l{7-5!e$eE|k$h2In;QbcGT1Zf|l#75XoP8hqbcxJSp%l_WOpFA+G+x4h#*stWu zR;liTuIz8C@lndPTPd^E-xot*N|J0Zce&FoTx02J5@7g`6<;Z~aV3iS2pd5|+d*pc z-J`>p)fr}A=b4YY6WPOV9-b*X*D!XjHehzm3}yi*jITSVhur#@p- z_R&|k`}>}7&U`#BQ7hgT?$R`xG4ycA)k{M&Az?4_^q*iQS{yu|T7XJX$>T&IZV z5dT&Fxdm+Lg&dSrJ|IbpP=dC>Cn4!;!JpM9vG2NiBgV_|xZ3ODS1Zw}69dNYT|8{N zrh0mIj&3|iA8IR`RIy{$mEFzr-W-&R<-(NZSZ4VCn1 zxmFd_`=GH_?_0)o*Of(4Zinv9OI7kGk!Ww-1?Vuu*S8+FRaf_g*&zI0XQxd*NEm_gz!aP%!7&LBu(Nxj}!K&9uYx1}Cv1ilqSC za3e1<@wH>QNpJR!wT`6>2^cVx&w2Yk+tlb~bwUX2+{Q%x zpD#dig~PvHAvTfYE`^pZ8}CE+Y<2GGb@B)VSr#S}k-N|Sx=AgPdUGOb>56vP`r|fD z^uUK2A@d=?Rcg_C#+88l%7k87&X+EcY-KkRS-_MK7%M^2@88)7tR( z!x}1bOwmU&nPw2|n)d>B==}cvd6Gqps5Mk}l@hgLH&fBSFrvedfB9C~w(SE;NxLe> z1tdWtCAZ6T6^HgOdgp!N(*h|6s2Ae!5u?fwVDSxN4UT^TqP{ATk_bp;A~%P(2A{B7 z(5Aep#XC!OODvup*_wOe^=ERe03Ckwowr#vmekjTOLv;R8a8Ti4v)9IWXb%Xl=BU} zjbm>89{6=gq$JzEtEw8t(yP7MnaSQa`*E#s!bQH_r)%CNHsGMK!gK*CZ@V?Cu(bmd z3Cf$}6fa^*q&5KSCs5*fU#sw-fg!H>||% zk%XL)9adP%EQnB%L38?*FCZ7aU7snf&F(@NLsE3D)qvHtl&3C#Mq5`@vXPMmx{YG@ z4l7I1suMOu5pU`8avJsml%eX^@-d)cVV_{Eu?);C+1(JPl+MMI@Rk@NC*>Vsah}zuLWF1q<=y|{T$*#_qQ-J zA2eomC}#HO^1^_}##Sc!-OG18N}<^bb$)uZW%n-?!<}?f_yX-t$-6-VWmmlt$$}hM z42?B$%#06Mo%17}NPfXP85F>1H_oqaCC5%IIInYma^C2B`QMCHAj2#rb?D38y`rmz z-jIxoXTod-x z>l@&l>yTqVfRxEz$YP&89QOM&z2gZNOs;S#$>ve%tj>L-KAi}cA!H+!L7p2PW(NR6 z+Pi#EUd)oPE@e_H&G!k&d%CW^`F$$Ym3XyLq05wE*Isma(1-VBpJ1D8>Xz!6mvQBs zsm|m?g#5O4svXm=gXo@_HSwVsGCOXdn~8?e#QQt`wBDVJ4}5szZff4V~U@wK)B4?wD~57`!!N2nA|zukvkCk9XCSg(rDyR*9~w&QD$XUi&E5?z5VXk)Pz@spbx)9-!yLZO^0x)~>#X7#ef-(Xw3^%wmeQj-1#> zYL67DXXMTo>51Jp49fhPkq7+c#+)hleigs9%_P@<=W$b$$+X4;L(9vfkCstr3@bq4 zJ@qCwHQ~^2ZiaZcM9yjr`XD0gJCU+AQumKbXW3!33m$k%_ZE;aT(hDq$(FCt54&!IaMLWZ zoxH{?-$Eleblv?C5M`93*F`64`**&L8y8ImQs^sZlWcTp#pt0-hKW>puch~ndQ@^7 zdBV%{ty&51UfH;|oyM-`kH>D}^j56xUNqa}dIGBYp1W?;Cj40*&D2 z?hl75TQEnES7r8=#q&>Zuzsqwmd?^o8$nA2tACue{iR6s=m#L5(>Uqrs-4}TwVLz2 zKY(IK59yG+UEqW@Z<##=>T~&o5_(6rZ!83SP;2!9)+TMlP?={4(ltcx0$8~J;dj1n zLyZZ<8CQ)OjwYSA1=uf3S#~3CUxqzuq3;g`LThjfJ8vg^N@DUSuI(KAO4xa6rt9xH zA@m>Fb-n9I>*cZCIT=MKubt=2Yz6<9-`4ZpbBf8E*!O`Owjy_PtTfwH^zO};Q&sn4 zTp`*67=axnz*(-{x~cHXXw7u=v<|I*;JCP^V)mc8gpvK2ru!c@vIFiL>lNJPubXKO3G(zSuYL$8RsYdpJt`XKvMk1cE9J%?f&g|YCf8Mn|V`SB`A1O z+U^(qeBL%}pCj$E~tUr>UT4?pTTK?fa(e=?~LJ^o{%W-AOr0H34SY?cb+n zS%;#saLst=&^Oiwjtu>6+6#L#PHav8%1?K@DnA9Ys~r zut4YBy6nvh$<|h@JAq0gz5@N< zb=4jiUxgjxUL~qXFSNZEf=efItoYR+xA#6}+N8LMuBhCCcs<$Q$;6)`$I6TLD2spp z5Ot>ynx}PoD4V}BQZLQn9_jU#qf<5U`RAprJlLmF?B?@@iqPXJd+FghdA;^Z>OV76 ztPu};e5?9?FN>(&rM|O>0$D@DwhdB6xv3dtK-uMBY7;dn~!`!UL$%|G0F=d_ztyTQ3F?v8jfN}$juNvRZ|G^!<2;)T{R^$4~{X#dnqyTDM@qf^DH5(KMrT8Xuf<4UCm=R)`SLg|>p<<&Et51?a; zo@;{GfdG^PM=USoCkXri!b;lVY7*Pd$ppU8g702{VuTpghKMQt9#HUI{e&LA?S>K0 zyn`K3Z(X12hX9AeIXAY`LSp8#Ga;&!cFaKyPBF))E_P{_$p^anFxI$JBW)VDj{(%C zWY=+PCy%$M%UNJ0WW-jMC9n^uPTDT#eZ>h2G9?DqTmH)}!#7xjMrZ^*`u+^scsx_# zm}TdDuiiJVG_iGIiY75ocC5zpsIAXoM(TY5ih7SrWuSFR^oW%yFj84Y5dx}K@qiXtmjLRX%RDGk&R0!#wWXyY)Z-^6H-7VYUc z-|kzC&X$rfbaJ80_0n`u`s?||gkx#a!&7>Jw{I^91(44F!3(hlurI8vo3%o)QqnK@ zZ#jG|uZu-en={-Vy;qiO*^Q)|X}W;~?5jBI z^s&snc#olb^|n?K$ozs7DH8Zjr$KbF*mf9eQjMzF0H-ycPrr^{4m=tusRbBH_dZzY zt10)fTm=tpg!_AL^NH6n-n;(Wpr(>p{yvYwc2DzVmJ&oI(xm&~s{XAKM-bI-?iN-C zMYYPIe>46K#WRc!J3e|2@8Q4<-Sot$9Q@SpB?I_z3^ef@}dL;=sysT0-7b~N3p zGRp0Uq~rfclZL~}puMCNd$59}+^dmy%fV885(rF13LMuGd=sZp8SXpY*yZ)DzmY|) zUn#5RU!p57IwCbPSJRqlYCipDx?dHd5_)2)B!UiB`P7K-!nYscTLE22S%z@`T9wXO z3!U>wd{lolDlAJ-pC}^?UQOMTEu=py(<+k7^^hKGW-%yzcpxuO>PDtbBShxe17Mbg5DN=x4GulG)nr;{wpWhxsQ-NX*WKmbBCI> znf~LI*g081IP$U}W&afWDp-_OZNzLG4F_)_m|*jIf@&9AVljwC=S}BqlaV z?=%76O?buTR%In$Q6}}*51W-5l-oqk6=_1|MZsNcitTECU1vRkAZi&suNC^B&7jwByxc@FrpK{axm2!1GdI&@o|1*yi^_m)G zzvweJs>z0ppQVi_d*H;iqF1CD-|CD&3b5-1c~tCn2lhy$R$P%13c`Nh1y299G5mN3 zETo7fsk=eN{&;e08=?6gK)o`Y99q_-G>l+4Wq6?4VM3=NAe@3A1RGrAH3SVHMk796 zNj=r(ekQCzRa5LCv$}@w{juYzh+RW)Ok%w*#B+J8Z|mt=>Kp)2M_bH2POG}K* zDOA|9Q42$sfItX@%FM|o?q^S|5j?ad-y42VrH9v_zz%R#T5`np?KGrnLztH*g_Tx8Sx5f& zL%6#}q`|0LHsCA;j=a_NE_MiBGdg=pPwdbm$dRP8J5BemQ@5qI+a~P-$p^mBy~6p7 zSZ)cvSCcnt)o~J$4UpUhCO}jGz+dm3U2(dpUV7p5v*mZ=9`i^7Y+87!S-#S}Wl@K` zM+wA!95{YK&doyL!!Jw^uzs^;@;d~gHvA&MjLFvz&)%YG zdQ;s4n15@$z1;@j{D@DlowA0AaNAaa>3%>TY$@*D47`N#6R6NAKq+iTENE;$vj!yB zR7VU-_7sPX$-<5Cfg3?sAAo36P3Md?VdpVM_f--T@`XY(C9?1hgjoFG@RYvg8I7rA zA|-5L4zpZ3?C}l2IKiBdW(Vcz8wpx3a)>YHQHp8_r$AprJZ$iW zE@&kFz3WFbaK3NL43ndx-Ld7~dQ5cUDb;PIF@_rV*mP{EXhoz}zVo4{SAK}jbNBxz z4N+$6;~(|V_yb-pEgpNPlTt;G4+xH+<}%TtU&}o8VilYr`_qqLP{Y;l(Y&C5GN#{R zRmNKNwjGkNfi#G^k$7`rgJCJ2>5OPS$u83t_VBC6+pez2AQn)-f30#97&WT_wNE+q z4fHtQ3!e#&RKRkCAJy_L{m+){TwImSB3|^dx#L>~INwsU>|#aK3d6$^qa<&kb)W~#OvBH`XSl`MI}KthKpa5<_;Az3ZO-U7cLeP4IhMiE~a zzB2j%N*CF3pMpLL5G%s}Io;4Y`pK!5`<(f3b2=;#Y$?LP`T!yUo}+xWfa-rh$5Lf5 z>rDK0XhC+*xO(S;3z&oKnU?VMpZW$Nc0#!|EVXAj)>b>zg1Yh=hF~5TD+uPdFL1*< z2n-RL8GV2{MR37z7M$F^XtLu2*kAGZq2a0BiE<15B(fKysKfY9dofUOnFGZ=)Pc+0 zzQMtpQ?hd=u&6a@$Z^19I130Z(*VW2BPVhUsyy5-z1?@~SAAKMLqS23Bi#CI+oeN%LA+enc&ES(K&b+;DIRJLZBmV+U?v9o(L5=G9Va>$2K@P zxbWEN5@p8z;R3&Esu#oCd~koL?=du~cB|eQKe-BPf#8z)#kEQqz(dtjgo#qoSe|Qe z=6~31r-qkvLucH|SLRXGseIUTUrafwl1?MO(AY zdacEVd2DOJx#K?W0%pgG(mq0^2%62grwg0+C14@LZ>n2gXB^5S#BGLs_t$Sk z;pUGx+*^=#`70f0QM|_P;L4w^_FkXkZl9ZOPU=PFt+&s;ThN919&e!0`o}_@)!&Je zrTR5-**EVIw?14fx@5u*HNv?9WMf|Qz7~g*&eW5*FWNg?ZC?Cq zQU`1kkb+_HPB%ELm7=~~G+Dg8y!+IMrO|5!MFH*yaf63~CgWi!HB~~76bT~Az5gk~ z;PnkDcVsYc67y4pZh(>r8F+`SNBq$$kAi6{5u&f)!_V@)JD*K@5$(3~&ibK8mE!E*MQ1cEZg6)}* z(STTw?N36$_ZD~$+QymC@hx{aPLBThYo)uN{;TCBNXZBXt;C0Y1y?d{82rb#&Ls53 zTWaS!GpEB!Jq8&Ieqt1tg>O~N1ntD6X%u4+aYA3il}2w$4{8F`75L)o^TCIG_!3y) z$HK<3K?h*MZwNa$to9H@1|seCEXBD6m4ES=V-I!{L`r!=^dZP3E}A`$pK?%Pksggb z78+JLx`2iERG;?nk!0e@_f=SkqWOkwJVKYBVpqYRI=PsA5$@Mh6kF@Fo4z$jkHG1E zx!cR$&)8o}`&9n_&bo(zZt?|mCF%FfAMnQFJXKFR0vVg9uIx_JQ%;%YlvHrTfKK~^ z@0dv-7VHSGU{3D*NFW}3INbN38!uuU?$U>Bxj`Avwbo2~aO6iYGQ^H*0ZThnKNMM^ zQSl^s>`;Nyme+gLBS=F1G~b|5Kp+%^Nadq{7ZlF{b5#LqJx?uVZU#Z#<$RxX7Udq;VI%XE-|bHb;M(E5J%4lIq42gc5oKx5ydVtNYF^Vq4;Yca8;-9 zYGo6D3vU|kfZE|$tn$mlPTOGMlsOVWQ5Pfb33T2eUH5@xHlI;YaU1g96d$JLM%Gg1 zao9IO#jd!%@LP7^?Tp}h*0ELgE9!T}bfsy9D>olT@jBWvEz6ebOZcmG-X%WnLRlc2 zr5^)jSHI9Z-kYzI`gTN94@KsiOP&sWGJ|h06hYlh#522C^W_fmC%SGdz>Z-p4cn?VVx#GXk&s>ywVSwZ_xgs91aO9Q z&SX3K;L8-`c=V0LFaqkx2d*KtdmSZyE;#WERv&)#e5!EA#B}Cw3b%TT&HfEoN{2Xk zmCuX|V5*jRSQSD5a-e#@^BLQh5vH;o@bt6|#M)n zRq3)WSofIdE zVe7Stb9(m@a=+1M z{IWmNcS`yDK-^6zz%txmg=Uqjh01C(-fP@6^a?{SqxR~stxq%U72D+&Y_ER0Kbs;w z{`A6gP7BtyE;~IGG<5T>N7nDx38!-4s9*}=1P=*~<>lh;dMppx_e@V8L2bP=pPnc! z2y*ZH(yVwWG3?G-*!AJ^=^-Ss$~SM<@8#B5vX<0DaWkG|k~QmR z2jq&7zjs?hPTg=j@D1KKHr+U4#?ATl(O|Hme>khC=W8m(MwRBK98dnfR;Y&JTF8O^iS0e zzff(nZ}6;G#~@D=CW0Kd*X0&%U3d)(mv|*5v#tN@Rs=xkF@Mq?v#Ew;mS3hn(Iis! zd~uIGpHy>Ap0gU;riErVQC7}HNtS~)8?pv0>lc3P(cPa)rW0-SrsFR>20EYc8GV!@ zlhO#jL<3T=VUPq&jW=)w(^0G4?%Yvf9P`-pO>}%gb(WHxrsPwHZC4FtZ7EO7H1hp* zm&vtA;~ec3Lg7QbmzUXK$nyV0gG6wgjy_rw2GGZn9DOtAnwt>T{Ww)I3edH`^!6mha$rBr$e}9n;b|N3Kj0wOU(F zD#AwTg_-IcTx*0m%)5JLdTO(yM8Ce?y%5p{tEG38I@`s3rrMYQJ|;_kcmeeVrprGb zKIY-w?bVdUZFFEn@ghVGEcEHlWlV;9mcvHOe4cha{UA3b6*GeIVuKUyKul+l)635< zGHothQhxTCKbRP`ztX5#=dq~yv+~C^RT-e*uVGW?%&6Yr9PwygI^96`_xxcw{HB3r zihh1L@r@QYk=Ub~y0<-c@eR!*DR%!!t4eAh|8*XcyF`(&Qt2P>Z4?&LZlY9TB z^Kt%8JPEU4{i;!0zTE{!Wvtj9>%YvtOMzgQvX}^un zPyfMn;6dflQJtKZ?(cEcmYz@-SIT2%b9h&fPK5F_TNXZVC|P$PVLO5_IkEP1a!v1Z z7PSXIW&6zZ1-BLW#SULiHU3PIei5VW#u<(^Vwa}DsX1y%3pR`!R2hIitk~gZf9*kq zeRA4)xTV8SELNv`&nTtF`O3`MA`hQ#;ZP@2j$mT-8bLL&#yL0t?$N0(EK7A#*bpQZ zf(>Bi7$YIE~6$9t3>Hh zGU~$R>{}{F)GirUAig)QYf{*Oz$!2)?ih~l=nAR3V|g)Hmdc~<$s}J634)hE;k}xD z-N+S&IdBFzQGauY!Z;J*!hS+kOi@qV>r4JiRuA*bXxfb&2|^39|LoICT|Rqr_1%q& zgFzKv6*6Lr1W5arr09aL$|_#Qz%EIqxW+tR@)>N3cr?8e_S`UrdF{B4yr*O$e!)7w zl!(Vhx8^R^ELi$1e(~PL=hqY+^z@^xFgcO=c&pZ~2QFsdqWA;^_UG$8=W-+Uw$2Fa zdF*w?yhP)7JPb)yBgI9RIP4PR3IoGn+~ncD;A_>jRP0q%I|h^QXb|rIr(v3fo0Y=0 zy}Grg+N}0@LCSe|)KXhG@J}tN%5vY!Qt-YsdR%*W;JJN#z-28;Qg)zh$^}B@E;D}d z=WGGUBh8G8aJ&$R$vvOuucS)mpITpafF*we$W@?uffPN0tV_8_6@y@u;E2Kc}ArG8@Uf{hClS=#7 zzjkDvgdSk>8-{bi*tJ^uU?N(W6H-UJr(NwmR+{?D4aHeU3wm&zQR zx7Ta|Qwh=&{d-$z7~5a|CH=4nyzHuXE*$S+?aY974T|ZXM{WHW()W??fxtr8S~ARd z`YIT?c6%N=^ksB4rlx&-3wpYi<6V8BVz)m&*lw0l>3>Pb)GogF$2OMRswnr{GoN?OIC#+CR z8sbRF{va2}^Iae3mvFf; zkGO$;U&E?2W^kk%B|TVNG+lf6WUU0$=~ZcJaJs1AyLe=d_WCi%CHVH64M&gn@V9%9 zrQ+x3@Lx3HBV6nhGVpTYOHi(gy)fz%lp$`l{jF_b()uv-@0$3gw;-Jwrl?{OQ(l(( zCM@0aOc3##L7G3k^01HZH_E)or0dMGrB*E*S^ksIK4SB5fMamn4STJhDK@m%1Dq36 ztsz!Z#c1)BXRPyd7P8HQV=%mkz3g%-RiyO%9OSZw4e{%sA@TDcw32CZ8vbe!F@h)B1C2u0j!va|!- zofb^6P=9)tO#SGJ#4T^U7%fe-g-r^PCj?BEqbmRD?NVd4q|qDTEPaS3Zu@dlT&_D) zF5da)EM}u})vu_oj>%uyJW*@uGsp?4Y<9DVGX_SD?tDpC%rsFO?~F>1lbC;xZEA>w zOhFJYbknkC`!X2IFBBR>m?rb5$PJEq=No)5FUps4o>)_+_K>|dzr@g?1CBakilT(ulb8jjdY)E419LnGS_-)*vPK_j7|3#@XL{qNC*HaZg>M*-N3JD|D-Rr8BZyrJFT)BFfz=aC(kru zcrw(^TXoN@UBRuo!7lWTp*v;0*x}|-LSFbK+eAFQk5KsWpHv%Dxu467zr9&m1{vq^ zSc$ulk+l-eP7TPz9P`@K6xnW0Zj9~C;bYJ?9&5gwbfyMJeRRtIj~`bJ1s(zMwbz*$ zZ}O(tF?I@7DSLoD+_>7yMz!yTR?Pe;ESAd2%I2fQ0yHB^@5gH_^IhVW_Fl@`?sYHf z)g;QpecH5tq6=f#HTi2tiP!HLiRCu?Wc(629+iIUkz35Es4r(DSI0}0l~Ef%s#^=t zXV@0B7l-w>)~DYFr1REeo7nv?D5&E;=bYGJ4r`6qBx9qdCdQYE;1>elE zs?p%t?>bkhY}_)k^ojDpdP-SoU!+Udzx9?ckb;Ol*)cYnqm}?Z)660?kD874{21jn zKVW%r^@kzyH@ExvumutRbwb`yegdrGoz0j3MaEOf_D9~@_%YPl%c}ovb3OhoPD$1n zmO<1kUxFYx(Nh>~Ga@C_23f7Yp`6@=B5qxnrk>yfD!;V$BK_=9qT?Sl)obU>cHI*V zHkh;&T=htaLo()fRNiy??7&cn5xn~72N3r_HuB07@Akuo{8EJvE)mtTgmS@ z$*w9U{i)rvz1aOPl~=0G2vQQ_;-05@K&^6WciYheJkUV~w4vHWlUHJJ_ie{p9zKrs zNVy@DM`8aY4URQHK2pis(sPSN(<`gM{AQ@~f0$D-ECpKASU@`UVh zEQS58_eHMOUs@cpRuuzYOv{(VptW`4F$wRSbBl?+A~2r(mqG#MSz-=?V#>pnU3(U| z54|tmTTqq71+nMl{Me(RFS%l++J4^b=XD>_>W|Mwt)DYP(mHk3){b?5ApN4=;OPK7ozo5cw(Je_V_+>t3OP_U+S*M|MjB~ok0$!q4XN#S;pkKfxi+ZdsUQ&J4}587u(P?Bwc z%W<-~=dK{1K0~!_GVH`dX1eWWc{c0ZoINAVuSu$_SB;{_#SMD+Qhk2*+%4ttQDw6O ziv@?hVqeAI)3XUan=rxD@;!OZhxop@s|FE28}7gp$dS!Fk?obJ2m`U@OMFqU2It0S zOYL^;l#0^6$A(zWEwOz_5&e8dx9X{35SYmQ{agJ=tM}rP%l3YT;jH;2tSOeS`3i6_ z0pe15=YYYS*@v3{Utw<@7G?K54&Q`;(vM0A2ndLPgrZVQC?G7TNavDDNP|nqf~16q zfI+GtT_W8jii85v9Re<;bicEHzQ4cU=eqWK@IEzjX6BqZb7to7KrEy_v1N}BIFCih zEWj9Fb2EAKEQ0Kedph7R1GSQs)?lAQjs2fX;SZFCJSUJxOLbV>7pHpqfQM*GwTY1j zAs@I3T|!(r^JEyY@P0_%CMw$oEl~LJbh!W&-%07}mzU+0;ezo^ReKoWQP-h&PFpU{4^pup!G5_^*$)_ROHvFLSgIaqANbwW-xBpOz@5nH(IyZB7X!O!1L^4Aa^A z100e18WVu9f>1r-S;-Jnp3#}^{qLSa6YF-|za|KimjaVchk>o_GRA*z+>&b|lk{G# zmzTLXyNRr0G7oY1T!bd7&F1K5?`eVE+hk7s$A=&L;UFR98d>h+y}6+vuYXg_F_9Y{ z^w=)4(aV7N-a+M#(i2zgVP962Z$CQU{MJYI?b+hGO8yUt!RSrN9F^EOOGi1k54oRX z(g#c=r2qUh`Q~O9a=P$=TIH?4N5{M(M*MNDeKEY%#NvgBuJPA?G10r#2ER(; zY=3$Zu1YR!90+9cRuzime>q*%1&ET2St%JyNMAXCMm*}`hG`x$xZS(qeKqN`@pfZm z#J2|^GTMD!<&zmuhO1EDjOD`htcA$Shb#P_-`OYcf(XeON&tDg(q(iuCUqu$P zxMR32P7D4Uh&}9`pYQ_3G)0+Rt*&sC3$03uyqgDAMf-2c5lDsr z4lP3LZ5PeoxQzk9w(g^5xIggld>w2F#K!2&VXP9wK($NRJy;c|nw=R?So(nR&M*I6 z!meXKfGQs!UYcM{)I00AP{k6vanQN+t^V+J`4X-0GjzMAZQ2dJKtr&eBrPaEVzvrv z?<@{CduW>|%&gRCs*_K$qzgy+ma6#}qu9Rcuc!OkN7La6W4Q%OW-0A?%Q8MawKH9u z5UsYPyG@Wl92ax!u>y%*KaSIG3>xp#;g4%Ow>-^ZEmiZ(lz#%%zl5OG+yL<4a)7U;1LB< z`r>0S!Kv5DatkWtch_x#&eHPaWt&jPCEII7f1C#4(t|b!J1;>wgD>&qA0&Nty`UnUMdi^XSUl`?xanO#er)On5XF5W4>c!anJK*t^+mmN?ex=Wcds zK!!OXn9W;Ihm0VN6-wK?6bm=1;Z{&wd0C9=Tarr42eb2Yn1%4x?z+;UVj4{=;b(WL zR5KxS1j70&?%ysnrK6PR@0b3v?V@B4-&4zaz>oZ7#(K>IHY5Y+1oIRg|5Ac+PlS^2 zF0~xhXOr`Z)`p>3xzNJW_~P$CoFcp5Z+VFq7KftM0IO^`>^~}g0{{Z~N zz(jtR3WhG`Ui_d}$M6DpshLz0b(~v%!*-vPIOFxpK5xDitSmTTAz{5S7yNKXJs>3M zDe0=8)$iwz`ET|E&8N)*`BGdVsn-2UPV@i(qD%k=Qh5AFpxrx0%=Ru|`ugQ((5|~9 z$Zkc#yoH`LZ!VI~?%{dZ#=m&iAocYD$9mL@2x^&)Nv&PYfZ?-YtJT2 z_wQIxfh61vG9YX_roiU+1OF!m-hdzvI5RXD(RKcqBpkT9>o;9r{Z`bkHWqal?_1GD?w@Q+Cr$Y*#*`CIjgucvTW`vIO!f?j;seHoI&o(Lp3c`;5GMzjRx!Ex3I* zGIZCcEO(UKCX-yO!6wgh!)*dkp^=Hu0IX7knBs;O9c59KCr0Zvys}!!*mr*j(Yt&Z zc&dtqf2Vz-PQJA0vG+wQ5(a?ydy2C4B#SZEpt?zGOviEijZ=yL3@3rEi~hTkW&E#J zxK5qbl4kV!lcYy3HwSC*8M`im;{!!PqH_!5xL1iV!P3QFx*u%E=O7X&l(J4cM2%<7 ziYw2I1#1Qv-SyyqC)RItHBSZdCuKB%6P=S78*dNzwZ!@4{QN|)YS87`d~o10ZT#tc zrkjHBl%Jf#2Ln*8LQs`sAGL$U%yW!~oWNj0@F@X*dr0FVjtP{jcc zFcwZr44M#=G0dLM3H{$iqr=+$^PDHk99)adUd7!VJHpc%S8(2-S?^Gs-z@|f^dN~B z4bp0UoI^0yY%?801*W4;xy+`OmzK%%0^lG1hJ~|UO&Xh$flD>#;773m0*V$s0dxn) z%VQWb8FMPAFNY-sp&*3(#(AGObb z^TbPvFkLD47&Ysw0!E2R3d9oW?{IB1u%+7wZEgB!Obtcs*^s%swZ=fPGEK5Q1!hAc zYAsT4?Ytw+8w^qul>>t+Uq$UFAj8t|6=~4HKPx!85$+JEX22+N zi4FTG_MYJYk=@4ed5?5c3<<7rAoC)o2%CC?xGZuU(3qt=vFla7@Yd#iAl*I3UgGY)J!{qZG9o#?I1YU}Mv)kx!`3;s{U&VJ`UuDTgDLTr402p0Gdf=&d zFO6taWl;XVemmMuCwKF``Y6P~HQEQr%lTY&o)ooirG*fG76sWHm>c9>HAnfQoX)Pi;7jjyFn8I?{Pz1 z%VB`h)BfwG1F>vp`r{*Xwe9k--4M;a#Kmuwth!pqiULrLye3 zPKl7|cbC%jrOt5mqhgf{DE}_$=QCEYa6ia6ln|TXJ#hk0TqLmE&4ap>C*VFcbZl<+ z*DXoIW+dT1Pf5Tz@H>G$|5XAO3gLt*s(T#XO|gTXTj%=0;(&XQzHz_K6oa_&Fz)id zoZ5BO6$bNmQ2~js#XK`CR1Va5;;ULkLWpnTvC;ICrYaXPk-Xd)1IlRqfq=R*d6ZvN zc=Hokkt?&7g8(=-)!i`;eTz50Gqxs6pHbv;<)~*udUrL+GL`+)Z|NWnyERhY^$##$ z6bUnaG#1Cze!L-Q1T9^RzNO8n>1gi6FBZ{?7?uxgOC_E$t3saj?q81Uu#Tics(aP z6I`}FaT6&5?g=eVfP|(OO0lF<5<+KT76rGbc6UlusWZf^IUi;pYUKZ03EpX!2ys|s z=dWwr@INq+F;**8UuiDU1#qQVUd!$%H-hZ#=;S6~0VxhiRlZWY{Y|zPWi!unZ<=9( ztk7+Rn4ECr^oza}m0y2ff;=xnD=Ne0 zzCL3`8^*ex8xpa~qAY-N?-^l5>$g@R0!ygE+^RG=upBE(}47WoFm8&<=>Exuu{%f z2m(|2{Y53|Gs=zN05kWsC(R%7@eR=`DoelAtK72xbTuk5#910j#-Pabs2L~3Tpmpo zlV-BIQ*;++=`or{CC^Ktehr&ir&3S+a39@$)%SmW=Kv>^x{vNBORTci?O7D4iVaY7 zC&GxhlsMqcVT&YR*sT&(T?X`*B-<_wSS43OIY)!SdjJskf1U6dGKz3~wGZL8o+{o5 z>03T!l~Vp)J@aB$N{G-`ld@-+>b{-M##@-ATJm>2Q)gE0|MBIr>XL=b3ZI0`=_G%j z$+vBY=I=id6qr0Oi1N$Ve?eFSM$YX*qgWz?p1e_lsv=ic8gK3K2VX&tmn_N;ZYu;2 zUB#flwVK+eu>PpWPVN4&?Y#bH9V8=Nzg|dLJr-Zl0uWN1obqH@l2PIuGO2oIVyn|S zU(O!-`~Ub=x@IB5A~Vw*-_ZfmUYh8*P(fwuAy}98YZz%LckqQFK?YrAxhNl5%g>hOYG} zzJohwwovxYgT2D^;Y(t)r!KSFVa1v2&zVV!-2wr&1CeV;h(6@ctnS7f zeZI>A_NL|^2^-eqPu2?uu{0Q9BBtW?qyXW(Yb=iJmpW7>9WazPwBv2fy7J-M{J0xQ z-=pdIGfWh2w?biE4#-h`BV8PuyDjFJGtHJZ+HdHk1sXQ&v*>tk&nM&+|SFSmRyi9A3sC(r{S#ow) zUV3agJzmA>C4HsIEXrp6UfwkUNA_2F?WLt^h65xm&u&joP49irsKF&-huxrO?|2xgD#@($@`WJMYeT-Gc2CF?<9){mhF9 zW1Et&II_9d3XHcnI6{8WW^BAEgbow)_%@1y7@0LxUGdWcoZs+VbL=cEj5Zhu5RA{K zR3@78L=H5Qc?jau4$UZGf$b=F)m6*;bjqc-awv)nnN>7gsoP!%kJo9OptZq{g_!7S zM}BMQXSkR5>0G~JIxfKMiKxIL27jR_8(v9Sj>U3bS*y1LYvNYG*VxhR&!#9uPGvau z7k4MX+NsZY`t2J%#-Kbqj2u5jH;D@%=ds@~Ta!N1Uuvq^HX$P-;ccGL%Df=6bD}kg zyVlqAPtJgY9@*mm+*X;HELQITN1>t^3iGzFo!PnPIX$g^hLQcreI<LA{tBM}y z(=v4oI?HM9q`Y|CU&UbeNTHxJ%i8evxs)odJE}c~EAeOD{jBlC2$hN<%dL+__bqij zoqo0WN83Op7#)aj{LDAg|#FPeNX3$r%-272NW4G3NcerlCi2dGG`}_YPrXs&%`AyQ7CCQRpX$%d8 z+)WV~GmM<^4xQ6DFdu#}v7q%l(K==BQ)jSF+tDX+t^Er4nzKMmiHWzhaf1G}pL?GR za!TG5+@g3@&v#iLHj^(c#1V&UyPH332s?d0BXgkC{h)cyzwckVPh2v7pYLnn+I%sW;=0J8C2o zJBfsiu+ru_A!$v1@rt=Ubg4lti*2!H=hk?--rAtE=rBYnQ?&8QcJS+3urq$^2C zz?xm=g^JbCjX?t0*V?9rfp{fzlhANvojl zuGGIe{?ERgpBnm(cF*!}tfJYN-C&uiY2v>R=M|8UrqQR!?q=5~EZYM5aCTFtxhf3B zi5d}z*jm;z;*blSPiMKNSzbVbe1ExzCJR2%R_IDNQl*G+cOX+kgJ}1P)Upf@y0m63 zSYDtKsMHl~jS`Y2DBL-jCRgNMmHZJLCDW`(7aDdFc*sJD!&!V{vjm|S{5=N^(S1PKFLB;+Vju_&gC^1P@x$Y z4Ad-Kr=SFm)-eZB4}b-chO%F3#`PT%oygASH%b6iSe3WXq(G1HxmD0SZYL2v1e0d} z24RZ?+z_1oPuRZlCp++2)0$;M9IF)F0TM8Re2p9L?Ow-7wQB$$Q>hSl_dVl!6tD3i z8S?hg?nJN1-_JWO3oJ9IsR5?PUF_>YoMa&<7W_x0MpR;N=h@ygiAF!Fk&#jNd6RHW z^mxYYI+!g24j`cwdIj3QvBJw zdpj+0SWb!0XLJz?&CV)Z&0l=1B8;XDMi!bWiTi5RWfw!(TWu72ybDFcM_PIvoCX8o zw1gY!knIdIJ?$s`H)X*(^5<^mSn)kl+!|}yNAY^*63RkRlR*XP|D!1a!ailO?d z>*JWA!O2~$qI^xxm9tRq=^E?Fi8^1P_=D-J(Q7MDVeI<^o^^@F%oDBS!?zU@-T1q9 zsS(n!HP)9;NBgAHr!H52e=yJyW7MZ_3pVzm|kQ zUu=(UH!*(r(L6zt?dL3m6O~${99FXb>@4Mk`Gv_HOGXgtMeaoRO@ z{A{CyBBhCOZHEC5@e)cY$Mspei{{y(yQ^%HX{?8bc+i}NJK9TyqTyG zmr=h_z~gm9qyUNq@+F)AM1{D_6@hHN=-(Ly9k+rPeXdv`Bi2+7 zh1KU1C#EV$5UPIS>Mn0rXP$VuVJfW(_w`ZMOE(se)j%P6^@K;7u(_YCRQkHGKrU`K3YkEqWk1pz+@7Z zjVULJv&zNU&b+TGEv%!RdLGO}nLJ&R+e_1fEm`&cVv5#X??qA8Pft%t(08xx<6CQIts_L03&@Xb?Zhy znsweb^}<%_uuEy2nNKB3W8bXl>d$({#vE^GbyiXJhTCU}`5?NK)7NXOM)RDHn5C+( zTi=O&WTBM6eZ4BxEZi+~CwDp&m5Px;B5g0$0q~ zW^18z3IHa71L{f-bywx9yyaE*wmiICpNfRn_c(!|`J_wL+wI9@mlh-uH7}rdOWc+No{)BDqcRHM0&R6iDGhQdvnrAla zTP=;p?(Cu=GC)GKS;1_nb19g0Sw^*|V~^&VCZ1@vH8$ORJg}s`K0Y50`O;(CMU!0b zou85j^!u7*Ym7| zzkThGpY#2ZB-`M1w@MWw^io%LtSsM?l#ZTp(fFRw_etmiB^%8Yjdtey52M|*Qo-H> zv09Q?u6ik}(ai!ay0*eqoBDKj-~CcPFvfe7wz7gs7^=vtk9*nuJlFCU9Utskt-W;XvAe` ziX{x(8`Fa~ukRSi9zIg_T7n9($84la?IHNJ$O@YMpS3jKj6!$oQq6RSV=LTT!qy^J zgbY9_QLq{=Kb#-VX8Go7zMacHHfGrs$|;)Za9LMTOj-8#v}7E2E#RrXB>CQd4&`n; z%RI##>u>_++K=Us)W#FvDl*%~t2|^r+7_lhWnsTBKBGJGC6*rR{6)FJpo!JqrqNnvd}Kh|BTO35Ih;4cJTJ zcqi4Px>olMp#Zi6GBHcWPb zKBM_u>7f!o!L;?`iL0ltaCSY7s!Jo^5dMA-Yk;S%U(|XEmM!eIp~my;G~A1zpK{!R zi4_LE`I46oCoYb2*DGzx2ZKz$=*$g|M!4LR+&Wok4{PkBr1uI`2UxuqTB-|@aicZ= z6~KE5U_IA*l^10===r(Kc+9`zow%C@U5IP&;^FV71`0T69Q;S2cvtkzvgbO zkCuoP6jjeDZtTb6ACnX``-Cq@Dl&DUyk|~X%{Ws|F-v)2kEh=BcwBupwbN|nXf$=w zK6OQ}f$?nr^YDbxE71~YYKGs55!UMWJm6f9FIqUJT9jOJ^SY*BIv%gP&He$ya+oFS zJ)<&IMG+KSX1}yB#G>0?ShaprDc|(BZ3`A;HP5Z=cEIj*)a#Mp zR(gKS0)uQ>C*q6t=S2#eSL6i>nma}{OA3%5dL9X1hX+Zgm}~jl$68K)AN=|Q2Fpez z_68{E|Dl{y+||vl`Ax=lcegvMKhtkzi!HU&*-qQG;J>EIvCl2 z%FZi0;a^eF>mOSGqH)(#!G6T#t-ZHt1x@qJ@x5^{wyRsK^7{l^&<_$%G5d1Ly{8pK zYmVY#7=x@hP_pjxgNf#2fyt#jXX~3r8SG|G1Kov=7~AuJ7n;&N*eKw7@h+*=#)msj zpYKt0FO0yb+2}W*0I7=~tsjPZ4iY0wk`vVb_2S!1t$qoJ6?Et`2CUJi6bs164Z!+$ z#&~TRf+wkhW#?I&ubdi`S{*2Dh<;TTh1qY95}4E7VoUHKzvkBO@o-|QN!QWkhMv@UZgvQG?slZ>EGL3{yu06#Bt9RR zEVzTc{9Uw36doodIwvK?f81pb*$&eA$D!pPWsympLR=Ic{kJyx3d(SK0DB-JbztS=-F%aP!+(%{!~1XhQ8N#k z5%#aO;n=4pwaytf7>a2{kJ}%7aKETddR^H*RakV(zZqX6ER4e~)YPmxscUFDpRoG+b7W(a zfo63_~Ek#a725_M) z7AU?i{0K{TH*r~|A`cNFw;z^z8^!YBVD%Vf@??Rnky-xc$c>KgO;IvQ`+x!PQ4Fnw zeed{5`%$KxmmZqrUP?}%ztg3d{mSqSRhly>)=PWGh((u}%xgnfzqnVypLl6U%ipFYLxDH0_k_?7xUE;%mE%Y*wa#u~wRI{o)}65k8r093b#67F{D ztCpxcyXd;>f26Y5Rb8sT8-;!eVyBbft&;i95-55{M1<jOZd}6sEzKzcsRXF&F9l2SJ^TMl~O7BL=m4`Z|5ur=1$txR0)m3dmX zGa>fL#_GM0O#+_o@b7@x-5%!|4Z3H0u3{6yhKvCR7f@W6j&4ixF22GFhTT@xSo~kR zl$wfFI{V=v!&vW_R%{sIpZC>QB$GbnlY&*bnRSZ!rkC@?+c$&sx8UyCef{t$mY!lH zVxLBvVEN!Ovr4Jnm01n`TvkC-Oi5}*8n6~=N}pCtN$FB;au-A*fpMFS|I4z=kDpgP zj6w&`VAuzF0!0HsY`+_`^2oh+v`5F!_YWQhu1_z#)kmMb5^4Rag2tJpKz>Q%u=0AK zXcJUjqxe3^Q@#)Rb#;wv#l5Um94;|cce+*3xOSO@UN0VrJWBR=SoR&y3rM3&p+7-< zZEJGW)ix=(>jk3{ZIsHJvsV1_P1skW0Kognqtyunq0u4a1$aH*KVGQMNHiM07HM50 z?v=&A2!^IG4ONHjTr&&>8NT(0Jfujoytoq*iWlJp6qNwQh_%QpLgmYgV4VnK5KIsE za*hiyrJs_#b7p|^D#+4zitezkZUDM^J(hn$&)dJVif*a1+sM{hn-w2MKVWvDg68@2 z*N${a^x{pT?-X#i$%b7bYWX9!cMj|hQFD2(##uxQn!}vgE^vV)I_A~ZTVp7SPD9;I zQL)AnPKUw4#In(*T5}AA+h=oW{h);~C$cvv_l+Jqwj+awYZpOaS#f{n897@^^C;Z3m&z3MtEIM|cDc!YuU5Anz zw-))}FwXs^g~zWsx2AKKC(E}q-TgjZV))Gzxk8dJQP7-kPtl=>Zo3{L2?-nvt|JMGGOqec}&jc{8!K28*ur|cBw zdr@!f{*bNmBOg)nYBO1VlXQ1kBln{I{^bF)zqF-(({3*#SY%KYO+~PguRGOpnox9q zIZ+?;>a^OZSuV>5#`rH6j(8ocDlcWTygB)eV(ha>?M^4DTk8$fr{70ty^su^%E=BC z9e3zjx-ycN`TbC`j=)!wCgnMHRxoP{E2Twqv;n3Ldgz`yn50T z@C(q3y{70yM$z5yp>B@M^bAiY%S$jin`XfEY5o>DtY0{~?`70Wlc`GQ3zQ{o2?I3H zCGYC&KY{b8oH9kX9Br2?%{c^_l{eH_ykfQt%+#gV2ea;Apy$Bs+Fw+FwP%@qmhoWbg5C?S#|qf_zBc#$joXhW*{tY6(SB z{n&4Iim&p<3L0+!J_K%NXpP-Z-rp#2Y(?u-OR@Do?t~BMgV}edZ>~%1bW*_i;&rMA zlLud#OJWIrBH%+yE}A*Q-j^4Fn9F;RUA;xP&ZIC>UAeBcbc3W?ZsbM`{cr`iay0zn zxl5lEFa};fOvt}49K*4eHoat9(Q8yxVU9C!w0&d*{!U+bKiH5kzq4myqVs>x-+s3Q zGC?q1Bzz+@4g7sb8_%Q76U9WB$)5fMt(3#V)n9W|s15(9|1P$ajFD~{i)S0cWn6>y_QmsyoQBwBww2G)7G^K>ah7Q&;)_t8QMqIbX> z3f7ctpOCkv4X}L&AYe2I|H;i(k`D-wI^5I2~>IFrCTIO3=0q--~x)|xhW%D?#~vFP?1KiQ7QatyE6;n z8P}ndo&!ZfzxT(NmZ3!fWJF`qEm3x_2ozG>G7KAbxKl?y6byCm*`rJR|NaQQj=zO$ zgg_rFO53vH{NpEX(-o=)rtonMR52T)KV3?CI%$>F54s; z6T?2XJ(*?1vTZY?0qnlqh0gKY=W)8FXJ{ ziF%4zdGHw%;d^|Uku@A*i8)vRS!XyA^V|C-*mH49FiO(fcmlNPF1(OeRl{k+=aGHrNwdo~XoFcgaJ#^S(K^M2P{aStcm?AWjRsF#m=K-fkfo z?3W$eHU*w?g~+rRMnUwP?2KAwl#elq6 zbo}vps%|3a-eDvFPRcGFCjRk1hIhLvY?JVvD$OLw3VFTTrY3toZY2EIrVfJpe)-Gt zKdRF4Hi_ao=-&|`M4Z=1zQFOfp@9zm6#_%SRWX@}Q;;<*YxHl3s0d}>31uV-)?_00 zp-*|GFf!yp*mo+^OE7;Ogfedt;cOKsz6`~`{3gdwW1e5LH#~`O5CfdD^U|G22=Fm5 zI`n!2#<&9_Ogc}V>SqcD`r}BZmn2A0Z-Q35*5UebO_e2v3g-i}N|8YO<6$M8w2dJ( zVj5-u+;H2PjP4YGe(7Mcmo{$h)&{5|2=b(agYlIu%tS*n0QBxJiP&3j9fGclAVaTT z+IE7;nHVwTM3IE`giM+9I18Oa%=;3wUQl-49nnBctpI5C{iE8X2r}H4PqXL!x(-|0 zlB~$+XknxWKF8zaDlVLRpd%x=>JHT$DAb(?$-s>Pb(OoeSAEks7~g$+m%)Nu9(&Yz z6!FZalZ;M`&~`UrWyRr|W)+dC2dEvcpz)jyD!QXVuIz$eTQCR`BA)@=XwU=$f{tg7_e0h`-vqFJDe+=~5Rlu!hE z3@J)Jfi7KjN??|wfx4zoRx8nCaNe!`flQ8nGx|S78$e)?vb9=o_Dk)-*27gqvv=T~ztRya!_BFd0uy>9(aDS=_>&NYc58&lrf=Pz3y z`dsU+B1$(!mk)m|ydubgl*xIJ;UJ^dCWo{@ml8>ss+dUk;auCmo3acEt#mq_C@71^KYCYKw(c`N14&bZ`TsKLE~^+rMi*%quv>T((LjZG--I5CE+~_T z6me&B5{n`aV5vpq|Bk6$v7>5e7?o^3&^>NtwnUgn0WPLNy7Y+YO z8Uce4tuK!$=C9r<5ui@b7Zh8HaI-@ws9^Fg!02q3A4&f!2^f2bgdv2lbKNS8rF8Qq5BtGfLflry06}?E@*Bb3c{Hx+Ya^RXu!@6{JvzmrRZD1^gQFukgGwaTKb zlGYNPV|ns1otjf$=#rQqB{7ozgitKWo>SdhM`=Ji$YrU>$zWe;I=Yb>l3rSG8N-N} zHV_6hwX;4JvX;K@ek>Y@^&4f5sZMRJ|7CO)X`q0~ql>EF?`wQ_+$MEA>w`6_n$mNT zYAZjQMO{U}1<(qSyLz-mOY_)0hw_LkpB$b5I3+_RScx&Wk!BJ*v|4sN@($j5gviAa zMj9QW`tTyat@Uv+TQ8vW(} zA+3?@Z`4h9(WtveC5n~zM@zTFjyu-`A%-020@@W~N}dXy&V>JP-+^v~KL&{aaKZ;R e5Sjm<4+s84<-C(04F5UPBWT5Y3I(@J1OGpQwt^D? literal 0 HcmV?d00001 diff --git a/mkdocs.yml b/mkdocs.yml index cb20ec4f..00621621 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -1,29 +1,29 @@ -site_name: DANDI Documentation -repo_name: "dandi/dandi-docs" -repo_url: "https://github.com/dandi/dandi-docs" +site_name: EMBER-DANDI Documentation +repo_name: "aplbrain/dandi-docs" +repo_url: "https://github.com/aplbrain/dandi-docs" copyright: "CC-BY 4.0" use_directory_urls: true -site_url: https://docs.dandiarchive.org +site_url: https://docs-dandi.emberarchive.org # Material theme theme: name: "material" language: "en" favicon: img/favicon.ico - logo: img/dandi-logo-square_sw.png + logo: img/ember-logo.png palette: - scheme: default toggle: icon: material/weather-night name: Dark mode - primary: "dark blue" - accent: "light blue" + primary: deep orange + accent: red - scheme: slate toggle: icon: material/weather-sunny name: Light mode - primary: "dark blue" - accent: "light blue" + primary: amber + accent: red features: - toc.integrate @@ -68,22 +68,22 @@ nav: - Advanced Asset Search: "example-notebooks/tutorials/cosyne_2023/advanced_asset_search.ipynb" - Streaming and interacting with NWB data: "example-notebooks/tutorials/bcm_2024/analysis-demo.ipynb" - NWB Widget Demo: "example-notebooks/demos/NWBWidget-demo.ipynb" - - Developer Guide: - - System Architecture: "developer-guide/system-architecture.md" - - Developer Notes: "developer-guide/developer-notes.md" - - Integrate External Services with DANDI: "developer-guide/integrate-external-services.md" - - Contributing Documentation: "developer-guide/contributing-documentation.md" - - Creating a DANDI Instance: - - Overview: "developer-guide/creating-dandi-instance/index.md" - - Initialize Vendor Accounts: "developer-guide/creating-dandi-instance/initialize-vendors.md" - - DANDI Infrastructure: "developer-guide/creating-dandi-instance/dandi-infrastructure.md" - - DANDI Archive: "developer-guide/creating-dandi-instance/dandi-archive.md" - - DANDI Authentication: "developer-guide/creating-dandi-instance/dandi-authentication.md" - - DANDI Client: "developer-guide/creating-dandi-instance/dandi-cli.md" - - DANDI Hub: "developer-guide/creating-dandi-instance/dandi-hub.md" - - Health Status: - - Dandisets: https://github.com/dandi/dandisets-healthstatus - - Services: https://status.dandiarchive.org + # - Developer Guide: + # - System Architecture: "developer-guide/system-architecture.md" + # - Developer Notes: "developer-guide/developer-notes.md" + # - Integrate External Services with DANDI: "developer-guide/integrate-external-services.md" + # - Contributing Documentation: "developer-guide/contributing-documentation.md" + # - Creating a DANDI Instance: + # - Overview: "developer-guide/creating-dandi-instance/index.md" + # - Initialize Vendor Accounts: "developer-guide/creating-dandi-instance/initialize-vendors.md" + # - DANDI Infrastructure: "developer-guide/creating-dandi-instance/dandi-infrastructure.md" + # - DANDI Archive: "developer-guide/creating-dandi-instance/dandi-archive.md" + # - DANDI Authentication: "developer-guide/creating-dandi-instance/dandi-authentication.md" + # - DANDI Client: "developer-guide/creating-dandi-instance/dandi-cli.md" + # - DANDI Hub: "developer-guide/creating-dandi-instance/dandi-hub.md" + # - Health Status: + # - Dandisets: https://github.com/dandi/dandisets-healthstatus + # - Services: https://status.dandiarchive.org - Terms and Policies: - Terms: "terms-policies/terms.md" - Policies: "terms-policies/policies.md" @@ -113,6 +113,9 @@ plugins: ignore_h1_titles: True include: ["*.ipynb"] - macros + - exclude: + glob: + - developer-guide/* # Customize theme extra: @@ -137,9 +140,9 @@ extra: link: https://bsky.app/profile/dandiarchive.org name: Bluesky instance: - name: DANDI - uri: https://dandiarchive.org - sandbox_uri: https://sandbox.dandiarchive.org - domain: dandiarchive.org - api: https://api.dandiarchive.org - sandbox_api: https://api.sandbox.dandiarchive.org + name: EMBER-DANDI + uri: https://dandi.emberarchive.org + sandbox_uri: https://apl-setup--ember-dandi-archive.netlify.app + domain: emberarchive.org + api: https://api-dandi.emberarchive.org + sandbox_api: https://api-dandi.sandbox.emberarchive.org diff --git a/requirements.txt b/requirements.txt index 9e0c7b44..abd504eb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,3 +3,4 @@ pymdown-extensions mkdocs-open-in-new-tab mkdocs-jupyter mkdocs-macros-plugin +mkdocs-exclude \ No newline at end of file