Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 17 additions & 0 deletions .markdownlintrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# Markdownlint configuration for PFS Target Database documentation
default: true

# MD013: Line length - disable for auto-generated CLI docs and long URLs
MD013: false

# MD014: Dollar signs in shell commands - disable for CLI documentation
MD014: false

# MD033: Inline HTML - allow for documentation formatting (e.g., details/summary)
MD033: false

# MD034: Bare URLs - allow for reference links
MD034: false

# MD041: First line in file should be top-level heading - not always needed
MD041: false
32 changes: 32 additions & 0 deletions docs/reference/cli.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,17 @@
host = "<hostname of uploader>"
user = "<user name of uploader>"
data_dir = "<path to the data directory on the uploader>"

# Optional section for Web API access
# The following parameters are used to download data via Web API instead of rsync.
[webapi]
url = "<base URL of the Web API endpoint>" # e.g., "https://example.com/get-upload/"
api_key = "<API key for authentication>" # Optional: leave empty ("") for no authentication
verify_ssl = true # Optional: set to false to disable SSL certificate verification
```

The `schemacrawler` section is required only if you want to draw an ER diagram of the database schema with SchemaCrawler.
The `webapi` section is used by the `transfer-targets-api` command to download data via Web API.

## `pfs-targetdb-cli`

Expand Down Expand Up @@ -60,6 +68,7 @@
- `update`: Update rows in a table in the PFS Target...
- `parse-alloc`: Parse an Excel file containing time...
- `transfer-targets`: Download target lists from the uploader to...
- `transfer-targets-api`: Download target lists from the uploader to...
- `insert-targets`: Insert targets using a list of input...
- `insert-pointings`: Insert user-defined pointings using a list...
- `update-catalog-active`: Update active flag in the input_catalog...
Expand Down Expand Up @@ -329,6 +338,29 @@

---

### `transfer-targets-api`

Download target lists from the uploader to the local machine via Web API.

**Usage**:

```console
$ pfs-targetdb-cli transfer-targets-api [OPTIONS] INPUT_FILE

Check notice on line 348 in docs/reference/cli.md

View check run for this annotation

Codacy Production / Codacy Static Code Analysis

docs/reference/cli.md#L348

Dollar signs used before commands without showing output
```

**Arguments**:

- `INPUT_FILE`: Input catalog list file (csv). [required]

**Options**:

- `-c, --config TEXT`: Database configuration file in the TOML format. [required]
- `--local-dir PATH`: Path to the data directory in the local machine [default: .]
- `--force / --no-force`: Force download. [default: no-force]
- `--help`: Show this message and exit.

---

### `insert-targets`

Insert targets using a list of input catalogs and upload IDs.
Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ dependencies = [
"pandas",
"psycopg2-binary",
"pyarrow",
"requests",
"sqlalchemy",
# "setuptools",
"sqlalchemy-utils",
Expand Down
81 changes: 65 additions & 16 deletions src/targetdb/cli/cli_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
parse_allocation_file,
prep_fluxstd_data,
transfer_data_from_uploader,
transfer_data_from_uploader_via_webapi,
update_input_catalog_active,
)

Expand Down Expand Up @@ -196,7 +197,7 @@ def checkdups(
typer.Option("--skip-save-merged", help="Do not save the merged DataFrame."),
] = False,
additional_columns: Annotated[
List[str],
List[str] | None,
typer.Option(
"--additional-columns",
help="Additional columns to output for the merged file. (e.g., 'psf_mag_g' 'psf_mag_r'). "
Expand All @@ -217,7 +218,7 @@ def checkdups(
"--format",
help="File format of the merged data file.",
),
] = "parquet",
] = PyArrowFileFormat.parquet,
):
if additional_columns is None:
additional_columns = []
Expand Down Expand Up @@ -260,23 +261,23 @@ def prep_fluxstd(
),
],
input_catalog_id: Annotated[
int,
int | None,
typer.Option(
"--input_catalog_id",
show_default=False,
help="Input catalog ID for the flux standard star catalog.",
),
] = None,
input_catalog_name: Annotated[
str,
str | None,
typer.Option(
"--input_catalog_name",
show_default=False,
help="Input catalog name for the flux standard star catalog.",
),
] = None,
rename_cols: Annotated[
str,
str | None,
typer.Option(
"--rename-cols",
help='Dictionary to rename columns (e.g., \'{"fstar_gaia": "is_fstar_gaia"}\').',
Expand All @@ -288,7 +289,7 @@ def prep_fluxstd(
"--format",
help="File format of the output data file.",
),
] = "parquet",
] = PyArrowFileFormat.parquet,
):

if input_catalog_id is None and input_catalog_name is None:
Expand Down Expand Up @@ -419,15 +420,15 @@ def insert(
),
] = FluxType.total,
upload_id: Annotated[
str,
str | None,
typer.Option(
"--upload_id",
show_default=False,
help="Upload ID issued by the PFS Target Uploader. Only required for the `target` table.",
),
] = None,
proposal_id: Annotated[
str,
str | None,
typer.Option(
"--proposal_id",
show_default=False,
Expand Down Expand Up @@ -502,15 +503,15 @@ def update(
),
] = False,
upload_id: Annotated[
str,
str | None,
typer.Option(
"--upload_id",
show_default=False,
help="Upload ID issued by the PFS Target Uploader. Only required for the `target` table",
),
] = None,
proposal_id: Annotated[
str,
str | None,
typer.Option(
"--proposal_id",
show_default=False,
Expand Down Expand Up @@ -564,9 +565,9 @@ def parse_alloc(
writable=True,
help="Directory path to save output files.",
),
] = ".",
] = Path("."),
outfile_prefix: Annotated[
str,
str | None,
typer.Option(
show_default=False,
help="Prefix to the output files.",
Expand Down Expand Up @@ -603,12 +604,11 @@ def transfer_targets(
local_dir: Annotated[
Path,
typer.Option(
exists=True,
dir_okay=True,
writable=True,
help="Path to the data directory in the local machine",
),
] = ".",
] = Path("."),
force: Annotated[bool, typer.Option(help="Force download.")] = False,
):

Expand All @@ -627,6 +627,55 @@ def transfer_targets(
)


@app.command(
help="Download target lists from the uploader to the local machine via Web API."
)
def transfer_targets_api(
input_file: Annotated[
Path,
typer.Argument(
exists=True,
file_okay=True,
dir_okay=False,
readable=True,
show_default=False,
help="Input catalog list file (csv).",
),
],
config_file: Annotated[
str,
typer.Option(
"-c",
"--config",
show_default=False,
help=config_help_msg,
),
],
local_dir: Annotated[
Path,
typer.Option(
dir_okay=True,
writable=True,
help="Path to the data directory in the local machine",
),
] = Path("."),
force: Annotated[bool, typer.Option(help="Force download.")] = False,
):

logger.info(f"Loading config file: {config_file}")
config = load_config(config_file)

logger.info(f"Loading input data from {input_file} into a DataFrame")
df = load_input_data(input_file)

transfer_data_from_uploader_via_webapi(
df,
config,
local_dir=local_dir,
force=force,
)


@app.command(help="Insert targets using a list of input catalogs and upload IDs.")
def insert_targets(
input_catalogs: Annotated[
Expand Down Expand Up @@ -657,7 +706,7 @@ def insert_targets(
readable=True,
help="Path to the data directory.",
),
] = ".",
] = Path("."),
flux_type: Annotated[
FluxType,
typer.Option(
Expand Down Expand Up @@ -724,7 +773,7 @@ def insert_pointings(
readable=True,
help="Path to the data directory.",
),
] = ".",
] = Path("."),
commit: Annotated[
bool,
typer.Option("--commit", help="Commit changes to the database."),
Expand Down
Loading
Loading