Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 6 additions & 10 deletions .ci/run
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ if ! command -v sudo; then
}
fi

# --parallel-live to show outputs while it's running
tox_cmd='run-parallel --parallel-live'
if [ -n "${CI-}" ]; then
# install OS specific stuff here
case "$OSTYPE" in
Expand All @@ -20,7 +22,8 @@ if [ -n "${CI-}" ]; then
;;
cygwin* | msys* | win*)
# windows
:
# ugh. parallel stuff seems super flaky under windows, some random failures, "file used by other process" and crap like that
tox_cmd='run'
;;
*)
# must be linux?
Expand All @@ -29,12 +32,5 @@ if [ -n "${CI-}" ]; then
esac
fi


PY_BIN="python3"
# some systems might have python pointing to python3
if ! command -v python3 &> /dev/null; then
PY_BIN="python"
fi

"$PY_BIN" -m pip install --user tox
"$PY_BIN" -m tox
# NOTE: expects uv installed
uv tool run --with tox-uv tox $tox_cmd "$@"
101 changes: 81 additions & 20 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,51 +6,112 @@ on:
branches: '*'
tags: 'v[0-9]+.*' # only trigger on 'release' tags for PyPi
# Ideally I would put this in the pypi job... but github syntax doesn't allow for regexes there :shrug:
# P.S. fuck made up yaml DSLs.
pull_request: # needed to trigger on others' PRs

# Needed to trigger on others' PRs.
# Note that people who fork it need to go to "Actions" tab on their fork and click "I understand my workflows, go ahead and enable them".
workflow_dispatch: # needed to trigger workflows manually
# todo cron?
pull_request:

# Needed to trigger workflows manually.
workflow_dispatch:
inputs:
debug_enabled:
type: boolean
description: 'Run the build with tmate debugging enabled (https://github.com/marketplace/actions/debugging-with-tmate)'
required: false
default: false

schedule:
- cron: '31 18 * * 5' # run every Friday


jobs:
build:
strategy:
fail-fast: false
matrix:
platform: [ubuntu-latest, macos-latest, windows-latest]
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11']
# 3.11 on windows has this bug, lxml setup fails
#https://bugs.launchpad.net/lxml/+bug/1977998
exclude: [{platform: windows-latest, python-version: '3.11'}]
python-version: ['3.9', '3.10', '3.11', '3.12', '3.13']
# vvv just an example of excluding stuff from matrix
# exclude: [{platform: macos-latest, python-version: '3.6'}]

runs-on: ${{ matrix.platform }}

# useful for 'optional' pipelines
# continue-on-error: ${{ matrix.platform == 'windows-latest' }}

steps:
# ugh https://github.com/actions/toolkit/blob/main/docs/commands.md#path-manipulation
- run: echo "$HOME/.local/bin" >> $GITHUB_PATH
- if: ${{ matrix.platform == 'macos-latest' && matrix.python-version == '3.11' }}
# hmm somehow only seems necessary for 3.11 on osx??
run: echo "$HOME/Library/Python/${{ matrix.python-version }}/bin" >> $GITHUB_PATH

- uses: actions/setup-python@v4
- uses: actions/checkout@v4
with:
python-version: ${{ matrix.python-version }}
submodules: recursive
fetch-depth: 0 # nicer to have all git history when debugging/for tests

- uses: actions/checkout@v3
- uses: actions/setup-python@v5
with:
submodules: recursive
python-version: ${{ matrix.python-version }}

- uses: astral-sh/setup-uv@v5
with:
enable-cache: false # we don't have lock files, so can't use them as cache key

# uncomment for SSH debugging
# - uses: mxschmitt/action-tmate@v3
- uses: mxschmitt/action-tmate@v3
if: ${{ github.event_name == 'workflow_dispatch' && inputs.debug_enabled }}

# explicit bash command is necessary for Windows CI runner, otherwise it thinks it's cmd...
- run: bash .ci/run
env:
# only compute lxml coverage on ubuntu; it crashes on windows
CI_MYPY_COVERAGE: ${{ matrix.platform == 'ubuntu-latest' && '--cobertura-xml-report .coverage.mypy' || '' }}

- if: matrix.platform == 'ubuntu-latest' # no need to compute coverage for other platforms
uses: actions/upload-artifact@v3
uses: codecov/codecov-action@v5
with:
fail_ci_if_error: true # default false
token: ${{ secrets.CODECOV_TOKEN }}
flags: mypy-${{ matrix.python-version }}
files: .coverage.mypy/cobertura.xml


pypi:
# Do not run it for PRs/cron schedule etc.
# NOTE: release tags are guarded by on: push: tags on the top.
if: github.event_name == 'push' && (startsWith(github.event.ref, 'refs/tags/') || (github.event.ref == format('refs/heads/{0}', github.event.repository.master_branch)))
# Ugh, I tried using matrix or something to explicitly generate only test pypi or prod pypi pipelines.
# But github actions is so shit, it's impossible to do any logic at all, e.g. doesn't support conditional matrix, if/else statements for variables etc.

needs: [build] # add all other jobs here

runs-on: ubuntu-latest

permissions:
# necessary for Trusted Publishing
id-token: write

steps:
# ugh https://github.com/actions/toolkit/blob/main/docs/commands.md#path-manipulation
- run: echo "$HOME/.local/bin" >> $GITHUB_PATH

- uses: actions/checkout@v4
with:
name: .coverage.mypy_${{ matrix.platform }}_${{ matrix.python-version }}
path: .coverage.mypy/
submodules: recursive
fetch-depth: 0 # pull all commits to correctly infer vcs version

- uses: actions/setup-python@v5
with:
python-version: '3.12'

- uses: astral-sh/setup-uv@v5
with:
enable-cache: false # we don't have lock files, so can't use them as cache key

- name: 'release to test pypi'
# always deploy merged master to test pypi
if: github.event.ref == format('refs/heads/{0}', github.event.repository.master_branch)
run: .ci/release-uv --use-test-pypi

- name: 'release to prod pypi'
# always deploy tags to release pypi
if: startsWith(github.event.ref, 'refs/tags/')
run: .ci/release-uv
58 changes: 58 additions & 0 deletions conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
# this is a hack to monkey patch pytest so it handles tests inside namespace packages without __init__.py properly
# without it, pytest can't discover the package root for some reason
# also see https://github.com/karlicoss/pytest_namespace_pkgs for more

import os
import pathlib
from typing import Optional

import _pytest.main
import _pytest.pathlib

# we consider all dirs in repo/ to be namespace packages
root_dir = pathlib.Path(__file__).absolute().parent.resolve() / 'src'
assert root_dir.exists(), root_dir

# TODO assert it contains package name?? maybe get it via setuptools..

namespace_pkg_dirs = [str(d) for d in root_dir.iterdir() if d.is_dir()]

# resolve_package_path is called from _pytest.pathlib.import_path
# takes a full abs path to the test file and needs to return the path to the 'root' package on the filesystem
resolve_pkg_path_orig = _pytest.pathlib.resolve_package_path


def resolve_package_path(path: pathlib.Path) -> Optional[pathlib.Path]:
result = path # search from the test file upwards
for parent in result.parents:
if str(parent) in namespace_pkg_dirs:
return parent
if os.name == 'nt':
# ??? for some reason on windows it is trying to call this against conftest? but not on linux/osx
if path.name == 'conftest.py':
return resolve_pkg_path_orig(path)
raise RuntimeError("Couldn't determine path for ", path)


# NOTE: seems like it's not necessary anymore?
# keeping it for now just in case
# after https://github.com/pytest-dev/pytest/pull/13426 we should be able to remove the whole conftest
# _pytest.pathlib.resolve_package_path = resolve_package_path


# without patching, the orig function returns just a package name for some reason
# (I think it's used as a sort of fallback)
# so we need to point it at the absolute path properly
# not sure what are the consequences.. maybe it wouldn't be able to run against installed packages? not sure..
search_pypath_orig = _pytest.main.search_pypath


def search_pypath(module_name: str) -> str:
mpath = root_dir / module_name.replace('.', os.sep)
if not mpath.is_dir():
mpath = mpath.with_suffix('.py')
assert mpath.exists(), mpath # just in case
return str(mpath)


_pytest.main.search_pypath = search_pypath # ty: ignore[invalid-assignment]
12 changes: 10 additions & 2 deletions mypy.ini
Original file line number Diff line number Diff line change
@@ -1,9 +1,17 @@
[mypy]
pretty = True
show_error_context = True
show_error_codes = True
show_column_numbers = True
show_error_end = True

check_untyped_defs = True
namespace_packages = True

# see https://mypy.readthedocs.io/en/stable/error_code_list2.html
warn_redundant_casts = True
strict_equality = True
warn_unused_ignores = True
enable_error_code = deprecated,redundant-expr,possibly-undefined,truthy-bool,truthy-iterable,ignore-without-code,unused-awaitable


# an example of suppressing
# [mypy-my.config.repos.pdfannots.pdfannots]
Expand Down
File renamed without changes.
69 changes: 69 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
# see https://github.com/karlicoss/pymplate for up-to-date reference
[project]
dynamic = ["version"] # version is managed by build backend
name = "endoexport"
dependencies = []
requires-python = ">=3.9"

## these need to be set if you're planning to upload to pypi
# description = "TODO"
# license = {file = "LICENSE"}
# authors = [
# {name = "Dima Gerasimov (@karlicoss)", email = "karlicoss@gmail.com"},
# ]
# maintainers = [
# {name = "Dima Gerasimov (@karlicoss)", email = "karlicoss@gmail.com"},
# ]
#
# [project.urls]
# Homepage = "https://github.com/karlicoss/pymplate"
##


[project.optional-dependencies]
dal = [
# I'm using some upstream unmerged changes, so unfortunately need my own fork
"endoapi @ git+https://github.com/karlicoss/endoapi.git",
]
optional = [
"orjson", # faster json processing
"colorlog",
"ijson", # faster iterative json processing
]
[dependency-groups]
# TODO: not sure, on the one hand could just use 'standard' dev dependency group
# On the other hand, it's a bit annoying that it's always included by default?
# To make sure it's not included, need to use `uv run --exact --no-default-groups ...`
testing = [
"pytest",
"ruff",
"mypy",
"lxml", # for mypy html coverage
"ty>=0.0.1a16",

"numpy", # for fake data generation
"matplotlib", # for DAL demo
"types-pytz",
"pandas-stubs",
]


[build-system]
requires = ["hatchling", "hatch-vcs"]
build-backend = "hatchling.build"

# unfortunately have to duplicate project name here atm, see https://github.com/pypa/hatch/issues/1894
[tool.hatch.build.targets.wheel]
packages = ["src/endoexport"]

[tool.hatch.version]
source = "vcs"

[tool.hatch.version.raw-options]
version_scheme = "python-simplified-semver"
local_scheme = "dirty-tag"

[tool.hatch.metadata]
# needed to allow direct git url for "instapaper" dependency
# NOTE: tool.uv.sources also works, but it would only work with uv, not pip
allow-direct-references = true
7 changes: 7 additions & 0 deletions pytest.ini
Original file line number Diff line number Diff line change
@@ -1,7 +1,14 @@
[pytest]
# discover files that don't follow test_ naming. Useful to keep tests along with the source code
python_files = *.py

# this setting only impacts package/module naming under pytest, not the discovery
consider_namespace_packages = true

addopts =
# prevent pytest cache from being created... it craps into project dir and I never use it anyway
-p no:cacheprovider

# -rap to print tests summary even when they are successful
-rap
--verbose
Expand Down
Loading
Loading