diff --git a/.gitignore b/.gitignore index b28e93d..fdd86a8 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,9 @@ __pycache__ /dist /gcloud_requests.egg-info /htmlcov +cloudbuild_pypirc +netrc +pip.conf +gcloud +build +dist diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..967adb3 --- /dev/null +++ b/Makefile @@ -0,0 +1,105 @@ +DOCKER ?= docker +MKDIR ?= mkdir +RM ?= rm + +PROJECT=gcloud-requests + +LATEST_SUPPORTED_PY_VERSION=3.11 + +SUPPORTED_PY_VERSIONS=2.7 3.6 3.7 3.8 3.9 $(LATEST_SUPPORTED_PY_VERSION) + +# Determine platform flag (only needed for python 2.7) +# grpcio HAS NO PRE_BUILT WHEELS FOR ARM64 FOR PYTHON 2, SO FORCE PLATFORM TO linuc/amd64 +# For grpcio We also need to rebuild from source with the alpine image, so for python 2.7, we dont use the full debian +define PLATFORM_ARG +$(if $(filter 2.7,$(PY_VERSION)),--platform=linux/amd64,) +endef +PY_IMAGE = python:$(PY_VERSION)-alpine +ifeq ($(PY_VERSION),2.7) + PY_IMAGE = python:2.7 +endif +# Addtionally, the python 2.7 debian image will need bash to work with runtests.sh and buildwheel.sh +SHELL_CMD = sh +ifeq ($(PY_VERSION),2.7) + SHELL_CMD = /bin/bash +endif + +# NOT FULLY Mocked, setting project ID as lp-infra-lab +GOOGLE_CLOUD_PROJECT ?= lp-infra-lab +DATASTORE_PROJECT_ID ?= $(GOOGLE_CLOUD_PROJECT) +DATASTORE_DATASET ?= $(GOOGLE_CLOUD_PROJECT) +GCLOUD_PROJECT ?= $(GOOGLE_CLOUD_PROJECT) + +.PHONY: test ci-tests + +netrc: + cp ${HOME}/.netrc netrc + +pip.conf: + cp ${HOME}/.pip/pip.conf pip.conf + +cloudbuild_pypirc: + cp ${HOME}/.pypirc cloudbuild_pypirc + +${HOME}/.config/gcloud/application_default_credentials.json: + gcloud auth application-default login + +run: netrc pip.conf cloudbuild_pypirc ${HOME}/.config/gcloud/application_default_credentials.json + $(DOCKER) run $(PLATFORM_ARG) -it --rm=true --name=$(PROJECT)_python$(PY_VERSION) \ + -v $(CURDIR):/workspace \ + -v ${HOME}/.config/gcloud:/root/.config/gcloud:ro \ + -e GOOGLE_APPLICATION_CREDENTIALS=/root/.config/gcloud/application_default_credentials.json \ + -e DATASTORE_PROJECT_ID=$(DATASTORE_PROJECT_ID) \ + -e DATASTORE_DATASET=$(DATASTORE_DATASET) \ + -e GOOGLE_CLOUD_PROJECT=$(GOOGLE_CLOUD_PROJECT) \ + -e GCLOUD_PROJECT=$(GCLOUD_PROJECT) \ + $(PY_IMAGE) \ + $(SHELL_CMD) -c "/workspace/ci/runtests.sh onlysetup; exec /bin/sh" + +lint: + $(DOCKER) run --rm \ + -v $(CURDIR):/workspace \ + python:3.11-alpine sh -c "pip install flake8 && flake8 /workspace/gcloud_requests /workspace/tests" + +test: netrc pip.conf cloudbuild_pypirc ${HOME}/.config/gcloud/application_default_credentials.json + $(DOCKER) run $(PLATFORM_ARG) -it --rm=true --name=$(PROJECT)_$@ \ + -v $(CURDIR):/workspace \ + -v ${HOME}/.config/gcloud:/root/.config/gcloud:ro \ + -e GOOGLE_APPLICATION_CREDENTIALS=/root/.config/gcloud/application_default_credentials.json \ + -e DATASTORE_PROJECT_ID=$(DATASTORE_PROJECT_ID) \ + -e DATASTORE_DATASET=$(DATASTORE_DATASET) \ + -e GOOGLE_CLOUD_PROJECT=$(GOOGLE_CLOUD_PROJECT) \ + -e GCLOUD_PROJECT=$(GCLOUD_PROJECT) \ + $(PY_IMAGE) $(SHELL_CMD) /workspace/ci/runtests.sh + +ci-tests: + $(foreach pyversion, $(SUPPORTED_PY_VERSIONS), $(MAKE) PY_VERSION=$(pyversion) test;) + +bdist_wheel: netrc pip.conf cloudbuild_pypirc ${HOME}/.config/gcloud/application_default_credentials.json + # THIS WILL PUBLISH THE LIBRARY! HAVE YOU SET THE gcloud_requests/__init__.py versions TO A DEV VERSION??? + @if [ x"$(BUILD_TYPE)" == x"release" ]; then \ + if grep __version__ gcloud_requests/__init__.py | grep -q '[0-9]*\.[0-9]*[\.\-]dev'; then \ + $(DOCKER) run $(PLATFORM_ARG) -it --rm=true --name=$(PROJECT)_$@ \ + -v $(CURDIR):/workspace \ + -v ${HOME}/.config/gcloud:/root/.config/gcloud:ro \ + -e GOOGLE_APPLICATION_CREDENTIALS=/root/.config/gcloud/application_default_credentials.json \ + --env BUILD_TYPE=$(BUILD_TYPE) \ + python:$(LATEST_SUPPORTED_PY_VERSION)-alpine /bin/sh /workspace/ci/buildwheel.sh; \ + else \ + echo "The __version__ in gcloud_requests/__init__.py does not contain 'dev' designator"; \ + fi \ + else \ + $(DOCKER) run $(PLATFORM_ARG) -it --rm=true --name=$(PROJECT)_$@ \ + -v $(CURDIR):/workspace \ + -v ${HOME}/.config/gcloud:/root/.config/gcloud:ro \ + -e GOOGLE_APPLICATION_CREDENTIALS=/root/.config/gcloud/application_default_credentials.json \ + --env BUILD_TYPE=local \ + python:$(LATEST_SUPPORTED_PY_VERSION)-alpine /bin/sh /workspace/ci/buildwheel.sh; \ + fi + +clean: + -$(RM) netrc + -$(RM) pip.conf + -$(RM) cloudbuild_pypirc + -$(RM) -rf build + -$(RM) -rf dist \ No newline at end of file diff --git a/README.md b/README.md index 45b3752..ea3cde6 100644 --- a/README.md +++ b/README.md @@ -44,6 +44,35 @@ bucket = client.get_bucket("my-bucket") *Note*: This will run the tests against whatever GCP project you're currently logged into via the gcloud tool. +# Running comprehensive tests + +req is a library that is released for multiple versions of python. When +build in CI we run the tests against every version of python the library is +supposed to work with. This can be simulated in development by running the +the tests inside docker containers. The process is simplified by just +running `make ci-tests`. If you wanna run a test suite locally against a +specific version of python only you can run `make PY_VERSION=3.11 test`. + +*Note*: This will run the tests against whatever GCP project `lp-infra-lab`. + +# Adding new python versions to supported roaster + +If you would like to add a new version of Python to the supported versions +it must be handled in three locations: +1. For local changes the variable `SUPPORTED_PY_VERSIONS' in the Makefile + must be adjusted to include (or exclude) the versions in question. +2. in `ci/cloudbuild.yaml` steps need to be added (or removed) to address + the different versions of Python as well. +3. in `ci/cloudbuild.yaml`, update the `_LATEST_SUPPORTED_PY_VERSION` to the + latest supporeted python version. + +# Building a Pre-Release version + +Running `make bdist_wheel` will build the library instide a container and +copy the results into `./build/` and `./dist/`. It will not actually +publish the wheel +To Publish the wheel from the local setup, run `make BUILD_TYPE=release bdist_wheel` + ## Authors `gcloud_requests` was authored at [Leadpages][leadpages]. You can diff --git a/ci/buildwheel.sh b/ci/buildwheel.sh new file mode 100755 index 0000000..90f068d --- /dev/null +++ b/ci/buildwheel.sh @@ -0,0 +1,64 @@ +#!/bin/sh + +function setup_root() { + mkdir /root/.pip && ln -s /workspace/pip.conf /root/.pip/pip.conf + ln -s /workspace/netrc /root/.netrc + ln -s /workspace/cloudbuild_pypirc /root/.pypirc +} + +function setup_app() { + mkdir /app && cd /app + # copy, because we wanna be able to toss out __pychache__ and *.pyc files + cp -ar /workspace/gcloud_requests /app/gcloud_requests + cp -ar /workspace/tests /app/tests + # copy, don't link, because we change it with `sed` + cp -v /workspace/requirements.txt /app/requirements.txt + ln -s /workspace/requirements-dev.txt /app/requirements-dev.txt + ln -s /workspace/setup.cfg /app/setup.cfg + ln -s /workspace/setup.py /app/setup.py +} + +function build_wheel() { + cd /app + export HOME=/root # Damn you cloudbuild, presets to /home/builder/home ... + sed -i requirements.txt -e "s:^six$:six==1.15:" + pip install -r requirements.txt + pip install wheel + + python setup.py bdist_wheel + + ls -la build + ls -la dist + ls -la . +} + +function upload_lib() { + # twine needs a compiler + cd /app + apk update && apk upgrade && apk add build-base libffi-dev + pip install twine + # "local" refers to the [local] section in pypirc which specifies the URL + echo "Uplaod library to Artifactory" + twine upload \ + --repository local \ + dist/* \ + --config-file /root/.pypirc +} + +function main() { + setup_root + setup_app + build_wheel + # additional safegard against uploading + if [ x"${BUILD_TYPE}" == x"release" ]; then + upload_lib + elif [ x"${BUILD_TYPE}" == x"local" ]; then + echo " copy into /workspace dir, which is a docker volume mount" + rm -rf /workspace/{build,dist} + cp -avr /app/build /app/dist /workspace/ + else + echo "Discard builded library!" + fi +} + +main $@ \ No newline at end of file diff --git a/ci/cloudbuild.yaml b/ci/cloudbuild.yaml new file mode 100644 index 0000000..eca0a73 --- /dev/null +++ b/ci/cloudbuild.yaml @@ -0,0 +1,136 @@ +substitutions: + _LATEST_SUPPORTED_PY_VERSION: "3.11" + _DATASTORE_PROJECT_ID: lp-infra-lab + _DATASTORE_DATASET: lp-infra-lab + _GOOGLE_CLOUD_PROJECT: lp-infra-lab + _GCLOUD_PROJECT: lp-infra-lab + +steps: + # Pulls down the netrc and pip conf files we need to access our private npm registry. + # This file is stored within the context of this build, and becomes accessible to the CI image + # via a volume mount. + - name: gcr.io/google.com/cloudsdktool/cloud-sdk:alpine + id: "Prepare: Copy over npm/pip/net config files from bucket" + entrypoint: "gsutil" + args: + - cp + - gs://center-builds/pypi/netrc + - gs://center-builds/pypi/pip.conf + - gs://center-builds/pypi/cloudbuild_pypirc + - . + + - name: gcr.io/google.com/cloudsdktool/cloud-sdk:alpine + id: "Test the git checkout" + entrypoint: "bash" + args: + - -c + - | + ls -la / + ls -la /workspace + git log + + - name: python:${_LATEST_SUPPORTED_PY_VERSION}-alpine + id: "Lint" + entrypoint: sh + args: + - -c + - | + # Create pip and auth dirs + mkdir -p /root/.pip + + # Link workspace configs into expected locations + ln -sf /workspace/pip.conf /root/.pip/pip.conf + ln -sf /workspace/netrc /root/.netrc + ln -sf /workspace/cloudbuild_pypirc /root/.pypirc + + # Install flake8 using the correct pip config + pip install flake8 + + # Run linting + flake8 gcloud_requests tests + + # We need to rebuild from source with alpine image, so for python 2.7, we dont use alpine + - name: python:2.7 + id: "Test: Inside Python-2.7" + entrypoint: bash + args: + - /workspace/ci/runtests.sh + env: + - DATASTORE_PROJECT_ID=${_DATASTORE_PROJECT_ID} + - DATASTORE_DATASET=${_DATASTORE_DATASET} + - GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT} + - GCLOUD_PROJECT=${_GCLOUD_PROJECT} + + - name: python:3.6-alpine + id: "Test: Inside Python-3.6" + entrypoint: sh + args: + - /workspace/ci/runtests.sh + env: + - DATASTORE_PROJECT_ID=${_DATASTORE_PROJECT_ID} + - DATASTORE_DATASET=${_DATASTORE_DATASET} + - GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT} + - GCLOUD_PROJECT=${_GCLOUD_PROJECT} + + - name: python:3.7-alpine + id: "Test: Inside Python-3.7" + entrypoint: sh + args: + - /workspace/ci/runtests.sh + env: + - DATASTORE_PROJECT_ID=${_DATASTORE_PROJECT_ID} + - DATASTORE_DATASET=${_DATASTORE_DATASET} + - GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT} + - GCLOUD_PROJECT=${_GCLOUD_PROJECT} + + - name: python:3.8-alpine + id: "Test: Inside Python-3.8" + entrypoint: sh + args: + - /workspace/ci/runtests.sh + env: + - DATASTORE_PROJECT_ID=${_DATASTORE_PROJECT_ID} + - DATASTORE_DATASET=${_DATASTORE_DATASET} + - GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT} + - GCLOUD_PROJECT=${_GCLOUD_PROJECT} + + - name: python:3.9-alpine + id: "Test: Inside Python-3.9" + entrypoint: sh + args: + - /workspace/ci/runtests.sh + env: + - DATASTORE_PROJECT_ID=${_DATASTORE_PROJECT_ID} + - DATASTORE_DATASET=${_DATASTORE_DATASET} + - GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT} + - GCLOUD_PROJECT=${_GCLOUD_PROJECT} + + - name: python:${_LATEST_SUPPORTED_PY_VERSION}-alpine + id: "Test: Inside Python-${_LATEST_SUPPORTED_PY_VERSION}" + entrypoint: sh + args: + - /workspace/ci/runtests.sh + env: + - DATASTORE_PROJECT_ID=${_DATASTORE_PROJECT_ID} + - DATASTORE_DATASET=${_DATASTORE_DATASET} + - GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT} + - GCLOUD_PROJECT=${_GCLOUD_PROJECT} + + - name: python:${_LATEST_SUPPORTED_PY_VERSION}-alpine + id: "Build and Publish: create and upload library" + entrypoint: sh + env: + - 'BUILD_TYPE=$_BUILD_TYPE' # Safe-Guard inside buildwheel.sh + args: + - -c + - | + if [ x"$_BUILD_TYPE" == x"release" ]; then + /workspace/ci/buildwheel.sh + else + echo "_BUILD_TYPE is <$_BUILD_TYPE> -> DO NOT build a wheel publish." + fi + +logsBucket: gs://leadpage-dev_cloudbuild/cloudbuild_logs +options: + machineType: 'E2_HIGHCPU_8' # We need these extra cores to run these unit tests for python 2, else it takes 20 minutes + logging: GCS_ONLY diff --git a/ci/runtests.sh b/ci/runtests.sh new file mode 100755 index 0000000..59e2d9a --- /dev/null +++ b/ci/runtests.sh @@ -0,0 +1,44 @@ +#!/bin/sh + +function setup_root() { + mkdir /root/.pip && ln -s /workspace/pip.conf /root/.pip/pip.conf + ln -s /workspace/netrc /root/.netrc + ln -s /workspace/cloudbuild_pypirc /root/.pypirc +} + +function setup_app() { + mkdir /app && cd /app + # copy, because we wanna be able to toss out __pychache__ and *.pyc files + ln -s /workspace/gcloud_requests /app/gcloud_requests + ln -s /workspace/tests /app/tests + + ln -s /workspace/requirements.txt /app/requirements.txt + ln -s /workspace/requirements-dev.txt /app/requirements-dev.txt + ln -s /workspace/setup.cfg /app/setup.cfg + ln -s /workspace/setup.py /app/setup.py +} + +function setup_env() { + cd /app + # this is a throwaway container, venv not needed! + + ls -la + cat requirements.txt requirements-dev.txt + pip install -r requirements-dev.txt +} + +function main() { + local onlysetup=$1 + export HOME=/root # this is a cloudbuild problem!!!! + setup_root + setup_app + setup_env + if [ x"${onlysetup}" != x"onlysetup" ]; then + echo "RUNNING TESTS: onlysetup=<${onlysetup}>" + py.test -sv tests + else + echo "ONLY PREPARE ENV: onlysetup=<${onlysetup}>" + fi +} + +main $@ \ No newline at end of file diff --git a/gcloud_requests/credentials_watcher.py b/gcloud_requests/credentials_watcher.py index 1a236fd..8d62031 100644 --- a/gcloud_requests/credentials_watcher.py +++ b/gcloud_requests/credentials_watcher.py @@ -17,7 +17,7 @@ class CredentialsWatcher(Thread): def __init__(self): super(CredentialsWatcher, self).__init__() - self.setDaemon(True) + self.daemon = True self.watch_list_updated = Condition() self.watch_list = [] self.logger = logging.getLogger("gcloud_requests.CredentialsWatcher") diff --git a/gcloud_requests/proxy.py b/gcloud_requests/proxy.py index bd4b5c9..f2762ba 100644 --- a/gcloud_requests/proxy.py +++ b/gcloud_requests/proxy.py @@ -34,10 +34,17 @@ class RequestsProxy(object): TIMEOUT_CONFIG = (3.05, 30) #: Determines how retries should be handled by this proxy. - RETRY_CONFIG = Retry( - total=10, connect=10, read=5, - method_whitelist=Retry.DEFAULT_METHOD_WHITELIST | frozenset(["POST"]) - ) + # Handle compatibility between old urllib3 (method_whitelist) and new (allowed_methods) + _retry_kwargs = { + "total": 10, + "connect": 10, + "read": 5, + } + if hasattr(Retry, "DEFAULT_METHOD_WHITELIST"): + _retry_kwargs["method_whitelist"] = Retry.DEFAULT_METHOD_WHITELIST | frozenset(["POST"]) + elif hasattr(Retry, "DEFAULT_ALLOWED_METHODS"): + _retry_kwargs["allowed_methods"] = Retry.DEFAULT_ALLOWED_METHODS | frozenset(["POST"]) + RETRY_CONFIG = Retry(**_retry_kwargs) #: The number of connections to pool per Session. CONNECTION_POOL_SIZE = 32 diff --git a/requirements-dev.txt b/requirements-dev.txt index 91adf33..db7ddd3 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -11,7 +11,7 @@ google-cloud-datastore>=1.6,<2.0 google-cloud-storage>=1.1.1,<2.0 # Testing -futures +futures; python_version < "3" httmock mock pytest>=3 diff --git a/setup.py b/setup.py index f2a45f5..3613932 100644 --- a/setup.py +++ b/setup.py @@ -46,5 +46,7 @@ def parse_dependencies(filename): "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.11", ] ) diff --git a/tests/test_datastore_proxy.py b/tests/test_datastore_proxy.py index e9c7985..d54beec 100644 --- a/tests/test_datastore_proxy.py +++ b/tests/test_datastore_proxy.py @@ -170,7 +170,13 @@ def downstream(netloc, request): # RefreshError to be raised refresh_calls = [] - @urlmatch(netloc=r"^(oauth2\.googleapis\.com|accounts\.google\.com)$", path=r"^/(o/oauth2/token|token)$") + # Match BOTH: + # - OAuth2 token endpoints (local/docker) + # - GCE metadata token endpoint (Cloud Build / GCE) + @urlmatch( + netloc=r"^(oauth2\.googleapis\.com|accounts\.google\.com|metadata\.google\.internal)$", + path=r".*token.*", + ) def refresh(netloc, request): refresh_calls.append(1) if sum(refresh_calls) == 1: diff --git a/tox.ini b/tox.ini index 6277e4d..0f4ad77 100644 --- a/tox.ini +++ b/tox.ini @@ -1,6 +1,6 @@ [tox] envlist= - py{27,35,36,37,38}-cpython + py{27,35,36,37,38,39,311}-cpython flake8 [testenv]