Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
49 changes: 36 additions & 13 deletions .github/workflows/build_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,14 @@ on:
required: true
tags:
description: "Test scenario tags"
required: false
type: string

permissions:
contents: read
actions: read
checks: write
pull-requests: write

concurrency:
# older builds for the same pull request number or branch should be cancelled
Expand Down Expand Up @@ -223,7 +231,6 @@ jobs:
# To run a single test on GHA use the below command:
# run: python -m tox run -e `echo py${PYTHON_VERSION/\./}-single-ci | sed 's/ /,/g'`
run: python -m tox run -e `echo py${PYTHON_VERSION/\./}-{extras,unit-parallel,integ-parallel,pandas-parallel,sso}-ci | sed 's/ /,/g'`

env:
PYTHON_VERSION: ${{ matrix.python-version }}
cloud_provider: ${{ matrix.cloud-provider }}
Expand All @@ -232,17 +239,16 @@ jobs:
# To specify the test name (in single test mode) pass this env variable:
# SINGLE_TEST_NAME: test/path/filename.py::test_name
shell: bash
- name: Combine coverages
run: python -m tox run -e coverage --skip-missing-interpreters false
shell: bash
- uses: actions/upload-artifact@v4
if: always()
with:
include-hidden-files: true
name: coverage_${{ matrix.os.download_name }}-${{ matrix.python-version }}-${{ matrix.cloud-provider }}
path: |
.tox/.coverage
.tox/coverage.xml
- uses: actions/upload-artifact@v4
if: always()
with:
include-hidden-files: true
name: junit_${{ matrix.os.download_name }}-${{ matrix.python-version }}-${{ matrix.cloud-provider }}
Expand Down Expand Up @@ -370,13 +376,15 @@ jobs:
TOX_PARALLEL_NO_SPINNER: 1
shell: bash
- uses: actions/upload-artifact@v4
if: always()
with:
include-hidden-files: true
name: coverage_linux-fips-3.9-${{ matrix.cloud-provider }}
path: |
.coverage
coverage.xml
- uses: actions/upload-artifact@v4
if: always()
with:
include-hidden-files: true
name: junit_linux-fips-3.9-${{ matrix.cloud-provider }}
Expand Down Expand Up @@ -432,13 +440,15 @@ jobs:
TOX_PARALLEL_NO_SPINNER: 1
shell: bash
- uses: actions/upload-artifact@v4
if: always()
with:
include-hidden-files: true
name: coverage_linux-lambda-${{ matrix.python-version }}-${{ matrix.cloud-provider }}
path: |
.coverage.py${{ env.shortver }}-lambda-ci
junit.py${{ env.shortver }}-lambda-ci-dev.xml
- uses: actions/upload-artifact@v4
if: always()
with:
include-hidden-files: true
name: junit_linux-lambda-${{ matrix.python-version }}-${{ matrix.cloud-provider }}
Expand Down Expand Up @@ -518,7 +528,9 @@ jobs:
run: python -m tox run -e coverage --skip-missing-interpreters false
shell: bash
- uses: actions/upload-artifact@v4
if: always()
with:
include-hidden-files: true
name: coverage_aio_${{ matrix.os.download_name }}-${{ matrix.python-version }}-${{ matrix.cloud-provider }}
path: |
.tox/.coverage
Expand Down Expand Up @@ -555,7 +567,7 @@ jobs:
shell: bash

combine-coverage:
if: ${{ success() || failure() }}
if: always()
name: Combine coverage
needs: [lint, test, test-fips, test-lambda, test-aio]
runs-on: ubuntu-latest
Expand Down Expand Up @@ -587,6 +599,7 @@ jobs:
dst_file = dst_dir / ".coverage.{}".format(src_file.parent.name[9:])
print("{} copy to {}".format(src_file, dst_file))
shutil.copy(str(src_file), str(dst_file))'

- name: Collect all JUnit XML files to one dir
run: |
python -c '
Expand All @@ -596,33 +609,43 @@ jobs:
src_dir = Path("artifacts")
dst_dir = Path(".") / "junit_results"
dst_dir.mkdir()
# Collect all JUnit XML files with different naming patterns
for pattern in ["*/junit.*.xml", "*/junit.py*-lambda-ci-dev.xml"]:
for src_file in src_dir.glob(pattern):
dst_file = dst_dir / src_file.name
print("{} copy to {}".format(src_file, dst_file))
shutil.copy(str(src_file), str(dst_file))'
for src_file in src_dir.glob("*/junit*.xml"):
artifact_name = src_file.parent.name
dst_file = dst_dir / f"{artifact_name}_{src_file.name}"
print("{} copy to {}".format(src_file, dst_file))
shutil.copy(str(src_file), str(dst_file))'
- name: Combine coverages
run: python -m tox run -e coverage
run: |
if [ -d ".tox" ] && [ "$(find .tox -name ".coverage*" -type f | wc -l)" -gt 0 ]; then
python -m tox run -e coverage
else
echo "No coverage files found, skipping coverage combination"
fi
- name: Publish html coverage
if: ${{ success() }}
uses: actions/upload-artifact@v4
with:
include-hidden-files: true
name: overall_cov_html
path: .tox/htmlcov
- name: Publish xml coverage
if: ${{ success() }}
uses: actions/upload-artifact@v4
with:
include-hidden-files: true
name: overall_cov_xml
path: .tox/coverage.xml
- uses: codecov/codecov-action@v4
if: ${{ success() }}
with:
files: .tox/coverage.xml
token: ${{ secrets.CODECOV_TOKEN }}
name: coverage-${{ github.run_id }}
fail_ci_if_error: false
verbose: true
- name: Upload test results to Codecov
if: ${{ !cancelled() }}
uses: codecov/test-results-action@v1
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: junit_results/junit.*.xml
files: junit_results/*.xml
24 changes: 23 additions & 1 deletion DESCRIPTION.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,33 @@ https://docs.snowflake.com/
Source code is also available at: https://github.com/snowflakedb/snowflake-connector-python

# Release Notes
- v3.18.0(TBD)
- v4.1.0(TBD)

- v4.0.0(October 09,2025)
- Added support for checking certificates revocation using revocation lists (CRLs)
- Added `CERT_REVOCATION_CHECK_MODE` to `CLIENT_ENVIRONMENT`
- Added the `workload_identity_impersonation_path` parameter to support service account impersonation for Workload Identity Federation on GCP and AWS workloads only
- Fixed `get_results_from_sfqid` when using `DictCursor` and executing multiple statements at once
- Added the `oauth_credentials_in_body` parameter supporting an option to send the oauth client credentials in the request body
- Fix retry behavior for `ECONNRESET` error
- Added an option to exclude `botocore` and `boto3` dependencies by setting `SNOWFLAKE_NO_BOTO` environment variable during installation
- Revert changing exception type in case of token expired scenario for `Oauth` authenticator back to `DatabaseError`
- Enhanced configuration file security checks with stricter permission validation.
- Configuration files writable by group or others now raise a `ConfigSourceError` with detailed permission information, preventing potential credential tampering.
- Fixed the return type of `SnowflakeConnection.cursor(cursor_class)` to match the type of `cursor_class`
- Constrained the types of `fetchone`, `fetchmany`, `fetchall`
- As part of this fix, `DictCursor` is no longer a subclass of `SnowflakeCursor`; use `SnowflakeCursorBase` as a superclass of both.
- Fix "No AWS region was found" error if AWS region was set in `AWS_DEFAULT_REGION` variable instead of `AWS_REGION` for `WORKLOAD_IDENTITY` authenticator
- Add `ocsp_root_certs_dict_lock_timeout` connection parameter to set the timeout (in seconds) for acquiring the lock on the OCSP root certs dictionary. Default value for this parameter is -1 which indicates no timeout.
- Fixed behaviour of trying S3 Transfer Accelerate endpoint by default for internal stages, and always getting HTTP403 due to permissions missing on purpose. Now /accelerate is not attempted.

- v3.18.0(October 03,2025)
- Added support for pandas conversion for Day-time and Year-Month Interval types

- v3.17.4(September 22,2025)
- Added support for intermediate certificates as roots when they are stored in the trust store
- Bumped up vendored `urllib3` to `2.5.0` and `requests` to `v2.32.5`
- Dropped support for OpenSSL versions older than 1.1.1

- v3.17.3(September 02,2025)
- Enhanced configuration file permission warning messages.
Expand Down
8 changes: 7 additions & 1 deletion src/snowflake/connector/aio/_ocsp_snowflake.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,11 @@
)
from snowflake.connector.errors import RevocationCheckError
from snowflake.connector.network import PYTHON_CONNECTOR_USER_AGENT
from snowflake.connector.ocsp_snowflake import OCSPCache, OCSPResponseValidationResult
from snowflake.connector.ocsp_snowflake import (
OCSP_ROOT_CERTS_DICT_LOCK_TIMEOUT_DEFAULT_NO_TIMEOUT,
OCSPCache,
OCSPResponseValidationResult,
)
from snowflake.connector.ocsp_snowflake import OCSPServer as OCSPServerSync
from snowflake.connector.ocsp_snowflake import OCSPTelemetryData
from snowflake.connector.ocsp_snowflake import SnowflakeOCSP as SnowflakeOCSPSync
Expand Down Expand Up @@ -143,6 +147,7 @@ def __init__(
use_ocsp_cache_server=None,
use_post_method: bool = True,
use_fail_open: bool = True,
root_certs_dict_lock_timeout: int = OCSP_ROOT_CERTS_DICT_LOCK_TIMEOUT_DEFAULT_NO_TIMEOUT,
**kwargs,
) -> None:
self.test_mode = os.getenv("SF_OCSP_TEST_MODE", None)
Expand All @@ -151,6 +156,7 @@ def __init__(
logger.debug("WARNING - DRIVER CONFIGURED IN TEST MODE")

self._use_post_method = use_post_method
self._root_certs_dict_lock_timeout = root_certs_dict_lock_timeout
self.OCSP_CACHE_SERVER = OCSPServer(
top_level_domain=extract_top_level_domain_from_hostname(
kwargs.pop("hostname", None)
Expand Down
15 changes: 12 additions & 3 deletions src/snowflake/connector/aio/_s3_storage_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -401,9 +401,17 @@ async def transfer_accelerate_config(
return False
else:
if use_accelerate_endpoint is None:
use_accelerate_endpoint = await self._get_bucket_accelerate_config(
self.s3location.bucket_name
)
if str(self.s3location.bucket_name).lower().startswith("sfc-"):
# SNOW-2324060: no s3:GetAccelerateConfiguration and no intention to add either
# for internal stage, thus previously the client got HTTP403 on /accelerate call
logger.debug(
"Not attempting to get bucket transfer accelerate endpoint for internal stage."
)
use_accelerate_endpoint = False
else:
use_accelerate_endpoint = await self._get_bucket_accelerate_config(
self.s3location.bucket_name
)

if use_accelerate_endpoint:
self.endpoint = (
Expand All @@ -413,6 +421,7 @@ async def transfer_accelerate_config(
self.endpoint = (
f"https://{self.s3location.bucket_name}.s3.amazonaws.com"
)
logger.debug(f"Using {self.endpoint} as storage endpoint.")
return use_accelerate_endpoint

async def _has_expired_token(self, response: aiohttp.ClientResponse) -> bool:
Expand Down
9 changes: 8 additions & 1 deletion src/snowflake/connector/aio/_session_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,8 @@ async def validate_ocsp(
ocsp_response_cache_uri=FEATURE_OCSP_RESPONSE_CACHE_FILE_NAME,
use_fail_open=self._snowflake_ocsp_mode == OCSPMode.FAIL_OPEN,
hostname=hostname,
# TODO: uncomment when issues with ocsp revoked certs in tests are fixed (reapply #2559)
# root_certs_dict_lock_timeout=FEATURE_ROOT_CERTS_DICT_LOCK_TIMEOUT,
).validate(hostname, protocol, session_manager=session_manager)
if not v:
raise OperationalError(
Expand Down Expand Up @@ -223,8 +225,13 @@ async def get(
use_pooling: bool | None = None,
**kwargs,
) -> aiohttp.ClientResponse:
async with self.use_session(url, use_pooling) as session:
if isinstance(timeout, tuple):
connect, total = timeout
timeout_obj = aiohttp.ClientTimeout(total=total, connect=connect)
else:
timeout_obj = aiohttp.ClientTimeout(total=timeout) if timeout else None

async with self.use_session(url, use_pooling) as session:
return await session.get(
url, headers=headers, timeout=timeout_obj, **kwargs
)
Expand Down
7 changes: 4 additions & 3 deletions src/snowflake/connector/aio/_wif_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,10 @@

async def get_aws_region() -> str:
"""Get the current AWS workload's region."""
if "AWS_REGION" in os.environ: # Lambda
region = os.environ["AWS_REGION"]
else: # EC2
region = os.environ.get("AWS_REGION") or os.environ.get("AWS_DEFAULT_REGION")

if not region:
# Fallback for EC2 environments
region = (
await aiobotocore.utils.AioInstanceMetadataRegionFetcher().retrieve_region()
)
Expand Down
14 changes: 13 additions & 1 deletion src/snowflake/connector/config_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@

LOGGER = logging.getLogger(__name__)
READABLE_BY_OTHERS = stat.S_IRGRP | stat.S_IROTH

WRITABLE_BY_OTHERS = stat.S_IWGRP | stat.S_IWOTH

SKIP_WARNING_ENV_VAR = "SF_SKIP_WARNING_FOR_READ_PERMISSIONS_ON_CONFIG_FILE"

Expand Down Expand Up @@ -337,6 +337,18 @@ def read_config(
)
continue

# Check for writable by others - this should raise an error
if (
not IS_WINDOWS # Skip checking on Windows
and sliceoptions.check_permissions # Skip checking if this file couldn't hold sensitive information
and filep.stat().st_mode & WRITABLE_BY_OTHERS != 0
):
file_stat = filep.stat()
file_permissions = oct(file_stat.st_mode)[-3:]
raise ConfigSourceError(
f"file '{str(filep)}' is writable by group or others — this poses a security risk because it allows unauthorized users to modify sensitive settings. Your Permission: {file_permissions}"
)

# Check for readable by others or wrong ownership - this should warn
if (
not IS_WINDOWS # Skip checking on Windows
Expand Down
8 changes: 8 additions & 0 deletions src/snowflake/connector/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@
_DOMAIN_NAME_MAP,
_OAUTH_DEFAULT_SCOPE,
ENV_VAR_PARTNER,
OCSP_ROOT_CERTS_DICT_LOCK_TIMEOUT_DEFAULT_NO_TIMEOUT,
PARAMETER_AUTOCOMMIT,
PARAMETER_CLIENT_PREFETCH_THREADS,
PARAMETER_CLIENT_REQUEST_MFA_TOKEN,
Expand Down Expand Up @@ -242,6 +243,10 @@ def _get_private_bytes_from_file(
"internal_application_version": (CLIENT_VERSION, (type(None), str)),
"disable_ocsp_checks": (False, bool),
"ocsp_fail_open": (True, bool), # fail open on ocsp issues, default true
"ocsp_root_certs_dict_lock_timeout": (
OCSP_ROOT_CERTS_DICT_LOCK_TIMEOUT_DEFAULT_NO_TIMEOUT, # no timeout
int,
),
"inject_client_pause": (0, int), # snowflake internal
"session_parameters": (None, (type(None), dict)), # snowflake session parameters
"autocommit": (None, (type(None), bool)), # snowflake
Expand Down Expand Up @@ -443,6 +448,7 @@ class SnowflakeConnection:
validates the TLS certificate but doesn't check revocation status with OCSP provider.
ocsp_fail_open: Whether or not the connection is in fail open mode. Fail open mode decides if TLS certificates
continue to be validated. Revoked certificates are blocked. Any other exceptions are disregarded.
ocsp_root_certs_dict_lock_timeout: Timeout for the OCSP root certs dict lock in seconds. Default value is -1, which means no timeout.
session_id: The session ID of the connection.
user: The user name used in the connection.
host: The host name the connection attempts to connect to.
Expand Down Expand Up @@ -1545,6 +1551,8 @@ def __config(self, **kwargs):
WORKLOAD_IDENTITY_AUTHENTICATOR,
PROGRAMMATIC_ACCESS_TOKEN,
PAT_WITH_EXTERNAL_SESSION,
OAUTH_AUTHORIZATION_CODE,
OAUTH_CLIENT_CREDENTIALS,
}

if not (self._master_token and self._session_token):
Expand Down
3 changes: 3 additions & 0 deletions src/snowflake/connector/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -354,6 +354,9 @@ class FileHeader(NamedTuple):

HTTP_HEADER_VALUE_OCTET_STREAM = "application/octet-stream"

# OCSP
OCSP_ROOT_CERTS_DICT_LOCK_TIMEOUT_DEFAULT_NO_TIMEOUT: int = -1


@unique
class OCSPMode(Enum):
Expand Down
7 changes: 7 additions & 0 deletions src/snowflake/connector/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
HTTP_HEADER_CONTENT_TYPE,
HTTP_HEADER_SERVICE_NAME,
HTTP_HEADER_USER_AGENT,
OCSP_ROOT_CERTS_DICT_LOCK_TIMEOUT_DEFAULT_NO_TIMEOUT,
)
from .description import (
CLIENT_NAME,
Expand Down Expand Up @@ -337,6 +338,12 @@ def __init__(
ssl_wrap_socket.FEATURE_OCSP_RESPONSE_CACHE_FILE_NAME = (
self._connection._ocsp_response_cache_filename if self._connection else None
)
# OCSP root timeout
ssl_wrap_socket.FEATURE_ROOT_CERTS_DICT_LOCK_TIMEOUT = (
self._connection._ocsp_root_certs_dict_lock_timeout
if self._connection
else OCSP_ROOT_CERTS_DICT_LOCK_TIMEOUT_DEFAULT_NO_TIMEOUT
)

# This is to address the issue where requests hangs
_ = "dummy".encode("idna").decode("utf-8")
Expand Down
Loading
Loading