Skip to content

Commit 5d3dbc4

Browse files
authored
Merge branch 'main' into up037
2 parents 1fcf3e9 + ff0fa55 commit 5d3dbc4

File tree

12 files changed

+49
-35
lines changed

12 files changed

+49
-35
lines changed

Makefile

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -32,9 +32,9 @@ else
3232
endif
3333

3434
ifeq ($(COVERAGE),1)
35-
TEST_RUNNER = uv run python -m coverage run --parallel-mode --source=pyiceberg -m
35+
TEST_RUNNER = uv run $(PYTHON_ARG) python -m coverage run --parallel-mode --source=pyiceberg -m
3636
else
37-
TEST_RUNNER = uv run python -m
37+
TEST_RUNNER = uv run $(PYTHON_ARG) python -m
3838
endif
3939

4040
ifeq ($(KEEP_COMPOSE),1)
@@ -70,7 +70,7 @@ setup-venv: ## Create virtual environment
7070
uv venv $(PYTHON_ARG)
7171

7272
install-dependencies: setup-venv ## Install all dependencies including extras
73-
uv sync --all-extras
73+
uv sync $(PYTHON_ARG) --all-extras
7474

7575
install: install-uv install-dependencies ## Install uv and dependencies
7676

@@ -84,7 +84,7 @@ check-license: ## Check license headers
8484
./dev/check-license
8585

8686
lint: ## Run code linters via prek (pre-commit hooks)
87-
uv run prek run -a
87+
uv run $(PYTHON_ARG) prek run -a
8888

8989
# ===============
9090
# Testing Section
@@ -101,7 +101,7 @@ test-integration-setup: ## Start Docker services for integration tests
101101
docker compose -f dev/docker-compose-integration.yml kill
102102
docker compose -f dev/docker-compose-integration.yml rm -f
103103
docker compose -f dev/docker-compose-integration.yml up -d --wait
104-
uv run python dev/provision.py
104+
uv run $(PYTHON_ARG) python dev/provision.py
105105

106106
test-integration-exec: ## Run integration tests (excluding provision)
107107
$(TEST_RUNNER) pytest tests/ -m integration $(PYTEST_ARGS)
@@ -133,10 +133,10 @@ test-coverage: COVERAGE=1
133133
test-coverage: test test-integration test-s3 test-adls test-gcs coverage-report ## Run all tests with coverage and report
134134

135135
coverage-report: ## Combine and report coverage
136-
uv run coverage combine
137-
uv run coverage report -m --fail-under=$(COVERAGE_FAIL_UNDER)
138-
uv run coverage html
139-
uv run coverage xml
136+
uv run $(PYTHON_ARG) coverage combine
137+
uv run $(PYTHON_ARG) coverage report -m --fail-under=$(COVERAGE_FAIL_UNDER)
138+
uv run $(PYTHON_ARG) coverage html
139+
uv run $(PYTHON_ARG) coverage xml
140140

141141
# ================
142142
# Documentation
@@ -145,13 +145,13 @@ coverage-report: ## Combine and report coverage
145145
##@ Documentation
146146

147147
docs-install: ## Install docs dependencies (included in default groups)
148-
uv sync --group docs
148+
uv sync $(PYTHON_ARG) --group docs
149149

150150
docs-serve: ## Serve local docs preview (hot reload)
151-
uv run mkdocs serve -f mkdocs/mkdocs.yml
151+
uv run $(PYTHON_ARG) mkdocs serve -f mkdocs/mkdocs.yml
152152

153153
docs-build: ## Build the static documentation site
154-
uv run mkdocs build -f mkdocs/mkdocs.yml --strict
154+
uv run $(PYTHON_ARG) mkdocs build -f mkdocs/mkdocs.yml --strict
155155

156156
# ===================
157157
# Project Maintenance

dev/.rat-excludes

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,5 +5,3 @@ build
55
.gitignore
66
uv.lock
77
mkdocs/*
8-
setup.cfg
9-
(^|.*/)[^/]*\.egg-info(/.*)?$

pyiceberg/avro/decoder.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -181,6 +181,6 @@ def new_decoder(b: bytes) -> BinaryDecoder:
181181
except ModuleNotFoundError:
182182
import warnings
183183

184-
warnings.warn("Falling back to pure Python Avro decoder, missing Cython implementation")
184+
warnings.warn("Falling back to pure Python Avro decoder, missing Cython implementation", stacklevel=2)
185185

186186
return StreamingBinaryDecoder(b)

pyiceberg/expressions/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -131,7 +131,7 @@ class Term(Generic[L], ABC):
131131
"""A simple expression that evaluates to a value."""
132132

133133

134-
class Bound(ABC):
134+
class Bound:
135135
"""Represents a bound value expression."""
136136

137137

pyiceberg/io/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -340,7 +340,7 @@ def _infer_file_io_from_scheme(path: str, properties: Properties) -> FileIO | No
340340
if file_io := _import_file_io(file_io_path, properties):
341341
return file_io
342342
else:
343-
warnings.warn(f"No preferred file implementation for scheme: {parsed_url.scheme}")
343+
warnings.warn(f"No preferred file implementation for scheme: {parsed_url.scheme}", stacklevel=2)
344344
return None
345345

346346

pyiceberg/io/pyarrow.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -232,7 +232,7 @@ def _import_retry_strategy(impl: str) -> S3RetryStrategy | None:
232232
class_ = getattr(module, class_name)
233233
return class_()
234234
except (ModuleNotFoundError, AttributeError):
235-
warnings.warn(f"Could not initialize S3 retry strategy: {impl}")
235+
warnings.warn(f"Could not initialize S3 retry strategy: {impl}", stacklevel=2)
236236
return None
237237

238238

@@ -2768,7 +2768,7 @@ def _get_parquet_writer_kwargs(table_properties: Properties) -> Dict[str, Any]:
27682768
f"{TableProperties.PARQUET_BLOOM_FILTER_COLUMN_ENABLED_PREFIX}.*",
27692769
]:
27702770
if unsupported_keys := fnmatch.filter(table_properties, key_pattern):
2771-
warnings.warn(f"Parquet writer option(s) {unsupported_keys} not implemented")
2771+
warnings.warn(f"Parquet writer option(s) {unsupported_keys} not implemented", stacklevel=2)
27722772

27732773
compression_codec = table_properties.get(TableProperties.PARQUET_COMPRESSION, TableProperties.PARQUET_COMPRESSION_DEFAULT)
27742774
compression_level = property_as_int(

pyiceberg/table/__init__.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -663,7 +663,7 @@ def delete(
663663
self.table_metadata.properties.get(TableProperties.DELETE_MODE, TableProperties.DELETE_MODE_DEFAULT)
664664
== TableProperties.DELETE_MODE_MERGE_ON_READ
665665
):
666-
warnings.warn("Merge on read is not yet supported, falling back to copy-on-write")
666+
warnings.warn("Merge on read is not yet supported, falling back to copy-on-write", stacklevel=2)
667667

668668
if isinstance(delete_filter, str):
669669
delete_filter = _parse_row_filter(delete_filter)
@@ -731,7 +731,7 @@ def delete(
731731
overwrite_snapshot.append_data_file(replaced_data_file)
732732

733733
if not delete_snapshot.files_affected and not delete_snapshot.rewrites_needed:
734-
warnings.warn("Delete operation did not match any records")
734+
warnings.warn("Delete operation did not match any records", stacklevel=2)
735735

736736
def upsert(
737737
self,
@@ -1502,7 +1502,7 @@ def _do_commit(self, updates: Tuple[TableUpdate, ...], requirements: Tuple[Table
15021502
try:
15031503
self.catalog._delete_old_metadata(self.io, self.metadata, response.metadata)
15041504
except Exception as e:
1505-
warnings.warn(f"Failed to delete old metadata after commit: {e}")
1505+
warnings.warn(f"Failed to delete old metadata after commit: {e}", stacklevel=2)
15061506

15071507
self.metadata = response.metadata
15081508
self.metadata_location = response.metadata_location
@@ -1728,7 +1728,7 @@ def projection(self) -> Schema:
17281728
schema for schema in self.table_metadata.schemas if schema.schema_id == snapshot.schema_id
17291729
)
17301730
except StopIteration:
1731-
warnings.warn(f"Metadata does not contain schema with id: {snapshot.schema_id}")
1731+
warnings.warn(f"Metadata does not contain schema with id: {snapshot.schema_id}", stacklevel=2)
17321732
else:
17331733
raise ValueError(f"Snapshot not found: {self.snapshot_id}")
17341734

@@ -1783,7 +1783,7 @@ def with_case_sensitive(self: S, case_sensitive: bool = True) -> S:
17831783
def count(self) -> int: ...
17841784

17851785

1786-
class ScanTask(ABC):
1786+
class ScanTask:
17871787
pass
17881788

17891789

pyiceberg/table/snapshots.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@ class Summary(IcebergBaseModel, Mapping[str, str]):
187187

188188
def __init__(self, operation: Operation | None = None, **data: Any) -> None:
189189
if operation is None:
190-
warnings.warn("Encountered invalid snapshot summary: operation is missing, defaulting to overwrite")
190+
warnings.warn("Encountered invalid snapshot summary: operation is missing, defaulting to overwrite", stacklevel=2)
191191
operation = Operation.OVERWRITE
192192
super().__init__(operation=operation, **data)
193193
self._additional_properties = data

pyproject.toml

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,13 @@ dependencies = [
4545
"zstandard>=0.13.0,<1.0.0"
4646
]
4747

48+
[project.urls]
49+
Homepage = "https://py.iceberg.apache.org/"
50+
Repository = "https://github.com/apache/iceberg-python"
51+
52+
[project.scripts]
53+
pyiceberg = "pyiceberg.cli.console:run"
54+
4855
[project.optional-dependencies]
4956
pyarrow = [
5057
"pyarrow>=17.0.0",
@@ -93,13 +100,6 @@ pyiceberg-core = ["pyiceberg-core>=0.5.1,<0.8.0"]
93100
datafusion = ["datafusion>=45,<49"]
94101
gcp-auth = ["google-auth>=2.4.0"]
95102

96-
[project.urls]
97-
Homepage = "https://py.iceberg.apache.org/"
98-
Repository = "https://github.com/apache/iceberg-python"
99-
100-
[project.scripts]
101-
pyiceberg = "pyiceberg.cli.console:run"
102-
103103
[dependency-groups]
104104
dev = [
105105
"pytest==7.4.4",

ruff.toml

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -59,8 +59,6 @@ select = [
5959
]
6060
ignore = [
6161
"E501",
62-
"B024",
63-
"B028",
6462
"UP035",
6563
"UP006"
6664
]

0 commit comments

Comments
 (0)