Skip to content

Commit 518b498

Browse files
committed
Fix(lint): Apply black formatting to existing files
- Runs 'black' on datasource_internal.py and python_streaming_source_runner.py to fix formatting issues flagged by CI.
1 parent 22fe2ac commit 518b498

File tree

1 file changed

+9
-7
lines changed

1 file changed

+9
-7
lines changed

python/pyspark/sql/streaming/python_streaming_source_runner.py

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -94,16 +94,16 @@ def partitions_func(
9494
if it is None:
9595
write_int(PREFETCHED_RECORDS_NOT_FOUND, outfile)
9696
else:
97-
send_batch_func( # noqa: E501
97+
send_batch_func(
9898
it, outfile, schema, max_arrow_batch_size, data_source
99-
)
99+
) # noqa: E501
100100
else:
101101
write_int(PREFETCHED_RECORDS_NOT_FOUND, outfile)
102102

103103

104-
def commit_func( # noqa: E501
104+
def commit_func(
105105
reader: DataSourceStreamReader, infile: IO, outfile: IO
106-
) -> None:
106+
) -> None: # noqa: E501
107107
end_offset = json.loads(utf8_deserializer.loads(infile))
108108
reader.commit(end_offset)
109109
write_int(0, outfile)
@@ -180,7 +180,9 @@ def send_batch_func(
180180
data_source: DataSource,
181181
) -> None:
182182
batches = list(
183-
records_to_arrow_batches(rows, max_arrow_batch_size, schema, data_source) # noqa: E501
183+
records_to_arrow_batches(
184+
rows, max_arrow_batch_size, schema, data_source
185+
) # noqa: E501
184186
)
185187
if len(batches) != 0:
186188
write_int(NON_EMPTY_PYARROW_RECORD_BATCHES, outfile)
@@ -196,9 +198,9 @@ def main(infile: IO, outfile: IO) -> None:
196198
check_python_version(infile)
197199
setup_spark_files(infile)
198200

199-
memory_limit_mb = int( # noqa: E501
201+
memory_limit_mb = int(
200202
os.environ.get("PYSPARK_PLANNER_MEMORY_MB", "-1")
201-
)
203+
) # noqa: E501
202204
setup_memory_limits(memory_limit_mb)
203205

204206
_accumulatorRegistry.clear()

0 commit comments

Comments
 (0)