Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 35 additions & 2 deletions src/crawlee/_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,9 @@

import dataclasses
from collections.abc import Callable, Iterator, Mapping
from copy import deepcopy
from dataclasses import dataclass
from typing import TYPE_CHECKING, Annotated, Any, Literal, Protocol, TypedDict, TypeVar, cast, overload
from typing import TYPE_CHECKING, Annotated, Any, ClassVar, Literal, Protocol, TypedDict, TypeVar, cast, overload

from pydantic import ConfigDict, Field, PlainValidator, RootModel

Expand Down Expand Up @@ -260,12 +261,27 @@ async def get_value(self, key: str, default_value: T | None = None) -> T | None:
class RequestHandlerRunResult:
"""Record of calls to storage-related context helpers."""

def __init__(self, *, key_value_store_getter: GetKeyValueStoreFunction) -> None:
_REQUEST_SYNC_FIELDS: ClassVar[frozenset[str]] = frozenset({'headers', 'user_data'})
_SESSION_SYNC_FIELDS: ClassVar[frozenset[str]] = frozenset(
{'_user_data', '_usage_count', '_error_score', '_cookies'}
)

def __init__(
self,
*,
key_value_store_getter: GetKeyValueStoreFunction,
request: Request,
session: Session | None = None,
) -> None:
self._key_value_store_getter = key_value_store_getter
self.add_requests_calls = list[AddRequestsKwargs]()
self.push_data_calls = list[PushDataFunctionCall]()
self.key_value_store_changes = dict[tuple[str | None, str | None, str | None], KeyValueStoreChangeRecords]()

# Isolated copies for handler execution
self.request = deepcopy(request)
self.session = deepcopy(session) if session else None

async def add_requests(
self,
requests: Sequence[str | Request],
Expand Down Expand Up @@ -315,6 +331,23 @@ async def get_key_value_store(

return self.key_value_store_changes[id, name, alias]

def sync_request(self, sync_request: Request) -> None:
"""Sync request state from copies back to originals."""
for field in self._REQUEST_SYNC_FIELDS:
value = getattr(self.request, field)
original_value = getattr(sync_request, field)
if value != original_value:
object.__setattr__(sync_request, field, value)

def sync_session(self, sync_session: Session | None = None) -> None:
"""Sync session state from copies back to originals."""
if self.session and sync_session:
for field in self._SESSION_SYNC_FIELDS:
value = getattr(self.session, field)
original_value = getattr(sync_session, field)
if value != original_value:
object.__setattr__(sync_session, field, value)


@docs_group('Functions')
class AddRequestsFunction(Protocol):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -290,10 +290,12 @@ async def get_input_state(
use_state_function = context.use_state

# New result is created and injected to newly created context. This is done to ensure isolation of sub crawlers.
result = RequestHandlerRunResult(key_value_store_getter=self.get_key_value_store)
result = RequestHandlerRunResult(
key_value_store_getter=self.get_key_value_store, request=context.request, session=context.session
)
context_linked_to_result = BasicCrawlingContext(
request=deepcopy(context.request),
session=deepcopy(context.session),
request=result.request,
session=result.session,
proxy_info=deepcopy(context.proxy_info),
send_request=context.send_request,
add_requests=result.add_requests,
Expand All @@ -314,7 +316,7 @@ async def get_input_state(
),
logger=self._logger,
)
return SubCrawlerRun(result=result, run_context=context_linked_to_result)
return SubCrawlerRun(result=result)
except Exception as e:
return SubCrawlerRun(exception=e)

Expand Down Expand Up @@ -370,8 +372,7 @@ async def _run_request_handler(self, context: BasicCrawlingContext) -> None:
self.track_http_only_request_handler_runs()

static_run = await self._crawl_one(rendering_type='static', context=context)
if static_run.result and static_run.run_context and self.result_checker(static_run.result):
self._update_context_from_copy(context, static_run.run_context)
if static_run.result and self.result_checker(static_run.result):
self._context_result_map[context] = static_run.result
return
if static_run.exception:
Expand Down Expand Up @@ -402,7 +403,7 @@ async def _run_request_handler(self, context: BasicCrawlingContext) -> None:
if pw_run.exception is not None:
raise pw_run.exception

if pw_run.result and pw_run.run_context:
if pw_run.result:
if should_detect_rendering_type:
detection_result: RenderingType
static_run = await self._crawl_one('static', context=context, state=old_state_copy)
Expand All @@ -414,7 +415,6 @@ async def _run_request_handler(self, context: BasicCrawlingContext) -> None:
context.log.debug(f'Detected rendering type {detection_result} for {context.request.url}')
self.rendering_type_predictor.store_result(context.request, detection_result)

self._update_context_from_copy(context, pw_run.run_context)
self._context_result_map[context] = pw_run.result

def pre_navigation_hook(
Expand Down Expand Up @@ -451,32 +451,8 @@ def track_browser_request_handler_runs(self) -> None:
def track_rendering_type_mispredictions(self) -> None:
self.statistics.state.rendering_type_mispredictions += 1

def _update_context_from_copy(self, context: BasicCrawlingContext, context_copy: BasicCrawlingContext) -> None:
"""Update mutable fields of `context` from `context_copy`.

Uses object.__setattr__ to bypass frozen dataclass restrictions,
allowing state synchronization after isolated crawler execution.
"""
updating_attributes = {
'request': ('headers', 'user_data'),
'session': ('_user_data', '_usage_count', '_error_score', '_cookies'),
}

for attr, sub_attrs in updating_attributes.items():
original_sub_obj = getattr(context, attr)
copy_sub_obj = getattr(context_copy, attr)

# Check that both sub objects are not None
if original_sub_obj is None or copy_sub_obj is None:
continue

for sub_attr in sub_attrs:
new_value = getattr(copy_sub_obj, sub_attr)
object.__setattr__(original_sub_obj, sub_attr, new_value)


@dataclass(frozen=True)
class SubCrawlerRun:
result: RequestHandlerRunResult | None = None
exception: Exception | None = None
run_context: BasicCrawlingContext | None = None
22 changes: 16 additions & 6 deletions src/crawlee/crawlers/_basic/_basic_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -1312,7 +1312,12 @@ async def _add_requests(

return await request_manager.add_requests(context_aware_requests)

async def _commit_request_handler_result(self, context: BasicCrawlingContext) -> None:
async def _commit_request_handler_result(
self,
context: BasicCrawlingContext,
original_request: Request,
original_session: Session | None = None,
) -> None:
"""Commit request handler result for the input `context`. Result is taken from `_context_result_map`."""
result = self._context_result_map[context]

Expand All @@ -1324,6 +1329,9 @@ async def _commit_request_handler_result(self, context: BasicCrawlingContext) ->

await self._commit_key_value_store_changes(result, get_kvs=self.get_key_value_store)

result.sync_session(sync_session=original_session)
result.sync_request(sync_request=original_request)

@staticmethod
async def _commit_key_value_store_changes(
result: RequestHandlerRunResult, get_kvs: GetKeyValueStoreFromRequestHandlerFunction
Expand Down Expand Up @@ -1389,11 +1397,13 @@ async def __run_task_function(self) -> None:
else:
session = await self._get_session()
proxy_info = await self._get_proxy_info(request, session)
result = RequestHandlerRunResult(key_value_store_getter=self.get_key_value_store)
result = RequestHandlerRunResult(
key_value_store_getter=self.get_key_value_store, request=request, session=session
)

context = BasicCrawlingContext(
request=request,
session=session,
request=result.request,
session=result.session,
proxy_info=proxy_info,
send_request=self._prepare_send_request_function(session, proxy_info),
add_requests=result.add_requests,
Expand All @@ -1416,9 +1426,9 @@ async def __run_task_function(self) -> None:
except asyncio.TimeoutError as e:
raise RequestHandlerError(e, context) from e

await self._commit_request_handler_result(context)
await self._commit_request_handler_result(context, original_request=request, original_session=session)
await wait_for(
lambda: request_manager.mark_request_as_handled(context.request),
lambda: request_manager.mark_request_as_handled(request),
timeout=self._internal_timeout,
timeout_message='Marking request as handled timed out after '
f'{self._internal_timeout.total_seconds()} seconds',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -802,6 +802,8 @@ async def request_handler(context: AdaptivePlaywrightCrawlingContext) -> None:

assert session is not None
assert check_request is not None

print('Test Session', check_request)
assert session.user_data.get('session_state') is True
# Check that request user data was updated in the handler and only onse.
assert check_request.user_data.get('request_state') == ['initial', 'handler']
Expand Down
Loading