Skip to content

Commit 6b6236d

Browse files
committed
fix test
1 parent 5e97e31 commit 6b6236d

File tree

1 file changed

+16
-6
lines changed

1 file changed

+16
-6
lines changed

tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawler.py

Lines changed: 16 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -32,16 +32,18 @@
3232
AdaptiveContextError,
3333
)
3434
from crawlee.statistics import Statistics
35+
from crawlee.storages import KeyValueStore
3536

3637
if TYPE_CHECKING:
37-
from collections.abc import Iterator
38+
from collections.abc import AsyncGenerator, Iterator
3839

3940
import respx
4041

4142
from crawlee.browsers._browser_plugin import BrowserPlugin
4243
from crawlee.browsers._types import CrawleePage
4344
from crawlee.proxy_configuration import ProxyInfo
4445

46+
4547
_H1_TEXT = 'Static'
4648
_H2_TEXT = 'Only in browser'
4749
_H3_CHANGED_TEXT = 'Changed by JS'
@@ -74,6 +76,13 @@ def test_urls(respx_mock: respx.MockRouter) -> list[str]:
7476
return urls
7577

7678

79+
@pytest.fixture
80+
async def key_value_store() -> AsyncGenerator[KeyValueStore, None]:
81+
kvs = await KeyValueStore.open()
82+
yield kvs
83+
await kvs.drop()
84+
85+
7786
class _StaticRedirectBrowserPool(BrowserPool):
7887
"""BrowserPool for redirecting browser requests to static content."""
7988

@@ -384,7 +393,9 @@ async def request_handler(context: AdaptivePlaywrightCrawlingContext) -> None:
384393
mocked_store_result.assert_called_once_with(requests[0], expected_result_rendering_type)
385394

386395

387-
async def test_adaptive_crawling_result_use_state_isolation(test_urls: list[str]) -> None:
396+
async def test_adaptive_crawling_result_use_state_isolation(
397+
key_value_store: KeyValueStore, test_urls: list[str]
398+
) -> None:
388399
"""Tests that global state accessed through `use_state` is changed only by one sub crawler.
389400
390401
Enforced rendering type detection to run both sub crawlers."""
@@ -393,8 +404,7 @@ async def test_adaptive_crawling_result_use_state_isolation(test_urls: list[str]
393404
rendering_type_predictor=static_only_predictor_enforce_detection,
394405
playwright_crawler_specific_kwargs={'browser_pool': _StaticRedirectBrowserPool.with_default_plugin()},
395406
)
396-
store = await crawler.get_key_value_store()
397-
await store.set_value(BasicCrawler._CRAWLEE_STATE_KEY, {'counter': 0})
407+
await key_value_store.set_value(BasicCrawler._CRAWLEE_STATE_KEY, {'counter': 0})
398408
request_handler_calls = 0
399409

400410
@crawler.router.default_handler
@@ -406,12 +416,12 @@ async def request_handler(context: AdaptivePlaywrightCrawlingContext) -> None:
406416

407417
await crawler.run(test_urls[:1])
408418

409-
await store.persist_autosaved_values()
419+
await key_value_store.persist_autosaved_values()
410420

411421
# Request handler was called twice
412422
assert request_handler_calls == 2
413423
# Increment of global state happened only once
414-
assert (await store.get_value(BasicCrawler._CRAWLEE_STATE_KEY))['counter'] == 1
424+
assert (await key_value_store.get_value(BasicCrawler._CRAWLEE_STATE_KEY))['counter'] == 1
415425

416426

417427
async def test_adaptive_crawling_statistics(test_urls: list[str]) -> None:

0 commit comments

Comments
 (0)