|
11 | 11 | from dataclasses import dataclass
|
12 | 12 | from datetime import timedelta
|
13 | 13 | from pathlib import Path
|
14 |
| -from typing import TYPE_CHECKING, Any, cast |
| 14 | +from typing import TYPE_CHECKING, Any, Literal, cast |
15 | 15 | from unittest.mock import AsyncMock, Mock, call
|
16 | 16 |
|
17 | 17 | import httpx
|
@@ -889,11 +889,20 @@ async def handler(context: BasicCrawlingContext) -> None:
|
889 | 889 |
|
890 | 890 |
|
891 | 891 | @pytest.mark.skipif(os.name == 'nt' and 'CI' in os.environ, reason='Skipped in Windows CI')
|
892 |
| -async def test_logs_final_statistics(monkeypatch: pytest.MonkeyPatch, caplog: pytest.LogCaptureFixture) -> None: |
| 892 | +@pytest.mark.parametrize( |
| 893 | + ('statistics_log_format'), |
| 894 | + [ |
| 895 | + pytest.param('table', id='With table for logs'), |
| 896 | + pytest.param('inline', id='With inline logs'), |
| 897 | + ], |
| 898 | +) |
| 899 | +async def test_logs_final_statistics( |
| 900 | + monkeypatch: pytest.MonkeyPatch, caplog: pytest.LogCaptureFixture, statistics_log_format: Literal['table', 'inline'] |
| 901 | +) -> None: |
893 | 902 | # Set the log level to INFO to capture the final statistics log.
|
894 | 903 | caplog.set_level(logging.INFO)
|
895 | 904 |
|
896 |
| - crawler = BasicCrawler(configure_logging=False) |
| 905 | + crawler = BasicCrawler(configure_logging=False, statistics_log_format=statistics_log_format) |
897 | 906 |
|
898 | 907 | @crawler.router.default_handler
|
899 | 908 | async def handler(context: BasicCrawlingContext) -> None:
|
@@ -923,21 +932,36 @@ async def handler(context: BasicCrawlingContext) -> None:
|
923 | 932 | )
|
924 | 933 |
|
925 | 934 | assert final_statistics is not None
|
926 |
| - assert final_statistics.msg.splitlines() == [ |
927 |
| - 'Final request statistics:', |
928 |
| - '┌───────────────────────────────┬───────────┐', |
929 |
| - '│ requests_finished │ 4 │', |
930 |
| - '│ requests_failed │ 33 │', |
931 |
| - '│ retry_histogram │ [1, 4, 8] │', |
932 |
| - '│ request_avg_failed_duration │ 99.0 │', |
933 |
| - '│ request_avg_finished_duration │ 0.483 │', |
934 |
| - '│ requests_finished_per_minute │ 0.33 │', |
935 |
| - '│ requests_failed_per_minute │ 0.1 │', |
936 |
| - '│ request_total_duration │ 720.0 │', |
937 |
| - '│ requests_total │ 37 │', |
938 |
| - '│ crawler_runtime │ 300.0 │', |
939 |
| - '└───────────────────────────────┴───────────┘', |
940 |
| - ] |
| 935 | + if statistics_log_format == 'table': |
| 936 | + assert final_statistics.msg.splitlines() == [ |
| 937 | + 'Final request statistics:', |
| 938 | + '┌───────────────────────────────┬───────────┐', |
| 939 | + '│ requests_finished │ 4 │', |
| 940 | + '│ requests_failed │ 33 │', |
| 941 | + '│ retry_histogram │ [1, 4, 8] │', |
| 942 | + '│ request_avg_failed_duration │ 99.0 │', |
| 943 | + '│ request_avg_finished_duration │ 0.483 │', |
| 944 | + '│ requests_finished_per_minute │ 0.33 │', |
| 945 | + '│ requests_failed_per_minute │ 0.1 │', |
| 946 | + '│ request_total_duration │ 720.0 │', |
| 947 | + '│ requests_total │ 37 │', |
| 948 | + '│ crawler_runtime │ 300.0 │', |
| 949 | + '└───────────────────────────────┴───────────┘', |
| 950 | + ] |
| 951 | + else: |
| 952 | + assert final_statistics.msg == 'Final request statistics:' |
| 953 | + |
| 954 | + # ignore[attr-defined] since `extra` parameters are not defined for `LogRecord` |
| 955 | + assert final_statistics.requests_finished == 4 # type: ignore[attr-defined] |
| 956 | + assert final_statistics.requests_failed == 33 # type: ignore[attr-defined] |
| 957 | + assert final_statistics.retry_histogram == [1, 4, 8] # type: ignore[attr-defined] |
| 958 | + assert final_statistics.request_avg_failed_duration == 99.0 # type: ignore[attr-defined] |
| 959 | + assert final_statistics.request_avg_finished_duration == 0.483 # type: ignore[attr-defined] |
| 960 | + assert final_statistics.requests_finished_per_minute == 0.33 # type: ignore[attr-defined] |
| 961 | + assert final_statistics.requests_failed_per_minute == 0.1 # type: ignore[attr-defined] |
| 962 | + assert final_statistics.request_total_duration == 720.0 # type: ignore[attr-defined] |
| 963 | + assert final_statistics.requests_total == 37 # type: ignore[attr-defined] |
| 964 | + assert final_statistics.crawler_runtime == 300.0 # type: ignore[attr-defined] |
941 | 965 |
|
942 | 966 |
|
943 | 967 | async def test_crawler_manual_stop(httpbin: URL) -> None:
|
|
0 commit comments