Skip to content

Commit bce2491

Browse files
authored
test: Use rerun instead skip for flaky tests (#1670)
### Description - Perform 3 launch attempts for tests marked as flaky, instead of skipping them.
1 parent 79b6978 commit bce2491

6 files changed

Lines changed: 32 additions & 15 deletions

File tree

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -107,6 +107,7 @@ dev = [
107107
"pydoc-markdown<5.0.0",
108108
"pytest-asyncio<2.0.0",
109109
"pytest-cov<8.0.0",
110+
"pytest-rerunfailures<17.0.0",
110111
"pytest-timeout<3.0.0",
111112
"pytest-xdist<4.0.0",
112113
"pytest<9.0.0",

tests/unit/_autoscaling/test_autoscaled_pool.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
from __future__ import annotations
44

55
import asyncio
6-
import sys
76
from contextlib import suppress
87
from datetime import datetime, timedelta, timezone
98
from itertools import chain, repeat
@@ -145,8 +144,8 @@ async def run() -> None:
145144
await pool.run()
146145

147146

148-
@pytest.mark.skipif(
149-
sys.platform != 'linux',
147+
@pytest.mark.flaky(
148+
rerun=3,
150149
reason='Test is flaky on Windows and MacOS, see https://github.com/apify/crawlee-python/issues/1655.',
151150
)
152151
async def test_autoscales(

tests/unit/browsers/test_browser_pool.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
from __future__ import annotations
22

3-
import sys
43
from typing import TYPE_CHECKING
54

65
import pytest
@@ -63,8 +62,8 @@ async def test_multiple_plugins_new_page_creation(server_url: URL) -> None:
6362
assert browser_pool.total_pages_count == 3
6463

6564

66-
@pytest.mark.skipif(
67-
sys.platform != 'linux',
65+
@pytest.mark.flaky(
66+
rerun=3,
6867
reason='Test is flaky on Windows and MacOS, see https://github.com/apify/crawlee-python/issues/1660.',
6968
)
7069
async def test_new_page_with_each_plugin(server_url: URL) -> None:

tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawler.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22

33
import asyncio
44
import logging
5-
import sys
65
from dataclasses import dataclass
76
from datetime import timedelta
87
from itertools import cycle
@@ -607,8 +606,8 @@ async def request_handler(context: AdaptivePlaywrightCrawlingContext) -> None:
607606
mocked_h2_handler.assert_has_calls([call(expected_h2_tag)])
608607

609608

610-
@pytest.mark.skipif(
611-
sys.platform != 'linux',
609+
@pytest.mark.flaky(
610+
rerun=3,
612611
reason='Test is flaky on Windows and MacOS, see https://github.com/apify/crawlee-python/issues/1650.',
613612
)
614613
async def test_adaptive_context_query_selector_parsel(test_urls: list[str]) -> None:

tests/unit/crawlers/_basic/test_basic_crawler.py

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1302,9 +1302,8 @@ async def handler(context: BasicCrawlingContext) -> None:
13021302

13031303

13041304
@pytest.mark.run_alone
1305-
@pytest.mark.skipif(
1306-
sys.platform != 'linux',
1307-
reason='Test is flaky on Windows and MacOS, see https://github.com/apify/crawlee-python/issues/1652.',
1305+
@pytest.mark.flaky(
1306+
reruns=3, reason='Test is flaky on Windows and MacOS, see https://github.com/apify/crawlee-python/issues/1652.'
13081307
)
13091308
@pytest.mark.skipif(sys.version_info[:3] < (3, 11), reason='asyncio.timeout was introduced in Python 3.11.')
13101309
@pytest.mark.parametrize(
@@ -1328,7 +1327,11 @@ async def test_timeout_in_handler(sleep_type: str) -> None:
13281327
double_handler_timeout_s = handler_timeout.total_seconds() * 2
13291328
handler_sleep = iter([double_handler_timeout_s, double_handler_timeout_s, 0])
13301329

1331-
crawler = BasicCrawler(request_handler_timeout=handler_timeout, max_request_retries=max_request_retries)
1330+
crawler = BasicCrawler(
1331+
request_handler_timeout=handler_timeout,
1332+
max_request_retries=max_request_retries,
1333+
storage_client=MemoryStorageClient(),
1334+
)
13321335

13331336
mocked_handler_before_sleep = Mock()
13341337
mocked_handler_after_sleep = Mock()
@@ -1355,8 +1358,8 @@ async def handler(context: BasicCrawlingContext) -> None:
13551358
assert mocked_handler_after_sleep.call_count == 1
13561359

13571360

1358-
@pytest.mark.skipif(
1359-
sys.platform != 'linux',
1361+
@pytest.mark.flaky(
1362+
reruns=3,
13601363
reason='Test is flaky on Windows and MacOS, see https://github.com/apify/crawlee-python/issues/1649.',
13611364
)
13621365
@pytest.mark.parametrize(
@@ -1381,6 +1384,7 @@ async def test_keep_alive(
13811384
max_requests_per_crawl=max_requests_per_crawl,
13821385
# If more request can run in parallel, then max_requests_per_crawl is not deterministic.
13831386
concurrency_settings=ConcurrencySettings(desired_concurrency=1, max_concurrency=1),
1387+
storage_client=MemoryStorageClient(),
13841388
)
13851389
mocked_handler = Mock()
13861390

uv.lock

Lines changed: 15 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)