Karim shoair commited on
Commit
a2d3f34
·
1 Parent(s): 7e18800

test: adding new tests and updating existing ones

Browse files
tests/fetchers/sync/test_requests_session.py ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+
4
+ from scrapling.engines.static import FetcherSession, FetcherClient, AsyncFetcherClient
5
+
6
+
7
+ class TestFetcherSession:
8
+ """Test FetcherSession functionality"""
9
+
10
+ def test_fetcher_session_creation(self):
11
+ """Test FetcherSession creation"""
12
+ session = FetcherSession(
13
+ timeout=30,
14
+ retries=3,
15
+ stealthy_headers=True
16
+ )
17
+
18
+ assert session.default_timeout == 30
19
+ assert session.default_retries == 3
20
+ assert session.stealth is True
21
+
22
+ def test_fetcher_session_context_manager(self):
23
+ """Test FetcherSession as a context manager"""
24
+ session = FetcherSession()
25
+
26
+ with session as s:
27
+ assert s == session
28
+ assert session._curl_session is not None
29
+
30
+ # Session should be cleaned up
31
+
32
+ def test_fetcher_session_double_enter(self):
33
+ """Test error on double entering"""
34
+ session = FetcherSession()
35
+
36
+ with session:
37
+ with pytest.raises(RuntimeError):
38
+ session.__enter__()
39
+
40
+ def test_fetcher_client_creation(self):
41
+ """Test FetcherClient creation"""
42
+ client = FetcherClient()
43
+
44
+ # Should not have context manager methods
45
+ assert client.__enter__ is None
46
+ assert client.__exit__ is None
47
+ assert client._curl_session is True # Special marker
48
+
49
+ def test_async_fetcher_client_creation(self):
50
+ """Test AsyncFetcherClient creation"""
51
+ client = AsyncFetcherClient()
52
+
53
+ # Should not have context manager methods
54
+ assert client.__aenter__ is None
55
+ assert client.__aexit__ is None
56
+ assert client._async_curl_session is True # Special marker
tests/fetchers/test_base.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ from scrapling.engines.toolbelt.custom import BaseFetcher
4
+
5
+
6
+ class TestBaseFetcher:
7
+ """Test BaseFetcher configuration functionality"""
8
+
9
+ def test_default_configuration(self):
10
+ """Test default configuration values"""
11
+ config = BaseFetcher.display_config()
12
+
13
+ assert config['huge_tree'] is True
14
+ assert config['adaptive'] is False
15
+ assert config['keep_comments'] is False
16
+ assert config['keep_cdata'] is False
17
+
18
+ def test_configure_single_parameter(self):
19
+ """Test configuring single parameter"""
20
+ BaseFetcher.configure(adaptive=True)
21
+
22
+ config = BaseFetcher.display_config()
23
+ assert config['adaptive'] is True
24
+
25
+ # Reset
26
+ BaseFetcher.configure(adaptive=False)
27
+
28
+ def test_configure_multiple_parameters(self):
29
+ """Test configuring multiple parameters"""
30
+ BaseFetcher.configure(
31
+ huge_tree=False,
32
+ keep_comments=True,
33
+ adaptive=True
34
+ )
35
+
36
+ config = BaseFetcher.display_config()
37
+ assert config['huge_tree'] is False
38
+ assert config['keep_comments'] is True
39
+ assert config['adaptive'] is True
40
+
41
+ # Reset
42
+ BaseFetcher.configure(
43
+ huge_tree=True,
44
+ keep_comments=False,
45
+ adaptive=False
46
+ )
47
+
48
+ def test_configure_invalid_parameter(self):
49
+ """Test configuring invalid parameter"""
50
+ with pytest.raises(ValueError):
51
+ BaseFetcher.configure(invalid_param=True)
52
+
53
+ def test_configure_no_parameters(self):
54
+ """Test configure with no parameters"""
55
+ with pytest.raises(AttributeError):
56
+ BaseFetcher.configure()
57
+
58
+ def test_configure_non_parser_keyword(self):
59
+ """Test configuring non-parser keyword"""
60
+ with pytest.raises(AttributeError):
61
+ # Assuming there's some attribute that's not in parser_keywords
62
+ BaseFetcher.some_other_attr = "test"
63
+ BaseFetcher.configure(some_other_attr="new_value")
64
+
65
+ def test_generate_parser_arguments(self):
66
+ """Test parser arguments generation"""
67
+ BaseFetcher.configure(
68
+ huge_tree=False,
69
+ adaptive=True,
70
+ adaptive_domain="example.com"
71
+ )
72
+
73
+ args = BaseFetcher._generate_parser_arguments()
74
+
75
+ assert args['huge_tree'] is False
76
+ assert args['adaptive'] is True
77
+ assert args['adaptive_domain'] == "example.com"
78
+
79
+ # Reset
80
+ BaseFetcher.configure(
81
+ huge_tree=True,
82
+ adaptive=False
83
+ )
84
+ BaseFetcher.adaptive_domain = None
tests/fetchers/test_constants.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from scrapling.engines.constants import (
2
+ DEFAULT_DISABLED_RESOURCES,
3
+ DEFAULT_STEALTH_FLAGS,
4
+ HARMFUL_DEFAULT_ARGS
5
+ )
6
+
7
+
8
+ class TestConstants:
9
+ """Test constant values"""
10
+
11
+ def test_default_disabled_resources(self):
12
+ """Test default disabled resources"""
13
+ assert "image" in DEFAULT_DISABLED_RESOURCES
14
+ assert "font" in DEFAULT_DISABLED_RESOURCES
15
+ assert "stylesheet" in DEFAULT_DISABLED_RESOURCES
16
+ assert "media" in DEFAULT_DISABLED_RESOURCES
17
+
18
+ def test_harmful_default_args(self):
19
+ """Test harmful default arguments"""
20
+ assert "--enable-automation" in HARMFUL_DEFAULT_ARGS
21
+ assert "--disable-popup-blocking" in HARMFUL_DEFAULT_ARGS
22
+
23
+ def test_default_stealth_flags(self):
24
+ """Test default stealth flags"""
25
+ assert "--no-pings" in DEFAULT_STEALTH_FLAGS
26
+ assert "--incognito" in DEFAULT_STEALTH_FLAGS
27
+ assert "--disable-blink-features=AutomationControlled" in DEFAULT_STEALTH_FLAGS
tests/fetchers/test_pages.py ADDED
@@ -0,0 +1,154 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+ from unittest.mock import Mock
3
+ from scrapling.engines._browsers._page import PageInfo, PagePool
4
+
5
+
6
+ class TestPageInfo:
7
+ """Test PageInfo functionality"""
8
+
9
+ def test_page_info_creation(self):
10
+ """Test PageInfo creation"""
11
+ mock_page = Mock()
12
+ page_info = PageInfo(mock_page, "ready", "https://example.com")
13
+
14
+ assert page_info.page == mock_page
15
+ assert page_info.state == "ready"
16
+ assert page_info.url == "https://example.com"
17
+
18
+ def test_page_info_marking(self):
19
+ """Test marking page"""
20
+ mock_page = Mock()
21
+ page_info = PageInfo(mock_page, "ready", None)
22
+
23
+ page_info.mark_busy("https://example.com")
24
+ assert page_info.state == "busy"
25
+ assert page_info.url == "https://example.com"
26
+
27
+ page_info.mark_ready()
28
+ assert page_info.state == "ready"
29
+ assert page_info.url == ""
30
+
31
+ page_info.mark_error()
32
+ assert page_info.state == "error"
33
+
34
+ def test_page_info_equality(self):
35
+ """Test PageInfo equality comparison"""
36
+ mock_page1 = Mock()
37
+ mock_page2 = Mock()
38
+
39
+ page_info1 = PageInfo(mock_page1, "ready", None)
40
+ page_info2 = PageInfo(mock_page1, "busy", None) # Same page, different state
41
+ page_info3 = PageInfo(mock_page2, "ready", None) # Different page
42
+
43
+ assert page_info1 == page_info2 # Same page
44
+ assert page_info1 != page_info3 # Different page
45
+ assert page_info1 != "not a page info" # Different type
46
+
47
+ def test_page_info_repr(self):
48
+ """Test PageInfo string representation"""
49
+ mock_page = Mock()
50
+ page_info = PageInfo(mock_page, "ready", "https://example.com")
51
+
52
+ repr_str = repr(page_info)
53
+ assert "ready" in repr_str
54
+ assert "https://example.com" in repr_str
55
+
56
+
57
+ class TestPagePool:
58
+ """Test PagePool functionality"""
59
+
60
+ def test_page_pool_creation(self):
61
+ """Test PagePool creation"""
62
+ pool = PagePool(max_pages=5)
63
+
64
+ assert pool.max_pages == 5
65
+ assert pool.pages_count == 0
66
+ assert pool.ready_count == 0
67
+ assert pool.busy_count == 0
68
+
69
+ def test_add_page(self):
70
+ """Test adding page to pool"""
71
+ pool = PagePool(max_pages=2)
72
+ mock_page = Mock()
73
+
74
+ page_info = pool.add_page(mock_page)
75
+
76
+ assert isinstance(page_info, PageInfo)
77
+ assert page_info.page == mock_page
78
+ assert page_info.state == "ready"
79
+ assert pool.pages_count == 1
80
+
81
+ def test_add_page_limit_exceeded(self):
82
+ """Test adding page when limit exceeded"""
83
+ pool = PagePool(max_pages=1)
84
+
85
+ # Add first page
86
+ pool.add_page(Mock())
87
+
88
+ # Try to add a second page
89
+ with pytest.raises(RuntimeError):
90
+ pool.add_page(Mock())
91
+
92
+ def test_get_ready_page(self):
93
+ """Test getting ready page"""
94
+ pool = PagePool(max_pages=3)
95
+
96
+ # Add pages
97
+ page1 = pool.add_page(Mock())
98
+ page2 = pool.add_page(Mock())
99
+
100
+ # Mark one as busy
101
+ page1.mark_busy("https://example.com")
102
+
103
+ # Should get the ready page
104
+ ready_page = pool.get_ready_page()
105
+ assert ready_page == page2
106
+
107
+ def test_get_ready_page_none_available(self):
108
+ """Test getting ready page when none available"""
109
+ pool = PagePool(max_pages=2)
110
+
111
+ # Add pages and mark all as busy
112
+ page1 = pool.add_page(Mock())
113
+ page2 = pool.add_page(Mock())
114
+ page1.mark_busy("https://example1.com")
115
+ page2.mark_busy("https://example2.com")
116
+
117
+ # Should return None
118
+ ready_page = pool.get_ready_page()
119
+ assert ready_page is None
120
+
121
+ def test_page_counts(self):
122
+ """Test page count properties"""
123
+ pool = PagePool(max_pages=3)
124
+
125
+ # Add pages with different states
126
+ page1 = pool.add_page(Mock())
127
+ page2 = pool.add_page(Mock())
128
+ page3 = pool.add_page(Mock())
129
+
130
+ page1.mark_busy("https://example.com")
131
+ page3.mark_error()
132
+
133
+ assert pool.pages_count == 3
134
+ assert pool.ready_count == 1
135
+ assert pool.busy_count == 1
136
+
137
+ def test_cleanup_error_pages(self):
138
+ """Test cleaning up error pages"""
139
+ pool = PagePool(max_pages=3)
140
+
141
+ # Add pages
142
+ page1 = pool.add_page(Mock())
143
+ page2 = pool.add_page(Mock())
144
+ page3 = pool.add_page(Mock())
145
+
146
+ # Mark some as error
147
+ page1.mark_error()
148
+ page3.mark_error()
149
+
150
+ assert pool.pages_count == 3
151
+
152
+ pool.cleanup_error_pages()
153
+
154
+ assert pool.pages_count == 1 # Only page2 should remain
tests/fetchers/test_utils.py CHANGED
@@ -1,6 +1,17 @@
1
  import pytest
 
2
 
3
- from scrapling.engines.toolbelt.custom import ResponseEncoding, StatusText
 
 
 
 
 
 
 
 
 
 
4
 
5
 
6
  @pytest.fixture
@@ -122,7 +133,7 @@ def status_map():
122
 
123
 
124
  def test_parsing_content_type(content_type_map):
125
- """Test if parsing different types of content-type returns the expected result"""
126
  for header_value, expected_encoding in content_type_map.items():
127
  assert ResponseEncoding.get_value(header_value) == expected_encoding
128
 
@@ -136,3 +147,208 @@ def test_parsing_response_status(status_map):
136
  def test_unknown_status_code():
137
  """Test handling of an unknown status code"""
138
  assert StatusText.get(1000) == "Unknown Status Code"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import pytest
2
+ from pathlib import Path
3
 
4
+ from scrapling.engines.toolbelt.custom import ResponseEncoding, StatusText, Response
5
+ from scrapling.engines.toolbelt.navigation import (
6
+ construct_proxy_dict,
7
+ construct_cdp_url,
8
+ js_bypass_path
9
+ )
10
+ from scrapling.engines.toolbelt.fingerprints import (
11
+ generate_convincing_referer,
12
+ get_os_name,
13
+ generate_headers
14
+ )
15
 
16
 
17
  @pytest.fixture
 
133
 
134
 
135
  def test_parsing_content_type(content_type_map):
136
+ """Test if parsing different types of 'content-type' returns the expected result"""
137
  for header_value, expected_encoding in content_type_map.items():
138
  assert ResponseEncoding.get_value(header_value) == expected_encoding
139
 
 
147
  def test_unknown_status_code():
148
  """Test handling of an unknown status code"""
149
  assert StatusText.get(1000) == "Unknown Status Code"
150
+
151
+
152
+ class TestConstructProxyDict:
153
+ """Test proxy dictionary construction"""
154
+
155
+ def test_proxy_string_basic(self):
156
+ """Test a basic proxy string"""
157
+ result = construct_proxy_dict("http://proxy.example.com:8080")
158
+
159
+ expected = {
160
+ "server": "http://proxy.example.com:8080",
161
+ "username": "",
162
+ "password": ""
163
+ }
164
+ assert result == expected
165
+
166
+ def test_proxy_string_with_auth(self):
167
+ """Test proxy string with authentication"""
168
+ result = construct_proxy_dict("http://user:pass@proxy.example.com:8080")
169
+
170
+ expected = {
171
+ "server": "http://proxy.example.com:8080",
172
+ "username": "user",
173
+ "password": "pass"
174
+ }
175
+ assert result == expected
176
+
177
+ def test_proxy_dict_input(self):
178
+ """Test proxy dictionary input"""
179
+ input_dict = {
180
+ "server": "http://proxy.example.com:8080",
181
+ "username": "user",
182
+ "password": "pass"
183
+ }
184
+ result = construct_proxy_dict(input_dict)
185
+
186
+ assert result == input_dict
187
+
188
+ def test_proxy_dict_minimal(self):
189
+ """Test minimal proxy dictionary"""
190
+ input_dict = {"server": "http://proxy.example.com:8080"}
191
+ result = construct_proxy_dict(input_dict)
192
+
193
+ expected = {
194
+ "server": "http://proxy.example.com:8080",
195
+ "username": "",
196
+ "password": ""
197
+ }
198
+ assert result == expected
199
+
200
+ def test_proxy_as_tuple(self):
201
+ """Test returning proxy as a tuple"""
202
+ result = construct_proxy_dict("http://proxy.example.com:8080", as_tuple=True)
203
+
204
+ assert isinstance(result, tuple)
205
+ result_dict = dict(result)
206
+ assert result_dict["server"] == "http://proxy.example.com:8080"
207
+
208
+ def test_invalid_proxy_string(self):
209
+ """Test invalid proxy string"""
210
+ with pytest.raises(ValueError):
211
+ construct_proxy_dict("invalid-proxy-format")
212
+
213
+ def test_invalid_proxy_dict(self):
214
+ """Test invalid proxy dictionary"""
215
+ with pytest.raises(TypeError):
216
+ construct_proxy_dict({"invalid": "structure"})
217
+
218
+
219
+ class TestConstructCdpUrl:
220
+ """Test CDP URL construction"""
221
+
222
+ def test_basic_cdp_url(self):
223
+ """Test basic CDP URL"""
224
+ result = construct_cdp_url("ws://localhost:9222/devtools/browser")
225
+ assert result == "ws://localhost:9222/devtools/browser"
226
+
227
+ def test_cdp_url_with_params(self):
228
+ """Test CDP URL with query parameters"""
229
+ params = {"timeout": "30000", "headless": "true"}
230
+ result = construct_cdp_url("ws://localhost:9222/devtools/browser", params)
231
+
232
+ assert "timeout=30000" in result
233
+ assert "headless=true" in result
234
+
235
+ def test_cdp_url_without_leading_slash(self):
236
+ """Test CDP URL without a leading slash in the path"""
237
+ with pytest.raises(ValueError):
238
+ construct_cdp_url("ws://localhost:9222devtools/browser")
239
+
240
+ def test_invalid_cdp_scheme(self):
241
+ """Test invalid CDP URL scheme"""
242
+ with pytest.raises(ValueError):
243
+ construct_cdp_url("http://localhost:9222/devtools/browser")
244
+
245
+ def test_invalid_cdp_netloc(self):
246
+ """Test invalid CDP URL network location"""
247
+ with pytest.raises(ValueError):
248
+ construct_cdp_url("ws:///devtools/browser")
249
+
250
+ def test_malformed_cdp_url(self):
251
+ """Test malformed CDP URL"""
252
+ with pytest.raises(ValueError):
253
+ construct_cdp_url("not-a-url")
254
+
255
+
256
+ class TestJsBypassPath:
257
+ """Test JavaScript bypass path utility"""
258
+
259
+ def test_js_bypass_path(self):
260
+ """Test getting JavaScript bypass file path"""
261
+ result = js_bypass_path("webdriver_fully.js")
262
+
263
+ assert isinstance(result, str)
264
+ assert result.endswith("webdriver_fully.js")
265
+ assert Path(result).exists()
266
+
267
+ def test_js_bypass_path_caching(self):
268
+ """Test that js_bypass_path is cached"""
269
+ result1 = js_bypass_path("webdriver_fully.js")
270
+ result2 = js_bypass_path("webdriver_fully.js")
271
+
272
+ assert result1 == result2
273
+
274
+
275
+ class TestFingerprintFunctions:
276
+ """Test fingerprint generation functions"""
277
+
278
+ def test_generate_convincing_referer(self):
279
+ """Test referer generation"""
280
+ url = "https://sub.example.com/page.html"
281
+ result = generate_convincing_referer(url)
282
+
283
+ assert result.startswith("https://www.google.com/search?q=")
284
+ assert "example" in result
285
+
286
+ def test_generate_convincing_referer_caching(self):
287
+ """Test referer generation caching"""
288
+ url = "https://example.com"
289
+ result1 = generate_convincing_referer(url)
290
+ result2 = generate_convincing_referer(url)
291
+
292
+ assert result1 == result2
293
+
294
+ def test_get_os_name(self):
295
+ """Test OS name detection"""
296
+ result = get_os_name()
297
+
298
+ # Should return one of the known OS names or None
299
+ valid_names = ["linux", "macos", "windows", "ios"]
300
+ assert result is None or result in valid_names
301
+
302
+ def test_generate_headers_basic(self):
303
+ """Test basic header generation"""
304
+ headers = generate_headers()
305
+
306
+ assert isinstance(headers, dict)
307
+ assert "User-Agent" in headers
308
+ assert len(headers["User-Agent"]) > 0
309
+
310
+ def test_generate_headers_browser_mode(self):
311
+ """Test header generation in browser mode"""
312
+ headers = generate_headers(browser_mode=True)
313
+
314
+ assert isinstance(headers, dict)
315
+ assert "User-Agent" in headers
316
+
317
+
318
+ class TestResponse:
319
+ """Test Response class functionality"""
320
+
321
+ def test_response_creation(self):
322
+ """Test Response object creation"""
323
+ response = Response(
324
+ url="https://example.com",
325
+ content="<html><body>Test</body></html>",
326
+ status=200,
327
+ reason="OK",
328
+ cookies={"session": "abc123"},
329
+ headers={"Content-Type": "text/html"},
330
+ request_headers={"User-Agent": "Test"},
331
+ encoding="utf-8"
332
+ )
333
+
334
+ assert response.url == "https://example.com"
335
+ assert response.status == 200
336
+ assert response.reason == "OK"
337
+ assert response.cookies == {"session": "abc123"}
338
+
339
+ def test_response_with_bytes_content(self):
340
+ """Test Response with 'bytes' content"""
341
+ content_bytes = "<html><body>Test</body></html>".encode('utf-8')
342
+
343
+ response = Response(
344
+ url="https://example.com",
345
+ content=content_bytes,
346
+ status=200,
347
+ reason="OK",
348
+ cookies={},
349
+ headers={},
350
+ request_headers={}
351
+ )
352
+
353
+ # Should handle 'bytes' content properly
354
+ assert response.status == 200
tests/fetchers/test_validator.py ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+ from scrapling.engines._browsers._validators import (
3
+ validate,
4
+ PlaywrightConfig,
5
+ CamoufoxConfig
6
+ )
7
+
8
+
9
+ class TestValidators:
10
+ """Test configuration validators"""
11
+
12
+ def test_playwright_config_valid(self):
13
+ """Test valid PlaywrightConfig"""
14
+ params = {
15
+ "max_pages": 2,
16
+ "headless": True,
17
+ "timeout": 30000,
18
+ "proxy": "http://proxy.example.com:8080"
19
+ }
20
+
21
+ config = validate(params, PlaywrightConfig)
22
+
23
+ assert config.max_pages == 2
24
+ assert config.headless is True
25
+ assert config.timeout == 30000
26
+ assert isinstance(config.proxy, tuple) # Should be converted to tuple
27
+
28
+ def test_playwright_config_invalid_max_pages(self):
29
+ """Test PlaywrightConfig with invalid max_pages"""
30
+ params = {"max_pages": 0}
31
+
32
+ with pytest.raises(TypeError):
33
+ validate(params, PlaywrightConfig)
34
+
35
+ params = {"max_pages": 51}
36
+
37
+ with pytest.raises(TypeError):
38
+ validate(params, PlaywrightConfig)
39
+
40
+ def test_playwright_config_invalid_timeout(self):
41
+ """Test PlaywrightConfig with an invalid timeout"""
42
+ params = {"timeout": -1}
43
+
44
+ with pytest.raises(TypeError):
45
+ validate(params, PlaywrightConfig)
46
+
47
+ def test_playwright_config_invalid_cdp_url(self):
48
+ """Test PlaywrightConfig with invalid CDP URL"""
49
+ params = {"cdp_url": "invalid-url"}
50
+
51
+ with pytest.raises(TypeError):
52
+ validate(params, PlaywrightConfig)
53
+
54
+ def test_camoufox_config_valid(self):
55
+ """Test valid CamoufoxConfig"""
56
+ params = {
57
+ "max_pages": 1,
58
+ "headless": True,
59
+ "solve_cloudflare": False,
60
+ "timeout": 30000
61
+ }
62
+
63
+ config = validate(params, CamoufoxConfig)
64
+
65
+ assert config.max_pages == 1
66
+ assert config.headless is True
67
+ assert config.solve_cloudflare is False
68
+ assert config.timeout == 30000
69
+
70
+ def test_camoufox_config_cloudflare_timeout(self):
71
+ """Test CamoufoxConfig timeout adjustment for Cloudflare"""
72
+ params = {
73
+ "solve_cloudflare": True,
74
+ "timeout": 10000 # Less than the required 60,000
75
+ }
76
+
77
+ config = validate(params, CamoufoxConfig)
78
+
79
+ assert config.timeout == 60000 # Should be increased