1
1
# Client implementation goes here
2
- from typing import Any , Dict , Optional , Callable
2
+ import uuid as _uuid
3
+ from typing import Any , Callable , Dict , Optional
4
+ from urllib .parse import urlparse
3
5
4
6
import requests
5
7
import urllib3
6
8
from pydantic import BaseModel
7
9
from requests .exceptions import RequestException
8
- from urllib .parse import urlparse
9
- import uuid as _uuid
10
10
11
11
from scrapegraph_py .config import API_BASE_URL , DEFAULT_HEADERS
12
12
from scrapegraph_py .exceptions import APIError
17
17
)
18
18
from scrapegraph_py .models .crawl import CrawlRequest , GetCrawlRequest
19
19
from scrapegraph_py .models .feedback import FeedbackRequest
20
- from scrapegraph_py .models .scrape import GetScrapeRequest , ScrapeRequest
21
20
from scrapegraph_py .models .markdownify import GetMarkdownifyRequest , MarkdownifyRequest
21
+ from scrapegraph_py .models .scrape import GetScrapeRequest , ScrapeRequest
22
22
from scrapegraph_py .models .searchscraper import (
23
23
GetSearchScraperRequest ,
24
24
SearchScraperRequest ,
@@ -255,13 +255,13 @@ def new_id(prefix: str) -> str:
255
255
# Generic fallback
256
256
return {"status" : "mock" , "url" : url , "method" : method , "kwargs" : kwargs }
257
257
258
- def markdownify (self , website_url : str , headers : Optional [dict [str , str ]] = None ):
258
+ def markdownify (self , website_url : str , headers : Optional [dict [str , str ]] = None , mock : bool = False ):
259
259
"""Send a markdownify request"""
260
260
logger .info (f"🔍 Starting markdownify request for { website_url } " )
261
261
if headers :
262
262
logger .debug ("🔧 Using custom headers" )
263
263
264
- request = MarkdownifyRequest (website_url = website_url , headers = headers )
264
+ request = MarkdownifyRequest (website_url = website_url , headers = headers , mock = mock )
265
265
logger .debug ("✅ Request validation passed" )
266
266
267
267
result = self ._make_request (
@@ -287,6 +287,7 @@ def scrape(
287
287
website_url : str ,
288
288
render_heavy_js : bool = False ,
289
289
headers : Optional [dict [str , str ]] = None ,
290
+ mock :bool = False ,
290
291
):
291
292
"""Send a scrape request to get HTML content from a website
292
293
@@ -304,6 +305,7 @@ def scrape(
304
305
website_url = website_url ,
305
306
render_heavy_js = render_heavy_js ,
306
307
headers = headers ,
308
+ mock = mock
307
309
)
308
310
logger .debug ("✅ Request validation passed" )
309
311
@@ -335,6 +337,8 @@ def smartscraper(
335
337
output_schema : Optional [BaseModel ] = None ,
336
338
number_of_scrolls : Optional [int ] = None ,
337
339
total_pages : Optional [int ] = None ,
340
+ mock :bool = False ,
341
+ plain_text :bool = False
338
342
):
339
343
"""Send a smartscraper request with optional pagination support and cookies"""
340
344
logger .info ("🔍 Starting smartscraper request" )
@@ -361,6 +365,8 @@ def smartscraper(
361
365
output_schema = output_schema ,
362
366
number_of_scrolls = number_of_scrolls ,
363
367
total_pages = total_pages ,
368
+ mock = mock ,
369
+ plain_text = plain_text ,
364
370
)
365
371
logger .debug ("✅ Request validation passed" )
366
372
@@ -420,6 +426,7 @@ def searchscraper(
420
426
num_results : Optional [int ] = 3 ,
421
427
headers : Optional [dict [str , str ]] = None ,
422
428
output_schema : Optional [BaseModel ] = None ,
429
+ mock : bool = False
423
430
):
424
431
"""Send a searchscraper request
425
432
@@ -443,6 +450,7 @@ def searchscraper(
443
450
num_results = num_results ,
444
451
headers = headers ,
445
452
output_schema = output_schema ,
453
+ mock = mock
446
454
)
447
455
logger .debug ("✅ Request validation passed" )
448
456
@@ -547,6 +555,7 @@ def agenticscraper(
547
555
user_prompt : Optional [str ] = None ,
548
556
output_schema : Optional [Dict [str , Any ]] = None ,
549
557
ai_extraction : bool = False ,
558
+ mock : bool = False ,
550
559
):
551
560
"""Send an agentic scraper request to perform automated actions on a webpage
552
561
@@ -573,6 +582,7 @@ def agenticscraper(
573
582
user_prompt = user_prompt ,
574
583
output_schema = output_schema ,
575
584
ai_extraction = ai_extraction ,
585
+ mock = mock
576
586
)
577
587
logger .debug ("✅ Request validation passed" )
578
588
0 commit comments