@@ -59,26 +59,20 @@ def scrape_problem(self, problem_slug):
59
59
60
60
try :
61
61
response = requests .post (self .graphql_url , headers = self .headers , json = query )
62
-
63
62
if response .status_code != 200 :
64
63
print (f"Failed to fetch problem: { problem_slug } . Status code: { response .status_code } " )
65
64
return None
66
-
67
65
data = response .json ()
68
- question = data . get ( 'data' , {}). get ( 'question' , {} )
69
-
70
- if not question :
71
- print (f"No data found for problem: { problem_slug } " )
66
+ print ( f"[DEBUG] Raw API response for { problem_slug } : { json . dumps ( data , indent = 2 ) } " )
67
+ question = data . get ( 'data' , {}). get ( 'question' , None )
68
+ if question is None :
69
+ print (f"No question data found for problem: { problem_slug } . Response structure may have changed or the slug is invalid. " )
72
70
return None
73
-
74
71
# Process the problem data
75
72
problem_data = self ._process_problem_data (question )
76
-
77
73
# Save the problem data
78
74
self ._save_problem_data (problem_slug , problem_data )
79
-
80
75
return problem_data
81
-
82
76
except Exception as e :
83
77
print (f"Error scraping problem { problem_slug } : { str (e )} " )
84
78
return None
@@ -208,7 +202,10 @@ def _process_problem_data(self, question):
208
202
problem_data ['code_snippets' ] = code_snippets
209
203
210
204
# Extract solution content if available
211
- solution_content = question .get ('solution' , {}).get ('content' )
205
+ solution = question .get ('solution' )
206
+ solution_content = None
207
+ if solution and isinstance (solution , dict ):
208
+ solution_content = solution .get ('content' )
212
209
if solution_content :
213
210
solution_soup = BeautifulSoup (solution_content , 'html.parser' )
214
211
problem_data ['solution' ] = solution_soup .get_text (strip = True )
@@ -260,7 +257,7 @@ def scrape_problem_list(self, limit=10):
260
257
261
258
if __name__ == "__main__" :
262
259
scraper = LeetCodeScraper ()
263
- problem_data = scraper .scrape_problem ("longest-strictly-increasing-or-strictly-decreasing-subarray " )
260
+ problem_data = scraper .scrape_problem ("list-the-products-ordered-in-a-period " )
264
261
print (json .dumps (problem_data , indent = 2 ))
265
262
# Option 2: Scrape multiple problems from the list
266
263
# problem_list = scraper.scrape_problem_list(limit=5)
0 commit comments