22# IMPORTS
33
44import re
5- import html
65import json
76from enum import Enum
87from bs4 import BeautifulSoup
@@ -43,7 +42,7 @@ def get_search_request_headers():
4342 return headers
4443
4544 @staticmethod
46- def get_search_request_data (game_name : str , search_modifiers : SearchModifiers , page : int ):
45+ def get_search_request_data (game_name : str , search_modifiers : SearchModifiers , page : int , api_key : str ):
4746 """
4847 Generate the data payload for the search request
4948 @param game_name: The name of the game to search
@@ -74,6 +73,7 @@ def get_search_request_data(game_name: str, search_modifiers: SearchModifiers, p
7473 'modifier' : search_modifiers .value ,
7574 },
7675 'users' : {
76+ 'id' : api_key ,
7777 'sortCategory' : "postcount"
7878 },
7979 'filter' : "" ,
@@ -94,12 +94,12 @@ def send_web_request(game_name: str, search_modifiers: SearchModifiers = SearchM
9494 @return: The HTML code of the research if the request returned 200(OK), None otherwise
9595 """
9696 headers = HTMLRequests .get_search_request_headers ()
97- payload = HTMLRequests .get_search_request_data (game_name , search_modifiers , page )
9897 api_key_result = HTMLRequests .send_website_request_getcode (False )
9998 if api_key_result is None :
10099 api_key_result = HTMLRequests .send_website_request_getcode (True )
100+ payload = HTMLRequests .get_search_request_data (game_name , search_modifiers , page , api_key_result )
101101 # Make the post request and return the result if is valid
102- search_url_with_key = HTMLRequests .SEARCH_URL + "/" + api_key_result
102+ search_url_with_key = HTMLRequests .SEARCH_URL
103103 resp = requests .post (search_url_with_key , headers = headers , data = payload , timeout = 60 )
104104 if resp .status_code == 200 :
105105 return resp .text
@@ -116,10 +116,10 @@ async def send_async_web_request(game_name: str, search_modifiers: SearchModifie
116116 @return: The HTML code of the research if the request returned 200(OK), None otherwise
117117 """
118118 headers = HTMLRequests .get_search_request_headers ()
119- payload = HTMLRequests .get_search_request_data (game_name , search_modifiers , page )
120119 api_key_result = await HTMLRequests .async_send_website_request_getcode (False )
121120 if api_key_result is None :
122121 api_key_result = await HTMLRequests .async_send_website_request_getcode (True )
122+ payload = HTMLRequests .get_search_request_data (game_name , search_modifiers , page , api_key_result )
123123 # Make the post request and return the result if is valid
124124 search_url_with_key = HTMLRequests .SEARCH_URL + "/" + api_key_result
125125 async with aiohttp .ClientSession () as session :
@@ -207,7 +207,7 @@ async def async_get_game_title(game_id: int):
207207 text = await resp .text ()
208208 return HTMLRequests .__cut_game_title (text )
209209 return None
210-
210+
211211 @staticmethod
212212 def send_website_request_getcode (parse_all_scripts : bool ):
213213 """
@@ -218,24 +218,24 @@ def send_website_request_getcode(parse_all_scripts: bool):
218218 headers = HTMLRequests .get_title_request_headers ()
219219 resp = requests .get (HTMLRequests .BASE_URL , headers = headers , timeout = 60 )
220220 if resp .status_code == 200 and resp .text is not None :
221- # Parse the HTML content using BeautifulSoup
222- soup = BeautifulSoup (resp .text , 'html.parser' )
223- # Find all <script> tags with a src attribute containing the substring
224- scripts = soup .find_all ('script' , src = True )
225- if parse_all_scripts :
226- matching_scripts = [script ['src' ] for script in scripts ]
227- else :
228- matching_scripts = [script ['src' ] for script in scripts if '_app-' in script ['src' ]]
229- for script_url in matching_scripts :
230- script_url = HTMLRequests .BASE_URL + script_url
231- script_resp = requests .get (script_url , headers = headers , timeout = 60 )
232- if script_resp .status_code == 200 and script_resp .text is not None :
233- pattern = r'"/api/search/".concat\( "([a-zA-Z0-9 ]+)"\) '
234- matches = re .findall (pattern , script_resp .text )
235- for match in matches :
236- return match
221+ # Parse the HTML content using BeautifulSoup
222+ soup = BeautifulSoup (resp .text , 'html.parser' )
223+ # Find all <script> tags with a src attribute containing the substring
224+ scripts = soup .find_all ('script' , src = True )
225+ if parse_all_scripts :
226+ matching_scripts = [script ['src' ] for script in scripts ]
227+ else :
228+ matching_scripts = [script ['src' ] for script in scripts if '_app-' in script ['src' ]]
229+ for script_url in matching_scripts :
230+ script_url = HTMLRequests .BASE_URL + script_url
231+ script_resp = requests .get (script_url , headers = headers , timeout = 60 )
232+ if script_resp .status_code == 200 and script_resp .text is not None :
233+ pattern = r'users\s*:\s*{\s*id\s*:\s* "([^" ]+)"'
234+ matches = re .findall (pattern , script_resp .text )
235+ for match in matches :
236+ return match
237237 return None
238-
238+
239239 @staticmethod
240240 async def async_send_website_request_getcode (parse_all_scripts : bool ):
241241 """
@@ -262,11 +262,11 @@ async def async_send_website_request_getcode(parse_all_scripts: bool):
262262 async with session .get (script_url , headers = headers ) as script_resp :
263263 if script_resp is not None and str (resp .status ) == "200" :
264264 script_resp_text = await script_resp .text ()
265- pattern = r'"/api/search/".concat\( "([a-zA-Z0-9 ]+)"\) '
265+ pattern = r'users\s*:\s*{\s*id\s*:\s* "([^" ]+)"'
266266 matches = re .findall (pattern , script_resp_text )
267267 for match in matches :
268268 return match
269269 else :
270- return None
270+ return None
271271 else :
272- return None
272+ return None
0 commit comments