whosyouragent.whosyouragent
1import json 2import random 3from concurrent.futures import ThreadPoolExecutor 4from pathlib import Path 5 6import requests 7from bs4 import BeautifulSoup 8 9 10class VersionUpdater: 11 def __init__(self): 12 self.versions_path = Path(__file__).parent / "browserVersions.json" 13 14 def update_firefox(self): 15 try: 16 url = "https://en.wikipedia.org/wiki/Firefox" 17 soup = BeautifulSoup(requests.get(url).text, "html.parser") 18 version = ( 19 soup.find("table", class_="infobox-subbox") 20 .find("td", class_="infobox-data") 21 .text 22 ) 23 version = version[: version.find("[")] 24 self.firefox = version 25 except Exception as e: 26 print(e) 27 raise Exception("Error updating firefox") 28 29 def update_chrome(self): 30 try: 31 url = "https://en.wikipedia.org/wiki/Google_Chrome" 32 soup = BeautifulSoup(requests.get(url).text, "html.parser") 33 info_boxes = soup.find_all("td", class_="infobox-data") 34 version = info_boxes[7].text[: info_boxes[7].text.find("/")] 35 self.chrome = version 36 except Exception as e: 37 print(e) 38 raise Exception("Error updating chrome") 39 40 def update_safari(self): 41 try: 42 url = "https://en.wikipedia.org/wiki/Safari_(web_browser)" 43 soup = BeautifulSoup(requests.get(url).text, "html.parser") 44 info_boxes = soup.find_all("td", class_="infobox-data") 45 version = info_boxes[2].text[: info_boxes[2].text.find("[")] 46 self.safari = version 47 except Exception as e: 48 print(e) 49 raise Exception("Error updating safari") 50 51 def update_edge(self): 52 try: 53 url = "https://www.techspot.com/downloads/7158-microsoft-edge.html" 54 soup = BeautifulSoup(requests.get(url).text, "html.parser") 55 version = soup.find("span", class_="subver").text 56 self.edge = version 57 except Exception as e: 58 print(e) 59 raise Exception("Error updating edge") 60 61 def update_vivaldi(self): 62 try: 63 url = "https://en.wikipedia.org/wiki/Vivaldi_(web_browser)" 64 soup = BeautifulSoup(requests.get(url).text, "html.parser") 65 info_boxes = soup.find_all("td", class_="infobox-data") 66 version = info_boxes[5].text[: info_boxes[5].text.find(" ")] 67 self.vivaldi = version 68 except Exception as e: 69 print(e) 70 raise Exception("Error updating vivaldi") 71 72 def update_opera(self) -> str: 73 try: 74 url = "https://en.wikipedia.org/wiki/Opera_(web_browser)" 75 soup = BeautifulSoup(requests.get(url).text, "html.parser") 76 info_boxes = soup.find_all("td", class_="infobox-data") 77 version = info_boxes[2].div.text[: info_boxes[2].div.text.find("[")] 78 self.opera = version 79 except Exception as e: 80 print(e) 81 raise Exception("Error updating Opera") 82 83 def update_all(self): 84 updaters = [ 85 self.update_firefox, 86 self.update_chrome, 87 self.update_safari, 88 self.update_edge, 89 self.update_vivaldi, 90 self.update_opera, 91 ] 92 with ThreadPoolExecutor(6) as executor: 93 for updater in updaters: 94 executor.submit(updater) 95 versions = { 96 "Firefox": self.firefox, 97 "Chrome": self.chrome, 98 "Edg": self.edge, 99 "Vivaldi": self.vivaldi, 100 "OPR": self.opera, 101 "Safari": self.safari, 102 } 103 for version in versions: 104 if not ((versions[version]).replace(".", "")).isnumeric(): 105 raise ValueError( 106 f"Scraped result for {version} is incorrect: {versions[version]}" 107 ) 108 self.versions_path.write_text(json.dumps(versions)) 109 110 111platforms = [ 112 "(Windows NT 10.0; Win64; x64)", 113 "(x11; Ubuntu; Linux x86_64)", 114 "(Windows NT 11.0; Win64; x64)", 115 "(Macintosh; Intel Mac OS X 13_0_0)", 116] 117 118 119def get_agent() -> str: 120 """Build and return a user agent string.""" 121 browsers = json.loads((Path(__file__).parent / "browserVersions.json").read_text()) 122 browser = random.choice(list(browsers.keys())) 123 if browser == "Safari": 124 platform = platforms[-1] 125 useragent = f'Mozilla/5.0 {platform} AppleWebKit/605.1.15 (KHTML, like Gecko) Version/{browsers["Safari"]} Safari/605.1.15' 126 else: 127 platform = random.choice(platforms) 128 if browser == "Firefox": 129 platform = platform[: platform.rfind(")")] + f"; rv:{browsers[browser]})" 130 useragent = ( 131 f"Mozilla/5.0 {platform} Gecko/20100101 Firefox/{browsers[browser]}" 132 ) 133 else: 134 useragent = f'Mozilla/5.0 {platform} AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{browsers["Chrome"]} Safari/537.36' 135 if browser == "Edg": 136 useragent += f' Edg/{browsers["Edg"]}' 137 elif browser == "OPR": 138 useragent += f' OPR/{browsers["OPR"]}' 139 elif browser == "Vivaldi": 140 useragent += f' Vivaldi/{browsers["Vivaldi"]}' 141 return useragent
class
VersionUpdater:
11class VersionUpdater: 12 def __init__(self): 13 self.versions_path = Path(__file__).parent / "browserVersions.json" 14 15 def update_firefox(self): 16 try: 17 url = "https://en.wikipedia.org/wiki/Firefox" 18 soup = BeautifulSoup(requests.get(url).text, "html.parser") 19 version = ( 20 soup.find("table", class_="infobox-subbox") 21 .find("td", class_="infobox-data") 22 .text 23 ) 24 version = version[: version.find("[")] 25 self.firefox = version 26 except Exception as e: 27 print(e) 28 raise Exception("Error updating firefox") 29 30 def update_chrome(self): 31 try: 32 url = "https://en.wikipedia.org/wiki/Google_Chrome" 33 soup = BeautifulSoup(requests.get(url).text, "html.parser") 34 info_boxes = soup.find_all("td", class_="infobox-data") 35 version = info_boxes[7].text[: info_boxes[7].text.find("/")] 36 self.chrome = version 37 except Exception as e: 38 print(e) 39 raise Exception("Error updating chrome") 40 41 def update_safari(self): 42 try: 43 url = "https://en.wikipedia.org/wiki/Safari_(web_browser)" 44 soup = BeautifulSoup(requests.get(url).text, "html.parser") 45 info_boxes = soup.find_all("td", class_="infobox-data") 46 version = info_boxes[2].text[: info_boxes[2].text.find("[")] 47 self.safari = version 48 except Exception as e: 49 print(e) 50 raise Exception("Error updating safari") 51 52 def update_edge(self): 53 try: 54 url = "https://www.techspot.com/downloads/7158-microsoft-edge.html" 55 soup = BeautifulSoup(requests.get(url).text, "html.parser") 56 version = soup.find("span", class_="subver").text 57 self.edge = version 58 except Exception as e: 59 print(e) 60 raise Exception("Error updating edge") 61 62 def update_vivaldi(self): 63 try: 64 url = "https://en.wikipedia.org/wiki/Vivaldi_(web_browser)" 65 soup = BeautifulSoup(requests.get(url).text, "html.parser") 66 info_boxes = soup.find_all("td", class_="infobox-data") 67 version = info_boxes[5].text[: info_boxes[5].text.find(" ")] 68 self.vivaldi = version 69 except Exception as e: 70 print(e) 71 raise Exception("Error updating vivaldi") 72 73 def update_opera(self) -> str: 74 try: 75 url = "https://en.wikipedia.org/wiki/Opera_(web_browser)" 76 soup = BeautifulSoup(requests.get(url).text, "html.parser") 77 info_boxes = soup.find_all("td", class_="infobox-data") 78 version = info_boxes[2].div.text[: info_boxes[2].div.text.find("[")] 79 self.opera = version 80 except Exception as e: 81 print(e) 82 raise Exception("Error updating Opera") 83 84 def update_all(self): 85 updaters = [ 86 self.update_firefox, 87 self.update_chrome, 88 self.update_safari, 89 self.update_edge, 90 self.update_vivaldi, 91 self.update_opera, 92 ] 93 with ThreadPoolExecutor(6) as executor: 94 for updater in updaters: 95 executor.submit(updater) 96 versions = { 97 "Firefox": self.firefox, 98 "Chrome": self.chrome, 99 "Edg": self.edge, 100 "Vivaldi": self.vivaldi, 101 "OPR": self.opera, 102 "Safari": self.safari, 103 } 104 for version in versions: 105 if not ((versions[version]).replace(".", "")).isnumeric(): 106 raise ValueError( 107 f"Scraped result for {version} is incorrect: {versions[version]}" 108 ) 109 self.versions_path.write_text(json.dumps(versions))
def
update_firefox(self):
15 def update_firefox(self): 16 try: 17 url = "https://en.wikipedia.org/wiki/Firefox" 18 soup = BeautifulSoup(requests.get(url).text, "html.parser") 19 version = ( 20 soup.find("table", class_="infobox-subbox") 21 .find("td", class_="infobox-data") 22 .text 23 ) 24 version = version[: version.find("[")] 25 self.firefox = version 26 except Exception as e: 27 print(e) 28 raise Exception("Error updating firefox")
def
update_chrome(self):
30 def update_chrome(self): 31 try: 32 url = "https://en.wikipedia.org/wiki/Google_Chrome" 33 soup = BeautifulSoup(requests.get(url).text, "html.parser") 34 info_boxes = soup.find_all("td", class_="infobox-data") 35 version = info_boxes[7].text[: info_boxes[7].text.find("/")] 36 self.chrome = version 37 except Exception as e: 38 print(e) 39 raise Exception("Error updating chrome")
def
update_safari(self):
41 def update_safari(self): 42 try: 43 url = "https://en.wikipedia.org/wiki/Safari_(web_browser)" 44 soup = BeautifulSoup(requests.get(url).text, "html.parser") 45 info_boxes = soup.find_all("td", class_="infobox-data") 46 version = info_boxes[2].text[: info_boxes[2].text.find("[")] 47 self.safari = version 48 except Exception as e: 49 print(e) 50 raise Exception("Error updating safari")
def
update_edge(self):
52 def update_edge(self): 53 try: 54 url = "https://www.techspot.com/downloads/7158-microsoft-edge.html" 55 soup = BeautifulSoup(requests.get(url).text, "html.parser") 56 version = soup.find("span", class_="subver").text 57 self.edge = version 58 except Exception as e: 59 print(e) 60 raise Exception("Error updating edge")
def
update_vivaldi(self):
62 def update_vivaldi(self): 63 try: 64 url = "https://en.wikipedia.org/wiki/Vivaldi_(web_browser)" 65 soup = BeautifulSoup(requests.get(url).text, "html.parser") 66 info_boxes = soup.find_all("td", class_="infobox-data") 67 version = info_boxes[5].text[: info_boxes[5].text.find(" ")] 68 self.vivaldi = version 69 except Exception as e: 70 print(e) 71 raise Exception("Error updating vivaldi")
def
update_opera(self) -> str:
73 def update_opera(self) -> str: 74 try: 75 url = "https://en.wikipedia.org/wiki/Opera_(web_browser)" 76 soup = BeautifulSoup(requests.get(url).text, "html.parser") 77 info_boxes = soup.find_all("td", class_="infobox-data") 78 version = info_boxes[2].div.text[: info_boxes[2].div.text.find("[")] 79 self.opera = version 80 except Exception as e: 81 print(e) 82 raise Exception("Error updating Opera")
def
update_all(self):
84 def update_all(self): 85 updaters = [ 86 self.update_firefox, 87 self.update_chrome, 88 self.update_safari, 89 self.update_edge, 90 self.update_vivaldi, 91 self.update_opera, 92 ] 93 with ThreadPoolExecutor(6) as executor: 94 for updater in updaters: 95 executor.submit(updater) 96 versions = { 97 "Firefox": self.firefox, 98 "Chrome": self.chrome, 99 "Edg": self.edge, 100 "Vivaldi": self.vivaldi, 101 "OPR": self.opera, 102 "Safari": self.safari, 103 } 104 for version in versions: 105 if not ((versions[version]).replace(".", "")).isnumeric(): 106 raise ValueError( 107 f"Scraped result for {version} is incorrect: {versions[version]}" 108 ) 109 self.versions_path.write_text(json.dumps(versions))
def
get_agent() -> str:
120def get_agent() -> str: 121 """Build and return a user agent string.""" 122 browsers = json.loads((Path(__file__).parent / "browserVersions.json").read_text()) 123 browser = random.choice(list(browsers.keys())) 124 if browser == "Safari": 125 platform = platforms[-1] 126 useragent = f'Mozilla/5.0 {platform} AppleWebKit/605.1.15 (KHTML, like Gecko) Version/{browsers["Safari"]} Safari/605.1.15' 127 else: 128 platform = random.choice(platforms) 129 if browser == "Firefox": 130 platform = platform[: platform.rfind(")")] + f"; rv:{browsers[browser]})" 131 useragent = ( 132 f"Mozilla/5.0 {platform} Gecko/20100101 Firefox/{browsers[browser]}" 133 ) 134 else: 135 useragent = f'Mozilla/5.0 {platform} AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{browsers["Chrome"]} Safari/537.36' 136 if browser == "Edg": 137 useragent += f' Edg/{browsers["Edg"]}' 138 elif browser == "OPR": 139 useragent += f' OPR/{browsers["OPR"]}' 140 elif browser == "Vivaldi": 141 useragent += f' Vivaldi/{browsers["Vivaldi"]}' 142 return useragent
Build and return a user agent string.