whosyouragent.whosyouragent

  1import json
  2import random
  3from concurrent.futures import ThreadPoolExecutor
  4from pathlib import Path
  5
  6import requests
  7from bs4 import BeautifulSoup
  8
  9
 10class VersionUpdater:
 11    def __init__(self):
 12        self.versions_path = Path(__file__).parent / "browserVersions.json"
 13        if not self.versions_path.exists():
 14            self.versions_path.write_text(json.dumps("{}"))
 15
 16    def update_firefox(self):
 17        try:
 18            url = "https://en.wikipedia.org/wiki/Firefox"
 19            soup = BeautifulSoup(requests.get(url).text, "html.parser")
 20            version = (
 21                soup.find("table", class_="infobox-subbox")
 22                .find("td", class_="infobox-data")
 23                .text
 24            )
 25            version = version[: version.find("[")]
 26            self.firefox = version
 27        except Exception as e:
 28            print(e)
 29            raise Exception("Error updating firefox")
 30
 31    def update_chrome(self):
 32        try:
 33            url = "https://en.wikipedia.org/wiki/Google_Chrome"
 34            soup = BeautifulSoup(requests.get(url).text, "html.parser")
 35            info_boxes = soup.find_all("td", class_="infobox-data")
 36            version = info_boxes[8].text[: info_boxes[8].text.find("/")]
 37            self.chrome = version
 38        except Exception as e:
 39            print(e)
 40            raise Exception("Error updating chrome")
 41
 42    def update_safari(self):
 43        try:
 44            url = "https://en.wikipedia.org/wiki/Safari_(web_browser)"
 45            soup = BeautifulSoup(requests.get(url).text, "html.parser")
 46            info_boxes = soup.find_all("td", class_="infobox-data")
 47            version = info_boxes[2].text[: info_boxes[2].text.find("[")]
 48            self.safari = version
 49        except Exception as e:
 50            print(e)
 51            raise Exception("Error updating safari")
 52
 53    def update_edge(self):
 54        try:
 55            url = "https://www.techspot.com/downloads/7158-microsoft-edge.html"
 56            soup = BeautifulSoup(requests.get(url).text, "html.parser")
 57            version = soup.find("span", class_="subver").text
 58            self.edge = version
 59        except Exception as e:
 60            print(e)
 61            raise Exception("Error updating edge")
 62
 63    def update_vivaldi(self):
 64        try:
 65            url = "https://en.wikipedia.org/wiki/Vivaldi_(web_browser)"
 66            soup = BeautifulSoup(requests.get(url).text, "html.parser")
 67            info_boxes = soup.find_all("td", class_="infobox-data")
 68            version = info_boxes[5].text[: info_boxes[5].text.find(" ")]
 69            self.vivaldi = version
 70        except Exception as e:
 71            print(e)
 72            raise Exception("Error updating vivaldi")
 73
 74    def update_opera(self) -> str:
 75        try:
 76            url = "https://en.wikipedia.org/wiki/Opera_(web_browser)"
 77            soup = BeautifulSoup(requests.get(url).text, "html.parser")
 78            info_boxes = soup.find_all("td", class_="infobox-data")
 79            version = info_boxes[2].div.text[: info_boxes[2].div.text.find("[")]
 80            self.opera = version
 81        except Exception as e:
 82            print(e)
 83            raise Exception("Error updating Opera")
 84
 85    def update_all(self):
 86        updaters = [
 87            self.update_firefox,
 88            self.update_chrome,
 89            self.update_safari,
 90            self.update_edge,
 91            self.update_vivaldi,
 92            self.update_opera,
 93        ]
 94        with ThreadPoolExecutor(6) as executor:
 95            for updater in updaters:
 96                executor.submit(updater)
 97        versions = {
 98            "Firefox": self.firefox,
 99            "Chrome": self.chrome,
100            "Edg": self.edge,
101            "Vivaldi": self.vivaldi,
102            "OPR": self.opera,
103            "Safari": self.safari,
104        }
105        # Remove any keys that failed to update and keep previous version number
106        for version in versions:
107            if not ((versions[version]).replace(".", "")).isnumeric():
108                versions.pop(version)
109        previous_versions = json.loads(self.versions_path.read_text())
110        versions = previous_versions | versions
111        self.versions_path.write_text(json.dumps(versions))
112
113
114platforms = [
115    "(Windows NT 10.0; Win64; x64)",
116    "(x11; Ubuntu; Linux x86_64)",
117    "(Windows NT 11.0; Win64; x64)",
118    "(Macintosh; Intel Mac OS X 13_0_0)",
119]
120
121
122def get_agent() -> str:
123    """Build and return a user agent string."""
124    browsers = json.loads((Path(__file__).parent / "browserVersions.json").read_text())
125    browser = random.choice(list(browsers.keys()))
126    if browser == "Safari":
127        platform = platforms[-1]
128        useragent = f'Mozilla/5.0 {platform} AppleWebKit/605.1.15 (KHTML, like Gecko) Version/{browsers["Safari"]} Safari/605.1.15'
129    else:
130        platform = random.choice(platforms)
131        if browser == "Firefox":
132            platform = platform[: platform.rfind(")")] + f"; rv:{browsers[browser]})"
133            useragent = (
134                f"Mozilla/5.0 {platform} Gecko/20100101 Firefox/{browsers[browser]}"
135            )
136        else:
137            useragent = f'Mozilla/5.0 {platform} AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{browsers["Chrome"]} Safari/537.36'
138            if browser == "Edg":
139                useragent += f' Edg/{browsers["Edg"]}'
140            elif browser == "OPR":
141                useragent += f' OPR/{browsers["OPR"]}'
142            elif browser == "Vivaldi":
143                useragent += f' Vivaldi/{browsers["Vivaldi"]}'
144    return useragent
class VersionUpdater:
 11class VersionUpdater:
 12    def __init__(self):
 13        self.versions_path = Path(__file__).parent / "browserVersions.json"
 14        if not self.versions_path.exists():
 15            self.versions_path.write_text(json.dumps("{}"))
 16
 17    def update_firefox(self):
 18        try:
 19            url = "https://en.wikipedia.org/wiki/Firefox"
 20            soup = BeautifulSoup(requests.get(url).text, "html.parser")
 21            version = (
 22                soup.find("table", class_="infobox-subbox")
 23                .find("td", class_="infobox-data")
 24                .text
 25            )
 26            version = version[: version.find("[")]
 27            self.firefox = version
 28        except Exception as e:
 29            print(e)
 30            raise Exception("Error updating firefox")
 31
 32    def update_chrome(self):
 33        try:
 34            url = "https://en.wikipedia.org/wiki/Google_Chrome"
 35            soup = BeautifulSoup(requests.get(url).text, "html.parser")
 36            info_boxes = soup.find_all("td", class_="infobox-data")
 37            version = info_boxes[8].text[: info_boxes[8].text.find("/")]
 38            self.chrome = version
 39        except Exception as e:
 40            print(e)
 41            raise Exception("Error updating chrome")
 42
 43    def update_safari(self):
 44        try:
 45            url = "https://en.wikipedia.org/wiki/Safari_(web_browser)"
 46            soup = BeautifulSoup(requests.get(url).text, "html.parser")
 47            info_boxes = soup.find_all("td", class_="infobox-data")
 48            version = info_boxes[2].text[: info_boxes[2].text.find("[")]
 49            self.safari = version
 50        except Exception as e:
 51            print(e)
 52            raise Exception("Error updating safari")
 53
 54    def update_edge(self):
 55        try:
 56            url = "https://www.techspot.com/downloads/7158-microsoft-edge.html"
 57            soup = BeautifulSoup(requests.get(url).text, "html.parser")
 58            version = soup.find("span", class_="subver").text
 59            self.edge = version
 60        except Exception as e:
 61            print(e)
 62            raise Exception("Error updating edge")
 63
 64    def update_vivaldi(self):
 65        try:
 66            url = "https://en.wikipedia.org/wiki/Vivaldi_(web_browser)"
 67            soup = BeautifulSoup(requests.get(url).text, "html.parser")
 68            info_boxes = soup.find_all("td", class_="infobox-data")
 69            version = info_boxes[5].text[: info_boxes[5].text.find(" ")]
 70            self.vivaldi = version
 71        except Exception as e:
 72            print(e)
 73            raise Exception("Error updating vivaldi")
 74
 75    def update_opera(self) -> str:
 76        try:
 77            url = "https://en.wikipedia.org/wiki/Opera_(web_browser)"
 78            soup = BeautifulSoup(requests.get(url).text, "html.parser")
 79            info_boxes = soup.find_all("td", class_="infobox-data")
 80            version = info_boxes[2].div.text[: info_boxes[2].div.text.find("[")]
 81            self.opera = version
 82        except Exception as e:
 83            print(e)
 84            raise Exception("Error updating Opera")
 85
 86    def update_all(self):
 87        updaters = [
 88            self.update_firefox,
 89            self.update_chrome,
 90            self.update_safari,
 91            self.update_edge,
 92            self.update_vivaldi,
 93            self.update_opera,
 94        ]
 95        with ThreadPoolExecutor(6) as executor:
 96            for updater in updaters:
 97                executor.submit(updater)
 98        versions = {
 99            "Firefox": self.firefox,
100            "Chrome": self.chrome,
101            "Edg": self.edge,
102            "Vivaldi": self.vivaldi,
103            "OPR": self.opera,
104            "Safari": self.safari,
105        }
106        # Remove any keys that failed to update and keep previous version number
107        for version in versions:
108            if not ((versions[version]).replace(".", "")).isnumeric():
109                versions.pop(version)
110        previous_versions = json.loads(self.versions_path.read_text())
111        versions = previous_versions | versions
112        self.versions_path.write_text(json.dumps(versions))
VersionUpdater()
12    def __init__(self):
13        self.versions_path = Path(__file__).parent / "browserVersions.json"
14        if not self.versions_path.exists():
15            self.versions_path.write_text(json.dumps("{}"))
def update_firefox(self):
17    def update_firefox(self):
18        try:
19            url = "https://en.wikipedia.org/wiki/Firefox"
20            soup = BeautifulSoup(requests.get(url).text, "html.parser")
21            version = (
22                soup.find("table", class_="infobox-subbox")
23                .find("td", class_="infobox-data")
24                .text
25            )
26            version = version[: version.find("[")]
27            self.firefox = version
28        except Exception as e:
29            print(e)
30            raise Exception("Error updating firefox")
def update_chrome(self):
32    def update_chrome(self):
33        try:
34            url = "https://en.wikipedia.org/wiki/Google_Chrome"
35            soup = BeautifulSoup(requests.get(url).text, "html.parser")
36            info_boxes = soup.find_all("td", class_="infobox-data")
37            version = info_boxes[8].text[: info_boxes[8].text.find("/")]
38            self.chrome = version
39        except Exception as e:
40            print(e)
41            raise Exception("Error updating chrome")
def update_safari(self):
43    def update_safari(self):
44        try:
45            url = "https://en.wikipedia.org/wiki/Safari_(web_browser)"
46            soup = BeautifulSoup(requests.get(url).text, "html.parser")
47            info_boxes = soup.find_all("td", class_="infobox-data")
48            version = info_boxes[2].text[: info_boxes[2].text.find("[")]
49            self.safari = version
50        except Exception as e:
51            print(e)
52            raise Exception("Error updating safari")
def update_edge(self):
54    def update_edge(self):
55        try:
56            url = "https://www.techspot.com/downloads/7158-microsoft-edge.html"
57            soup = BeautifulSoup(requests.get(url).text, "html.parser")
58            version = soup.find("span", class_="subver").text
59            self.edge = version
60        except Exception as e:
61            print(e)
62            raise Exception("Error updating edge")
def update_vivaldi(self):
64    def update_vivaldi(self):
65        try:
66            url = "https://en.wikipedia.org/wiki/Vivaldi_(web_browser)"
67            soup = BeautifulSoup(requests.get(url).text, "html.parser")
68            info_boxes = soup.find_all("td", class_="infobox-data")
69            version = info_boxes[5].text[: info_boxes[5].text.find(" ")]
70            self.vivaldi = version
71        except Exception as e:
72            print(e)
73            raise Exception("Error updating vivaldi")
def update_opera(self) -> str:
75    def update_opera(self) -> str:
76        try:
77            url = "https://en.wikipedia.org/wiki/Opera_(web_browser)"
78            soup = BeautifulSoup(requests.get(url).text, "html.parser")
79            info_boxes = soup.find_all("td", class_="infobox-data")
80            version = info_boxes[2].div.text[: info_boxes[2].div.text.find("[")]
81            self.opera = version
82        except Exception as e:
83            print(e)
84            raise Exception("Error updating Opera")
def update_all(self):
 86    def update_all(self):
 87        updaters = [
 88            self.update_firefox,
 89            self.update_chrome,
 90            self.update_safari,
 91            self.update_edge,
 92            self.update_vivaldi,
 93            self.update_opera,
 94        ]
 95        with ThreadPoolExecutor(6) as executor:
 96            for updater in updaters:
 97                executor.submit(updater)
 98        versions = {
 99            "Firefox": self.firefox,
100            "Chrome": self.chrome,
101            "Edg": self.edge,
102            "Vivaldi": self.vivaldi,
103            "OPR": self.opera,
104            "Safari": self.safari,
105        }
106        # Remove any keys that failed to update and keep previous version number
107        for version in versions:
108            if not ((versions[version]).replace(".", "")).isnumeric():
109                versions.pop(version)
110        previous_versions = json.loads(self.versions_path.read_text())
111        versions = previous_versions | versions
112        self.versions_path.write_text(json.dumps(versions))
def get_agent() -> str:
123def get_agent() -> str:
124    """Build and return a user agent string."""
125    browsers = json.loads((Path(__file__).parent / "browserVersions.json").read_text())
126    browser = random.choice(list(browsers.keys()))
127    if browser == "Safari":
128        platform = platforms[-1]
129        useragent = f'Mozilla/5.0 {platform} AppleWebKit/605.1.15 (KHTML, like Gecko) Version/{browsers["Safari"]} Safari/605.1.15'
130    else:
131        platform = random.choice(platforms)
132        if browser == "Firefox":
133            platform = platform[: platform.rfind(")")] + f"; rv:{browsers[browser]})"
134            useragent = (
135                f"Mozilla/5.0 {platform} Gecko/20100101 Firefox/{browsers[browser]}"
136            )
137        else:
138            useragent = f'Mozilla/5.0 {platform} AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{browsers["Chrome"]} Safari/537.36'
139            if browser == "Edg":
140                useragent += f' Edg/{browsers["Edg"]}'
141            elif browser == "OPR":
142                useragent += f' OPR/{browsers["OPR"]}'
143            elif browser == "Vivaldi":
144                useragent += f' Vivaldi/{browsers["Vivaldi"]}'
145    return useragent

Build and return a user agent string.