Skip to content

Commit

Permalink
Add rate limit handler
Browse files Browse the repository at this point in the history
  • Loading branch information
thefakequake committed Feb 8, 2021
1 parent 1a0e3fc commit 1a30af1
Showing 1 changed file with 4 additions and 9 deletions.
13 changes: 4 additions & 9 deletions pypartpicker/scraper.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,8 @@ def __init__(self, **kwargs):
self.rating = kwargs.get("rating")
self.content = kwargs.get("content")

class Verification(Exception):
pass

class Scraper:

Expand All @@ -67,20 +69,17 @@ def __init__(self, **kwargs):
self.headers = headers_dict





def make_soup(self, url) -> BeautifulSoup:
# sends a request to the URL
page = requests.get(url, headers=self.headers)
# gets the HTML code for the website and parses it using Python's built in HTML parser
soup = BeautifulSoup(page.content, 'html.parser')
if "Verification" in soup.find(class_="pageTitle").get_text():
raise Verification("You are being rate limited by PCPartPicker!")
# returns the HTML
return soup




def fetch_list(self, list_url) -> PCPPList:

# checks if its a pcpartpicker list and raises an exception if its not or if the list is empty
Expand Down Expand Up @@ -135,8 +134,6 @@ def fetch_list(self, list_url) -> PCPPList:
return PCPPList(parts=parts, wattage=wattage, total=total_cost, url=list_url, compatibility=compatibilitynotes)




def part_search(self, search_term, **kwargs) -> Part:

search_term = search_term.replace(' ', '+')
Expand Down Expand Up @@ -227,8 +224,6 @@ def part_search(self, search_term, **kwargs) -> Part:
return parts[:kwargs.get("limit", 20)]




def fetch_product(self, part_url) -> Product:

# checks if the URL is invalid
Expand Down

0 comments on commit 1a30af1

Please sign in to comment.