diff --git a/.github/prtester.py b/.github/prtester.py index 3d7dae99..5a0814e1 100644 --- a/.github/prtester.py +++ b/.github/prtester.py @@ -18,14 +18,14 @@ class Instance: name = '' url = '' -def main(instances: Iterable[Instance], with_upload: bool, with_reduced_upload: bool, title: str, output_file: str): +def main(instances: Iterable[Instance], with_upload: bool, with_reduced_upload: bool, termpad_instance: str, title: str, output_file: str): start_date = datetime.now() table_rows = [] for instance in instances: - page = requests.get(instance.url) # Use python requests to grab the rss-bridge main page + page = requests.get(url=instance.url, timeout=10) # Use python requests to grab the rss-bridge main page soup = BeautifulSoup(page.content, "html.parser") # use bs4 to turn the page into soup bridge_cards = soup.select('.bridge-card') # get a soup-formatted list of all bridges on the rss-bridge page - table_rows += testBridges(instance, bridge_cards, with_upload, with_reduced_upload) # run the main scraping code with the list of bridges + table_rows += testBridges(instance, bridge_cards, with_upload, with_reduced_upload, termpad_instance) # run the main scraping code with the list of bridges with open(file=output_file, mode='w+', encoding='utf-8') as file: table_rows_value = '\n'.join(sorted(table_rows)) file.write(f''' @@ -37,7 +37,7 @@ def main(instances: Iterable[Instance], with_upload: bool, with_reduced_upload: *last change: {start_date.strftime("%A %Y-%m-%d %H:%M:%S")}* '''.strip()) -def testBridges(instance: Instance, bridge_cards: Iterable, with_upload: bool, with_reduced_upload: bool) -> Iterable: +def testBridges(instance: Instance, bridge_cards: Iterable, with_upload: bool, with_reduced_upload: bool, termpad_instance: str) -> Iterable: instance_suffix = '' if instance.name: instance_suffix = f' ({instance.name})' @@ -116,7 +116,7 @@ def testBridges(instance: Instance, bridge_cards: Iterable, with_upload: bool, w 'format': 'Html', }) request_url = f'{instance.url}/?{urllib.parse.urlencode(context_parameters)}' - response = requests.get(request_url) + response = requests.get(url=request_url, timeout=60) page_text = response.text.replace('
','