-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
f14d3ad
commit ee78447
Showing
4 changed files
with
89 additions
and
64 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,4 +1,5 @@ | ||
myenv/ | ||
instruct.txt | ||
appfiles/data | ||
appfiles/app_data.json | ||
appfiles/app_data.json | ||
data/ |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,68 +1,51 @@ | ||
import requests | ||
from bs4 import BeautifulSoup | ||
import json | ||
import time | ||
import os | ||
|
||
# Define the URL pattern for the HTML pages | ||
url_pattern = "https://korrykatti.github.io/thapps/apps/{:05d}.html" | ||
|
||
# Function to fetch application data from HTML pages | ||
def fetch_application_data(num): | ||
url = url_pattern.format(num) | ||
def fetch_data(url): | ||
response = requests.get(url) | ||
print(f"Fetching data from {url}. Status code: {response.status_code}") | ||
if response.status_code == 200: | ||
soup = BeautifulSoup(response.content, 'html.parser') | ||
app_name = soup.find('h1', {'id': 'appName'}) | ||
icon_url = soup.find('h2', {'id': 'iconUrl'}) | ||
version = soup.find('h2', {'id': 'version'}) | ||
repo_url = soup.find('h2', {'id': 'repoUrl'}) | ||
main_file = soup.find('h2', {'id': 'mainFile'}) | ||
if all((app_name, icon_url, version, repo_url, main_file)): | ||
return { | ||
'app_name': app_name.text.strip(), | ||
'icon_url': icon_url.text.strip(), | ||
'version': version.text.strip(), | ||
'repo_url': repo_url.text.strip(), | ||
'main_file': main_file.text.strip() | ||
} | ||
return None | ||
app_name = soup.find('h1', {'id': 'appName'}).text.strip() | ||
icon_url = soup.find('h2', {'id': 'iconUrl'}).text.strip() | ||
version = soup.find('h2', {'id': 'version'}).text.strip() | ||
repo_url = soup.find('h2', {'id': 'repoUrl'}).text.strip() | ||
main_file = soup.find('h2', {'id': 'mainFile'}).text.strip() | ||
|
||
# Fetch application data for all HTML pages | ||
def fetch_all_application_data(): | ||
app_data = {} | ||
num = 1 | ||
while True: | ||
data = fetch_application_data(num) | ||
if data is None: | ||
break | ||
app_data[num] = data | ||
num += 1 | ||
return app_data | ||
application_data = { | ||
'app_name': app_name, | ||
'icon_url': icon_url, | ||
'version': version, | ||
'repo_url': repo_url, | ||
'main_file': main_file | ||
} | ||
return application_data | ||
else: | ||
print(f"Failed to fetch data from {url}. Status code: {response.status_code}") | ||
return None | ||
|
||
# Main function | ||
def main(): | ||
# Fetch application data from HTML pages | ||
application_data = fetch_all_application_data() | ||
def check_for_updates(): | ||
url_base = 'https://korrykatti.github.io/thapps/apps/' | ||
data_folder = 'data' | ||
|
||
# Print the fetched data | ||
for num, data in application_data.items(): | ||
print(f"Application {num}:") | ||
print(f"Name: {data['app_name']}") | ||
print(f"Icon URL: {data['icon_url']}") | ||
print(f"Version: {data['version']}") | ||
print(f"Repo URL: {data['repo_url']}") | ||
print(f"Main File: {data['main_file']}") | ||
print() | ||
if not os.path.exists(data_folder): | ||
os.makedirs(data_folder) | ||
|
||
# Save application data to a file | ||
file_path = os.path.join(os.path.dirname(__file__), 'app_data.json') | ||
if application_data: | ||
with open(file_path, 'w') as f: | ||
json.dump(application_data, f, indent=4) | ||
print("Application data saved to app_data.json") | ||
else: | ||
print("No application data fetched. No JSON file created.") | ||
for i in range(1, 100): # Assuming maximum of 99 files | ||
url = f"{url_base}{i:05d}.html" # Pad number with leading zeros | ||
data = fetch_data(url) | ||
if data: | ||
with open(f"{data_folder}/{i:05d}.json", 'w') as f: | ||
json.dump(data, f, indent=4) | ||
else: | ||
break | ||
|
||
def main(): | ||
while True: | ||
check_for_updates() | ||
time.sleep(60) # Check for updates every one minute | ||
|
||
if __name__ == "__main__": | ||
main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters