slimshadow commited on
Commit
2c86df9
·
verified ·
1 Parent(s): f4a8b2c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +71 -32
app.py CHANGED
@@ -1,32 +1,71 @@
1
- import streamlit as st
2
-
3
- def read_file(file_name):
4
- with open(file_name, 'r') as file:
5
- return file.readlines()
6
-
7
- def search_game(game_title, game_titles, game_urls):
8
- matches = []
9
- for title, url in zip(game_titles, game_urls):
10
- if game_title.lower() in title.lower():
11
- matches.append((title.strip(), url.strip()))
12
- return matches
13
-
14
- def main():
15
- st.title("Game Search")
16
-
17
- game_titles = read_file('game_title.txt')
18
- game_urls = read_file('game_url.txt')
19
-
20
- search_query = st.text_input("Enter a game title to search:")
21
-
22
- if st.button("Search"):
23
- matches = search_game(search_query, game_titles, game_urls)
24
- if matches:
25
- st.write("Matches found:")
26
- for title, url in matches:
27
- st.write(f"Title: {title}, URL: {url}")
28
- else:
29
- st.write("No matches found.")
30
-
31
- if __name__ == "__main__":
32
- main()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import requests
3
+ from bs4 import BeautifulSoup
4
+
5
+ def search_game_results(game_name):
6
+ search_url = f"https://gamingbeasts.com/?s={game_name}"
7
+ response = requests.get(search_url)
8
+
9
+ if response.status_code == 200:
10
+ soup = BeautifulSoup(response.text, 'html.parser')
11
+ game_entries = soup.find_all('div', class_='inside-article')
12
+
13
+ if not game_entries:
14
+ return None
15
+
16
+ results = []
17
+ for entry in game_entries:
18
+ title_elem = entry.find('h2', class_='entry-title').find('a')
19
+ title = title_elem.text if title_elem else "Title not found"
20
+ link = title_elem['href'] if title_elem else "Link not found"
21
+
22
+ categories_elem = entry.find('span', class_='cat-links')
23
+ categories = [category.text for category in categories_elem.find_all('a')] if categories_elem else []
24
+
25
+ results.append({
26
+ 'title': title,
27
+ 'link': link,
28
+ 'categories': categories
29
+ })
30
+
31
+ return results
32
+ else:
33
+ return None
34
+
35
+ def scrape_download_url(url):
36
+ response = requests.get(url)
37
+ if response.status_code == 200:
38
+ soup = BeautifulSoup(response.text, 'html.parser')
39
+ url_input_elem = soup.find('input', {'name': 'url'})
40
+
41
+ if url_input_elem:
42
+ download_url = url_input_elem['value']
43
+ return download_url
44
+ return None
45
+
46
+ def get_download_urls(game_name):
47
+ results = search_game_results(game_name)
48
+
49
+ if isinstance(results, list):
50
+ download_urls = []
51
+ for result in results:
52
+ download_url = scrape_download_url(result['link'])
53
+ if download_url:
54
+ download_urls.append(download_url)
55
+
56
+ return download_urls
57
+ else:
58
+ return None
59
+
60
+ st.title("Game Download URL Finder")
61
+
62
+ game_name = st.text_input("Enter the name of the game:")
63
+ if game_name:
64
+ download_urls = get_download_urls(game_name)
65
+
66
+ if download_urls:
67
+ st.subheader(f"Download URLs for '{game_name}':")
68
+ for i, download_url in enumerate(download_urls, start=1):
69
+ st.write(f"{i}. {download_url}")
70
+ else:
71
+ st.write("No download URLs found for this game.")