Artificial-superintelligence commited on
Commit
5add52e
·
verified ·
1 Parent(s): a4ea8a9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +70 -28
app.py CHANGED
@@ -1,29 +1,71 @@
1
  import streamlit as st
2
- from selenium import webdriver
3
- from selenium.webdriver.chrome.service import Service
4
- from selenium.webdriver.common.by import By
5
- from webdriver_manager.chrome import ChromeDriverManager
6
- from selenium.webdriver.chrome.options import Options
7
-
8
- def setup_chrome():
9
- chrome_options = Options()
10
- chrome_options.add_argument("--headless") # Run in headless mode
11
- chrome_options.add_argument("--no-sandbox") # Required for cloud environments
12
- chrome_options.add_argument("--disable-dev-shm-usage") # Avoid shared memory issues
13
- chrome_options.add_argument("--disable-gpu") # Disable GPU acceleration
14
-
15
- # Initialize WebDriver
16
- driver = webdriver.Chrome(service=Service(ChromeDriverManager().install()), options=chrome_options)
17
- return driver
18
-
19
- def main():
20
- st.title("Selenium in Streamlit Cloud")
21
-
22
- if st.button("Run Chrome"):
23
- driver = setup_chrome()
24
- driver.get("https://www.google.com")
25
- st.write("Page title:", driver.title)
26
- driver.quit()
27
-
28
- if __name__ == "__main__":
29
- main()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
2
+ from googlesearch import search
3
+ import requests
4
+ from bs4 import BeautifulSoup
5
+
6
+ # Function to fetch search results
7
+ def fetch_search_results(query, num_results=5):
8
+ results = []
9
+ try:
10
+ for url in search(query, num=num_results, stop=num_results, pause=2):
11
+ results.append(url)
12
+ except Exception as e:
13
+ st.error(f"Error fetching search results: {e}")
14
+ return results
15
+
16
+ # Function to fetch website preview
17
+ def fetch_website_preview(url):
18
+ try:
19
+ response = requests.get(url, timeout=5)
20
+ soup = BeautifulSoup(response.content, "html.parser")
21
+ title = soup.title.string if soup.title else "No title"
22
+ return title, url
23
+ except Exception as e:
24
+ return "Error loading website", None
25
+
26
+ # Mask IP address using proxy
27
+ def secure_request(url, proxy=None):
28
+ proxies = {"http": proxy, "https": proxy} if proxy else None
29
+ try:
30
+ response = requests.get(url, proxies=proxies, timeout=5)
31
+ return response.status_code, response.content
32
+ except Exception as e:
33
+ return None, str(e)
34
+
35
+ # Streamlit app starts here
36
+ st.title("Secure Chrome Search with IP Masking")
37
+ st.write("Search the web securely, view results, and mask your IP address.")
38
+
39
+ # User input
40
+ query = st.text_input("Enter search query:", "")
41
+
42
+ # Search results
43
+ if query:
44
+ st.write("Fetching search results...")
45
+ results = fetch_search_results(query)
46
+ if results:
47
+ st.write("Top Results:")
48
+ for idx, url in enumerate(results):
49
+ st.markdown(f"{idx+1}. [Visit Site]({url})")
50
+
51
+ # Select website to preview
52
+ selected_url = st.selectbox("Select a website to preview:", results)
53
+ if selected_url:
54
+ st.write("Website Preview:")
55
+ title, preview_url = fetch_website_preview(selected_url)
56
+ st.write(f"**Title:** {title}")
57
+ st.markdown(f"[Visit Site in Browser]({preview_url})")
58
+ else:
59
+ st.write("No results found.")
60
+
61
+ # Mask IP section
62
+ proxy = st.text_input("Enter proxy (e.g., http://<proxy-ip>:<port>):", "")
63
+ if proxy:
64
+ test_url = st.text_input("Enter URL to test secure connection:", "http://httpbin.org/ip")
65
+ if st.button("Test Secure Connection"):
66
+ status, response = secure_request(test_url, proxy=proxy)
67
+ if status:
68
+ st.success(f"Response Status: {status}")
69
+ st.write(response.decode("utf-8"))
70
+ else:
71
+ st.error(f"Error: {response}")