import requests import streamlit as st from bs4 import BeautifulSoup from googleapiclient.discovery import build import base64 from datetime import datetime, timedelta from openai import OpenAI # Function to fetch all videos from a YouTube playlist def fetch_all_youtube_playlist_videos(api_key, playlist_id): youtube = build("youtube", "v3", developerKey=api_key) request = youtube.playlistItems().list( part="snippet", maxResults=50, # Maximum results per request playlistId=playlist_id ) response = request.execute() video_items = response['items'] while 'nextPageToken' in response: nextPageToken = response['nextPageToken'] request = youtube.playlistItems().list( part="snippet", maxResults=50, playlistId=playlist_id, pageToken=nextPageToken ) response = request.execute() video_items.extend(response['items']) return video_items # Function to extract video information from the playlist items def extract_video_info(video_items): video_info = [] for item in video_items: video_id = item['snippet']['resourceId']['videoId'] title = item['snippet']['title'] published_at = item['snippet']['publishedAt'] video_info.append({'title': title, 'video_id': video_id, 'published_at': published_at}) return video_info # Function to set background image and logo def set_bg_hack(main_bg_url, logo_url): main_bg_ext = main_bg_url.split(".")[-1] logo_ext = logo_url.split(".")[-1] main_bg_encoded = base64.b64encode(requests.get(main_bg_url).content).decode() logo_encoded = base64.b64encode(requests.get(logo_url).content).decode() st.markdown( f""" """, unsafe_allow_html=True ) # Function to get a restricted video link def get_restricted_video_link(video_id): return f"https://www.youtube.com/embed/{video_id}?autoplay=1" # Function to handle adding/uploading videos def handle_video_upload(): st.subheader("Add and Upload Videos") st.write("Use this interface to add and upload videos.") video_title = st.text_input("Video Title") video_url = st.text_input("YouTube Video URL") if st.button("Upload Video"): if video_title and video_url: st.write(f"Video '{video_title}' added successfully! Video URL: {video_url}") # Here you could add logic to store the video information, e.g., in a database or file # Function to scrape live football scores from a website def scrape_live_scores(): URL = "https://www.goal.com/en-tza/live-scores" try: page = requests.get(URL) soup = BeautifulSoup(page.content, "html.parser") matches = soup.find_all("div", class_="match-row__data") live_scores = [] for match in matches: home_team = match.find("div", class_="match-row__team-name match-row__team-name--home").text.strip() away_team = match.find("div", class_="match-row__team-name match-row__team-name--away").text.strip() score = match.find("div", class_="match-row__score").text.strip() live_scores.append(f"{home_team} {score} {away_team}") return "\n".join(live_scores) if live_scores else "No current live scores found." except Exception as e: return f"Failed to retrieve data: {str(e)}" # Function to generate response using OpenAI GPT def get_gpt_response(question, live_scores): client = OpenAI(api_key="sk-proj-WH7EctRQbK1zmJjyzCHwT3BlbkFJi2OyxNTMqSWORdpTGYw5") try: conversation_history = [ {"role": "system", "content": "You are using an AI to get insights into today's live football scores."}, {"role": "user", "content": live_scores}, {"role": "user", "content": question} ] response = client.chat.completions.create( model="gpt-3.5-turbo", messages=conversation_history, temperature=0.5, max_tokens=512 ) if response.choices and response.choices[0].message.content: chat_response = response.choices[0].message.content else: chat_response = "No response generated." return chat_response except Exception as e: return f"An error occurred: {str(e)}" # Streamlit app layout def main(): # Set background image and logo set_bg_hack("https://huggingface.co/spaces/Nkuku/FVGC/blob/main/cover.png", "https://huggingface.co/spaces/Nkuku/FVGC/blob/main/logo.jpg") st.title("Wellcome To Football Videos goals clips Channels") # Create a search input for users to search for a specific match search_input = st.text_input("Search for a match (e.g., Man City vs Arsenal):") # Create horizontal menu st.markdown("
", unsafe_allow_html=True) st.markdown(" ", unsafe_allow_html=True) # Dictionary containing channel IDs and their corresponding playlist IDs channel_playlist_ids = { "Premier League": "UUG5qGWdu8nIRZqJ_GgDwQ-w", "Serie A": "UUBJeMCIeLQos7wacox4hmLQ", "Bundesliga": "UU6UL29enLNe4mqwTfAyeNuw", "LaLiga": "UUTv-XvfzLX3i4IGWAm4sbmA", "UEFA": "UUyGa1YEx9ST66rYrJTGIKOw", "CAF TV": "UUr5K057x3mHroPHsNk9OiwA" } # API key for accessing the YouTube Data API api_key = "AIzaSyBAFAhl15rrR5lQ1VA_wn15zuBgMqaOTpA" # Create a menu for selecting a channel selected_channel = st.sidebar.selectbox("Select a Channel", list(channel_playlist_ids.keys())) # Display latest videos for the selected channel st.header(f"Latest Videos from {selected_channel}") # Fetch all videos from the selected channel's playlist video_items = fetch_all_youtube_playlist_videos(api_key, channel_playlist_ids[selected_channel]) # Filter videos based on the search input if search_input: filtered_video_items = [item for item in video_items if search_input.lower() in item['snippet']['title'].lower()] else: filtered_video_items = video_items # Extract video information and sort them based on publishing date video_info = extract_video_info(filtered_video_items) sorted_video_info = sorted(video_info, key=lambda x: x['published_at'], reverse=True) # Display the 10 latest videos in two columns col1, col2 = st.columns(2) for i, info in enumerate(sorted_video_info[:50]): restricted_video_link = get_restricted_video_link(info['video_id']) if i % 2 == 0: col1.video(restricted_video_link) else: col2.video(restricted_video_link) # Additional functionality to show live scores if st.button("Show Live Scores"): live_scores = scrape_live_scores(); st.write(live_scores) # Handle adding/uploading videos handle_video_upload() # Handle JavaScript function for clicking "Home" st.markdown( """ """, unsafe_allow_html=True ) if __name__ == "__main__": main()