File size: 6,813 Bytes
6fdf6ae
62a11d4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
135bd1e
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
import gradio as gr
import openai
from openai import OpenAI

# Configure your API keys

# Setup API clients
openai.api_key = OPENAI_API_KEY
perplexity_client = OpenAI(api_key=PERPLEXITY_API_KEY, base_url="https://api.perplexity.ai")
client = OpenAI(api_key=OPENAI_API_KEY)


def search_linkedin_person(name, company):
    """Search for a person on LinkedIn via Perplexity API."""
    query = f"fine this person {name} at {company}, try LinkedIn"
    try:
        messages = [
            {"role": "system", "content": "You are an AI assistant. Provide summary of the person."},
            {"role": "user", "content": query}
        ]
        response = perplexity_client.chat.completions.create(
            model="llama-3.1-sonar-large-128k-online",
            messages=messages,
        )
        return response.choices[0].message.content
    except Exception as e:
        return f"Error searching: {str(e)}"


def create_multi_block_app():
    with gr.Blocks() as demo:
        # Block 1: LinkedIn Search
        with gr.Column(variant="panel"):
            gr.Markdown("## LinkedIn Profile Search")
            with gr.Row():
                name_input = gr.Textbox(label="Person's Name", placeholder="Enter the name")
                company_input = gr.Textbox(label="Company", placeholder="Enter the company")

            search_btn = gr.Button("Search Profile")
            profile_output = gr.Textbox(label="LinkedIn Profile Info", interactive=False)

            search_btn.click(
                fn=search_linkedin_person,
                inputs=[name_input, company_input],
                outputs=profile_output
            )

        # Block 2: Introductory Email Chatbot
        with gr.Column(variant="panel"):
            gr.Markdown("## 1 Email Chatbot")

            # Create Chatbot and Input Elements
            intro_chatbot = gr.Chatbot(label="Intro Email Generation")
            intro_msg_input = gr.Textbox(label="Your Message", placeholder="Type your message here...")
            intro_submit_btn = gr.Button("Send")

            # Define the Chatbot Conversation Function
            def intro_email_conversation(message, history, profile_info):
                try:
                    # Format the conversation history for the OpenAI API
                    formatted_history = [
                        {"role": "user" if i % 2 == 0 else "assistant", "content": msg[0]}
                        for i, msg in enumerate(history)
                    ]

                    # Add the current user message to the conversation
                    formatted_history.append({"role": "user", "content": message})

                    # Add profile info to guide the assistant
                    system_message = {
                        "role": "system",
                        "content": (
                            f"You are an AI assistant helping to draft a professional email"
                            f"to the following individual: {profile_info}. Make it short and engaging."
                        )
                    }

                    # Make a request to the OpenAI API
                    response = client.chat.completions.create(
                        model="gpt-4",
                        messages=[system_message] + formatted_history
                    )

                    # Extract the AI's response
                    ai_response = response.choices[0].message.content

                    # Append the new message-response pair to the history
                    history.append([message, ai_response])

                    return history, ""  # Clear the input box
                except Exception as e:
                    # Handle exceptions gracefully and append the error message
                    history.append([message, f"Error: {str(e)}"])
                    return history, ""

            # Set up the button click behavior
            intro_submit_btn.click(
                fn=intro_email_conversation,
                inputs=[intro_msg_input, intro_chatbot, profile_output],
                outputs=[intro_chatbot, intro_msg_input]
            )


        # Block 2: Introductory Email Chatbot
        with gr.Column(variant="panel"):
            gr.Markdown("## 2 Email Chatbot")

            # Create Chatbot and Input Elements
            intro_chatbot = gr.Chatbot(label="Intro Email Generation")
            intro_msg_input = gr.Textbox(label="Your Message", placeholder="Type your message here...")
            intro_submit_btn = gr.Button("Send")

            # Define the Chatbot Conversation Function
            def intro_email_conversation(message, history, profile_info):
                try:
                    # Format the conversation history for the OpenAI API
                    formatted_history = [
                        {"role": "user" if i % 2 == 0 else "assistant", "content": msg[0]}
                        for i, msg in enumerate(history)
                    ]

                    # Add the current user message to the conversation
                    formatted_history.append({"role": "user", "content": message})

                    # Add profile info to guide the assistant
                    system_message = {
                        "role": "system",
                        "content": (
                            f"You are an AI assistant helping to draft a professional email"
                            f"to the following individual: {profile_info}. introduce my semicondoctor company."
                        )
                    }

                    # Make a request to the OpenAI API
                    response = client.chat.completions.create(
                        model="gpt-3.5-turbo",
                        messages=[system_message] + formatted_history
                    )

                    # Extract the AI's response
                    ai_response = response.choices[0].message.content

                    # Append the new message-response pair to the history
                    history.append([message, ai_response])

                    return history, ""  # Clear the input box
                except Exception as e:
                    # Handle exceptions gracefully and append the error message
                    history.append([message, f"Error: {str(e)}"])
                    return history, ""

            # Set up the button click behavior
            intro_submit_btn.click(
                fn=intro_email_conversation,
                inputs=[intro_msg_input, intro_chatbot, profile_output],
                outputs=[intro_chatbot, intro_msg_input]
            )

    return demo

if __name__ == "__main__":
    app = create_multi_block_app()
    # app.launch()
    app.launch(share=True)  # Share your demo with just 1 extra parameter 🚀