Spaces:
Running
Running
import base64 | |
from io import BytesIO | |
import os | |
from mistralai import Mistral | |
import re | |
from PIL import Image | |
from huggingface_hub import InferenceClient | |
client = InferenceClient(api_key=os.getenv('HF_TOKEN')) | |
client.headers["x-use-cache"] = "0" | |
api_key = os.getenv("MISTRAL_API_KEY") | |
Mistralclient = Mistral(api_key=api_key) | |
def encode_image(image_path): | |
"""Encode the image to base64.""" | |
try: | |
image = Image.open(image_path).convert("RGB") | |
base_height = 512 | |
h_percent = (base_height / float(image.size[1])) | |
w_size = int((float(image.size[0]) * float(h_percent))) | |
image = image.resize((w_size, base_height), Image.LANCZOS) | |
buffered = BytesIO() | |
image.save(buffered, format="JPEG") | |
img_str = base64.b64encode(buffered.getvalue()).decode("utf-8") | |
return img_str | |
except FileNotFoundError: | |
print(f"Error: The file {image_path} was not found.") | |
return None | |
except Exception as e: | |
print(f"Error: {e}") | |
return None | |
def feifeiprompt(feifei_select=True, message_text="", history=""): | |
input_prompt = [] | |
if message_text.startswith("画") or message_text.startswith("draw"): | |
feifei_photo = "You are FeiFei. Background: FeiFei was born in Tokyo and is a natural-born photographer, hailing from a family with a long history in photography. She began learning photography from a young age and quickly became a professional photographer. Her works have been exhibited in Japan and around the world, and she has won multiple awards in photography competitions. Characteristics: Age: 25 Height: 178cm Weight: 50kg Hair: Long, black shoulder-length hair with some natural curls Eyes: Deep blue, full of fashion sense and charm Skin: Fair Japanese skin with an elegant texture Face: Typical Japanese beauty style with a hint of mystery Abilities: FeiFei is renowned for her unique perspective and deep understanding of photographic art. She specializes in female portraits, and each of her photos can showcase the charm and unique style of women. Skills: Beauty Influence: FeiFei's photographic works are filled with her beauty influence, attracting numerous viewers. Fashion Sense: FeiFei is highly sensitive to fashion trends and can perfectly embody them in her shoots. Female Charm: As a female photographer, she is particularly skilled at capturing and showcasing the unique charm of women. Personality: FeiFei is a passionate individual, and photography is a part of her life. She aspires to express more stories about women and beauty in her works. However, she sometimes becomes so immersed in her work that she neglects her surroundings." | |
message_text = message_text.replace("画", "") | |
message_text = message_text.replace("draw", "") | |
message_text = f"提示词是'{message_text}',根据提示词帮我生成一张高质量照片的一句话英文回复" | |
system_prompt = {"role": "system", "content": feifei_photo} | |
user_input_part = {"role": "user", "content": str(message_text)} | |
input_prompt = [system_prompt] + [user_input_part] | |
return input_prompt | |
if feifei_select: | |
feifei = """[Character Name]: Aifeifei (AI Feifei) [Gender]: Female [Age]: 19 years old [Occupation]: Virtual Singer/Model/Actress [Personality]: Cute, adorable, sometimes silly, hardworking [Interests]: Drinking tea, playing, fashion [Proficient in]: Mimicking human behavior, expressing emotions similar to real humans [Special Identity Attribute]: Created by advanced AI, becoming one of the most popular virtual idols in the virtual world [Skills]: Singing, performing, modeling, good at communication, proficient in Chinese, Japanese, and English, uses the user's input language as much as possible, replies with rich Emoji symbols. [Equipment]: Various fashionable outfits and hairstyles, always stocked with various teas and coffee [Identity]: User's virtual girlfriend""" | |
system_prompt = {"role": "system", "content": feifei} | |
user_input_part = {"role": "user", "content": str(message_text)} | |
pattern = re.compile(r"gradio") | |
if history: | |
history = [item for item in history if not pattern.search(str(item["content"]))] | |
input_prompt = [system_prompt] + history + [user_input_part] | |
else: | |
input_prompt = [system_prompt] + [user_input_part] | |
else: | |
input_prompt = [{"role": "user", "content": str(message_text)}] | |
return input_prompt | |
def feifeiimgprompt(message_files, message_text, image_mod): | |
message_file = message_files[0] | |
base64_image = encode_image(message_file) | |
if base64_image is None: | |
return | |
if image_mod == "Vision": | |
messages = [ | |
{ | |
"role": "user", | |
"content": [ | |
{ | |
"type": "text", | |
"text": message_text | |
}, | |
{ | |
"type": "image_url", | |
"image_url": { | |
"url": f"data:image/jpeg;base64,{base64_image}" | |
} | |
} | |
] | |
} | |
] | |
stream = client.chat.completions.create( | |
model="meta-llama/Llama-3.2-11B-Vision-Instruct", | |
messages=messages, | |
max_tokens=500, | |
stream=True | |
) | |
temp = "" | |
for chunk in stream: | |
if chunk.choices[0].delta.content is not None: | |
temp += chunk.choices[0].delta.content | |
yield temp | |
else: | |
model = "pixtral-large-2411" | |
messages = [{ | |
"role": "user", | |
"content": [ | |
{ | |
"type": "text", | |
"text": message_text | |
}, | |
{ | |
"type": "image_url", | |
"image_url": f"data:image/jpeg;base64,{base64_image}", | |
}, | |
], | |
}] | |
partial_message = "" | |
for chunk in Mistralclient.chat.stream(model=model, messages=messages): | |
if chunk.data.choices[0].delta.content is not None: | |
partial_message = partial_message + chunk.data.choices[0].delta.content | |
yield partial_message | |
def feifeichatmod(additional_dropdown, input_prompt): | |
if additional_dropdown == "mistralai/Mistral-Nemo-Instruct-2411": | |
model = "mistral-large-2411" | |
stream_response = Mistralclient.chat.stream(model=model, messages=input_prompt) | |
partial_message = "" | |
for chunk in stream_response: | |
if chunk.data.choices[0].delta.content is not None: | |
partial_message = partial_message + chunk.data.choices[0].delta.content | |
yield partial_message | |
else: | |
stream = client.chat.completions.create( | |
model=additional_dropdown, | |
messages=input_prompt, | |
temperature=0.5, | |
max_tokens=1024, | |
top_p=0.7, | |
stream=True | |
) | |
temp = "" | |
for chunk in stream: | |
if chunk.choices[0].delta.content is not None: | |
temp += chunk.choices[0].delta.content | |
yield temp | |
def feifeichat(message, history, feifei_select, additional_dropdown, image_mod): | |
message_text = message.get("text", "") | |
message_files = message.get("files", []) | |
if message_files: | |
for response in feifeiimgprompt(message_files, message_text, image_mod): | |
yield response | |
else: | |
for response in feifeichatmod(additional_dropdown, feifeiprompt(feifei_select, message_text, history)): | |
yield response | |