import HuggingFaceAI from langchain_huggingface import HuggingFacePipeline, ChatHuggingFace from langchain_core.messages import BaseMessage, HumanMessage, AIMessage from agents.supervisor import Supervisor from agents.agent_support import create_agent from agents.agent_node import agent_node from agents.help_agent import HelpAgent from agents.project_agent import ProjectAgent from agent_system import AgentSystem from tools.multiply_tool import multiply import functools import os from uuid import uuid4 unique_id = uuid4().hex[0:8] os.environ["LANGCHAIN_TRACING_V2"] = "true" os.environ["LANGCHAIN_PROJECT"] = f"InfiniFleetTrace" os.environ["LANGCHAIN_ENDPOINT"] = "https://api.smith.langchain.com" os.environ["LANGCHAIN_API_KEY"] = "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" # Update to your API key llm = HuggingFacePipeline.from_model_id( model_id="mistralai/Mistral-7B-Instruct-v0.3", device_map="auto", task="text-generation", pipeline_kwargs={ "max_new_tokens": 100, "top_k": 50, }, ) print("Creating chat interface") chat_llm = ChatHuggingFace(llm=llm) print("Done") print("---------") supervisor = Supervisor(chat_llm, ["ProductHelp","ProjectHelp", "Multiplier"]) help_agent = HelpAgent(chat_llm, "You provide help for the InfiniFleet product in general") help_node = functools.partial(agent_node, agent=help_agent, name="ProductHelp") project_agent = ProjectAgent(chat_llm, "Always use robot_information tool to get all required information.") project_node = functools.partial(agent_node, agent=project_agent, name="ProjectHelp") print("--project agent-------") input_data = "Use the tool to give me information about how many robots there are in the project called 'largeProject'." result = project_agent.invoke({"messages":[HumanMessage(input_data)]}) print(result) print("---------")