import yaml import os from smolagents import CodeAgent, InferenceClientModel from Gradio_UI import GradioUI LANGFUSE_PUBLIC_KEY = os.environ["LANGFUSE_PUBLIC_KEY"] LANGFUSE_SECRET_KEY = os.environ["LANGFUSE_SECRET_KEY"] LANGFUSE_HOST = os.environ.get("LANGFUSE_HOST", "https://cloud.langfuse.com") from langfuse import get_client langfuse = get_client() if langfuse.auth_check(): print("Langfuse client is authenticated and ready!") else: print("Authentication failed. Please check your credentials and host.") from openinference.instrumentation.smolagents import SmolagentsInstrumentor SmolagentsInstrumentor().instrument() # Get current directory path CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) from tools.suggest_menu import SimpleTool as SuggestMenu from tools.web_search import DuckDuckGoSearchTool as WebSearch from tools.final_answer import FinalAnswerTool as FinalAnswer model = InferenceClientModel( model_id='Qwen/Qwen3-Next-80B-A3B-Thinking', ) suggest_menu = SuggestMenu() web_search = WebSearch() final_answer = FinalAnswer() with open(os.path.join(CURRENT_DIR, "prompts.yaml"), 'r') as stream: prompt_templates = yaml.safe_load(stream) agent = CodeAgent( model=model, tools=[suggest_menu, web_search, final_answer], managed_agents=[], max_steps=20, verbosity_level=1, planning_interval=None, name=None, description=None, executor_type='local', executor_kwargs={}, max_print_outputs_length=None, prompt_templates=prompt_templates ) if __name__ == "__main__": GradioUI(agent).launch()