from openai import OpenAI import pandas as pd import requests # input model series and content def LLM_request(model_series, model_name, content): # Read API keys from txt file df = pd.read_csv('./scripts/Task2/utils/LLM_APIs.txt') # Split column names due to tab characters df = pd.DataFrame([x.split('\t') for x in df.values.flatten()], columns=['name','API']) # get corresponding API key api_key = df[df['name'] == model_series]['API'].values[0] if model_series == 'gpt': client = OpenAI( api_key=api_key, base_url="https://api.bianxie.ai/v1" ) completion = client.chat.completions.create( model = model_name, messages=[ { "role": "user", "content": content } ] ) return completion.choices[0].message elif model_series == 'ds_V3_qwen_llama': client = OpenAI(api_key=api_key, base_url="https://api.studio.nebius.ai/v1") response = client.chat.completions.create( model=model_name, messages=[ {"role": "user", "content": content}, ], stream=False ) return response.choices[0].message.content elif model_series == 'gemini': api_key = api_key url = 'https://api.bianxie.ai/v1/chat/completions' headers = { 'Content-Type': 'application/json', 'Authorization': f'Bearer {api_key}' } data = { 'model': model_name, 'messages': [{'role': 'user', 'content': content}], } response = requests.post(url, headers=headers, json=data) content = response.json()['choices'][0]['message'] return content elif model_series == 'claude': api_key = api_key url = 'https://api.bianxie.ai/v1/chat/completions' headers = { 'Content-Type': 'application/json', 'Authorization': f'Bearer {api_key}' } data = { 'model': model_name, 'messages': [{'role': 'user', 'content': content}], } response = requests.post(url, headers=headers, json=data) content = response.json()['choices'][0]['message'] return content else: return "Unsupported model series. Please check the model series name."