Update app.py
Browse files
app.py
CHANGED
|
@@ -21,6 +21,9 @@ import pandas as pd
|
|
| 21 |
from docx import Document as DocxDocument
|
| 22 |
import google.generativeai as genai
|
| 23 |
|
|
|
|
|
|
|
|
|
|
| 24 |
# Set up basic configuration for logging
|
| 25 |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
| 26 |
|
|
@@ -34,6 +37,15 @@ API_BASE_URL = "https://api.cloudflare.com/client/v4/accounts/a17f03e0f049ccae0c
|
|
| 34 |
print(f"ACCOUNT_ID: {ACCOUNT_ID}")
|
| 35 |
print(f"CLOUDFLARE_AUTH_TOKEN: {API_TOKEN[:5]}..." if API_TOKEN else "Not set")
|
| 36 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 37 |
MODELS = [
|
| 38 |
"mistralai/Mistral-7B-Instruct-v0.3",
|
| 39 |
"mistralai/Mixtral-8x7B-Instruct-v0.1",
|
|
|
|
| 21 |
from docx import Document as DocxDocument
|
| 22 |
import google.generativeai as genai
|
| 23 |
|
| 24 |
+
|
| 25 |
+
from huggingface_hub import InferenceClient
|
| 26 |
+
|
| 27 |
# Set up basic configuration for logging
|
| 28 |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
| 29 |
|
|
|
|
| 37 |
print(f"ACCOUNT_ID: {ACCOUNT_ID}")
|
| 38 |
print(f"CLOUDFLARE_AUTH_TOKEN: {API_TOKEN[:5]}..." if API_TOKEN else "Not set")
|
| 39 |
|
| 40 |
+
|
| 41 |
+
# Initialize the Hugging Face client
|
| 42 |
+
client = InferenceClient(model="meta-llama/Llama-3.1", token=huggingface_token)
|
| 43 |
+
|
| 44 |
+
# Assuming the API supports such a call (pseudocode example):
|
| 45 |
+
metadata = client.get_model_metadata()
|
| 46 |
+
print(metadata["max_input_tokens"]) # This would be a hypothetical field
|
| 47 |
+
|
| 48 |
+
|
| 49 |
MODELS = [
|
| 50 |
"mistralai/Mistral-7B-Instruct-v0.3",
|
| 51 |
"mistralai/Mixtral-8x7B-Instruct-v0.1",
|