Spaces:
Paused
Paused
Update utils.py
#1
by
dwb2023
- opened
utils.py
CHANGED
|
@@ -3,6 +3,7 @@ import os
|
|
| 3 |
import torch
|
| 4 |
from transformers import BitsAndBytesConfig, AutoConfig, AutoModelForCausalLM, LlavaNextForConditionalGeneration, LlavaForConditionalGeneration, PaliGemmaForConditionalGeneration, Idefics2ForConditionalGeneration
|
| 5 |
from functools import lru_cache
|
|
|
|
| 6 |
|
| 7 |
os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
|
| 8 |
|
|
@@ -23,7 +24,8 @@ ARCHITECTURE_MAP = {
|
|
| 23 |
"AutoModelForCausalLM": AutoModelForCausalLM
|
| 24 |
}
|
| 25 |
|
| 26 |
-
# Function to get the model summary with caching
|
|
|
|
| 27 |
@lru_cache(maxsize=10)
|
| 28 |
def get_model_summary(model_name):
|
| 29 |
"""
|
|
|
|
| 3 |
import torch
|
| 4 |
from transformers import BitsAndBytesConfig, AutoConfig, AutoModelForCausalLM, LlavaNextForConditionalGeneration, LlavaForConditionalGeneration, PaliGemmaForConditionalGeneration, Idefics2ForConditionalGeneration
|
| 5 |
from functools import lru_cache
|
| 6 |
+
import spaces
|
| 7 |
|
| 8 |
os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
|
| 9 |
|
|
|
|
| 24 |
"AutoModelForCausalLM": AutoModelForCausalLM
|
| 25 |
}
|
| 26 |
|
| 27 |
+
# Function to get the model summary with caching and GPU support
|
| 28 |
+
@spaces.GPU
|
| 29 |
@lru_cache(maxsize=10)
|
| 30 |
def get_model_summary(model_name):
|
| 31 |
"""
|