Yahya Darman
commited on
Commit
·
82cfa24
0
Parent(s):
Initial clean commit on master branch
Browse files- .gitattributes +35 -0
- .gitignore +24 -0
- LICENSE +21 -0
- README.md +13 -0
- app.py +411 -0
- backend/config.py +59 -0
- backend/data_collector.py +278 -0
- backend/llm_service.py +143 -0
- backend/mcp_server.py +103 -0
- backend/moat_analyzer.py +200 -0
- backend/stock_analyzer.py +387 -0
- modal_llm_service.py +107 -0
- modal_mcp_server.py +35 -0
- requirements.txt +13 -0
.gitattributes
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
| 2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
| 3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
| 4 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
| 5 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
| 6 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
| 7 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
| 8 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
| 9 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
| 10 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
| 11 |
+
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
| 12 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
| 13 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
| 14 |
+
*.npy filter=lfs diff=lfs merge=lfs -text
|
| 15 |
+
*.npz filter=lfs diff=lfs merge=lfs -text
|
| 16 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
| 17 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
| 18 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
| 19 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
| 20 |
+
*.pickle filter=lfs diff=lfs merge=lfs -text
|
| 21 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
| 22 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
| 23 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
| 24 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
| 25 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
| 26 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
| 27 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
| 28 |
+
*.tar filter=lfs diff=lfs merge=lfs -text
|
| 29 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
| 30 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
| 31 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
| 32 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
| 33 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
+
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
.gitignore
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Python
|
| 2 |
+
__pycache__/
|
| 3 |
+
*.pyc
|
| 4 |
+
*.pyo
|
| 5 |
+
*.pyd
|
| 6 |
+
*.log
|
| 7 |
+
|
| 8 |
+
# Virtual Environment
|
| 9 |
+
venv/
|
| 10 |
+
.venv/
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
# IDE-specific files
|
| 14 |
+
.idea/
|
| 15 |
+
.vscode/
|
| 16 |
+
|
| 17 |
+
# Environment variables
|
| 18 |
+
.env
|
| 19 |
+
|
| 20 |
+
# Reports and generated files
|
| 21 |
+
reports/
|
| 22 |
+
|
| 23 |
+
# macOS system files
|
| 24 |
+
.DS_Store
|
LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
MIT License
|
| 2 |
+
|
| 3 |
+
Copyright (c) 2025 Yahya Darman
|
| 4 |
+
|
| 5 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 6 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 7 |
+
in the Software without restriction, including without limitation the rights
|
| 8 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 9 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 10 |
+
furnished to do so, subject to the following conditions:
|
| 11 |
+
|
| 12 |
+
The above copyright notice and this permission notice shall be included in all
|
| 13 |
+
copies or substantial portions of the Software.
|
| 14 |
+
|
| 15 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 16 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 17 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 18 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 19 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 20 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 21 |
+
SOFTWARE.
|
README.md
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
title: BuffetBot - Agentic Stock Advisor
|
| 3 |
+
emoji: 📈
|
| 4 |
+
colorFrom: purple
|
| 5 |
+
colorTo: green
|
| 6 |
+
sdk: gradio
|
| 7 |
+
app_file: app.py
|
| 8 |
+
short_description: MCP-Modal AI agent for Buffett-inspired stock analysis
|
| 9 |
+
pinned: false
|
| 10 |
+
tags:
|
| 11 |
+
- agent-demo-track
|
| 12 |
+
- mcp-server-track
|
| 13 |
+
---
|
app.py
ADDED
|
@@ -0,0 +1,411 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
+
from datetime import datetime
|
| 3 |
+
import os
|
| 4 |
+
from dotenv import load_dotenv
|
| 5 |
+
import requests # Import requests for HTTP calls to Modal backend
|
| 6 |
+
import plotly.io as pio # Import plotly.io to deserialize JSON strings to Plotly figures
|
| 7 |
+
# from gradio.themes.utils import fonts
|
| 8 |
+
|
| 9 |
+
# from backend.stock_analyzer import StockAnalyzer
|
| 10 |
+
# from backend.config import AppConfig
|
| 11 |
+
|
| 12 |
+
# Load environment variables
|
| 13 |
+
load_dotenv()
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
# class BuffetBotTheme(gr.themes.Soft):
|
| 17 |
+
# def __init__(self, **kwargs):
|
| 18 |
+
# super().__init__(
|
| 19 |
+
# font=(
|
| 20 |
+
# fonts.GoogleFont("Quicksand"),
|
| 21 |
+
# "ui-sans-serif",
|
| 22 |
+
# "sans-serif",
|
| 23 |
+
# ),
|
| 24 |
+
# **kwargs
|
| 25 |
+
# )
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class BuffetBotApp:
|
| 29 |
+
def __init__(self):
|
| 30 |
+
# self.config = AppConfig()
|
| 31 |
+
# self.stock_analyzer = StockAnalyzer()
|
| 32 |
+
self.mcp_server_url = os.getenv("MODAL_MCP_SERVER_URL") # Get Modal MCP server URL from environment variable
|
| 33 |
+
if not self.mcp_server_url:
|
| 34 |
+
raise ValueError("MODAL_MCP_SERVER_URL environment variable not set.")
|
| 35 |
+
|
| 36 |
+
def create_ui(self):
|
| 37 |
+
"""Create the Gradio interface."""
|
| 38 |
+
custom_css = """
|
| 39 |
+
/* General Body and Font */
|
| 40 |
+
body {
|
| 41 |
+
font-family: 'Roboto', 'Helvetica', 'Arial', sans-serif;
|
| 42 |
+
color: #333;
|
| 43 |
+
background-color: #f5f5f5;
|
| 44 |
+
}
|
| 45 |
+
|
| 46 |
+
/* Main Container Styling */
|
| 47 |
+
.gradio-container {
|
| 48 |
+
max-width: 1000px; /* Slightly wider container */
|
| 49 |
+
margin: auto;
|
| 50 |
+
padding: 30px;
|
| 51 |
+
box-shadow: 0 4px 15px rgba(0, 0, 0, 0.1);
|
| 52 |
+
border-radius: 12px;
|
| 53 |
+
background-color: #ffffff;
|
| 54 |
+
margin-top: 30px;
|
| 55 |
+
margin-bottom: 30px;
|
| 56 |
+
}
|
| 57 |
+
|
| 58 |
+
/* Headings */
|
| 59 |
+
h1 {
|
| 60 |
+
font-size: 2.8em;
|
| 61 |
+
color: #1a237e; /* Darker blue for prominence */
|
| 62 |
+
text-align: center;
|
| 63 |
+
margin-bottom: 25px;
|
| 64 |
+
font-weight: 700; /* Bold */
|
| 65 |
+
letter-spacing: -0.5px;
|
| 66 |
+
}
|
| 67 |
+
|
| 68 |
+
h2 {
|
| 69 |
+
font-size: 2.0em;
|
| 70 |
+
color: #3f51b5; /* Medium blue for subheadings */
|
| 71 |
+
border-bottom: 2px solid #e8eaf6; /* Light separator */
|
| 72 |
+
padding-bottom: 10px;
|
| 73 |
+
margin-top: 40px;
|
| 74 |
+
margin-bottom: 20px;
|
| 75 |
+
font-weight: 600;
|
| 76 |
+
}
|
| 77 |
+
|
| 78 |
+
h3 {
|
| 79 |
+
font-size: 1.5em;
|
| 80 |
+
color: #424242;
|
| 81 |
+
margin-top: 25px;
|
| 82 |
+
margin-bottom: 15px;
|
| 83 |
+
}
|
| 84 |
+
|
| 85 |
+
/* Textboxes and Inputs */
|
| 86 |
+
.gr-textbox textarea, .gr-textbox input {
|
| 87 |
+
border: 1px solid #bdbdbd;
|
| 88 |
+
border-radius: 8px;
|
| 89 |
+
padding: 10px 15px;
|
| 90 |
+
font-size: 1.1em;
|
| 91 |
+
box-shadow: inset 0 1px 3px rgba(0, 0, 0, 0.05);
|
| 92 |
+
}
|
| 93 |
+
.gr-textbox label {
|
| 94 |
+
font-weight: 600;
|
| 95 |
+
color: #555;
|
| 96 |
+
margin-bottom: 8px;
|
| 97 |
+
}
|
| 98 |
+
|
| 99 |
+
/* Buttons */
|
| 100 |
+
.gr-button {
|
| 101 |
+
border-radius: 8px;
|
| 102 |
+
padding: 12px 25px;
|
| 103 |
+
font-size: 1.1em;
|
| 104 |
+
font-weight: 600;
|
| 105 |
+
transition: all 0.3s ease;
|
| 106 |
+
}
|
| 107 |
+
.gr-button.primary {
|
| 108 |
+
background-color: #4CAF50; /* Green */
|
| 109 |
+
color: white;
|
| 110 |
+
border: none;
|
| 111 |
+
}
|
| 112 |
+
.gr-button.primary:hover {
|
| 113 |
+
background-color: #43a047;
|
| 114 |
+
box-shadow: 0 2px 8px rgba(0,0,0,0.2);
|
| 115 |
+
}
|
| 116 |
+
.gr-button.secondary {
|
| 117 |
+
background-color: #e3f2fd; /* Light blue background for secondary button */
|
| 118 |
+
color: #424242;
|
| 119 |
+
border: 1px solid #90caf9; /* Light blue border */
|
| 120 |
+
}
|
| 121 |
+
.gr-button.secondary:hover {
|
| 122 |
+
background-color: #bbdefb;
|
| 123 |
+
box-shadow: 0 2px 8px rgba(0,0,0,0.1);
|
| 124 |
+
}
|
| 125 |
+
.gr-button.download {
|
| 126 |
+
background-color: #2196f3; /* Blue for download */
|
| 127 |
+
color: white;
|
| 128 |
+
border: none;
|
| 129 |
+
}
|
| 130 |
+
.gr-button.download:hover {
|
| 131 |
+
background-color: #1976d2;
|
| 132 |
+
box-shadow: 0 2px 8px rgba(0,0,0,0.2);
|
| 133 |
+
}
|
| 134 |
+
|
| 135 |
+
/* Markdown Output */
|
| 136 |
+
.gr-markdown {
|
| 137 |
+
background-color: #f9f9f9;
|
| 138 |
+
border: 1px solid #e0e0e0;
|
| 139 |
+
border-radius: 8px;
|
| 140 |
+
padding: 20px;
|
| 141 |
+
line-height: 1.6;
|
| 142 |
+
color: #424242;
|
| 143 |
+
white-space: normal !important; /* Ensure text wraps */
|
| 144 |
+
word-wrap: break-word !important; /* Ensure long words break */
|
| 145 |
+
}
|
| 146 |
+
.gr-markdown p {
|
| 147 |
+
margin-bottom: 10px;
|
| 148 |
+
}
|
| 149 |
+
.gr-markdown ul {
|
| 150 |
+
list-style-type: disc;
|
| 151 |
+
margin-left: 20px;
|
| 152 |
+
padding-left: 0;
|
| 153 |
+
}
|
| 154 |
+
.gr-markdown li {
|
| 155 |
+
margin-bottom: 5px;
|
| 156 |
+
}
|
| 157 |
+
|
| 158 |
+
/* Plots */
|
| 159 |
+
.gr-plot {
|
| 160 |
+
border: 1px solid #e0e0e0;
|
| 161 |
+
border-radius: 8px;
|
| 162 |
+
padding: 10px;
|
| 163 |
+
background-color: #ffffff;
|
| 164 |
+
margin-top: 20px;
|
| 165 |
+
}
|
| 166 |
+
|
| 167 |
+
/* Specific element adjustments */
|
| 168 |
+
#connection-status-textbox {
|
| 169 |
+
font-weight: 500;
|
| 170 |
+
color: #3f51b5;
|
| 171 |
+
}
|
| 172 |
+
#loading-status-textbox {
|
| 173 |
+
font-style: italic;
|
| 174 |
+
color: #757575;
|
| 175 |
+
}
|
| 176 |
+
#input-button-column {
|
| 177 |
+
background-color: #ffffff !important;
|
| 178 |
+
}
|
| 179 |
+
.investor-note {
|
| 180 |
+
color: #2e7d32; /* Green color for investor notes */
|
| 181 |
+
font-style: italic;
|
| 182 |
+
font-weight: 500;
|
| 183 |
+
margin-top: 8px;
|
| 184 |
+
margin-bottom: 8px;
|
| 185 |
+
padding-left: 8px;
|
| 186 |
+
border-left: 2px solid #66bb6a; /* Small green bar on the left */
|
| 187 |
+
}
|
| 188 |
+
"""
|
| 189 |
+
|
| 190 |
+
with gr.Blocks(theme=gr.themes.Soft(), css=custom_css) as app:
|
| 191 |
+
gr.Markdown("# 📈 BuffetBot - AI Stock Advisor")
|
| 192 |
+
|
| 193 |
+
# Connection Status
|
| 194 |
+
with gr.Column():
|
| 195 |
+
connection_btn = gr.Button("🔗 Test MCP Connection", variant="secondary")
|
| 196 |
+
connection_status = gr.Textbox(label="MCP Connection Status", interactive=False, elem_id="connection-status-textbox")
|
| 197 |
+
# New vLLM Connection Test
|
| 198 |
+
vllm_connection_btn = gr.Button("🧠 Test vLLM Connection", variant="secondary")
|
| 199 |
+
vllm_connection_status = gr.Textbox(label="vLLM Connection Status", interactive=False, elem_id="vllm-connection-status-textbox")
|
| 200 |
+
|
| 201 |
+
with gr.Row():
|
| 202 |
+
with gr.Column(scale=2, elem_id="input-button-column"):
|
| 203 |
+
ticker_input = gr.Textbox(
|
| 204 |
+
label="Enter Stock Ticker",
|
| 205 |
+
placeholder="e.g., AAPL",
|
| 206 |
+
max_lines=1
|
| 207 |
+
)
|
| 208 |
+
generate_btn = gr.Button("Generate Report", variant="primary")
|
| 209 |
+
loading_status = gr.Textbox(label="Status", interactive=False, visible=False, elem_id="loading-status-textbox")
|
| 210 |
+
|
| 211 |
+
# Results display components
|
| 212 |
+
output = gr.Markdown(label="Analysis", visible=False)
|
| 213 |
+
revenue_plot = gr.Plot(label="Revenue Growth", visible=False)
|
| 214 |
+
fcf_plot = gr.Plot(label="Free Cash Flow per Share", visible=False)
|
| 215 |
+
shares_plot = gr.Plot(label="Shares Outstanding", visible=False)
|
| 216 |
+
|
| 217 |
+
# Static Chart Insights Section
|
| 218 |
+
chart_insights_text = """
|
| 219 |
+
## Chart Insights
|
| 220 |
+
### Revenue Growth
|
| 221 |
+
<p class="investor-note">*What to look for: We are looking for companies that consistently grow their revenue year after year, ideally. Consistent growth indicates market acceptance and business expansion.*</p>
|
| 222 |
+
|
| 223 |
+
### Free Cash Flow per Share
|
| 224 |
+
<p class="investor-note">*What to look for: Look for companies with consistently high and growing free cash flow. High FCF indicates a company has strong financial health and flexibility for reinvestment, debt reduction, or shareholder returns.*</p>
|
| 225 |
+
|
| 226 |
+
### Shares Outstanding
|
| 227 |
+
<p class="investor-note">*What to look for: Ideally, look for a declining trend in shares outstanding. This suggests the company is buying back its own shares, which can increase shareholder value by reducing the number of shares in circulation.*</p>
|
| 228 |
+
"""
|
| 229 |
+
chart_insights = gr.Markdown(value=chart_insights_text, visible=False, label="Chart Insights")
|
| 230 |
+
|
| 231 |
+
download_button = gr.DownloadButton(
|
| 232 |
+
label="Download Analysis as TXT",
|
| 233 |
+
visible=False,
|
| 234 |
+
variant="secondary"
|
| 235 |
+
)
|
| 236 |
+
|
| 237 |
+
# Hidden state to store analysis text for download
|
| 238 |
+
analysis_text_state = gr.State()
|
| 239 |
+
|
| 240 |
+
# Event handlers
|
| 241 |
+
generate_btn.click(
|
| 242 |
+
fn=self.generate_report,
|
| 243 |
+
inputs=[ticker_input],
|
| 244 |
+
outputs=[
|
| 245 |
+
output,
|
| 246 |
+
revenue_plot,
|
| 247 |
+
fcf_plot,
|
| 248 |
+
shares_plot,
|
| 249 |
+
analysis_text_state,
|
| 250 |
+
download_button,
|
| 251 |
+
loading_status,
|
| 252 |
+
chart_insights
|
| 253 |
+
]
|
| 254 |
+
)
|
| 255 |
+
|
| 256 |
+
ticker_input.submit(
|
| 257 |
+
fn=self.generate_report,
|
| 258 |
+
inputs=[ticker_input],
|
| 259 |
+
outputs=[
|
| 260 |
+
output,
|
| 261 |
+
revenue_plot,
|
| 262 |
+
fcf_plot,
|
| 263 |
+
shares_plot,
|
| 264 |
+
analysis_text_state,
|
| 265 |
+
download_button,
|
| 266 |
+
loading_status,
|
| 267 |
+
chart_insights
|
| 268 |
+
]
|
| 269 |
+
)
|
| 270 |
+
|
| 271 |
+
download_button.click(
|
| 272 |
+
fn=self._save_and_return_analysis_file,
|
| 273 |
+
inputs=[analysis_text_state, ticker_input],
|
| 274 |
+
outputs=[download_button],
|
| 275 |
+
show_progress="hidden"
|
| 276 |
+
)
|
| 277 |
+
|
| 278 |
+
# Event handler for connection button
|
| 279 |
+
connection_btn.click(
|
| 280 |
+
fn=self._test_mcp_connection,
|
| 281 |
+
inputs=[],
|
| 282 |
+
outputs=[connection_status]
|
| 283 |
+
)
|
| 284 |
+
|
| 285 |
+
vllm_connection_btn.click(
|
| 286 |
+
fn=self._test_vllm_connection,
|
| 287 |
+
inputs=[],
|
| 288 |
+
outputs=[vllm_connection_status]
|
| 289 |
+
)
|
| 290 |
+
|
| 291 |
+
return app
|
| 292 |
+
|
| 293 |
+
def _save_and_return_analysis_file(self, analysis_text: str, ticker: str):
|
| 294 |
+
"""Saves the analysis text to a file and returns the path for download."""
|
| 295 |
+
if not analysis_text:
|
| 296 |
+
return None # No file to download if no analysis text
|
| 297 |
+
|
| 298 |
+
# Ensure reports directory exists
|
| 299 |
+
reports_dir = "reports"
|
| 300 |
+
os.makedirs(reports_dir, exist_ok=True)
|
| 301 |
+
|
| 302 |
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
| 303 |
+
file_path = os.path.join(reports_dir, f"{ticker}_analysis_{timestamp}.txt")
|
| 304 |
+
|
| 305 |
+
with open(file_path, "w") as f:
|
| 306 |
+
f.write(analysis_text)
|
| 307 |
+
|
| 308 |
+
return file_path
|
| 309 |
+
|
| 310 |
+
def generate_report(self, ticker):
|
| 311 |
+
"""Generate stock analysis report."""
|
| 312 |
+
if not ticker or not ticker.strip():
|
| 313 |
+
return (
|
| 314 |
+
gr.update(value="Error: Please enter a valid stock ticker", visible=True),
|
| 315 |
+
gr.update(value=None, visible=False),
|
| 316 |
+
gr.update(value=None, visible=False),
|
| 317 |
+
gr.update(value=None, visible=False),
|
| 318 |
+
None,
|
| 319 |
+
gr.update(visible=False),
|
| 320 |
+
gr.update(value="❌ Error: Please enter a valid stock ticker", visible=True),
|
| 321 |
+
gr.update(visible=False)
|
| 322 |
+
)
|
| 323 |
+
|
| 324 |
+
try:
|
| 325 |
+
# Show loading state
|
| 326 |
+
yield (
|
| 327 |
+
gr.update(value="⏳ Generating report... Please wait.", visible=True),
|
| 328 |
+
gr.update(value=None, visible=False),
|
| 329 |
+
gr.update(value=None, visible=False),
|
| 330 |
+
gr.update(value=None, visible=False),
|
| 331 |
+
None,
|
| 332 |
+
gr.update(visible=False),
|
| 333 |
+
gr.update(value="⏳ Analyzing stock data and generating insights...", visible=True),
|
| 334 |
+
gr.update(visible=False)
|
| 335 |
+
)
|
| 336 |
+
|
| 337 |
+
# Call the Modal MCP server for analysis
|
| 338 |
+
headers = {'Content-Type': 'application/json'}
|
| 339 |
+
payload = {"ticker": ticker}
|
| 340 |
+
response = requests.post(f"{self.mcp_server_url}/analyze", headers=headers, json=payload)
|
| 341 |
+
response.raise_for_status()
|
| 342 |
+
analysis_data = response.json()
|
| 343 |
+
|
| 344 |
+
# Extract data from the response
|
| 345 |
+
analysis_results = {
|
| 346 |
+
"analysis": analysis_data["analysis"],
|
| 347 |
+
"revenue_chart": pio.from_json(analysis_data["revenue_chart"]) if analysis_data["revenue_chart"] else None,
|
| 348 |
+
"fcf_chart": pio.from_json(analysis_data["fcf_chart"]) if analysis_data["fcf_chart"] else None,
|
| 349 |
+
"shares_chart": pio.from_json(analysis_data["shares_chart"]) if analysis_data["shares_chart"] else None
|
| 350 |
+
}
|
| 351 |
+
|
| 352 |
+
# Format the analysis text with a timestamp
|
| 353 |
+
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
| 354 |
+
formatted_analysis = f"## Analysis Report for {ticker.upper()}\n*Generated on {timestamp}*\n\n{analysis_results['analysis']}"
|
| 355 |
+
|
| 356 |
+
yield (
|
| 357 |
+
gr.update(value=formatted_analysis, visible=True),
|
| 358 |
+
gr.update(value=analysis_results["revenue_chart"], visible=True),
|
| 359 |
+
gr.update(value=analysis_results["fcf_chart"], visible=True),
|
| 360 |
+
gr.update(value=analysis_results["shares_chart"], visible=True),
|
| 361 |
+
formatted_analysis,
|
| 362 |
+
gr.update(label=f"Download {ticker.upper()} Report", visible=True),
|
| 363 |
+
gr.update(value="✅ Analysis complete!", visible=True),
|
| 364 |
+
gr.update(visible=True)
|
| 365 |
+
)
|
| 366 |
+
except requests.exceptions.RequestException as e:
|
| 367 |
+
error_msg = f"Error analyzing stock: {str(e)}"
|
| 368 |
+
if "429" in str(e):
|
| 369 |
+
error_msg = "Rate limit exceeded. Please try again in a few minutes."
|
| 370 |
+
elif "500" in str(e):
|
| 371 |
+
error_msg = "Server error. Please try again later."
|
| 372 |
+
yield (
|
| 373 |
+
gr.update(value=error_msg, visible=True),
|
| 374 |
+
gr.update(value=None, visible=False),
|
| 375 |
+
gr.update(value=None, visible=False),
|
| 376 |
+
gr.update(value=None, visible=False),
|
| 377 |
+
None,
|
| 378 |
+
gr.update(visible=False),
|
| 379 |
+
gr.update(value=f"❌ {error_msg}", visible=True),
|
| 380 |
+
gr.update(visible=False)
|
| 381 |
+
)
|
| 382 |
+
|
| 383 |
+
def _test_mcp_connection(self):
|
| 384 |
+
"""Test connection to Modal MCP server health endpoint."""
|
| 385 |
+
try:
|
| 386 |
+
response = requests.get(f"{self.mcp_server_url}/health", timeout=10)
|
| 387 |
+
if response.status_code == 200:
|
| 388 |
+
return f"✅ Connected to Modal MCP Server: {response.json().get('status', 'OK')}"
|
| 389 |
+
else:
|
| 390 |
+
return f"❌ Connection failed (HTTP {response.status_code}): {response.text}"
|
| 391 |
+
except requests.exceptions.RequestException as e:
|
| 392 |
+
return f"❌ Connection error: {str(e)}"
|
| 393 |
+
|
| 394 |
+
def _test_vllm_connection(self):
|
| 395 |
+
"""Tests the connection to the Modal vLLM service via the MCP server."""
|
| 396 |
+
try:
|
| 397 |
+
response = requests.get(f"{self.mcp_server_url}/test_llm_connection")
|
| 398 |
+
response.raise_for_status() # Raise an HTTPError for bad responses (4xx or 5xx)
|
| 399 |
+
status_data = response.json()
|
| 400 |
+
return f"Status: {status_data.get('status', 'Unknown')}"
|
| 401 |
+
except requests.exceptions.RequestException as e:
|
| 402 |
+
return f"Error calling vLLM test endpoint: {e}"
|
| 403 |
+
|
| 404 |
+
def main():
|
| 405 |
+
"""Main entry point for the application."""
|
| 406 |
+
app_instance = BuffetBotApp()
|
| 407 |
+
app = app_instance.create_ui()
|
| 408 |
+
app.launch(server_name="0.0.0.0", server_port=7860, mcp_server=True)
|
| 409 |
+
|
| 410 |
+
if __name__ == "__main__":
|
| 411 |
+
main()
|
backend/config.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Configuration settings for BuffetBot.
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
from dotenv import load_dotenv
|
| 5 |
+
from datetime import datetime
|
| 6 |
+
|
| 7 |
+
class AppConfig:
|
| 8 |
+
"""Configuration settings for the BuffetBot application."""
|
| 9 |
+
|
| 10 |
+
def __init__(self):
|
| 11 |
+
load_dotenv()
|
| 12 |
+
|
| 13 |
+
# API Keys
|
| 14 |
+
self.openai_api_key = os.getenv("OPENAI_API_KEY")
|
| 15 |
+
self.serper_api_key = os.getenv("SERPER_API_KEY")
|
| 16 |
+
|
| 17 |
+
# App Settings
|
| 18 |
+
self.app_name = "BuffetBot"
|
| 19 |
+
self.app_version = "1.0.0"
|
| 20 |
+
|
| 21 |
+
# Analysis Settings
|
| 22 |
+
self.default_timeframe = "5y"
|
| 23 |
+
self.default_interval = "1mo"
|
| 24 |
+
|
| 25 |
+
# PDF Settings
|
| 26 |
+
self.pdf_output_dir = "reports"
|
| 27 |
+
self.pdf_template_dir = "templates"
|
| 28 |
+
|
| 29 |
+
# Cache Settings
|
| 30 |
+
self.cache_dir = ".cache"
|
| 31 |
+
self.cache_expiry = 3600 # 1 hour
|
| 32 |
+
|
| 33 |
+
# Create necessary directories
|
| 34 |
+
self._create_directories()
|
| 35 |
+
|
| 36 |
+
def _create_directories(self):
|
| 37 |
+
"""Create necessary directories if they don't exist."""
|
| 38 |
+
directories = [
|
| 39 |
+
self.pdf_output_dir,
|
| 40 |
+
self.pdf_template_dir,
|
| 41 |
+
self.cache_dir
|
| 42 |
+
]
|
| 43 |
+
|
| 44 |
+
for directory in directories:
|
| 45 |
+
if not os.path.exists(directory):
|
| 46 |
+
os.makedirs(directory)
|
| 47 |
+
|
| 48 |
+
def get_pdf_path(self, ticker):
|
| 49 |
+
"""Get the path for a PDF report."""
|
| 50 |
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
| 51 |
+
filename = f"{ticker}_{timestamp}.pdf"
|
| 52 |
+
return os.path.join(self.pdf_output_dir, filename)
|
| 53 |
+
|
| 54 |
+
def get_cache_path(self, ticker, data_type):
|
| 55 |
+
"""Get the path for cached data."""
|
| 56 |
+
filename = f"{ticker}_{data_type}.json"
|
| 57 |
+
return os.path.join(self.cache_dir, filename)
|
| 58 |
+
|
| 59 |
+
# Adding a comment to trigger a rebuild on Hugging Face Spaces.
|
backend/data_collector.py
ADDED
|
@@ -0,0 +1,278 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import yfinance as yf
|
| 2 |
+
import pandas as pd
|
| 3 |
+
import requests
|
| 4 |
+
from bs4 import BeautifulSoup
|
| 5 |
+
import json
|
| 6 |
+
import os
|
| 7 |
+
from datetime import datetime, timedelta
|
| 8 |
+
import time
|
| 9 |
+
import logging
|
| 10 |
+
from typing import List, Dict, Any
|
| 11 |
+
|
| 12 |
+
from .config import AppConfig
|
| 13 |
+
|
| 14 |
+
class YFinanceRateLimitError(Exception):
|
| 15 |
+
"""Custom exception for yfinance rate limit errors."""
|
| 16 |
+
pass
|
| 17 |
+
|
| 18 |
+
class StockDataCollector:
|
| 19 |
+
"""Collects and caches stock data from various sources."""
|
| 20 |
+
|
| 21 |
+
def __init__(self):
|
| 22 |
+
self.config = AppConfig()
|
| 23 |
+
self._setup_logging()
|
| 24 |
+
self.serper_api_key = os.getenv("SERPER_API_KEY")
|
| 25 |
+
self.serper_base_url = "https://google.serper.dev/search"
|
| 26 |
+
|
| 27 |
+
def _setup_logging(self):
|
| 28 |
+
"""Setup logging configuration."""
|
| 29 |
+
logging.basicConfig(
|
| 30 |
+
level=logging.INFO,
|
| 31 |
+
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
| 32 |
+
)
|
| 33 |
+
self.logger = logging.getLogger(__name__)
|
| 34 |
+
|
| 35 |
+
def _perform_serper_search(self, query: str, num_results: int = 5) -> List[Dict[str, Any]]:
|
| 36 |
+
"""Performs a web search using the Serper API and returns a list of search results."""
|
| 37 |
+
if not self.serper_api_key:
|
| 38 |
+
self.logger.warning("Serper API key not found. Skipping web search.")
|
| 39 |
+
return []
|
| 40 |
+
|
| 41 |
+
headers = {
|
| 42 |
+
'X-API-KEY': self.serper_api_key,
|
| 43 |
+
'Content-Type': 'application/json'
|
| 44 |
+
}
|
| 45 |
+
payload = {
|
| 46 |
+
"q": query,
|
| 47 |
+
"num": num_results
|
| 48 |
+
}
|
| 49 |
+
|
| 50 |
+
try:
|
| 51 |
+
self.logger.info(f"Performing Serper search for query: {query}")
|
| 52 |
+
response = requests.post(self.serper_base_url, headers=headers, data=json.dumps(payload))
|
| 53 |
+
response.raise_for_status() # Raise an HTTPError for bad responses (4xx or 5xx)
|
| 54 |
+
search_results = response.json()
|
| 55 |
+
|
| 56 |
+
# Extract relevant snippets and URLs
|
| 57 |
+
snippets = []
|
| 58 |
+
if "organic" in search_results:
|
| 59 |
+
for result in search_results["organic"]:
|
| 60 |
+
snippets.append({
|
| 61 |
+
"title": result.get("title"),
|
| 62 |
+
"snippet": result.get("snippet"),
|
| 63 |
+
"link": result.get("link")
|
| 64 |
+
})
|
| 65 |
+
return snippets
|
| 66 |
+
|
| 67 |
+
except requests.exceptions.RequestException as e:
|
| 68 |
+
self.logger.error(f"Error performing Serper search for '{query}': {e}")
|
| 69 |
+
return []
|
| 70 |
+
except json.JSONDecodeError as e:
|
| 71 |
+
self.logger.error(f"JSON decoding error from Serper API for '{query}': {e}. Response: {response.text[:500]}...")
|
| 72 |
+
return []
|
| 73 |
+
except Exception as e:
|
| 74 |
+
self.logger.error(f"An unexpected error occurred during Serper search for '{query}': {e}")
|
| 75 |
+
return []
|
| 76 |
+
|
| 77 |
+
def get_revenue_history(self, ticker):
|
| 78 |
+
"""Get historical revenue data."""
|
| 79 |
+
cache_path = self.config.get_cache_path(ticker, "revenue")
|
| 80 |
+
|
| 81 |
+
# Try to get from cache first
|
| 82 |
+
if os.path.exists(cache_path):
|
| 83 |
+
cache_age = time.time() - os.path.getmtime(cache_path)
|
| 84 |
+
if cache_age < self.config.cache_expiry:
|
| 85 |
+
try:
|
| 86 |
+
with open(cache_path, 'r') as f:
|
| 87 |
+
data_dict = json.load(f)
|
| 88 |
+
df = pd.DataFrame.from_dict(data_dict, orient='index')
|
| 89 |
+
df.index = pd.to_datetime(df.index)
|
| 90 |
+
return df
|
| 91 |
+
except Exception as e:
|
| 92 |
+
self.logger.warning(f"Error reading revenue cache: {e}")
|
| 93 |
+
|
| 94 |
+
try:
|
| 95 |
+
self.logger.debug(f"Attempting to fetch revenue data from yfinance for {ticker}.")
|
| 96 |
+
# Try yfinance first
|
| 97 |
+
stock = yf.Ticker(ticker)
|
| 98 |
+
financials = stock.financials
|
| 99 |
+
|
| 100 |
+
if not financials.empty:
|
| 101 |
+
self.logger.debug(f"Successfully fetched revenue data from yfinance for {ticker}.")
|
| 102 |
+
revenue_data = financials.loc['Total Revenue'].to_frame('Revenue')
|
| 103 |
+
revenue_data.index = pd.to_datetime(revenue_data.index)
|
| 104 |
+
|
| 105 |
+
# Cache the data
|
| 106 |
+
self._cache_data(revenue_data, cache_path)
|
| 107 |
+
return revenue_data
|
| 108 |
+
else:
|
| 109 |
+
self.logger.info(f"No revenue data from yfinance for {ticker}.")
|
| 110 |
+
return None
|
| 111 |
+
|
| 112 |
+
except requests.exceptions.RequestException as req_e:
|
| 113 |
+
if isinstance(req_e, requests.exceptions.HTTPError) and req_e.response.status_code == 429:
|
| 114 |
+
self.logger.error(f"Yfinance rate limit hit for {ticker}: {req_e}")
|
| 115 |
+
raise YFinanceRateLimitError(f"Yfinance rate limit hit for {ticker}. Please try again in an hour.")
|
| 116 |
+
else:
|
| 117 |
+
self.logger.error(f"Network or API error fetching revenue data from yfinance for {ticker}: {req_e}")
|
| 118 |
+
return None
|
| 119 |
+
except json.JSONDecodeError as json_e:
|
| 120 |
+
self.logger.error(f"JSON decoding error for revenue data from yfinance for {ticker}: {json_e}")
|
| 121 |
+
return None
|
| 122 |
+
except Exception as e:
|
| 123 |
+
self.logger.error(f"Unexpected error fetching revenue data from yfinance: {e}")
|
| 124 |
+
return None
|
| 125 |
+
|
| 126 |
+
def get_fcf_history(self, ticker):
|
| 127 |
+
"""Get historical free cash flow data."""
|
| 128 |
+
cache_path = self.config.get_cache_path(ticker, "fcf")
|
| 129 |
+
|
| 130 |
+
# Try to get from cache first
|
| 131 |
+
if os.path.exists(cache_path):
|
| 132 |
+
cache_age = time.time() - os.path.getmtime(cache_path)
|
| 133 |
+
if cache_age < self.config.cache_expiry:
|
| 134 |
+
try:
|
| 135 |
+
with open(cache_path, 'r') as f:
|
| 136 |
+
data_dict = json.load(f)
|
| 137 |
+
df = pd.DataFrame.from_dict(data_dict, orient='index')
|
| 138 |
+
df.index = pd.to_datetime(df.index)
|
| 139 |
+
return df
|
| 140 |
+
except Exception as e:
|
| 141 |
+
self.logger.warning(f"Error reading FCF cache: {e}")
|
| 142 |
+
|
| 143 |
+
try:
|
| 144 |
+
self.logger.debug(f"Attempting to fetch FCF data from yfinance for {ticker}.")
|
| 145 |
+
stock = yf.Ticker(ticker)
|
| 146 |
+
cash_flow = stock.cashflow
|
| 147 |
+
|
| 148 |
+
if not cash_flow.empty:
|
| 149 |
+
self.logger.debug(f"Successfully fetched FCF data from yfinance for {ticker}.")
|
| 150 |
+
fcf_data = cash_flow.loc['Free Cash Flow'].to_frame('FCF')
|
| 151 |
+
fcf_data.index = pd.to_datetime(fcf_data.index)
|
| 152 |
+
|
| 153 |
+
# Cache the data
|
| 154 |
+
self._cache_data(fcf_data, cache_path)
|
| 155 |
+
return fcf_data
|
| 156 |
+
else:
|
| 157 |
+
self.logger.info(f"No FCF data from yfinance for {ticker}.")
|
| 158 |
+
return None
|
| 159 |
+
|
| 160 |
+
except requests.exceptions.RequestException as req_e:
|
| 161 |
+
if isinstance(req_e, requests.exceptions.HTTPError) and req_e.response.status_code == 429:
|
| 162 |
+
self.logger.error(f"Yfinance rate limit hit for {ticker}: {req_e}")
|
| 163 |
+
raise YFinanceRateLimitError(f"Yfinance rate limit hit for {ticker}. Please try again in an hour.")
|
| 164 |
+
else:
|
| 165 |
+
self.logger.error(f"Network or API error fetching FCF data from yfinance for {ticker}: {req_e}")
|
| 166 |
+
return None
|
| 167 |
+
except json.JSONDecodeError as json_e:
|
| 168 |
+
self.logger.error(f"JSON decoding error for FCF data from yfinance for {ticker}: {json_e}")
|
| 169 |
+
return None
|
| 170 |
+
except Exception as e:
|
| 171 |
+
self.logger.error(f"Unexpected error fetching FCF data from yfinance: {e}")
|
| 172 |
+
return None
|
| 173 |
+
|
| 174 |
+
def get_shares_history(self, ticker):
|
| 175 |
+
"""Get historical shares outstanding data."""
|
| 176 |
+
cache_path = self.config.get_cache_path(ticker, "shares")
|
| 177 |
+
|
| 178 |
+
# Try to get from cache first
|
| 179 |
+
if os.path.exists(cache_path):
|
| 180 |
+
cache_age = time.time() - os.path.getmtime(cache_path)
|
| 181 |
+
if cache_age < self.config.cache_expiry:
|
| 182 |
+
try:
|
| 183 |
+
with open(cache_path, 'r') as f:
|
| 184 |
+
data_dict = json.load(f)
|
| 185 |
+
df = pd.DataFrame.from_dict(data_dict, orient='index')
|
| 186 |
+
df.index = pd.to_datetime(df.index)
|
| 187 |
+
return df
|
| 188 |
+
except Exception as e:
|
| 189 |
+
self.logger.warning(f"Error reading shares cache: {e}")
|
| 190 |
+
|
| 191 |
+
try:
|
| 192 |
+
self.logger.debug(f"Attempting to fetch shares data from yfinance for {ticker}.")
|
| 193 |
+
stock = yf.Ticker(ticker)
|
| 194 |
+
balance_sheet = stock.balance_sheet
|
| 195 |
+
|
| 196 |
+
if not balance_sheet.empty:
|
| 197 |
+
self.logger.debug(f"Successfully fetched shares data from yfinance for {ticker}.")
|
| 198 |
+
shares_data = balance_sheet.loc['Common Stock'].to_frame('Shares')
|
| 199 |
+
shares_data.index = pd.to_datetime(shares_data.index)
|
| 200 |
+
|
| 201 |
+
# Cache the data
|
| 202 |
+
self._cache_data(shares_data, cache_path)
|
| 203 |
+
return shares_data
|
| 204 |
+
else:
|
| 205 |
+
self.logger.info(f"No shares data from yfinance for {ticker}.")
|
| 206 |
+
return None
|
| 207 |
+
|
| 208 |
+
except requests.exceptions.RequestException as req_e:
|
| 209 |
+
if isinstance(req_e, requests.exceptions.HTTPError) and req_e.response.status_code == 429:
|
| 210 |
+
self.logger.error(f"Yfinance rate limit hit for {ticker}: {req_e}")
|
| 211 |
+
raise YFinanceRateLimitError(f"Yfinance rate limit hit for {ticker}. Please try again in an hour.")
|
| 212 |
+
else:
|
| 213 |
+
self.logger.error(f"Network or API error fetching shares data from yfinance for {ticker}: {req_e}")
|
| 214 |
+
return None
|
| 215 |
+
except json.JSONDecodeError as json_e:
|
| 216 |
+
self.logger.error(f"JSON decoding error for shares data from yfinance for {ticker}: {json_e}")
|
| 217 |
+
return None
|
| 218 |
+
except Exception as e:
|
| 219 |
+
self.logger.error(f"Unexpected error fetching shares data from yfinance: {e}")
|
| 220 |
+
return None
|
| 221 |
+
|
| 222 |
+
def _scrape_revenue_data(self, ticker):
|
| 223 |
+
"""Scrape revenue data from alternative sources."""
|
| 224 |
+
self.logger.info(f"Attempting to scrape revenue data from Macrotrends for {ticker}.")
|
| 225 |
+
try:
|
| 226 |
+
# Example: Scrape from Macrotrends
|
| 227 |
+
url = f"https://www.macrotrends.net/stocks/charts/{ticker}/revenue"
|
| 228 |
+
response = requests.get(url)
|
| 229 |
+
response.raise_for_status() # Raise an HTTPError for bad responses
|
| 230 |
+
soup = BeautifulSoup(response.text, 'html.parser')
|
| 231 |
+
|
| 232 |
+
# Extract revenue data from the page
|
| 233 |
+
# Note: This is a placeholder. You'll need to implement the actual scraping logic
|
| 234 |
+
# based on the website's structure
|
| 235 |
+
self.logger.warning(f"Macrotrends scraping for {ticker} is a placeholder and not fully implemented.")
|
| 236 |
+
return None
|
| 237 |
+
|
| 238 |
+
except requests.exceptions.RequestException as req_e:
|
| 239 |
+
self.logger.error(f"Network or HTTP error scraping revenue data from Macrotrends for {ticker}: {req_e}")
|
| 240 |
+
return None
|
| 241 |
+
except Exception as e:
|
| 242 |
+
self.logger.error(f"Error scraping revenue data from Macrotrends for {ticker}: {e}")
|
| 243 |
+
return None
|
| 244 |
+
|
| 245 |
+
def _cache_data(self, data, cache_path):
|
| 246 |
+
"""Cache data to file."""
|
| 247 |
+
try:
|
| 248 |
+
os.makedirs(os.path.dirname(cache_path), exist_ok=True)
|
| 249 |
+
# Convert Timestamp index to string before saving to JSON
|
| 250 |
+
data_to_save = data.copy()
|
| 251 |
+
data_to_save.index = data_to_save.index.astype(str)
|
| 252 |
+
with open(cache_path, 'w') as f:
|
| 253 |
+
json.dump(data_to_save.to_dict(orient='index'), f)
|
| 254 |
+
except Exception as e:
|
| 255 |
+
self.logger.warning(f"Error caching data: {e}")
|
| 256 |
+
|
| 257 |
+
def clear_cache(self, ticker=None):
|
| 258 |
+
"""Clear cached data for a ticker or all tickers."""
|
| 259 |
+
try:
|
| 260 |
+
if ticker:
|
| 261 |
+
patterns = [
|
| 262 |
+
self.config.get_cache_path(ticker, "revenue"),
|
| 263 |
+
self.config.get_cache_path(ticker, "fcf"),
|
| 264 |
+
self.config.get_cache_path(ticker, "shares")
|
| 265 |
+
]
|
| 266 |
+
else:
|
| 267 |
+
patterns = [self.config.cache_dir]
|
| 268 |
+
|
| 269 |
+
for pattern in patterns:
|
| 270 |
+
if os.path.exists(pattern):
|
| 271 |
+
if os.path.isfile(pattern):
|
| 272 |
+
os.remove(pattern)
|
| 273 |
+
else:
|
| 274 |
+
for file in os.listdir(pattern):
|
| 275 |
+
os.remove(os.path.join(pattern, file))
|
| 276 |
+
|
| 277 |
+
except Exception as e:
|
| 278 |
+
self.logger.error(f"Error clearing cache: {e}")
|
backend/llm_service.py
ADDED
|
@@ -0,0 +1,143 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import modal
|
| 2 |
+
import os
|
| 3 |
+
import json
|
| 4 |
+
import urllib.request
|
| 5 |
+
import traceback
|
| 6 |
+
|
| 7 |
+
# Define a Modal App
|
| 8 |
+
# You can give it a more specific name if you like, e.g., "buffetbot-llm-service"
|
| 9 |
+
app = modal.App("buffetbot-llm")
|
| 10 |
+
|
| 11 |
+
@app.function(
|
| 12 |
+
secrets=[modal.Secret.from_name("buffetbot-vllm-config")],
|
| 13 |
+
gpu="H100:1", # Add GPU specification
|
| 14 |
+
timeout=1500 # Increased timeout to 25 minutes
|
| 15 |
+
)
|
| 16 |
+
def generate_llm_response(prompt: str, system_message: str) -> str:
|
| 17 |
+
"""Generates a response using our deployed vLLM service."""
|
| 18 |
+
print(f"Received prompt for LLM: {prompt[:200]}...") # Log the prompt for debugging
|
| 19 |
+
|
| 20 |
+
# Get the URL of our deployed vLLM service
|
| 21 |
+
vllm_url = os.environ.get("VLLM_SERVICE_URL")
|
| 22 |
+
if not vllm_url:
|
| 23 |
+
raise ValueError("VLLM_SERVICE_URL environment variable not set")
|
| 24 |
+
|
| 25 |
+
# Prepare the request with system message for better context
|
| 26 |
+
messages = [
|
| 27 |
+
{
|
| 28 |
+
"role": "system",
|
| 29 |
+
"content": "You are BuffetBot, a concise stock analyst. Provide clear recommendations based on key metrics and economic moat. Use 'BuffetBot recommends'."
|
| 30 |
+
},
|
| 31 |
+
{"role": "user", "content": prompt}
|
| 32 |
+
]
|
| 33 |
+
|
| 34 |
+
headers = {
|
| 35 |
+
"Authorization": f"Bearer {os.environ['API_KEY']}",
|
| 36 |
+
"Content-Type": "application/json",
|
| 37 |
+
}
|
| 38 |
+
payload = json.dumps({
|
| 39 |
+
"messages": messages,
|
| 40 |
+
"model": "neuralmagic/Meta-Llama-3.1-8B-Instruct-quantized.w4a16",
|
| 41 |
+
"temperature": 0.3, # Lower temperature for more focused responses
|
| 42 |
+
"max_tokens": 500, # Reduce max tokens for faster responses
|
| 43 |
+
"top_p": 0.9, # Add top_p for better quality/speed balance
|
| 44 |
+
"frequency_penalty": 0.0, # Add frequency penalty to reduce repetition
|
| 45 |
+
"presence_penalty": 0.0 # Add presence penalty to encourage diversity
|
| 46 |
+
})
|
| 47 |
+
|
| 48 |
+
# Make the request to our vLLM service
|
| 49 |
+
req = urllib.request.Request(
|
| 50 |
+
f"{vllm_url}/v1/chat/completions",
|
| 51 |
+
data=payload.encode("utf-8"),
|
| 52 |
+
headers=headers,
|
| 53 |
+
method="POST",
|
| 54 |
+
)
|
| 55 |
+
|
| 56 |
+
try:
|
| 57 |
+
with urllib.request.urlopen(req, timeout=1200) as response: # Increased timeout to 20 minutes
|
| 58 |
+
result = json.loads(response.read().decode())
|
| 59 |
+
return result["choices"][0]["message"]["content"]
|
| 60 |
+
except Exception as e:
|
| 61 |
+
traceback.print_exc()
|
| 62 |
+
print(f"Error calling vLLM service: {str(e)}")
|
| 63 |
+
# Fallback to a simple response if the LLM service fails
|
| 64 |
+
return "I apologize, but I'm having trouble accessing the LLM service at the moment. Please try again later."
|
| 65 |
+
|
| 66 |
+
@app.local_entrypoint()
|
| 67 |
+
def main():
|
| 68 |
+
# Test with a prompt similar to what stock_analyzer.py sends
|
| 69 |
+
test_prompt = """
|
| 70 |
+
As an AI-powered stock analyst, provide a comprehensive investment insight for AAPL. Focus on explaining what the key numbers mean, how the financial health and growth metrics contribute to the overall picture, and the significance of its economic moat.
|
| 71 |
+
|
| 72 |
+
Company Name: Apple Inc.
|
| 73 |
+
Sector: Technology
|
| 74 |
+
Industry: Consumer Electronics
|
| 75 |
+
Full-time Employees: 161000
|
| 76 |
+
|
| 77 |
+
## Current Market Status
|
| 78 |
+
- Current Price: $170.00
|
| 79 |
+
- 52 Week High: $180.00
|
| 80 |
+
- 52 Week Low: $120.00
|
| 81 |
+
- Market Cap: $2,600,000,000,000.00
|
| 82 |
+
|
| 83 |
+
## Key Valuation Metrics
|
| 84 |
+
- P/E Ratio: 28.0
|
| 85 |
+
- Forward P/E: 26.0
|
| 86 |
+
- PEG Ratio: 2.1
|
| 87 |
+
- Dividend Yield: 0.5%
|
| 88 |
+
|
| 89 |
+
## Growth and Profitability
|
| 90 |
+
- Revenue Growth (YoY): 0.05
|
| 91 |
+
- Profit Margins: 0.25
|
| 92 |
+
- Operating Margins: 0.30
|
| 93 |
+
|
| 94 |
+
## Financial Health Indicators
|
| 95 |
+
- Current Ratio: 1.2
|
| 96 |
+
- Debt to Equity: 1.5
|
| 97 |
+
- Return on Equity: 1.5
|
| 98 |
+
- Debt to Asset Ratio: 0.45
|
| 99 |
+
|
| 100 |
+
#### Recent Financials (Latest Available)
|
| 101 |
+
- Latest Revenue: $90,000,000,000.00
|
| 102 |
+
- Latest Free Cash Flow: $25,000,000,000.00
|
| 103 |
+
- Latest Shares Outstanding: 15,500,000,000.00
|
| 104 |
+
|
| 105 |
+
#### Key Balance Sheet Items
|
| 106 |
+
- Total Assets: $350,000,000,000.00
|
| 107 |
+
- Total Liabilities: $250,000,000,000.00
|
| 108 |
+
- Total Debt: $100,000,000,000.00
|
| 109 |
+
- Total Cash: $50,000,000,000.00
|
| 110 |
+
- Total Equity: $100,000,000,000.00
|
| 111 |
+
- Debt to Equity: 1.0
|
| 112 |
+
|
| 113 |
+
#### Key Income Statement Items
|
| 114 |
+
- Gross Profits: $40,000,000,000.00
|
| 115 |
+
- Operating Income: $30,000,000,000.00
|
| 116 |
+
- Net Income: $20,000,000,000.00
|
| 117 |
+
- Earnings Growth (YoY): 0.08
|
| 118 |
+
|
| 119 |
+
## Economic Moat Analysis
|
| 120 |
+
Moat Rating: Wide Moat
|
| 121 |
+
|
| 122 |
+
Based on the detailed financial data and moat analysis provided above, give a comprehensive, easy-to-understand summary of the investment potential of AAPL. Explain the significance of key numbers and their implications. Conclude with a clear overall sentiment regarding investment potential.
|
| 123 |
+
"""
|
| 124 |
+
print(f"Testing LLM with prompt: {test_prompt[:100]}...")
|
| 125 |
+
response = generate_llm_response.remote(test_prompt, "You are a professional stock analyst providing clear, concise, and accurate investment analysis. Focus on explaining the numbers and their implications. Avoid any garbled or unclear text.")
|
| 126 |
+
print(f"LLM Response: {response}")
|
| 127 |
+
|
| 128 |
+
# Test with a simple prompt that includes the stock data
|
| 129 |
+
simple_prompt = """
|
| 130 |
+
Based on the following data for Apple Inc. (AAPL), summarize the key investment considerations:
|
| 131 |
+
|
| 132 |
+
Company: Apple Inc. (AAPL)
|
| 133 |
+
Current Price: $170.00
|
| 134 |
+
P/E Ratio: 28.0
|
| 135 |
+
Revenue Growth: 5%
|
| 136 |
+
Profit Margin: 25%
|
| 137 |
+
Moat Rating: Wide Moat
|
| 138 |
+
|
| 139 |
+
Please provide a concise summary of the investment considerations.
|
| 140 |
+
"""
|
| 141 |
+
print(f"Testing LLM with simple prompt: {simple_prompt[:100]}...")
|
| 142 |
+
response_simple = generate_llm_response.remote(simple_prompt, "You are a professional stock analyst providing clear, concise, and accurate investment analysis. Focus on explaining the numbers and their implications. Avoid any garbled or unclear text.")
|
| 143 |
+
print(f"LLM Response (simple): {response_simple}")
|
backend/mcp_server.py
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Dict, Any, Optional
|
| 2 |
+
from fastapi import FastAPI, HTTPException
|
| 3 |
+
from pydantic import BaseModel
|
| 4 |
+
import uvicorn
|
| 5 |
+
import traceback
|
| 6 |
+
import logging
|
| 7 |
+
import modal # Import modal
|
| 8 |
+
|
| 9 |
+
# Configure logging
|
| 10 |
+
logging.basicConfig(level=logging.INFO)
|
| 11 |
+
logger = logging.getLogger(__name__)
|
| 12 |
+
|
| 13 |
+
from .stock_analyzer import StockAnalyzer
|
| 14 |
+
from .data_collector import StockDataCollector
|
| 15 |
+
|
| 16 |
+
class StockAnalysisRequest(BaseModel):
|
| 17 |
+
"""Request model for stock analysis."""
|
| 18 |
+
ticker: str
|
| 19 |
+
|
| 20 |
+
class StockAnalysisResponse(BaseModel):
|
| 21 |
+
"""Response model for stock analysis."""
|
| 22 |
+
analysis: str
|
| 23 |
+
revenue_chart: Optional[str] = None
|
| 24 |
+
fcf_chart: Optional[str] = None
|
| 25 |
+
shares_chart: Optional[str] = None
|
| 26 |
+
|
| 27 |
+
class MCPStockServer:
|
| 28 |
+
"""MCP server implementation for stock analysis tools."""
|
| 29 |
+
|
| 30 |
+
def __init__(self):
|
| 31 |
+
self.app = FastAPI(title="BuffetBot MCP Server")
|
| 32 |
+
self.stock_analyzer = StockAnalyzer()
|
| 33 |
+
self.data_collector = StockDataCollector()
|
| 34 |
+
|
| 35 |
+
# Register routes
|
| 36 |
+
self.app.post("/analyze", response_model=StockAnalysisResponse)(self.analyze_stock)
|
| 37 |
+
self.app.get("/health")(self.health_check)
|
| 38 |
+
self.app.get("/test_llm_connection")(self.test_llm_connection)
|
| 39 |
+
|
| 40 |
+
async def analyze_stock(self, request: StockAnalysisRequest) -> StockAnalysisResponse:
|
| 41 |
+
"""
|
| 42 |
+
Analyze a stock and return the results.
|
| 43 |
+
|
| 44 |
+
Args:
|
| 45 |
+
request: StockAnalysisRequest containing the ticker
|
| 46 |
+
|
| 47 |
+
Returns:
|
| 48 |
+
StockAnalysisResponse containing the analysis results
|
| 49 |
+
"""
|
| 50 |
+
logger.info(f"Received analysis request for ticker: {request.ticker}")
|
| 51 |
+
try:
|
| 52 |
+
# Get analysis
|
| 53 |
+
analysis_result = self.stock_analyzer.analyze(request.ticker)
|
| 54 |
+
|
| 55 |
+
logger.info(f"Analysis result for {request.ticker}: {analysis_result}")
|
| 56 |
+
|
| 57 |
+
return StockAnalysisResponse(
|
| 58 |
+
analysis=analysis_result["analysis"],
|
| 59 |
+
revenue_chart=analysis_result["revenue_chart"],
|
| 60 |
+
fcf_chart=analysis_result["fcf_chart"],
|
| 61 |
+
shares_chart=analysis_result["shares_chart"]
|
| 62 |
+
)
|
| 63 |
+
|
| 64 |
+
except Exception as e:
|
| 65 |
+
traceback.print_exc()
|
| 66 |
+
raise HTTPException(status_code=500, detail=f"Internal Server Error during analysis: {str(e)}")
|
| 67 |
+
|
| 68 |
+
async def health_check(self) -> Dict[str, str]:
|
| 69 |
+
"""Health check endpoint."""
|
| 70 |
+
return {"status": "healthy"}
|
| 71 |
+
|
| 72 |
+
async def test_llm_connection(self) -> Dict[str, str]:
|
| 73 |
+
"""Test connection to the Modal LLM service."""
|
| 74 |
+
logger.info("Testing LLM connection...")
|
| 75 |
+
try:
|
| 76 |
+
generate_llm_response_modal = modal.Function.from_name("buffetbot-llm", "generate_llm_response")
|
| 77 |
+
# Attempt to call the LLM service with a minimal prompt
|
| 78 |
+
test_prompt = "Hello"
|
| 79 |
+
test_system_message = "You are a helpful assistant."
|
| 80 |
+
response = await generate_llm_response_modal.remote.aio(
|
| 81 |
+
system_message=test_system_message,
|
| 82 |
+
prompt=test_prompt
|
| 83 |
+
)
|
| 84 |
+
if response:
|
| 85 |
+
logger.info(f"LLM test response (first 50 chars): {response[:50]}...")
|
| 86 |
+
return {"status": "LLM connected successfully!", "response_snippet": response[:100]}
|
| 87 |
+
else:
|
| 88 |
+
return {"status": "LLM connection failed: Empty response.", "response_snippet": ""}
|
| 89 |
+
except Exception as e:
|
| 90 |
+
logger.error(f"Error testing LLM connection: {e}", exc_info=True)
|
| 91 |
+
raise HTTPException(status_code=500, detail=f"Error testing LLM connection: {str(e)}")
|
| 92 |
+
|
| 93 |
+
def run(self, host: str = "0.0.0.0", port: int = 8000):
|
| 94 |
+
"""Run the MCP server."""
|
| 95 |
+
uvicorn.run(self.app, host=host, port=port)
|
| 96 |
+
|
| 97 |
+
# def main():
|
| 98 |
+
# """Main entry point for the MCP server."""
|
| 99 |
+
# server = MCPStockServer()
|
| 100 |
+
# server.run()
|
| 101 |
+
|
| 102 |
+
# if __name__ == "__main__":
|
| 103 |
+
# main()
|
backend/moat_analyzer.py
ADDED
|
@@ -0,0 +1,200 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Moat Analysis Module for BuffetBot
|
| 3 |
+
|
| 4 |
+
This module implements Warren Buffett's concept of economic moats to analyze
|
| 5 |
+
a company's competitive advantages and long-term sustainability.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import yfinance as yf
|
| 9 |
+
import pandas as pd
|
| 10 |
+
from typing import Dict, Any, List, Tuple
|
| 11 |
+
|
| 12 |
+
class MoatAnalyzer:
|
| 13 |
+
"""Analyzes a company's economic moat using various financial metrics."""
|
| 14 |
+
|
| 15 |
+
def __init__(self):
|
| 16 |
+
self.moat_indicators = {
|
| 17 |
+
'brand_power': ['enterpriseValue'],
|
| 18 |
+
'network_effects': ['totalRevenue', 'totalAssets', 'totalCash'],
|
| 19 |
+
'cost_advantages': ['grossProfits', 'operatingMargins'],
|
| 20 |
+
'switching_costs': ['currentRatio', 'totalCurrentAssets', 'totalCurrentLiabilities'],
|
| 21 |
+
'intangible_assets': ['goodWill', 'intangibleAssets', 'totalAssets', 'totalCash']
|
| 22 |
+
}
|
| 23 |
+
|
| 24 |
+
# Helper function to format numbers or return 'N/A'
|
| 25 |
+
def _format_num(self, value):
|
| 26 |
+
return f'{value:,.2f}' if isinstance(value, (int, float)) else str(value)
|
| 27 |
+
|
| 28 |
+
def analyze_moat(self, ticker: str) -> Dict[str, Any]:
|
| 29 |
+
"""
|
| 30 |
+
Analyze a company's economic moat.
|
| 31 |
+
|
| 32 |
+
Args:
|
| 33 |
+
ticker: Stock ticker symbol
|
| 34 |
+
|
| 35 |
+
Returns:
|
| 36 |
+
A dictionary containing the markdown summary and the moat rating (Wide, Narrow, Medium).
|
| 37 |
+
"""
|
| 38 |
+
try:
|
| 39 |
+
stock = yf.Ticker(ticker)
|
| 40 |
+
info = stock.info
|
| 41 |
+
|
| 42 |
+
# Get financial metrics
|
| 43 |
+
metrics = self._get_financial_metrics(info)
|
| 44 |
+
|
| 45 |
+
# Analyze each moat component
|
| 46 |
+
moat_analysis = self._analyze_moat_components(metrics)
|
| 47 |
+
|
| 48 |
+
# Generate summary and get moat rating
|
| 49 |
+
summary, moat_rating = self._generate_summary(ticker, moat_analysis)
|
| 50 |
+
|
| 51 |
+
return {"summary": summary, "rating": moat_rating}
|
| 52 |
+
|
| 53 |
+
except Exception as e:
|
| 54 |
+
return {"summary": f"Error analyzing moat: {str(e)}", "rating": "N/A"}
|
| 55 |
+
|
| 56 |
+
def _get_financial_metrics(self, info: Dict[str, Any]) -> Dict[str, float]:
|
| 57 |
+
"""Extract relevant financial metrics from stock info, ensuring all expected keys are present."""
|
| 58 |
+
metrics = {}
|
| 59 |
+
all_indicators = set()
|
| 60 |
+
for indicators_list in self.moat_indicators.values():
|
| 61 |
+
all_indicators.update(indicators_list)
|
| 62 |
+
|
| 63 |
+
for indicator in all_indicators:
|
| 64 |
+
metrics[indicator] = info.get(indicator, 0) # Ensure key is present, default to 0
|
| 65 |
+
|
| 66 |
+
return metrics
|
| 67 |
+
|
| 68 |
+
def _analyze_moat_components(self, metrics: Dict[str, float]) -> Dict[str, Any]:
|
| 69 |
+
"""Analyze each component of the economic moat."""
|
| 70 |
+
analysis = {
|
| 71 |
+
'brand_power': self._analyze_brand_power(metrics),
|
| 72 |
+
'network_effects': self._analyze_network_effects(metrics),
|
| 73 |
+
'cost_advantages': self._analyze_cost_advantages(metrics),
|
| 74 |
+
'switching_costs': self._analyze_switching_costs(metrics),
|
| 75 |
+
'intangible_assets': self._analyze_intangible_assets(metrics)
|
| 76 |
+
}
|
| 77 |
+
|
| 78 |
+
return analysis
|
| 79 |
+
|
| 80 |
+
def _analyze_brand_power(self, metrics: Dict[str, float]) -> Dict[str, Any]:
|
| 81 |
+
"""Analyze brand power and market position."""
|
| 82 |
+
enterprise_value = metrics.get('enterpriseValue', 0)
|
| 83 |
+
|
| 84 |
+
if enterprise_value > 100_000_000_000: # Example threshold for large company
|
| 85 |
+
score = 7
|
| 86 |
+
description = "Large enterprise value, indicating significant market presence."
|
| 87 |
+
elif enterprise_value > 10_000_000_000:
|
| 88 |
+
score = 4
|
| 89 |
+
description = "Moderate enterprise value, suggesting established presence."
|
| 90 |
+
else:
|
| 91 |
+
score = 1
|
| 92 |
+
description = "Smaller enterprise value, brand power may be limited or hard to quantify."
|
| 93 |
+
|
| 94 |
+
return {
|
| 95 |
+
'score': score,
|
| 96 |
+
'description': description
|
| 97 |
+
}
|
| 98 |
+
|
| 99 |
+
def _analyze_network_effects(self, metrics: Dict[str, float]) -> Dict[str, Any]:
|
| 100 |
+
"""Analyze network effects and scale advantages."""
|
| 101 |
+
assets = metrics.get('totalAssets', metrics.get('totalCash', 0)) # Use totalCash as fallback
|
| 102 |
+
revenue = metrics.get('totalRevenue', 0)
|
| 103 |
+
|
| 104 |
+
asset_turnover = revenue / assets if assets else 0
|
| 105 |
+
|
| 106 |
+
return {
|
| 107 |
+
'score': min(asset_turnover * 5, 10),
|
| 108 |
+
'description': f"Asset turnover ratio (using assets or cash as proxy): {self._format_num(asset_turnover)}"
|
| 109 |
+
}
|
| 110 |
+
|
| 111 |
+
def _analyze_cost_advantages(self, metrics: Dict[str, float]) -> Dict[str, Any]:
|
| 112 |
+
"""Analyze cost advantages and operational efficiency."""
|
| 113 |
+
gross_profits = metrics.get('grossProfits', 0)
|
| 114 |
+
operating_margins = metrics.get('operatingMargins', 0)
|
| 115 |
+
|
| 116 |
+
score = min(operating_margins * 10, 10)
|
| 117 |
+
|
| 118 |
+
return {
|
| 119 |
+
'score': score,
|
| 120 |
+
'description': f"Operating profit margin: {self._format_num(operating_margins)}%"
|
| 121 |
+
}
|
| 122 |
+
|
| 123 |
+
def _analyze_switching_costs(self, metrics: Dict[str, float]) -> Dict[str, Any]:
|
| 124 |
+
"""Analyze customer switching costs."""
|
| 125 |
+
current_assets = metrics.get('totalCurrentAssets', 0)
|
| 126 |
+
current_liabilities = metrics.get('totalCurrentLiabilities', 0)
|
| 127 |
+
current_ratio = metrics.get('currentRatio', 0)
|
| 128 |
+
|
| 129 |
+
# Prefer using current ratio if available, otherwise calculate from assets/liabilities
|
| 130 |
+
if current_ratio > 0:
|
| 131 |
+
working_capital_ratio_description = f"Current Ratio: {self._format_num(current_ratio)}"
|
| 132 |
+
score = min(current_ratio * 3, 10) # Scale current ratio to a score
|
| 133 |
+
else:
|
| 134 |
+
working_capital = current_assets - current_liabilities
|
| 135 |
+
working_capital_ratio = working_capital / current_assets if current_assets else 0
|
| 136 |
+
working_capital_ratio_description = f"Working capital ratio: {self._format_num(working_capital_ratio)}"
|
| 137 |
+
score = min(working_capital_ratio * 10, 10)
|
| 138 |
+
|
| 139 |
+
return {
|
| 140 |
+
'score': score,
|
| 141 |
+
'description': working_capital_ratio_description
|
| 142 |
+
}
|
| 143 |
+
|
| 144 |
+
def _analyze_intangible_assets(self, metrics: Dict[str, float]) -> Dict[str, Any]:
|
| 145 |
+
"""Analyze intangible assets and intellectual property."""
|
| 146 |
+
goodwill = metrics.get('goodWill', 0)
|
| 147 |
+
intangibles = metrics.get('intangibleAssets', 0)
|
| 148 |
+
total_assets = metrics.get('totalAssets', metrics.get('totalCash', 1)) # Fallback to totalCash
|
| 149 |
+
|
| 150 |
+
# If no direct intangible assets, assign a baseline score
|
| 151 |
+
if goodwill == 0 and intangibles == 0:
|
| 152 |
+
score = 1 # Minimal score if no direct intangible assets are reported
|
| 153 |
+
description = "Direct intangible assets data not available, minimal score assigned."
|
| 154 |
+
else:
|
| 155 |
+
intangible_ratio = (goodwill + intangibles) / total_assets
|
| 156 |
+
score = min(intangible_ratio * 10, 10)
|
| 157 |
+
description = f"Intangible assets ratio: {self._format_num(intangible_ratio)}%"
|
| 158 |
+
|
| 159 |
+
return {
|
| 160 |
+
'score': score,
|
| 161 |
+
'description': description
|
| 162 |
+
}
|
| 163 |
+
|
| 164 |
+
def _generate_summary(self, ticker: str, moat_analysis: Dict[str, Any]) -> Tuple[str, str]:
|
| 165 |
+
"""Generate a summary of the moat analysis."""
|
| 166 |
+
total_score = sum(component['score'] for component in moat_analysis.values()) / len(self.moat_indicators)
|
| 167 |
+
|
| 168 |
+
# Determine moat rating based on total_score
|
| 169 |
+
moat_rating = "Developing"
|
| 170 |
+
if total_score >= 7.5: # Adjusted threshold for 'wide'
|
| 171 |
+
moat_rating = "Wide"
|
| 172 |
+
elif total_score >= 4.5: # Adjusted threshold for 'narrow'
|
| 173 |
+
moat_rating = "Narrow"
|
| 174 |
+
else:
|
| 175 |
+
moat_rating = "Medium"
|
| 176 |
+
|
| 177 |
+
summary = f"""
|
| 178 |
+
### Key Moat Components:
|
| 179 |
+
|
| 180 |
+
1. **Brand Power**
|
| 181 |
+
- {moat_analysis['brand_power']['description']}
|
| 182 |
+
|
| 183 |
+
2. **Network Effects**
|
| 184 |
+
- {moat_analysis['network_effects']['description']}
|
| 185 |
+
|
| 186 |
+
3. **Cost Advantages**
|
| 187 |
+
- {moat_analysis['cost_advantages']['description']}
|
| 188 |
+
|
| 189 |
+
4. **Switching Costs**
|
| 190 |
+
- {moat_analysis['switching_costs']['description']}
|
| 191 |
+
|
| 192 |
+
5. **Intangible Assets**
|
| 193 |
+
- {moat_analysis['intangible_assets']['description']}
|
| 194 |
+
|
| 195 |
+
### What this means:
|
| 196 |
+
This analysis helps understand the company's competitive advantages. A wider moat generally suggests a more durable business and potential for long-term success. Consider these points in your overall investment research.
|
| 197 |
+
|
| 198 |
+
*Note: This analysis is based on available financial data and should be used as one of many factors in investment decisions.*
|
| 199 |
+
"""
|
| 200 |
+
return summary, moat_rating
|
backend/stock_analyzer.py
ADDED
|
@@ -0,0 +1,387 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import yfinance as yf
|
| 2 |
+
import pandas as pd
|
| 3 |
+
import plotly.graph_objects as go
|
| 4 |
+
import plotly.io as pio
|
| 5 |
+
from datetime import datetime, timedelta
|
| 6 |
+
import json
|
| 7 |
+
import os
|
| 8 |
+
import modal
|
| 9 |
+
import logging
|
| 10 |
+
import requests
|
| 11 |
+
from typing import Dict, Any
|
| 12 |
+
|
| 13 |
+
# DEBUG: Print modal module path and version
|
| 14 |
+
print(f"DEBUG: modal module loaded from: {modal.__file__}")
|
| 15 |
+
print(f"DEBUG: modal version: {modal.__version__}")
|
| 16 |
+
|
| 17 |
+
from .data_collector import StockDataCollector, YFinanceRateLimitError
|
| 18 |
+
from .moat_analyzer import MoatAnalyzer
|
| 19 |
+
from .config import AppConfig
|
| 20 |
+
|
| 21 |
+
class StockAnalyzer:
|
| 22 |
+
"""Handles stock analysis and visualization."""
|
| 23 |
+
|
| 24 |
+
def __init__(self):
|
| 25 |
+
self.config = AppConfig()
|
| 26 |
+
self.data_collector = StockDataCollector()
|
| 27 |
+
self.moat_analyzer = MoatAnalyzer()
|
| 28 |
+
self._setup_logging()
|
| 29 |
+
|
| 30 |
+
# Helper function to format numbers or return 'N/A'
|
| 31 |
+
def _format_num(self, value):
|
| 32 |
+
"""Helper to format numbers, handling N/A gracefully."""
|
| 33 |
+
if isinstance(value, (int, float)):
|
| 34 |
+
return f"{value:,.2f}"
|
| 35 |
+
return "N/A"
|
| 36 |
+
|
| 37 |
+
def _setup_logging(self):
|
| 38 |
+
"""Setup logging configuration."""
|
| 39 |
+
logging.basicConfig(
|
| 40 |
+
level=logging.INFO,
|
| 41 |
+
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
| 42 |
+
)
|
| 43 |
+
self.logger = logging.getLogger(__name__)
|
| 44 |
+
|
| 45 |
+
def analyze(self, ticker):
|
| 46 |
+
"""Analyze a stock and return the analysis results."""
|
| 47 |
+
try:
|
| 48 |
+
stock = yf.Ticker(ticker)
|
| 49 |
+
info = stock.info
|
| 50 |
+
if not info or not info.get('regularMarketPrice'): # Check for essential info
|
| 51 |
+
return {"analysis": f"Could not retrieve comprehensive stock information for {ticker}. Please check the ticker symbol and try again.", "revenue_chart": None, "fcf_chart": None, "shares_chart": None}
|
| 52 |
+
|
| 53 |
+
# Initialize variables to None or default empty values
|
| 54 |
+
latest_revenue = 'N/A'
|
| 55 |
+
latest_fcf = 'N/A'
|
| 56 |
+
latest_shares = 'N/A'
|
| 57 |
+
latest_bs_data = {}
|
| 58 |
+
latest_is_data = {}
|
| 59 |
+
moat_summary_text = 'N/A'
|
| 60 |
+
moat_rating = 'N/A'
|
| 61 |
+
llm_insights = "AI insights temporarily unavailable."
|
| 62 |
+
serper_results_str = ""
|
| 63 |
+
revenue_chart_json = None
|
| 64 |
+
fcf_chart_json = None
|
| 65 |
+
shares_chart_json = None
|
| 66 |
+
|
| 67 |
+
# Collect data for charts and LLM
|
| 68 |
+
revenue_data = self.data_collector.get_revenue_history(ticker)
|
| 69 |
+
fcf_data = self.data_collector.get_fcf_history(ticker)
|
| 70 |
+
shares_data = self.data_collector.get_shares_history(ticker)
|
| 71 |
+
|
| 72 |
+
# Generate charts
|
| 73 |
+
revenue_chart = self._create_revenue_chart(revenue_data)
|
| 74 |
+
fcf_chart = self._create_fcf_chart(fcf_data)
|
| 75 |
+
shares_chart = self._create_shares_chart(shares_data)
|
| 76 |
+
|
| 77 |
+
# Convert Plotly figures to JSON strings
|
| 78 |
+
revenue_chart_json = pio.to_json(revenue_chart) if revenue_chart else None
|
| 79 |
+
fcf_chart_json = pio.to_json(fcf_chart) if fcf_chart else None
|
| 80 |
+
shares_chart_json = pio.to_json(shares_chart) if shares_chart else None
|
| 81 |
+
|
| 82 |
+
# Safely get latest values with proper error handling
|
| 83 |
+
if revenue_data is not None and not revenue_data.empty:
|
| 84 |
+
latest_revenue = revenue_data.iloc[-1]['Revenue']
|
| 85 |
+
if fcf_data is not None and not fcf_data.empty:
|
| 86 |
+
latest_fcf = fcf_data.iloc[-1]['FCF']
|
| 87 |
+
if shares_data is not None and not shares_data.empty:
|
| 88 |
+
latest_shares = shares_data.iloc[-1]['Shares']
|
| 89 |
+
|
| 90 |
+
# Attempt to get full balance sheet and income statement data
|
| 91 |
+
balance_sheet = stock.balance_sheet
|
| 92 |
+
income_statement = stock.financials
|
| 93 |
+
|
| 94 |
+
# Extract relevant recent quarterly/annual data (handle potential empty dataframes)
|
| 95 |
+
if balance_sheet is not None and not balance_sheet.empty:
|
| 96 |
+
latest_bs_data = balance_sheet.iloc[:, 0].to_dict()
|
| 97 |
+
if income_statement is not None and not income_statement.empty:
|
| 98 |
+
latest_is_data = income_statement.iloc[:, 0].to_dict()
|
| 99 |
+
|
| 100 |
+
# Get moat analysis
|
| 101 |
+
moat_analyzer = MoatAnalyzer()
|
| 102 |
+
moat_analysis_results = moat_analyzer.analyze_moat(ticker)
|
| 103 |
+
moat_summary_text = moat_analysis_results["summary"]
|
| 104 |
+
moat_rating = moat_analysis_results["rating"]
|
| 105 |
+
|
| 106 |
+
try:
|
| 107 |
+
# Look up the deployed Modal LLM app and get the remote function here
|
| 108 |
+
generate_llm_response_modal = modal.Function.from_name("buffetbot-llm", "generate_llm_response")
|
| 109 |
+
|
| 110 |
+
# Perform Serper search for moat articles
|
| 111 |
+
data_collector = StockDataCollector()
|
| 112 |
+
moat_search_results = data_collector._perform_serper_search(f"{ticker} economic moat analysis articles")
|
| 113 |
+
|
| 114 |
+
# Format Serper search results
|
| 115 |
+
serper_results_str = ""
|
| 116 |
+
if moat_search_results:
|
| 117 |
+
serper_results_str = "\n\n#### External Moat Analysis (Web Search)\n" + "\n".join([
|
| 118 |
+
f"- **{r['title']}** (Source: {r['link']})\n Snippet: {r['snippet']}"
|
| 119 |
+
for r in moat_search_results
|
| 120 |
+
])
|
| 121 |
+
|
| 122 |
+
# Get additional comprehensive data for LLM prompt
|
| 123 |
+
revenue_data = data_collector.get_revenue_history(ticker)
|
| 124 |
+
fcf_data = data_collector.get_fcf_history(ticker)
|
| 125 |
+
shares_data = data_collector.get_shares_history(ticker)
|
| 126 |
+
|
| 127 |
+
# Generate charts
|
| 128 |
+
revenue_chart = self._create_revenue_chart(revenue_data)
|
| 129 |
+
fcf_chart = self._create_fcf_chart(fcf_data)
|
| 130 |
+
shares_chart = self._create_shares_chart(shares_data)
|
| 131 |
+
|
| 132 |
+
# Convert Plotly figures to JSON strings
|
| 133 |
+
revenue_chart_json = pio.to_json(revenue_chart) if revenue_chart else None
|
| 134 |
+
fcf_chart_json = pio.to_json(fcf_chart) if fcf_chart else None
|
| 135 |
+
shares_chart_json = pio.to_json(shares_chart) if shares_chart else None
|
| 136 |
+
|
| 137 |
+
# Safely get latest values with proper error handling
|
| 138 |
+
if revenue_data is not None and not revenue_data.empty:
|
| 139 |
+
latest_revenue = revenue_data.iloc[-1]['Revenue']
|
| 140 |
+
if fcf_data is not None and not fcf_data.empty:
|
| 141 |
+
latest_fcf = fcf_data.iloc[-1]['FCF']
|
| 142 |
+
if shares_data is not None and not shares_data.empty:
|
| 143 |
+
latest_shares = shares_data.iloc[-1]['Shares']
|
| 144 |
+
|
| 145 |
+
# Attempt to get full balance sheet and income statement data
|
| 146 |
+
balance_sheet = stock.balance_sheet
|
| 147 |
+
income_statement = stock.financials
|
| 148 |
+
|
| 149 |
+
# Extract relevant recent quarterly/annual data (handle potential empty dataframes)
|
| 150 |
+
if balance_sheet is not None and not balance_sheet.empty:
|
| 151 |
+
latest_bs_data = balance_sheet.iloc[:, 0].to_dict()
|
| 152 |
+
if income_statement is not None and not income_statement.empty:
|
| 153 |
+
latest_is_data = income_statement.iloc[:, 0].to_dict()
|
| 154 |
+
|
| 155 |
+
# Format detailed financial data for the LLM prompt
|
| 156 |
+
financial_data_str = f"""
|
| 157 |
+
#### Recent Financials (Latest Available)
|
| 158 |
+
- Latest Revenue: {self._format_num(latest_revenue)}
|
| 159 |
+
- Latest Free Cash Flow: {self._format_num(latest_fcf)}
|
| 160 |
+
- Latest Shares Outstanding: {self._format_num(latest_shares)}
|
| 161 |
+
|
| 162 |
+
#### Key Balance Sheet Items
|
| 163 |
+
- Total Assets: {self._format_num(latest_bs_data.get('Total Assets', 'N/A'))}
|
| 164 |
+
- Total Liabilities: {self._format_num(latest_bs_data.get('Total Liabilities', 'N/A'))}
|
| 165 |
+
- Total Debt: {self._format_num(info.get('totalDebt', 'N/A'))}
|
| 166 |
+
- Total Cash: {self._format_num(info.get('totalCash', 'N/A'))}
|
| 167 |
+
- Total Equity: {self._format_num(latest_bs_data.get('Total Equity', 'N/A'))}
|
| 168 |
+
- Debt to Equity: {self._format_num(info.get('debtToEquity', 'N/A'))}
|
| 169 |
+
|
| 170 |
+
#### Key Income Statement Items
|
| 171 |
+
- Gross Profits: {self._format_num(latest_is_data.get('Gross Profit', 'N/A'))}
|
| 172 |
+
- Operating Income: {self._format_num(latest_is_data.get('Operating Income', 'N/A'))}
|
| 173 |
+
- Net Income: {self._format_num(latest_is_data.get('Net Income', 'N/A'))}
|
| 174 |
+
- Earnings Growth (YoY): {self._format_num(info.get('earningsQuarterlyGrowth', 'N/A'))}%
|
| 175 |
+
"""
|
| 176 |
+
|
| 177 |
+
# Ensure the LLM knows to use BuffetBot's persona and clearly mark AI insights
|
| 178 |
+
system_message = """
|
| 179 |
+
As an AI-powered stock analyst named BuffetBot, your goal is to provide concise, clear, and actionable investment insights. When giving recommendations, state "BuffetBot recommends" instead of "I recommend". Clearly mark your analysis as "AI Insights:" at the beginning of your comprehensive insight. Focus on explaining key numbers, financial health, growth metrics, economic moat significance, and overall investment potential. Do not include "What to look for" sections for charts or specific metrics, as these will be provided separately. Avoid garbled text or incomplete sentences.
|
| 180 |
+
"""
|
| 181 |
+
|
| 182 |
+
llm_prompt = self._generate_llm_prompt(ticker, {
|
| 183 |
+
'info': info,
|
| 184 |
+
'moat_summary': moat_summary_text,
|
| 185 |
+
'serper_results': serper_results_str,
|
| 186 |
+
'financial_data_str': financial_data_str
|
| 187 |
+
})
|
| 188 |
+
|
| 189 |
+
llm_insights = generate_llm_response_modal.remote(
|
| 190 |
+
system_message=system_message,
|
| 191 |
+
prompt=llm_prompt
|
| 192 |
+
)
|
| 193 |
+
except Exception as llm_e:
|
| 194 |
+
self.logger.warning(f"Could not get LLM insights for {ticker}: {llm_e}")
|
| 195 |
+
llm_insights = "AI insights temporarily unavailable. Please ensure Modal LLM service is deployed and accessible and Modal API keys are set as Hugging Face Space secrets."
|
| 196 |
+
|
| 197 |
+
# Generate analysis text
|
| 198 |
+
analysis = self._generate_analysis(ticker, info, moat_summary_text, moat_rating, llm_insights, latest_bs_data, latest_is_data, latest_revenue, latest_fcf, latest_shares)
|
| 199 |
+
|
| 200 |
+
return {
|
| 201 |
+
"analysis": analysis,
|
| 202 |
+
"revenue_chart": revenue_chart_json,
|
| 203 |
+
"fcf_chart": fcf_chart_json,
|
| 204 |
+
"shares_chart": shares_chart_json
|
| 205 |
+
}
|
| 206 |
+
|
| 207 |
+
except YFinanceRateLimitError as e:
|
| 208 |
+
self.logger.error(f"Yfinance rate limit hit for {ticker}: {e}")
|
| 209 |
+
return {"analysis": f"Error analyzing stock: {e}. Charts may be unavailable.", "revenue_chart": None, "fcf_chart": None, "shares_chart": None}
|
| 210 |
+
except requests.exceptions.HTTPError as e:
|
| 211 |
+
if e.response.status_code == 429:
|
| 212 |
+
# This case should now be handled by YFinanceRateLimitError
|
| 213 |
+
self.logger.error(f"Unexpected 429 HTTP Error for {ticker}: {e}")
|
| 214 |
+
return {"analysis": f"Error analyzing stock: You have been rate-limited by Yahoo Finance. Please wait a few minutes and try again. Charts may be unavailable.", "revenue_chart": None, "fcf_chart": None, "shares_chart": None}
|
| 215 |
+
else:
|
| 216 |
+
self.logger.error(f"HTTP error fetching data for {ticker}: {e}")
|
| 217 |
+
return {"analysis": f"Error analyzing stock: An HTTP error occurred: {e}. Charts may be unavailable.", "revenue_chart": None, "fcf_chart": None, "shares_chart": None}
|
| 218 |
+
except Exception as e:
|
| 219 |
+
self.logger.error(f"Error analyzing stock {ticker}: {e}", exc_info=True) # Log full traceback
|
| 220 |
+
return {"analysis": f"An unexpected error occurred while analyzing {ticker}: {str(e)}. Charts may be unavailable.", "revenue_chart": None, "fcf_chart": None, "shares_chart": None}
|
| 221 |
+
|
| 222 |
+
def _create_revenue_chart(self, revenue_data):
|
| 223 |
+
"""Create revenue growth chart."""
|
| 224 |
+
if revenue_data is None or revenue_data.empty:
|
| 225 |
+
return None
|
| 226 |
+
|
| 227 |
+
fig = go.Figure()
|
| 228 |
+
fig.add_trace(go.Scatter(
|
| 229 |
+
x=revenue_data.index,
|
| 230 |
+
y=revenue_data['Revenue'],
|
| 231 |
+
mode='lines+markers',
|
| 232 |
+
name='Revenue'
|
| 233 |
+
))
|
| 234 |
+
|
| 235 |
+
fig.update_layout(
|
| 236 |
+
title='Revenue Growth',
|
| 237 |
+
xaxis_title='Year',
|
| 238 |
+
yaxis_title='Revenue (USD)',
|
| 239 |
+
template='plotly_white'
|
| 240 |
+
)
|
| 241 |
+
|
| 242 |
+
return fig
|
| 243 |
+
|
| 244 |
+
def _create_fcf_chart(self, fcf_data):
|
| 245 |
+
"""Create free cash flow chart."""
|
| 246 |
+
if fcf_data is None or fcf_data.empty:
|
| 247 |
+
return None
|
| 248 |
+
|
| 249 |
+
fig = go.Figure()
|
| 250 |
+
fig.add_trace(go.Scatter(
|
| 251 |
+
x=fcf_data.index,
|
| 252 |
+
y=fcf_data['FCF'],
|
| 253 |
+
mode='lines+markers',
|
| 254 |
+
name='Free Cash Flow'
|
| 255 |
+
))
|
| 256 |
+
|
| 257 |
+
fig.update_layout(
|
| 258 |
+
title='Free Cash Flow',
|
| 259 |
+
xaxis_title='Year',
|
| 260 |
+
yaxis_title='Free Cash Flow (USD)',
|
| 261 |
+
template='plotly_white'
|
| 262 |
+
)
|
| 263 |
+
|
| 264 |
+
return fig
|
| 265 |
+
|
| 266 |
+
def _create_shares_chart(self, shares_data):
|
| 267 |
+
"""Create shares outstanding chart."""
|
| 268 |
+
if shares_data is None or shares_data.empty:
|
| 269 |
+
return None
|
| 270 |
+
|
| 271 |
+
fig = go.Figure()
|
| 272 |
+
fig.add_trace(go.Scatter(
|
| 273 |
+
x=shares_data.index,
|
| 274 |
+
y=shares_data['Shares'],
|
| 275 |
+
mode='lines+markers',
|
| 276 |
+
name='Shares Outstanding'
|
| 277 |
+
))
|
| 278 |
+
|
| 279 |
+
fig.update_layout(
|
| 280 |
+
title='Shares Outstanding',
|
| 281 |
+
xaxis_title='Year',
|
| 282 |
+
yaxis_title='Shares (millions)',
|
| 283 |
+
template='plotly_white'
|
| 284 |
+
)
|
| 285 |
+
|
| 286 |
+
return fig
|
| 287 |
+
|
| 288 |
+
def _generate_analysis(self, ticker, info, moat_summary, moat_rating, llm_insights, latest_bs_data, latest_is_data, latest_revenue, latest_fcf, latest_shares):
|
| 289 |
+
"""Generate analysis text."""
|
| 290 |
+
data_as_of_timestamp = info.get('regularMarketTime')
|
| 291 |
+
data_as_of_date = datetime.fromtimestamp(data_as_of_timestamp).strftime('%Y-%m-%d %H:%M:%S') if data_as_of_timestamp else 'N/A'
|
| 292 |
+
|
| 293 |
+
# Initialize serper_results_str
|
| 294 |
+
serper_results_str = ""
|
| 295 |
+
|
| 296 |
+
analysis = f"""
|
| 297 |
+
# {info.get('longName', ticker)} ({ticker}) Analysis
|
| 298 |
+
|
| 299 |
+
*Data as of: {data_as_of_date}*
|
| 300 |
+
|
| 301 |
+
## Company Overview
|
| 302 |
+
- Sector: {info.get('sector', 'N/A')}
|
| 303 |
+
- Industry: {info.get('industry', 'N/A')}
|
| 304 |
+
- Full-time Employees: {self._format_num(info.get('fullTimeEmployees', 'N/A'))}
|
| 305 |
+
|
| 306 |
+
## Current Status
|
| 307 |
+
- Current Price: {self._format_num(info.get('currentPrice', 'N/A'))}
|
| 308 |
+
- 52 Week High: {self._format_num(info.get('fiftyTwoWeekHigh', 'N/A'))}
|
| 309 |
+
- 52 Week Low: {self._format_num(info.get('fiftyTwoWeekLow', 'N/A'))}
|
| 310 |
+
- Market Cap: {self._format_num(info.get('marketCap', 'N/A'))}
|
| 311 |
+
|
| 312 |
+
## Key Valuation Metrics
|
| 313 |
+
- P/E Ratio: {self._format_num(info.get('trailingPE', 'N/A'))}
|
| 314 |
+
- Forward P/E: {self._format_num(info.get('forwardPE', 'N/A'))}
|
| 315 |
+
- PEG Ratio: {self._format_num(info.get('pegRatio', 'N/A'))}
|
| 316 |
+
- Dividend Yield: {self._format_num(info.get('dividendYield', 'N/A'))}%
|
| 317 |
+
|
| 318 |
+
## Growth and Profitability
|
| 319 |
+
- Revenue Growth (YoY): {self._format_num(info.get('revenueGrowth', 'N/A'))}%
|
| 320 |
+
- Profit Margins: {self._format_num(info.get('profitMargins', 'N/A'))}%
|
| 321 |
+
- Operating Margins: {self._format_num(info.get('operatingMargins', 'N/A'))}%
|
| 322 |
+
|
| 323 |
+
## Financial Health Indicators
|
| 324 |
+
- Current Ratio: {self._format_num(info.get('currentRatio', 'N/A'))}
|
| 325 |
+
- Debt to Equity: {self._format_num(info.get('debtToEquity', 'N/A'))}
|
| 326 |
+
- Return on Equity: {self._format_num(info.get('returnOnEquity', 'N/A'))}%
|
| 327 |
+
- Debt to Asset Ratio: {self._format_num(info.get('totalDebt', 0) / info.get('totalAssets', 1) if info.get('totalAssets') else 'N/A')}
|
| 328 |
+
|
| 329 |
+
#### Recent Financials (Latest Available)
|
| 330 |
+
- Latest Revenue: {self._format_num(latest_revenue)}
|
| 331 |
+
- Latest Free Cash Flow: {self._format_num(latest_fcf)}
|
| 332 |
+
- Latest Shares Outstanding: {self._format_num(latest_shares)}
|
| 333 |
+
|
| 334 |
+
#### Key Balance Sheet Items
|
| 335 |
+
- Total Assets: {self._format_num(latest_bs_data.get('Total Assets', 'N/A'))}
|
| 336 |
+
- Total Liabilities: {self._format_num(latest_bs_data.get('Total Liabilities', 'N/A'))}
|
| 337 |
+
- Total Debt: {self._format_num(info.get('totalDebt', 'N/A'))}
|
| 338 |
+
- Total Cash: {self._format_num(info.get('totalCash', 'N/A'))}
|
| 339 |
+
- Total Equity: {self._format_num(latest_bs_data.get('Total Equity', 'N/A'))}
|
| 340 |
+
- Debt to Equity: {self._format_num(info.get('debtToEquity', 'N/A'))}
|
| 341 |
+
|
| 342 |
+
#### Key Income Statement Items
|
| 343 |
+
- Gross Profits: {self._format_num(latest_is_data.get('Gross Profit', 'N/A'))}
|
| 344 |
+
- Operating Income: {self._format_num(latest_is_data.get('Operating Income', 'N/A'))}
|
| 345 |
+
- Net Income: {self._format_num(latest_is_data.get('Net Income', 'N/A'))}
|
| 346 |
+
- Earnings Growth (YoY): {self._format_num(info.get('earningsQuarterlyGrowth', 'N/A'))}%
|
| 347 |
+
|
| 348 |
+
## Economic Moat Analysis
|
| 349 |
+
{moat_summary}
|
| 350 |
+
{serper_results_str}
|
| 351 |
+
|
| 352 |
+
## AI Insights
|
| 353 |
+
{llm_insights}
|
| 354 |
+
"""
|
| 355 |
+
|
| 356 |
+
return analysis
|
| 357 |
+
|
| 358 |
+
def _generate_llm_prompt(self, ticker: str, data: Dict[str, Any]) -> str:
|
| 359 |
+
"""Generate a focused prompt for the LLM."""
|
| 360 |
+
info = data.get('info', {})
|
| 361 |
+
moat_summary_text = data.get('moat_summary', 'N/A')
|
| 362 |
+
serper_results_str = data.get('serper_results', 'N/A')
|
| 363 |
+
financial_data_str = data.get('financial_data_str', 'N/A')
|
| 364 |
+
|
| 365 |
+
llm_prompt = f"""
|
| 366 |
+
Analyze {ticker} ({info.get('longName', ticker)}) as an investment opportunity.
|
| 367 |
+
|
| 368 |
+
Key Metrics:
|
| 369 |
+
- Price: ${self._format_num(info.get('currentPrice', 'N/A'))}
|
| 370 |
+
- P/E: {self._format_num(info.get('trailingPE', 'N/A'))}
|
| 371 |
+
- Revenue Growth: {self._format_num(info.get('revenueGrowth', 'N/A'))}%
|
| 372 |
+
- Profit Margin: {self._format_num(info.get('profitMargins', 'N/A'))}%
|
| 373 |
+
- Debt/Equity: {self._format_num(info.get('debtToEquity', 'N/A'))}
|
| 374 |
+
- ROE: {self._format_num(info.get('returnOnEquity', 'N/A'))}%
|
| 375 |
+
|
| 376 |
+
Moat Analysis:
|
| 377 |
+
{moat_summary_text}
|
| 378 |
+
{serper_results_str}
|
| 379 |
+
|
| 380 |
+
Provide a concise analysis focusing on:
|
| 381 |
+
1. Key strengths and risks
|
| 382 |
+
2. Economic moat significance
|
| 383 |
+
3. Clear investment recommendation
|
| 384 |
+
|
| 385 |
+
Keep your response under 200 words.
|
| 386 |
+
"""
|
| 387 |
+
return llm_prompt
|
modal_llm_service.py
ADDED
|
@@ -0,0 +1,107 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import modal
|
| 2 |
+
|
| 3 |
+
# Define the container image with vLLM
|
| 4 |
+
vllm_image = (
|
| 5 |
+
modal.Image.debian_slim(python_version="3.12")
|
| 6 |
+
.pip_install(
|
| 7 |
+
"vllm==0.7.2",
|
| 8 |
+
"huggingface_hub[hf_transfer]==0.26.2",
|
| 9 |
+
"flashinfer-python==0.2.0.post2",
|
| 10 |
+
extra_index_url="https://flashinfer.ai/whl/cu124/torch2.5",
|
| 11 |
+
)
|
| 12 |
+
.env({"HF_HUB_ENABLE_HF_TRANSFER": "1"})
|
| 13 |
+
)
|
| 14 |
+
|
| 15 |
+
# Enable vLLM V1 engine for better performance
|
| 16 |
+
vllm_image = vllm_image.env({"VLLM_USE_V1": "1"})
|
| 17 |
+
|
| 18 |
+
# Model configuration
|
| 19 |
+
MODELS_DIR = "/llamas"
|
| 20 |
+
MODEL_NAME = "neuralmagic/Meta-Llama-3.1-8B-Instruct-quantized.w4a16"
|
| 21 |
+
MODEL_REVISION = "a7c09948d9a632c2c840722f519672cd94af885d"
|
| 22 |
+
|
| 23 |
+
# Set up caching volumes
|
| 24 |
+
hf_cache_vol = modal.Volume.from_name("huggingface-cache", create_if_missing=True)
|
| 25 |
+
vllm_cache_vol = modal.Volume.from_name("vllm-cache", create_if_missing=True)
|
| 26 |
+
|
| 27 |
+
# Create Modal app
|
| 28 |
+
app = modal.App("buffetbot-llm-service")
|
| 29 |
+
|
| 30 |
+
# Configuration
|
| 31 |
+
N_GPU = 1 # Number of GPUs to use
|
| 32 |
+
API_KEY = "buffetbot-llm-key" # We'll replace this with a Modal Secret
|
| 33 |
+
MINUTES = 60
|
| 34 |
+
VLLM_PORT = 8000
|
| 35 |
+
|
| 36 |
+
@app.function(
|
| 37 |
+
image=vllm_image,
|
| 38 |
+
gpu=f"H100:{N_GPU}",
|
| 39 |
+
scaledown_window=15 * MINUTES,
|
| 40 |
+
timeout=10 * MINUTES,
|
| 41 |
+
volumes={
|
| 42 |
+
"/root/.cache/huggingface": hf_cache_vol,
|
| 43 |
+
"/root/.cache/vllm": vllm_cache_vol,
|
| 44 |
+
},
|
| 45 |
+
secrets=[modal.Secret.from_name("buffetbot-vllm-config")]
|
| 46 |
+
)
|
| 47 |
+
@modal.concurrent(max_inputs=100)
|
| 48 |
+
@modal.web_server(port=VLLM_PORT, startup_timeout=5 * MINUTES)
|
| 49 |
+
def serve():
|
| 50 |
+
import subprocess
|
| 51 |
+
import os
|
| 52 |
+
|
| 53 |
+
cmd = [
|
| 54 |
+
"vllm",
|
| 55 |
+
"serve",
|
| 56 |
+
"--uvicorn-log-level=info",
|
| 57 |
+
MODEL_NAME,
|
| 58 |
+
"--revision",
|
| 59 |
+
MODEL_REVISION,
|
| 60 |
+
"--host",
|
| 61 |
+
"0.0.0.0",
|
| 62 |
+
"--port",
|
| 63 |
+
str(VLLM_PORT),
|
| 64 |
+
"--api-key",
|
| 65 |
+
os.environ["API_KEY"],
|
| 66 |
+
]
|
| 67 |
+
|
| 68 |
+
subprocess.Popen(" ".join(cmd), shell=True)
|
| 69 |
+
|
| 70 |
+
@app.local_entrypoint()
|
| 71 |
+
def test(test_timeout=10 * MINUTES):
|
| 72 |
+
import json
|
| 73 |
+
import time
|
| 74 |
+
import urllib.request
|
| 75 |
+
|
| 76 |
+
print(f"Running health check for server at {serve.get_web_url()}")
|
| 77 |
+
up, start, delay = False, time.time(), 10
|
| 78 |
+
while not up:
|
| 79 |
+
try:
|
| 80 |
+
with urllib.request.urlopen(serve.get_web_url() + "/health") as response:
|
| 81 |
+
if response.getcode() == 200:
|
| 82 |
+
up = True
|
| 83 |
+
except Exception:
|
| 84 |
+
if time.time() - start > test_timeout:
|
| 85 |
+
break
|
| 86 |
+
time.sleep(delay)
|
| 87 |
+
|
| 88 |
+
assert up, f"Failed health check for server at {serve.get_web_url()}"
|
| 89 |
+
print(f"Successful health check for server at {serve.get_web_url()}")
|
| 90 |
+
|
| 91 |
+
# Test with a sample prompt
|
| 92 |
+
messages = [{"role": "user", "content": "What is the economic moat of Apple Inc.?"}]
|
| 93 |
+
print(f"Sending a sample message to {serve.get_web_url()}", *messages, sep="\n")
|
| 94 |
+
|
| 95 |
+
headers = {
|
| 96 |
+
"Authorization": f"Bearer {API_KEY}",
|
| 97 |
+
"Content-Type": "application/json",
|
| 98 |
+
}
|
| 99 |
+
payload = json.dumps({"messages": messages, "model": MODEL_NAME})
|
| 100 |
+
req = urllib.request.Request(
|
| 101 |
+
serve.get_web_url() + "/v1/chat/completions",
|
| 102 |
+
data=payload.encode("utf-8"),
|
| 103 |
+
headers=headers,
|
| 104 |
+
method="POST",
|
| 105 |
+
)
|
| 106 |
+
with urllib.request.urlopen(req) as response:
|
| 107 |
+
print(json.loads(response.read().decode()))
|
modal_mcp_server.py
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import modal
|
| 2 |
+
import os
|
| 3 |
+
from dotenv import load_dotenv
|
| 4 |
+
|
| 5 |
+
# Ensure environment variables are loaded for local testing and deployment to Modal
|
| 6 |
+
load_dotenv()
|
| 7 |
+
|
| 8 |
+
# Define a Modal App
|
| 9 |
+
# You can give it a more specific name if you like, e.g., "buffetbot-mcp-server"
|
| 10 |
+
app = modal.App("buffetbot-mcp-server")
|
| 11 |
+
|
| 12 |
+
# Define the Modal Image
|
| 13 |
+
# This image will include all the necessary backend files and Python packages.
|
| 14 |
+
# The 'backend' directory and its contents are added to the image.
|
| 15 |
+
# We also install all packages from requirements.txt.
|
| 16 |
+
buffetbot_image = modal.Image.debian_slim(python_version="3.10").pip_install_from_requirements("requirements.txt").add_local_dir(
|
| 17 |
+
local_path="backend",
|
| 18 |
+
remote_path="/root/backend"
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
# Import the FastAPI app from backend.mcp_server
|
| 22 |
+
# This import needs to happen within the Modal image context,
|
| 23 |
+
# so we import it after defining the image and copying the files.
|
| 24 |
+
with buffetbot_image.imports():
|
| 25 |
+
from backend.mcp_server import MCPStockServer
|
| 26 |
+
|
| 27 |
+
@app.function(image=buffetbot_image, secrets=[
|
| 28 |
+
modal.Secret.from_name("buffetbot-mcp-server-secrets"),
|
| 29 |
+
modal.Secret.from_name("buffetbot-serper-config")
|
| 30 |
+
])
|
| 31 |
+
@modal.asgi_app()
|
| 32 |
+
def fastapi_app():
|
| 33 |
+
# Instantiate the MCPStockServer, which contains the FastAPI app
|
| 34 |
+
server = MCPStockServer()
|
| 35 |
+
return server.app
|
requirements.txt
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
gradio
|
| 2 |
+
yfinance
|
| 3 |
+
pandas
|
| 4 |
+
plotly
|
| 5 |
+
requests
|
| 6 |
+
beautifulsoup4
|
| 7 |
+
python-dotenv
|
| 8 |
+
jinja2
|
| 9 |
+
modal
|
| 10 |
+
fastapi
|
| 11 |
+
uvicorn
|
| 12 |
+
pydantic
|
| 13 |
+
kaleido
|