Spaces:
Runtime error
Runtime error
shenchucheng
commited on
Commit
·
6a3fde1
1
Parent(s):
0f9c423
format error msg
Browse files
app.py
CHANGED
|
@@ -7,8 +7,8 @@ from collections import deque
|
|
| 7 |
import contextlib
|
| 8 |
from functools import partial
|
| 9 |
import shutil
|
| 10 |
-
import urllib.parse
|
| 11 |
from datetime import datetime
|
|
|
|
| 12 |
import uuid
|
| 13 |
from enum import Enum
|
| 14 |
from metagpt.logs import set_llm_stream_logfunc
|
|
@@ -40,6 +40,7 @@ class SentenceType(Enum):
|
|
| 40 |
TEXT = "text"
|
| 41 |
HIHT = "hint"
|
| 42 |
ACTION = "action"
|
|
|
|
| 43 |
|
| 44 |
|
| 45 |
class MessageStatus(Enum):
|
|
@@ -63,7 +64,7 @@ class Sentences(BaseModel):
|
|
| 63 |
role: Optional[str] = None
|
| 64 |
skill: Optional[str] = None
|
| 65 |
description: Optional[str] = None
|
| 66 |
-
timestamp: str = datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f%z")
|
| 67 |
status: str
|
| 68 |
contents: list[dict]
|
| 69 |
|
|
@@ -84,14 +85,14 @@ class ThinkActStep(BaseModel):
|
|
| 84 |
id: str
|
| 85 |
status: str
|
| 86 |
title: str
|
| 87 |
-
timestamp: str
|
| 88 |
description: str
|
| 89 |
content: Sentence = None
|
| 90 |
|
| 91 |
|
| 92 |
class ThinkActPrompt(BaseModel):
|
| 93 |
message_id: int = None
|
| 94 |
-
timestamp: str = datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f%z")
|
| 95 |
step: ThinkActStep = None
|
| 96 |
skill: Optional[str] = None
|
| 97 |
role: Optional[str] = None
|
|
@@ -101,7 +102,6 @@ class ThinkActPrompt(BaseModel):
|
|
| 101 |
id=str(tc_id),
|
| 102 |
status="running",
|
| 103 |
title=action.desc,
|
| 104 |
-
timestamp=datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f%z"),
|
| 105 |
description=action.desc,
|
| 106 |
)
|
| 107 |
|
|
@@ -109,7 +109,7 @@ class ThinkActPrompt(BaseModel):
|
|
| 109 |
if is_finished:
|
| 110 |
self.step.status = "finish"
|
| 111 |
self.step.content = Sentence(
|
| 112 |
-
type=
|
| 113 |
id=str(1),
|
| 114 |
value=SentenceValue(answer=message.content if is_finished else message),
|
| 115 |
is_finished=is_finished,
|
|
@@ -121,19 +121,30 @@ class ThinkActPrompt(BaseModel):
|
|
| 121 |
|
| 122 |
@property
|
| 123 |
def prompt(self):
|
| 124 |
-
|
| 125 |
-
return urllib.parse.quote(v)
|
| 126 |
|
| 127 |
|
| 128 |
class MessageJsonModel(BaseModel):
|
| 129 |
steps: list[Sentences]
|
| 130 |
qa_type: str
|
| 131 |
-
created_at: datetime = datetime.now
|
| 132 |
-
query_time: datetime = datetime.now
|
| 133 |
-
answer_time: datetime = datetime.now
|
| 134 |
score: Optional[int] = None
|
| 135 |
feedback: Optional[str] = None
|
| 136 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 137 |
def add_think_act(self, think_act_prompt: ThinkActPrompt):
|
| 138 |
s = Sentences(
|
| 139 |
action=think_act_prompt.step.title,
|
|
@@ -147,8 +158,7 @@ class MessageJsonModel(BaseModel):
|
|
| 147 |
|
| 148 |
@property
|
| 149 |
def prompt(self):
|
| 150 |
-
|
| 151 |
-
return urllib.parse.quote(v)
|
| 152 |
|
| 153 |
|
| 154 |
async def create_message(req_model: NewMsg, request: Request):
|
|
@@ -201,15 +211,36 @@ async def create_message(req_model: NewMsg, request: Request):
|
|
| 201 |
break
|
| 202 |
|
| 203 |
await asyncio.sleep(0.5)
|
|
|
|
|
|
|
|
|
|
| 204 |
|
| 205 |
act_result = await task
|
| 206 |
think_act_prompt.update_act(act_result)
|
| 207 |
yield think_act_prompt.prompt + "\n\n"
|
| 208 |
answer.add_think_act(think_act_prompt)
|
| 209 |
yield answer.prompt + "\n\n" # Notify the front-end that the message is complete.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 210 |
finally:
|
| 211 |
shutil.rmtree(CONFIG.WORKSPACE_PATH)
|
| 212 |
|
|
|
|
| 213 |
default_llm_stream_log = partial(print, end="")
|
| 214 |
|
| 215 |
|
|
|
|
| 7 |
import contextlib
|
| 8 |
from functools import partial
|
| 9 |
import shutil
|
|
|
|
| 10 |
from datetime import datetime
|
| 11 |
+
import traceback
|
| 12 |
import uuid
|
| 13 |
from enum import Enum
|
| 14 |
from metagpt.logs import set_llm_stream_logfunc
|
|
|
|
| 40 |
TEXT = "text"
|
| 41 |
HIHT = "hint"
|
| 42 |
ACTION = "action"
|
| 43 |
+
ERROR = "error"
|
| 44 |
|
| 45 |
|
| 46 |
class MessageStatus(Enum):
|
|
|
|
| 64 |
role: Optional[str] = None
|
| 65 |
skill: Optional[str] = None
|
| 66 |
description: Optional[str] = None
|
| 67 |
+
timestamp: str = Field(default_factory=lambda: datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f%z"))
|
| 68 |
status: str
|
| 69 |
contents: list[dict]
|
| 70 |
|
|
|
|
| 85 |
id: str
|
| 86 |
status: str
|
| 87 |
title: str
|
| 88 |
+
timestamp: str = Field(default_factory=lambda: datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f%z"))
|
| 89 |
description: str
|
| 90 |
content: Sentence = None
|
| 91 |
|
| 92 |
|
| 93 |
class ThinkActPrompt(BaseModel):
|
| 94 |
message_id: int = None
|
| 95 |
+
timestamp: str = Field(default_factory=lambda: datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f%z"))
|
| 96 |
step: ThinkActStep = None
|
| 97 |
skill: Optional[str] = None
|
| 98 |
role: Optional[str] = None
|
|
|
|
| 102 |
id=str(tc_id),
|
| 103 |
status="running",
|
| 104 |
title=action.desc,
|
|
|
|
| 105 |
description=action.desc,
|
| 106 |
)
|
| 107 |
|
|
|
|
| 109 |
if is_finished:
|
| 110 |
self.step.status = "finish"
|
| 111 |
self.step.content = Sentence(
|
| 112 |
+
type=SentenceType.TEXT.value,
|
| 113 |
id=str(1),
|
| 114 |
value=SentenceValue(answer=message.content if is_finished else message),
|
| 115 |
is_finished=is_finished,
|
|
|
|
| 121 |
|
| 122 |
@property
|
| 123 |
def prompt(self):
|
| 124 |
+
return self.json(exclude_unset=True)
|
|
|
|
| 125 |
|
| 126 |
|
| 127 |
class MessageJsonModel(BaseModel):
|
| 128 |
steps: list[Sentences]
|
| 129 |
qa_type: str
|
| 130 |
+
created_at: datetime = Field(default_factory=datetime.now)
|
| 131 |
+
query_time: datetime = Field(default_factory=datetime.now)
|
| 132 |
+
answer_time: datetime = Field(default_factory=datetime.now)
|
| 133 |
score: Optional[int] = None
|
| 134 |
feedback: Optional[str] = None
|
| 135 |
|
| 136 |
+
def add_think_act(self, think_act_prompt: ThinkActPrompt):
|
| 137 |
+
s = Sentences(
|
| 138 |
+
action=think_act_prompt.step.title,
|
| 139 |
+
skill=think_act_prompt.skill,
|
| 140 |
+
description=think_act_prompt.step.description,
|
| 141 |
+
timestamp=think_act_prompt.timestamp,
|
| 142 |
+
status=think_act_prompt.step.status,
|
| 143 |
+
contents=[think_act_prompt.step.content.dict()],
|
| 144 |
+
)
|
| 145 |
+
self.steps.append(s)
|
| 146 |
+
|
| 147 |
+
|
| 148 |
def add_think_act(self, think_act_prompt: ThinkActPrompt):
|
| 149 |
s = Sentences(
|
| 150 |
action=think_act_prompt.step.title,
|
|
|
|
| 158 |
|
| 159 |
@property
|
| 160 |
def prompt(self):
|
| 161 |
+
return self.json(exclude_unset=True)
|
|
|
|
| 162 |
|
| 163 |
|
| 164 |
async def create_message(req_model: NewMsg, request: Request):
|
|
|
|
| 211 |
break
|
| 212 |
|
| 213 |
await asyncio.sleep(0.5)
|
| 214 |
+
else:
|
| 215 |
+
task.cancel()
|
| 216 |
+
return
|
| 217 |
|
| 218 |
act_result = await task
|
| 219 |
think_act_prompt.update_act(act_result)
|
| 220 |
yield think_act_prompt.prompt + "\n\n"
|
| 221 |
answer.add_think_act(think_act_prompt)
|
| 222 |
yield answer.prompt + "\n\n" # Notify the front-end that the message is complete.
|
| 223 |
+
except Exception as ex:
|
| 224 |
+
description = str(ex)
|
| 225 |
+
answer = f"```python\n\n{traceback.format_exc()}\n\n```"
|
| 226 |
+
step = ThinkActStep(
|
| 227 |
+
id=tc_id,
|
| 228 |
+
status="failed",
|
| 229 |
+
title=description,
|
| 230 |
+
description=description,
|
| 231 |
+
content=Sentence(
|
| 232 |
+
type=SentenceType.ERROR.value,
|
| 233 |
+
id=1,
|
| 234 |
+
value=SentenceValue(answer=answer),
|
| 235 |
+
is_finished=True
|
| 236 |
+
),
|
| 237 |
+
)
|
| 238 |
+
think_act_prompt = ThinkActPrompt(step=step)
|
| 239 |
+
yield think_act_prompt.prompt + "\n\n"
|
| 240 |
finally:
|
| 241 |
shutil.rmtree(CONFIG.WORKSPACE_PATH)
|
| 242 |
|
| 243 |
+
|
| 244 |
default_llm_stream_log = partial(print, end="")
|
| 245 |
|
| 246 |
|