Compare commits

...

2 Commits

Author SHA1 Message Date
guo zebin
6eaec96532 Merge branch 'main' of https://github.com/xszyou/Fay 2026-03-11 22:18:50 +08:00
guo zebin
2ee729b120 1. 打出fay release 安装装包;
2. 优化tts前的换行符处理;
3. 修正prompt模版引入逻辑。
2026-03-11 22:17:53 +08:00
18 changed files with 559 additions and 201 deletions

View File

@@ -103,21 +103,13 @@ https://www.compshare.cn/images/compshareImage-1cft3sk9gvta?ytag=GPU_fay
### ***使用数字人(非必须)***
https://qqk9ntwbcit.feishu.cn/wiki/GHevwqxwfiX4hCk8yJCcoJ54nqg
ue: https://github.com/xszyou/fay-ue5
unityhttps://qqk9ntwbcit.feishu.cn/wiki/Se9xw04hUiss00kb2Lmci1BVnM9
metahuman-stream2dhttps://qqk9ntwbcit.feishu.cn/wiki/Ik1kwO9X5iilnGkFwRhcnmtvn3e
duixandroid)https://qqk9ntwbcit.feishu.cn/wiki/Ik1kwO9X5iilnGkFwRhcnmtvn3e()
aibote(windows cpu克隆人)https://qqk9ntwbcit.feishu.cn/wiki/ULaywzVRti0HXWkhCzacoSPAnIg
### ***集成到自家产品(非必须)***
接口:https://qqk9ntwbcit.feishu.cn/wiki/Mcw3wbA3RiNZzwkexz6cnKCsnhh
https://qqk9ntwbcit.feishu.cn/wiki/Mcw3wbA3RiNZzwkexz6cnKCsnhh
@@ -125,7 +117,7 @@ aibote(windows cpu克隆人)https://qqk9ntwbcit.feishu.cn/wiki/ULaywzVRti0HXW
**交流群及资料教程**关注公众号 **fay数字人****请先star本仓库**
![](readme/gzh.jpg)
![](readme/gzh.png)
**微信交流群**

View File

@@ -374,13 +374,51 @@ class FeiFei:
return filtered_text
def __process_stream_output(self, text, username, session_type="type2_stream", is_qa=False):
return filtered_text
def __normalize_tts_text(self, text):
if text is None:
return text
text = text.replace("\u3000", " ")
raw_lines = re.split(r"\r\n|\r|\n+", text)
lines = []
for line in raw_lines:
normalized_line = re.sub(r"\s+", " ", line).strip()
normalized_line = re.sub(r"\s+([,。!?;:、,.!?;:])", r"\1", normalized_line)
if normalized_line:
lines.append(normalized_line)
if not lines:
return ""
merged_text = lines[0]
for next_line in lines[1:]:
merged_text += self.__get_tts_line_separator(merged_text, next_line)
merged_text += next_line
return re.sub(r"\s+", " ", merged_text).strip()
def __get_tts_line_separator(self, previous_text, next_text):
sentence_endings = ("", "", "", "!", "?", "", ";", "")
pause_endings = ("", ",", "", "", ":")
previous_text = previous_text.rstrip()
if not previous_text:
return ""
if previous_text.endswith(sentence_endings) or previous_text.endswith(pause_endings):
return ""
if self.__contains_cjk(previous_text) or self.__contains_cjk(next_text):
return ""
return ". "
def __contains_cjk(self, text):
return re.search(r"[\u3400-\u4dbf\u4e00-\u9fff\uf900-\ufaff]", text or "") is not None
def __process_stream_output(self, text, username, session_type="type2_stream", is_qa=False):
"""
@@ -614,22 +652,40 @@ class FeiFei:
# 根据配置动态调用不同的NLP模块
if cfg.config["memory"].get("use_bionic_memory", False):
from llm import nlp_bionicmemory_stream
text = nlp_bionicmemory_stream.question(interact.data["msg"], username, interact.data.get("observation", None))
else:
from llm import nlp_cognitive_stream
text = nlp_cognitive_stream.question(interact.data["msg"], username, interact.data.get("observation", None))
if cfg.config["memory"].get("use_bionic_memory", False):
try:
from llm import nlp_bionicmemory_stream
text = nlp_bionicmemory_stream.question(interact.data["msg"], username, interact.data.get("observation", None))
except Exception as exc:
util.log(1, f"Bionic memory pipeline unavailable, fallback to cognitive mode: {exc}")
cfg.config.setdefault("memory", {})["use_bionic_memory"] = False
from llm import nlp_cognitive_stream
text = nlp_cognitive_stream.question(interact.data["msg"], username, interact.data.get("observation", None))
else:
from llm import nlp_cognitive_stream
text = nlp_cognitive_stream.question(interact.data["msg"], username, interact.data.get("observation", None))
@@ -1569,7 +1625,8 @@ class FeiFei:
# 先过滤表情符号,然后再合成语音
filtered_text = self.__remove_emojis(tts_text.replace("*", ""))
filtered_text = self.__remove_emojis(tts_text.replace("*", ""))
filtered_text = self.__normalize_tts_text(filtered_text)
if filtered_text is not None and filtered_text.strip() != "":

View File

@@ -24,10 +24,115 @@ except Exception:
StdioServerParameters = None
HAS_STDIO = False
logger = logging.getLogger(__name__)
def _is_awaitable(obj: Any) -> bool:
logger = logging.getLogger(__name__)
def _runtime_root_dir() -> str:
if getattr(sys, "frozen", False):
return os.path.abspath(os.path.dirname(sys.executable))
return os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
def _normalize_rel_path(path_value: Optional[str]) -> str:
return str(path_value or "").replace("\\", "/").strip().lower()
def _path_matches(path_value: Optional[str], expected_suffix: str) -> bool:
normalized = _normalize_rel_path(path_value)
suffix = _normalize_rel_path(expected_suffix)
if not normalized or not suffix:
return False
return normalized == suffix or normalized.endswith("/" + suffix)
def _is_python_command(command: Any) -> bool:
command_text = str(command or "").strip().lower()
if not command_text:
return False
if command_text in {"python", "python.exe", "pythonw", "pythonw.exe"}:
return True
return os.path.basename(command_text) in {"python", "python.exe", "pythonw", "pythonw.exe"}
def _resolve_existing_path(path_value: Optional[str], *base_dirs: Optional[str]) -> Optional[str]:
if not isinstance(path_value, str) or not path_value.strip():
return None
candidate = path_value.strip()
if os.path.isabs(candidate):
return candidate if os.path.exists(candidate) else None
for base_dir in base_dirs:
if not base_dir:
continue
abs_path = os.path.abspath(os.path.join(base_dir, candidate))
if os.path.exists(abs_path):
return abs_path
return None
_PACKAGED_STDIO_SERVERS = [
{
"script": "test/mcp_stdio_example.py",
"cwd": "",
"exe_relpath": os.path.join("mcp_bin", "mcp_stdio_example", "mcp_stdio_example.exe"),
},
{
"script": "mcp_servers/schedule_manager/server.py",
"cwd": "mcp_servers/schedule_manager",
"exe_relpath": os.path.join("mcp_bin", "schedule_manager_mcp", "schedule_manager_mcp.exe"),
},
{
"script": "mcp_servers/logseq/server.py",
"cwd": "mcp_servers/logseq",
"exe_relpath": os.path.join("mcp_bin", "logseq_mcp", "logseq_mcp.exe"),
},
{
"script": "mcp_servers/yueshen_rag/server.py",
"cwd": "mcp_servers/yueshen_rag",
"exe_relpath": os.path.join("mcp_bin", "yueshen_rag_mcp", "yueshen_rag_mcp.exe"),
},
{
"script": "mcp_servers/window_capture/server.py",
"cwd": "mcp_servers/window_capture",
"exe_relpath": os.path.join("mcp_bin", "window_capture_mcp", "window_capture_mcp.exe"),
},
{
"script": "mcp_servers/mcp-todo-server/server.py",
"cwd": "mcp_servers/mcp-todo-server",
"exe_relpath": os.path.join("mcp_bin", "todo_server_mcp", "todo_server_mcp.exe"),
},
{
"script": "mcp_servers/elderly_mcp/server.py",
"cwd": "mcp_servers/elderly_mcp",
"exe_relpath": os.path.join("mcp_bin", "elderly_mcp_server", "elderly_mcp_server.exe"),
},
]
def _resolve_packaged_stdio_binary(
runtime_root: str, command: Any, args: List[Any], cwd: Optional[str]
) -> Optional[Tuple[str, List[Any], str]]:
if not getattr(sys, "frozen", False):
return None
arg_paths = [str(arg) for arg in args if isinstance(arg, str) and arg and not str(arg).startswith("-")]
command_text = str(command or "")
for target in _PACKAGED_STDIO_SERVERS:
matched = any(_path_matches(arg, target["script"]) for arg in arg_paths)
if not matched and target["cwd"] and _path_matches(cwd, target["cwd"]):
if not arg_paths or any(os.path.basename(arg).lower() == "server.py" for arg in arg_paths):
matched = True
if not matched and _path_matches(command_text, target["exe_relpath"]):
matched = True
if not matched:
continue
exe_path = os.path.join(runtime_root, target["exe_relpath"])
if os.path.exists(exe_path):
return exe_path, [], os.path.dirname(exe_path)
return None
def _is_awaitable(obj: Any) -> bool:
try:
return inspect.isawaitable(obj)
except Exception:
@@ -79,11 +184,12 @@ class McpClient:
t.start()
self._loop_thread = t
self._stdio_errlog_file = None
self._manager_task: Optional[asyncio.Task] = None
self._disconnect_event: Optional[asyncio.Event] = None
self._connect_ready_future: Optional[asyncio.Future] = None
self._last_error: Optional[str] = None
self._stdio_errlog_file = None
self._manager_task: Optional[asyncio.Task] = None
self._disconnect_event: Optional[asyncio.Event] = None
self._connect_ready_future: Optional[asyncio.Future] = None
self._last_error: Optional[str] = None
self._resolved_stdio_config: Optional[Dict[str, Any]] = None
# tool availability cache
self.tools_refresh_interval = max(int(tools_refresh_interval), 5)
@@ -260,13 +366,43 @@ class McpClient:
logger.debug(f"Failed to refresh MCP tool cache: {exc}")
return False
def _clear_tool_cache(self) -> None:
with self._tools_lock:
self._tool_cache = []
self._tool_cache_timestamp = 0.0
self.tools = None
if self.server_id is not None:
tool_registry.mark_all_unavailable(self.server_id)
def _clear_tool_cache(self) -> None:
with self._tools_lock:
self._tool_cache = []
self._tool_cache_timestamp = 0.0
self.tools = None
if self.server_id is not None:
tool_registry.mark_all_unavailable(self.server_id)
def _resolve_stdio_launch_config(self) -> Dict[str, Any]:
cfg = self.stdio_config or {}
runtime_root = _runtime_root_dir()
command = cfg.get("command") or sys.executable
args = list(cfg.get("args") or [])
env = cfg.get("env") or None
cwd = cfg.get("cwd") or None
if cwd and not os.path.isabs(cwd):
cwd = os.path.abspath(os.path.join(runtime_root, cwd))
packaged_launch = _resolve_packaged_stdio_binary(runtime_root, command, args, cwd)
if packaged_launch is not None:
command, args, cwd = packaged_launch
else:
if _is_python_command(command):
command = sys.executable
else:
resolved_command = _resolve_existing_path(command, cwd, runtime_root)
if resolved_command:
command = resolved_command
resolved_cfg = {
"command": command,
"args": args,
"env": env,
"cwd": cwd,
}
self._resolved_stdio_config = resolved_cfg
return resolved_cfg
async def _connect_async(self) -> Tuple[bool, Any]:
if self.connected and self.session:
@@ -311,26 +447,21 @@ class McpClient:
self.exit_stack = stack
try:
async with stack:
if self.transport == "stdio":
if not HAS_STDIO:
message = "Missing stdio-capable MCP client, run: pip install -U mcp"
self._last_error = message
if not ready_future.done():
ready_future.set_result((False, message))
return
cfg = self.stdio_config or {}
command = cfg.get("command") or sys.executable
if str(command).lower() == "python":
command = sys.executable
args = list(cfg.get("args") or [])
env = cfg.get("env") or None
cwd = cfg.get("cwd") or None
if cwd and not os.path.isabs(cwd):
repo_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
cwd = os.path.abspath(os.path.join(repo_root, cwd))
try:
log_dir = os.path.join(os.getcwd(), 'logs')
if self.transport == "stdio":
if not HAS_STDIO:
message = "Missing stdio-capable MCP client, run: pip install -U mcp"
self._last_error = message
if not ready_future.done():
ready_future.set_result((False, message))
return
cfg = self._resolve_stdio_launch_config()
command = cfg.get("command") or sys.executable
args = list(cfg.get("args") or [])
env = cfg.get("env") or None
cwd = cfg.get("cwd") or None
try:
log_dir = os.path.join(os.getcwd(), 'logs')
os.makedirs(log_dir, exist_ok=True)
base = os.path.basename(str(command))
log_path = os.path.join(log_dir, f"mcp_stdio_{base}.log")
@@ -383,15 +514,16 @@ class McpClient:
stdio_errlog.close()
except Exception:
pass
if self._stdio_errlog_file and self._stdio_errlog_file is not stdio_errlog:
try:
self._stdio_errlog_file.close()
except Exception:
pass
self._stdio_errlog_file = None
self._stop_refresh_worker()
self.connected = False
self.session = None
if self._stdio_errlog_file and self._stdio_errlog_file is not stdio_errlog:
try:
self._stdio_errlog_file.close()
except Exception:
pass
self._stdio_errlog_file = None
self._resolved_stdio_config = None
self._stop_refresh_worker()
self.connected = False
self.session = None
self._clear_tool_cache()
if not ready_future.done():
ready_future.set_result((False, self._last_error or "MCP server connection failed"))
@@ -446,12 +578,12 @@ class McpClient:
def _kill_stdio_process(self) -> None:
"""强制终止 stdio 子进程,确保子进程被完全清理"""
if self.transport != "stdio":
return
cfg = self.stdio_config or {}
command = cfg.get("command") or ""
args = cfg.get("args") or []
if self.transport != "stdio":
return
cfg = self._resolved_stdio_config or self.stdio_config or {}
command = cfg.get("command") or ""
args = cfg.get("args") or []
# 构建用于匹配进程的关键字
if args:

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.2 KiB

After

Width:  |  Height:  |  Size: 8.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.2 KiB

After

Width:  |  Height:  |  Size: 8.7 KiB

View File

@@ -1,15 +1,30 @@
import os
import sys
import time
from PyQt5.QtWidgets import *
from PyQt5.QtWidgets import QDialog, QHBoxLayout, QVBoxLayout
from PyQt5.QtWidgets import QGroupBox
from PyQt5.QtWebEngineWidgets import *
from PyQt5.QtCore import *
from PyQt5 import QtWidgets
from PyQt5.QtCore import QUrl, pyqtSignal
from PyQt5.QtGui import QIcon
from PyQt5.QtWebEngineWidgets import QWebEngineProfile, QWebEngineView
from PyQt5.QtWidgets import (
QAction,
QApplication,
QDialog,
QGroupBox,
QHBoxLayout,
QMainWindow,
QMenu,
QSystemTrayIcon,
QVBoxLayout,
)
from scheduler.thread_manager import MyThread
from scheduler.thread_manager import stopAll
def _resolve_runtime_dir():
if getattr(sys, "frozen", False):
return os.path.abspath(os.path.dirname(sys.executable))
return os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
class MainWindow(QMainWindow):
@@ -17,37 +32,117 @@ class MainWindow(QMainWindow):
def __init__(self):
super(MainWindow, self).__init__()
# self.setWindowFlags(Qt.WindowType.WindowShadeButtonHint)
self.setWindowTitle('FeiFei Alpha')
# self.setFixedSize(16 * 80, 9 * 80)
self._allow_close = False
self._shutdown_in_progress = False
self._tray_hint_shown = False
self._tray_icon = None
self.setWindowTitle("FeiFei Alpha")
self.setGeometry(0, 0, 16 * 70, 9 * 70)
self.showMaximized()
# self.center()
self.browser = QWebEngineView()
#清空缓存
profile = QWebEngineProfile.defaultProfile()
profile.clearHttpCache()
self.browser.load(QUrl('http://127.0.0.1:5000'))
self.setCentralWidget(self.browser)
MyThread(target=self.runnable).start()
def runnable(self):
while True:
if not self.isVisible():
try:
# 正常关闭服务
import fay_booter
if fay_booter.is_running():
print("窗口关闭正在停止Fay服务...")
fay_booter.stop()
time.sleep(0.5) # 给服务一点时间完成清理
print("服务已停止")
except BaseException as e:
print(f"正常关闭服务时出错: {e}")
finally:
# 如果正常关闭失败,再强制终止
os.system("taskkill /F /PID {}".format(os.getpid()))
time.sleep(0.05)
self.browser = QWebEngineView()
profile = QWebEngineProfile.defaultProfile()
profile.clearHttpCache()
self.browser.load(QUrl("http://127.0.0.1:5000"))
self.setCentralWidget(self.browser)
self._init_tray_icon()
def _resolve_app_icon(self):
runtime_dir = _resolve_runtime_dir()
for icon_name in ("favicon.ico", "icon.png"):
icon_path = os.path.join(runtime_dir, icon_name)
if os.path.exists(icon_path):
return QIcon(icon_path)
return self.windowIcon()
def _init_tray_icon(self):
if not QSystemTrayIcon.isSystemTrayAvailable():
return
tray_icon = QSystemTrayIcon(self)
tray_icon.setIcon(self._resolve_app_icon())
tray_icon.setToolTip("Fay")
tray_menu = QMenu(self)
show_action = QAction("Open Fay", self)
show_action.triggered.connect(self.show_from_tray)
tray_menu.addAction(show_action)
exit_action = QAction("Exit", self)
exit_action.triggered.connect(self.exit_from_tray)
tray_menu.addAction(exit_action)
tray_icon.setContextMenu(tray_menu)
tray_icon.activated.connect(self._on_tray_icon_activated)
tray_icon.show()
self._tray_icon = tray_icon
def _show_tray_message_once(self):
if self._tray_icon is None or self._tray_hint_shown:
return
self._tray_hint_shown = True
self._tray_icon.showMessage(
"Fay",
"Window minimized to tray. Double-click the tray icon to restore it.",
QSystemTrayIcon.Information,
3000,
)
def _shutdown_services(self):
if self._shutdown_in_progress:
return
self._shutdown_in_progress = True
try:
import fay_booter
if fay_booter.is_running():
print("Stopping Fay services...")
fay_booter.stop()
time.sleep(0.5)
except BaseException as exc:
print(f"Failed to stop Fay services: {exc}")
try:
stopAll()
except BaseException as exc:
print(f"Failed to stop background threads: {exc}")
def show_from_tray(self):
if self.isMinimized():
self.showNormal()
else:
self.show()
self.raise_()
self.activateWindow()
def exit_from_tray(self):
self._allow_close = True
if self._tray_icon is not None:
self._tray_icon.hide()
self._shutdown_services()
QApplication.instance().quit()
os._exit(0)
def _on_tray_icon_activated(self, reason):
if reason in (QSystemTrayIcon.Trigger, QSystemTrayIcon.DoubleClick):
self.show_from_tray()
def closeEvent(self, event):
if self._allow_close:
event.accept()
return
if self._tray_icon is None:
event.ignore()
self.exit_from_tray()
return
event.ignore()
self.hide()
self._show_tray_message_once()
def center(self):
screen = QtWidgets.QDesktopWidget().screenGeometry()
@@ -56,10 +151,6 @@ class MainWindow(QMainWindow):
def keyPressEvent(self, event):
pass
# if event.key() == Qt.Key_F12:
# self.s = TDevWindow()
# self.s.show()
# self.browser.page().setDevToolsPage(self.s.mpJSWebView.page())
def OnReceiveMessageFromJS(self, strParameter):
if not strParameter:
@@ -73,18 +164,18 @@ class TDevWindow(QDialog):
def init_ui(self):
self.mpJSWebView = QWebEngineView(self)
self.url = 'https://www.baidu.com/'
self.url = "https://www.baidu.com/"
self.mpJSWebView.page().load(QUrl(self.url))
self.mpJSWebView.show()
self.pJSTotalVLayout = QVBoxLayout()
self.pJSTotalVLayout.setSpacing(0)
self.pJSTotalVLayout.addWidget(self.mpJSWebView)
self.pWebGroup = QGroupBox('Web View', self)
self.pWebGroup = QGroupBox("Web View", self)
self.pWebGroup.setLayout(self.pJSTotalVLayout)
self.mainLayout = QHBoxLayout()
self.mainLayout.setSpacing(5)
self.mainLayout.addWidget(self.pWebGroup)
self.setLayout(self.mainLayout)
self.setMinimumSize(800, 800)
self.setMinimumSize(800, 800)

View File

@@ -7,12 +7,19 @@ import requests
import datetime
import schedule
import textwrap
from dataclasses import dataclass
from typing import Any, Callable, Dict, List, Literal, Optional, TypedDict, Tuple
from collections.abc import Mapping, Sequence
from langchain_openai import ChatOpenAI
from langchain_core.messages import HumanMessage, SystemMessage
from langgraph.graph import END, START, StateGraph
from dataclasses import dataclass
from typing import Any, Callable, Dict, List, Literal, Optional, TypedDict, Tuple
from collections.abc import Mapping, Sequence
from langchain_openai import ChatOpenAI
from langchain_core.messages import HumanMessage, SystemMessage
try:
from langgraph.graph import END, START, StateGraph
_LANGGRAPH_AVAILABLE = True
except Exception:
END = None
START = None
StateGraph = None
_LANGGRAPH_AVAILABLE = False
# 新增:本地知识库相关导入
import re
@@ -1056,8 +1063,10 @@ def _route_decision(state: AgentState) -> str:
return "call_tool" if state.get("status") == "needs_tool" else "end"
def _build_workflow_app() -> StateGraph:
graph = StateGraph(AgentState)
def _build_workflow_app() -> StateGraph:
if not _LANGGRAPH_AVAILABLE:
return None
graph = StateGraph(AgentState)
graph.add_node("plan_next", _plan_next_action)
graph.add_node("call_tool", _execute_tool)
graph.add_edge(START, "plan_next")
@@ -1073,7 +1082,7 @@ def _build_workflow_app() -> StateGraph:
return graph.compile()
_WORKFLOW_APP = _build_workflow_app()
_WORKFLOW_APP = _build_workflow_app() if _LANGGRAPH_AVAILABLE else None
def get_user_memory_dir(username=None):
"""根据配置决定是否按用户名隔离记忆目录"""
@@ -2017,10 +2026,13 @@ def question(content, username, observation=None):
except Exception as exc:
util.log(1, f"获取工具列表失败: {exc}")
mcp_tools = []
for tool_def in mcp_tools:
spec = _build_workflow_tool_spec(tool_def)
if spec:
tool_registry[spec.name] = spec
for tool_def in mcp_tools:
spec = _build_workflow_tool_spec(tool_def)
if spec:
tool_registry[spec.name] = spec
if tool_registry and not _LANGGRAPH_AVAILABLE:
util.log(1, "langgraph is unavailable, workflow tools are disabled and the app will use direct LLM mode.")
tool_registry = {}
try:
from utils.stream_state_manager import get_state_manager as _get_state_manager
@@ -2128,8 +2140,10 @@ def question(content, username, observation=None):
full_response_text += prestart_stream_text
is_first_sentence = False
def run_workflow(tool_registry: Dict[str, WorkflowToolSpec]) -> bool:
nonlocal accumulated_text, full_response_text, is_first_sentence, messages_buffer
def run_workflow(tool_registry: Dict[str, WorkflowToolSpec]) -> bool:
nonlocal accumulated_text, full_response_text, is_first_sentence, messages_buffer
if _WORKFLOW_APP is None:
return False
# 创建规划器流式回调,用于实时输出 finish+message 响应
planner_stream_buffer = {"text": "", "first_chunk": True}

108
main.py
View File

@@ -1,39 +1,45 @@
#入口文件main
import os
import sys
def _resolve_runtime_dir():
if hasattr(sys, "_MEIPASS"):
return os.path.abspath(sys._MEIPASS)
return os.path.abspath(os.path.dirname(__file__))
#入口文件main
import os
import sys
def _resolve_runtime_dir():
if hasattr(sys, "_MEIPASS"):
return os.path.abspath(sys._MEIPASS)
return os.path.abspath(os.path.dirname(__file__))
_RUNTIME_DIR = _resolve_runtime_dir()
os.environ['PATH'] += os.pathsep + os.path.join(_RUNTIME_DIR, "test", "ovr_lipsync", "ffmpeg", "bin")
def _preload_config_center(argv):
def _extract_config_center_id(argv):
for i, arg in enumerate(argv):
if arg in ("-config_center", "--config_center"):
if arg in ("-config_center", "--config_center", "-center_config", "--center_config"):
if i + 1 < len(argv):
os.environ["FAY_CONFIG_CENTER_ID"] = argv[i + 1]
return argv[i + 1]
break
return None
def _preload_config_center(argv):
config_center_id = _extract_config_center_id(argv)
if config_center_id:
os.environ["FAY_CONFIG_CENTER_ID"] = config_center_id
_preload_config_center(sys.argv[1:])
import time
import psutil
import re
import argparse
import signal
import atexit
import threading
from utils import config_util, util
from asr import ali_nls
from core import wsa_server
from gui import flask_server
from core import content_db
import fay_booter
from scheduler.thread_manager import MyThread
from core.interact import Interact
import time
import psutil
import re
import argparse
import signal
import atexit
import threading
from utils import config_util, util
from asr import ali_nls
from core import wsa_server
from gui import flask_server
from core import content_db
import fay_booter
from scheduler.thread_manager import MyThread
from core.interact import Interact
# import sys, io, traceback
# class StdoutInterceptor(io.TextIOBase):
@@ -149,12 +155,17 @@ def __check_and_clear_chroma_db():
"""检查并清除ChromaDB数据库如果存在清除标记"""
try:
if config_util.config["memory"].get("use_bionic_memory", False):
from bionicmemory.core.chroma_service import ChromaService
try:
from bionicmemory.core.chroma_service import ChromaService
except Exception as exc:
util.log(1, f"Bionic memory is unavailable, fallback to cognitive mode: {exc}")
config_util.config.setdefault("memory", {})["use_bionic_memory"] = False
return
if ChromaService.check_and_clear_database_on_startup():
util.log(1, "检测到记忆清除标记已清除ChromaDB数据库")
except Exception as e:
util.log(1, f"清理ChromaDB时出错: {e}")
except Exception as e:
util.log(1, f"清理ChromaDB时出错: {e}")
def kill_process_by_port(port):
for conn in psutil.net_connections(kind='inet'):
@@ -247,10 +258,14 @@ if __name__ == '__main__':
#启动http服务器
flask_server.start()
#启动mcp service
util.log(1, '启动mcp service...')
from faymcp import mcp_service
MyThread(target=mcp_service.start).start()
#启动mcp service
util.log(1, '启动mcp service...')
try:
from faymcp import mcp_service
except Exception as exc:
util.log(1, f"MCP service disabled: {exc}")
else:
MyThread(target=mcp_service.start).start()
#监听控制台
util.log(1, '注册命令...')
@@ -266,21 +281,28 @@ if __name__ == '__main__':
parser = argparse.ArgumentParser(description="start自启动")
parser.add_argument('command', nargs='?', default='', help="start")
parser.add_argument('-config_center', '--config_center', dest='config_center', default=None, help="配置中心项目ID")
parser.add_argument(
'-config_center', '--config_center', '-center_config', '--center_config',
dest='config_center', default=None, help="配置中心项目ID"
)
parsed_args = parser.parse_args()
# Packaged app should behave like "python main.py start" on double-click.
if not parsed_args.command and getattr(sys, 'frozen', False):
parsed_args.command = 'start'
if parsed_args.config_center:
os.environ["FAY_CONFIG_CENTER_ID"] = parsed_args.config_center
config_util.CONFIG_SERVER['PROJECT_ID'] = parsed_args.config_center
if parsed_args.command.lower() == 'start':
MyThread(target=fay_booter.start).start()
if parsed_args.command.lower() == 'start':
MyThread(target=fay_booter.start).start()
#普通模式下启动窗口
if config_util.start_mode == 'common':
app = QApplication(sys.argv)
app.setWindowIcon(QtGui.QIcon('icon.png'))
win = MainWindow()
#普通模式下启动窗口
if config_util.start_mode == 'common':
app = QApplication(sys.argv)
app.setQuitOnLastWindowClosed(False)
app.setWindowIcon(QtGui.QIcon('icon.png'))
win = MainWindow()
time.sleep(1)
win.show()
app.exit(app.exec_())

View File

@@ -0,0 +1 @@
{}

View File

@@ -0,0 +1 @@
[]

3
memory/User/meta.json Normal file
View File

@@ -0,0 +1,3 @@
{
"id": "4f7b5c53-9d62-4977-88bb-dd7671217c8a"
}

Binary file not shown.

BIN
memory/fay.db Normal file

Binary file not shown.

BIN
memory/user_profiles.db Normal file

Binary file not shown.

BIN
readme/gzh.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 MiB

View File

@@ -1,7 +1,9 @@
from pathlib import Path
import sys
import os
OPENAI_API_KEY = "sk-hAuN7OLqKJTdyDjNFdEfF4B0E53642E4B2BbCa248594Cd29"
OPENAI_API_BASE = "https://api.zyai.online/v1" # 可以修改为你的自定义 base URL
OPENAI_API_BASE = "https://api.zyai.online/v1"
KEY_OWNER = "xszyou"
@@ -11,8 +13,16 @@ MAX_CHUNK_SIZE = 4
LLM_VERS = "gpt-4o-mini"
BASE_DIR = f"{Path(__file__).resolve().parent.parent}"
## To do: Are the following needed in the new structure? Ideally Populations_Dir is for the user to define.
POPULATIONS_DIR = f"{BASE_DIR}/agent_bank/populations"
LLM_PROMPT_DIR = f"{BASE_DIR}/simulation_engine/prompt_template"
def _resolve_base_dir():
if getattr(sys, "frozen", False):
if hasattr(sys, "_MEIPASS"):
return Path(sys._MEIPASS).resolve()
return Path(sys.executable).resolve().parent
return Path(__file__).resolve().parent.parent
BASE_DIR = str(_resolve_base_dir())
POPULATIONS_DIR = os.path.join(BASE_DIR, "agent_bank", "populations")
LLM_PROMPT_DIR = os.path.join(BASE_DIR, "simulation_engine", "prompt_template")

View File

@@ -0,0 +1,33 @@
<commentblockmarker>###</commentblockmarker>
<Background information about the Character>
!<INPUT 0>!
<End of background information about the Character>
=====
<Questions>
!<INPUT 1>!
<End of questions>
Task: Based on the character background above, answer each question as the character would. Keep the answers consistent
with the character description. For each question, provide a short reasoning sentence and then the final response.
Important:
- For categorical questions, the Response must be one of the listed options.
- For int or float questions, the Response must stay within the provided range.
- For open questions, keep the Response concise and stay within the requested character limit.
- Return valid JSON only.
Output format -- output your response in json, where you provide the following:
{
"1": {
"Q": "<repeat the question you are answering>",
"Reasoning": "<brief reasoning>",
"Response": "<final response>"
},
"2": {
"Q": "<repeat the question you are answering>",
"Reasoning": "<brief reasoning>",
"Response": "<final response>"
}
}

View File

@@ -2,28 +2,30 @@ from pathlib import Path
import sys
import os
# 添加项目根目录到系统路径
BASE_DIR = f"{Path(__file__).resolve().parent.parent}"
def _resolve_base_dir():
if getattr(sys, "frozen", False):
if hasattr(sys, "_MEIPASS"):
return Path(sys._MEIPASS).resolve()
return Path(sys.executable).resolve().parent
return Path(__file__).resolve().parent.parent
BASE_DIR = str(_resolve_base_dir())
sys.path.append(BASE_DIR)
# 导入配置工具
from utils import config_util as cfg
# 确保配置已加载
cfg.load_config()
# 调试模式开关
DEBUG = False
# 从system.conf读取配置
OPENAI_API_KEY = cfg.key_gpt_api_key
OPENAI_API_BASE = cfg.gpt_base_url
MAX_CHUNK_SIZE = 4
# 使用system.conf中的模型配置
LLM_VERS = cfg.gpt_model_engine
## To do: Are the following needed in the new structure? Ideally Populations_Dir is for the user to define.
POPULATIONS_DIR = f"{BASE_DIR}/agent_bank/populations"
LLM_PROMPT_DIR = f"{BASE_DIR}/simulation_engine/prompt_template"
POPULATIONS_DIR = os.path.join(BASE_DIR, "agent_bank", "populations")
LLM_PROMPT_DIR = os.path.join(BASE_DIR, "simulation_engine", "prompt_template")