feat: SQLite event storage with pagination and filtering
Implement SQLite-based event persistence as specified in sqlite-event-manager.md.
## Changes
### Backend
- **EventStorage** (`src/classes/event_storage.py`): New SQLite storage layer
- Cursor-based pagination with compound cursor `{month_stamp}_{rowid}`
- Avatar filtering (single and pair queries)
- Major/minor event separation
- Cleanup API with `keep_major` and `before_month_stamp` filters
- **EventManager** (`src/classes/event_manager.py`): Refactored to use SQLite
- Delegates to EventStorage for persistence
- Memory fallback mode for testing
- New `get_events_paginated()` method
- **API** (`src/server/main.py`):
- `GET /api/events` - Paginated event retrieval with filtering
- `DELETE /api/events/cleanup` - User-triggered cleanup
### Frontend
- **EventPanel.vue**: Scroll-to-load pagination, dual-person filter UI
- **world.ts**: Event state management with pagination
- **game.ts**: New API client methods
### Testing
- 81 new tests for EventStorage, EventManager, and API
- Added `pytest-asyncio` and `httpx` to requirements.txt
## Known Issues: Save/Load is Currently Broken
After loading a saved game, the following issues occur:
1. **Wrong database used**: API returns events from the startup database instead
of the loaded save's `_events.db` file
2. **Events from wrong time period**: Shows events from year 115 when loaded
save is at year 114
3. **Pagination broken after load**: `has_more` returns `False` despite hundreds
of events in the saved database
4. **Filter functionality broken**: Character selection filter stops working
after loading a game
Root cause: `load_game.py` does not properly switch the EventManager's database
connection to the loaded save's events database.
This commit is contained in:
@@ -3,4 +3,9 @@ omegaconf>=2.3.0
|
||||
json5>=0.9.0
|
||||
fastapi>=0.100.0
|
||||
uvicorn>=0.20.0
|
||||
websockets>=11.0
|
||||
websockets>=11.0
|
||||
|
||||
# Testing
|
||||
pytest>=8.0.0
|
||||
pytest-asyncio>=0.23.0 # Required for async tests
|
||||
httpx>=0.27.0 # Required for FastAPI TestClient
|
||||
@@ -1,126 +1,247 @@
|
||||
from typing import Dict, List
|
||||
from collections import deque, defaultdict
|
||||
"""
|
||||
事件管理器。
|
||||
|
||||
from src.classes.event import Event
|
||||
重构后使用 SQLite 存储,提供与旧版兼容的接口。
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import List, Optional, TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from src.classes.event import Event
|
||||
from src.classes.event_storage import EventStorage
|
||||
|
||||
|
||||
class EventManager:
|
||||
"""
|
||||
全局事件管理器:统一保存事件,并提供按角色、按角色对、按时间的查询。
|
||||
- 限长清理,避免内存无限增长。
|
||||
- 幂等写入(基于 event_id)。
|
||||
- 仅对恰为两人参与的事件建立“按人对”索引。
|
||||
事件管理器:使用 SQLite 持久化存储。
|
||||
|
||||
保持与旧版兼容的接口:
|
||||
- add_event: 添加事件
|
||||
- get_recent_events: 获取最近事件
|
||||
- get_events_by_avatar: 按角色查询
|
||||
- get_events_between: 按角色对查询
|
||||
- get_major_events_by_avatar: 获取角色大事
|
||||
- get_minor_events_by_avatar: 获取角色小事
|
||||
- get_major_events_between: 获取角色对大事
|
||||
- get_minor_events_between: 获取角色对小事
|
||||
"""
|
||||
|
||||
def __init__(self, *, max_global_events: int = 5000, max_index_events: int = 200) -> None:
|
||||
self.max_global_events = max_global_events
|
||||
self.max_index_events = max_index_events
|
||||
def __init__(self, storage: Optional["EventStorage"] = None):
|
||||
"""
|
||||
初始化事件管理器。
|
||||
|
||||
self._events: deque[Event] = deque()
|
||||
self._by_id: Dict[str, Event] = {}
|
||||
self._by_avatar: Dict[str, deque[Event]] = defaultdict(deque)
|
||||
self._by_pair: Dict[frozenset[str], deque[Event]] = defaultdict(deque)
|
||||
# 按角色分类的大事/小事索引
|
||||
self._by_avatar_major: Dict[str, deque[Event]] = defaultdict(deque)
|
||||
self._by_avatar_minor: Dict[str, deque[Event]] = defaultdict(deque)
|
||||
# 按角色对分类的大事/小事索引
|
||||
self._by_pair_major: Dict[frozenset[str], deque[Event]] = defaultdict(deque)
|
||||
self._by_pair_minor: Dict[frozenset[str], deque[Event]] = defaultdict(deque)
|
||||
Args:
|
||||
storage: SQLite 存储层。如果为 None,则使用内存模式(仅用于测试)。
|
||||
"""
|
||||
self._storage = storage
|
||||
# 内存后备(仅当 storage 为 None 时使用,用于测试或迁移期间)。
|
||||
self._memory_events: List["Event"] = []
|
||||
|
||||
def _append_with_limit(self, dq: deque, item: Event) -> None:
|
||||
dq.append(item)
|
||||
if len(dq) > self.max_index_events:
|
||||
dq.popleft()
|
||||
@classmethod
|
||||
def create_with_db(cls, db_path: Path) -> "EventManager":
|
||||
"""
|
||||
工厂方法:创建使用 SQLite 的事件管理器。
|
||||
|
||||
def add_event(self, event: Event) -> None:
|
||||
# 过滤掉空事件
|
||||
Args:
|
||||
db_path: 数据库文件路径。
|
||||
|
||||
Returns:
|
||||
配置好的 EventManager 实例。
|
||||
"""
|
||||
from src.classes.event_storage import EventStorage
|
||||
storage = EventStorage(db_path)
|
||||
return cls(storage)
|
||||
|
||||
@classmethod
|
||||
def create_in_memory(cls) -> "EventManager":
|
||||
"""
|
||||
工厂方法:创建内存模式的事件管理器(仅用于测试)。
|
||||
|
||||
Returns:
|
||||
内存模式的 EventManager 实例。
|
||||
"""
|
||||
return cls(storage=None)
|
||||
|
||||
def add_event(self, event: "Event") -> None:
|
||||
"""
|
||||
添加事件。
|
||||
|
||||
如果有 SQLite 存储,实时写入数据库。
|
||||
否则存入内存后备列表。
|
||||
"""
|
||||
# 过滤空事件。
|
||||
from src.classes.event import is_null_event
|
||||
if is_null_event(event):
|
||||
return
|
||||
|
||||
# 幂等:若已存在同 id,跳过
|
||||
if getattr(event, "id", None) and event.id in self._by_id:
|
||||
return
|
||||
if getattr(event, "id", None):
|
||||
self._by_id[event.id] = event
|
||||
if self._storage:
|
||||
self._storage.add_event(event)
|
||||
else:
|
||||
# 内存后备模式。
|
||||
self._memory_events.append(event)
|
||||
|
||||
# 全局
|
||||
self._events.append(event)
|
||||
if len(self._events) > self.max_global_events:
|
||||
self._events.popleft()
|
||||
def get_recent_events(self, limit: int = 100) -> List["Event"]:
|
||||
"""获取最近的事件(时间正序)。"""
|
||||
if self._storage:
|
||||
return self._storage.get_recent_events(limit=limit)
|
||||
else:
|
||||
return self._memory_events[-limit:]
|
||||
|
||||
# 分索引:按人/人对
|
||||
rel = event.related_avatars or []
|
||||
rel_unique = list(dict.fromkeys(rel)) # 去重但保持顺序
|
||||
for aid in rel_unique:
|
||||
self._append_with_limit(self._by_avatar[aid], event)
|
||||
# 故事事件进入小事索引,不进入大事索引
|
||||
if event.is_story:
|
||||
self._append_with_limit(self._by_avatar_minor[aid], event)
|
||||
elif event.is_major:
|
||||
self._append_with_limit(self._by_avatar_major[aid], event)
|
||||
else:
|
||||
self._append_with_limit(self._by_avatar_minor[aid], event)
|
||||
# 仅当且仅当"恰有两位参与者"时建立按人对索引
|
||||
if len(rel_unique) == 2:
|
||||
a, b = rel_unique[0], rel_unique[1]
|
||||
pair_key = frozenset([a, b])
|
||||
self._append_with_limit(self._by_pair[pair_key], event)
|
||||
# 角色对也建立分类索引
|
||||
if event.is_story:
|
||||
self._append_with_limit(self._by_pair_minor[pair_key], event)
|
||||
elif event.is_major:
|
||||
self._append_with_limit(self._by_pair_major[pair_key], event)
|
||||
else:
|
||||
self._append_with_limit(self._by_pair_minor[pair_key], event)
|
||||
def get_events_by_avatar(self, avatar_id: str, *, limit: int = 50) -> List["Event"]:
|
||||
"""获取角色相关的事件(时间正序)。"""
|
||||
if self._storage:
|
||||
return self._storage.get_events_by_avatar(avatar_id, limit=limit)
|
||||
else:
|
||||
# 内存后备模式:简单过滤。
|
||||
result = []
|
||||
for e in reversed(self._memory_events):
|
||||
if e.related_avatars and avatar_id in e.related_avatars:
|
||||
result.append(e)
|
||||
if len(result) >= limit:
|
||||
break
|
||||
return list(reversed(result))
|
||||
|
||||
# —— 查询接口 ——
|
||||
def get_recent_events(self, limit: int = 100) -> List[Event]:
|
||||
if limit <= 0:
|
||||
return []
|
||||
return list(self._events)[-limit:]
|
||||
def get_events_between(self, avatar_id1: str, avatar_id2: str, *, limit: int = 50) -> List["Event"]:
|
||||
"""获取两个角色之间的事件(时间正序)。"""
|
||||
if self._storage:
|
||||
return self._storage.get_events_between(avatar_id1, avatar_id2, limit=limit)
|
||||
else:
|
||||
# 内存后备模式:简单过滤。
|
||||
result = []
|
||||
for e in reversed(self._memory_events):
|
||||
if e.related_avatars:
|
||||
if avatar_id1 in e.related_avatars and avatar_id2 in e.related_avatars:
|
||||
result.append(e)
|
||||
if len(result) >= limit:
|
||||
break
|
||||
return list(reversed(result))
|
||||
|
||||
def get_events_by_avatar(self, avatar_id: str, *, limit: int = 50) -> List[Event]:
|
||||
dq = self._by_avatar.get(avatar_id)
|
||||
if not dq:
|
||||
return []
|
||||
return list(dq)[-limit:]
|
||||
def get_major_events_by_avatar(self, avatar_id: str, *, limit: int = 10) -> List["Event"]:
|
||||
"""获取角色的大事(长期记忆,时间正序)。"""
|
||||
if self._storage:
|
||||
return self._storage.get_major_events_by_avatar(avatar_id, limit=limit)
|
||||
else:
|
||||
result = []
|
||||
for e in reversed(self._memory_events):
|
||||
if e.is_major and not e.is_story:
|
||||
if e.related_avatars and avatar_id in e.related_avatars:
|
||||
result.append(e)
|
||||
if len(result) >= limit:
|
||||
break
|
||||
return list(reversed(result))
|
||||
|
||||
def get_events_between(self, avatar_id1: str, avatar_id2: str, *, limit: int = 50) -> List[Event]:
|
||||
key = frozenset([avatar_id1, avatar_id2])
|
||||
dq = self._by_pair.get(key)
|
||||
if not dq:
|
||||
return []
|
||||
return list(dq)[-limit:]
|
||||
def get_minor_events_by_avatar(self, avatar_id: str, *, limit: int = 10) -> List["Event"]:
|
||||
"""获取角色的小事(短期记忆,时间正序)。"""
|
||||
if self._storage:
|
||||
return self._storage.get_minor_events_by_avatar(avatar_id, limit=limit)
|
||||
else:
|
||||
result = []
|
||||
for e in reversed(self._memory_events):
|
||||
if not e.is_major or e.is_story:
|
||||
if e.related_avatars and avatar_id in e.related_avatars:
|
||||
result.append(e)
|
||||
if len(result) >= limit:
|
||||
break
|
||||
return list(reversed(result))
|
||||
|
||||
def get_major_events_by_avatar(self, avatar_id: str, *, limit: int = 10) -> List[Event]:
|
||||
"""获取角色的大事(长期记忆)"""
|
||||
dq = self._by_avatar_major.get(avatar_id)
|
||||
if not dq:
|
||||
return []
|
||||
return list(dq)[-limit:]
|
||||
def get_major_events_between(self, avatar_id1: str, avatar_id2: str, *, limit: int = 10) -> List["Event"]:
|
||||
"""获取两个角色之间的大事(长期记忆,时间正序)。"""
|
||||
if self._storage:
|
||||
return self._storage.get_major_events_between(avatar_id1, avatar_id2, limit=limit)
|
||||
else:
|
||||
result = []
|
||||
for e in reversed(self._memory_events):
|
||||
if e.is_major and not e.is_story:
|
||||
if e.related_avatars:
|
||||
if avatar_id1 in e.related_avatars and avatar_id2 in e.related_avatars:
|
||||
result.append(e)
|
||||
if len(result) >= limit:
|
||||
break
|
||||
return list(reversed(result))
|
||||
|
||||
def get_minor_events_by_avatar(self, avatar_id: str, *, limit: int = 10) -> List[Event]:
|
||||
"""获取角色的小事(短期记忆)"""
|
||||
dq = self._by_avatar_minor.get(avatar_id)
|
||||
if not dq:
|
||||
return []
|
||||
return list(dq)[-limit:]
|
||||
def get_minor_events_between(self, avatar_id1: str, avatar_id2: str, *, limit: int = 10) -> List["Event"]:
|
||||
"""获取两个角色之间的小事(短期记忆,时间正序)。"""
|
||||
if self._storage:
|
||||
return self._storage.get_minor_events_between(avatar_id1, avatar_id2, limit=limit)
|
||||
else:
|
||||
result = []
|
||||
for e in reversed(self._memory_events):
|
||||
if not e.is_major or e.is_story:
|
||||
if e.related_avatars:
|
||||
if avatar_id1 in e.related_avatars and avatar_id2 in e.related_avatars:
|
||||
result.append(e)
|
||||
if len(result) >= limit:
|
||||
break
|
||||
return list(reversed(result))
|
||||
|
||||
def get_major_events_between(self, avatar_id1: str, avatar_id2: str, *, limit: int = 10) -> List[Event]:
|
||||
"""获取两个角色之间的大事(长期记忆)"""
|
||||
key = frozenset([avatar_id1, avatar_id2])
|
||||
dq = self._by_pair_major.get(key)
|
||||
if not dq:
|
||||
return []
|
||||
return list(dq)[-limit:]
|
||||
# --- 分页查询接口(新增)---
|
||||
|
||||
def get_minor_events_between(self, avatar_id1: str, avatar_id2: str, *, limit: int = 10) -> List[Event]:
|
||||
"""获取两个角色之间的小事(短期记忆)"""
|
||||
key = frozenset([avatar_id1, avatar_id2])
|
||||
dq = self._by_pair_minor.get(key)
|
||||
if not dq:
|
||||
return []
|
||||
return list(dq)[-limit:]
|
||||
def get_events_paginated(
|
||||
self,
|
||||
avatar_id: Optional[str] = None,
|
||||
avatar_id_pair: Optional[tuple[str, str]] = None,
|
||||
cursor: Optional[str] = None,
|
||||
limit: int = 100,
|
||||
) -> tuple[List["Event"], Optional[str], bool]:
|
||||
"""
|
||||
分页查询事件。
|
||||
|
||||
Args:
|
||||
avatar_id: 按单个角色筛选。
|
||||
avatar_id_pair: Pair 查询(两个角色之间的事件)。
|
||||
cursor: 分页 cursor,获取该位置之前的事件。
|
||||
limit: 每页数量。
|
||||
|
||||
Returns:
|
||||
(events, next_cursor, has_more)
|
||||
- events: 事件列表(时间倒序,最新在前)。
|
||||
- next_cursor: 下一页的 cursor,None 表示没有更多。
|
||||
- has_more: 是否有更多数据。
|
||||
"""
|
||||
if self._storage:
|
||||
events, next_cursor = self._storage.get_events(
|
||||
avatar_id=avatar_id,
|
||||
avatar_id_pair=avatar_id_pair,
|
||||
cursor=cursor,
|
||||
limit=limit,
|
||||
)
|
||||
return events, next_cursor, next_cursor is not None
|
||||
else:
|
||||
# 内存模式不支持完整分页,返回最近的。
|
||||
events = self.get_recent_events(limit=limit)
|
||||
return list(reversed(events)), None, False
|
||||
|
||||
# --- 清理接口 ---
|
||||
|
||||
def cleanup(self, keep_major: bool = True, before_month_stamp: Optional[int] = None) -> int:
|
||||
"""
|
||||
清理事件。
|
||||
|
||||
Args:
|
||||
keep_major: 是否保留大事。
|
||||
before_month_stamp: 删除此时间之前的事件。
|
||||
|
||||
Returns:
|
||||
删除的事件数量。
|
||||
"""
|
||||
if self._storage:
|
||||
return self._storage.cleanup(keep_major=keep_major, before_month_stamp=before_month_stamp)
|
||||
else:
|
||||
# 内存模式:简单清空。
|
||||
count = len(self._memory_events)
|
||||
self._memory_events.clear()
|
||||
return count
|
||||
|
||||
def count(self) -> int:
|
||||
"""获取事件总数。"""
|
||||
if self._storage:
|
||||
return self._storage.count()
|
||||
else:
|
||||
return len(self._memory_events)
|
||||
|
||||
def close(self) -> None:
|
||||
"""关闭资源。"""
|
||||
if self._storage:
|
||||
self._storage.close()
|
||||
|
||||
543
src/classes/event_storage.py
Normal file
543
src/classes/event_storage.py
Normal file
@@ -0,0 +1,543 @@
|
||||
"""
|
||||
SQLite 事件存储层。
|
||||
|
||||
提供事件的持久化存储、分页查询和清理功能。
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import sqlite3
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
from contextlib import contextmanager
|
||||
|
||||
from src.run.log import get_logger
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from src.classes.event import Event
|
||||
|
||||
|
||||
class EventStorage:
|
||||
"""
|
||||
SQLite 事件存储层。
|
||||
|
||||
提供:
|
||||
- 实时写入事件
|
||||
- 分页查询(cursor-based)
|
||||
- 按角色/角色对查询
|
||||
- 历史清理
|
||||
"""
|
||||
|
||||
def __init__(self, db_path: Path):
|
||||
"""
|
||||
初始化数据库连接,创建表(如不存在)。
|
||||
|
||||
Args:
|
||||
db_path: 数据库文件路径。
|
||||
"""
|
||||
self._db_path = db_path
|
||||
self._conn: Optional[sqlite3.Connection] = None
|
||||
self._logger = get_logger().logger
|
||||
self._init_db()
|
||||
|
||||
def _init_db(self) -> None:
|
||||
"""初始化数据库连接和表结构。"""
|
||||
try:
|
||||
# 确保目录存在。
|
||||
self._db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self._conn = sqlite3.connect(str(self._db_path), check_same_thread=False)
|
||||
self._conn.row_factory = sqlite3.Row
|
||||
|
||||
# 启用外键约束。
|
||||
self._conn.execute("PRAGMA foreign_keys = ON")
|
||||
|
||||
# 创建表。
|
||||
self._conn.executescript("""
|
||||
CREATE TABLE IF NOT EXISTS events (
|
||||
id TEXT PRIMARY KEY,
|
||||
month_stamp INTEGER NOT NULL,
|
||||
content TEXT NOT NULL,
|
||||
is_major BOOLEAN DEFAULT FALSE,
|
||||
is_story BOOLEAN DEFAULT FALSE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS event_avatars (
|
||||
event_id TEXT NOT NULL,
|
||||
avatar_id TEXT NOT NULL,
|
||||
PRIMARY KEY (event_id, avatar_id),
|
||||
FOREIGN KEY (event_id) REFERENCES events(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_events_month_stamp
|
||||
ON events(month_stamp DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_events_is_major
|
||||
ON events(is_major);
|
||||
CREATE INDEX IF NOT EXISTS idx_event_avatars_avatar_id
|
||||
ON event_avatars(avatar_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_event_avatars_event_id
|
||||
ON event_avatars(event_id);
|
||||
""")
|
||||
self._conn.commit()
|
||||
self._logger.info(f"EventStorage initialized: {self._db_path}")
|
||||
except Exception as e:
|
||||
self._logger.error(f"Failed to initialize EventStorage: {e}")
|
||||
raise
|
||||
|
||||
@contextmanager
|
||||
def _transaction(self):
|
||||
"""事务上下文管理器。"""
|
||||
try:
|
||||
yield self._conn
|
||||
self._conn.commit()
|
||||
except Exception:
|
||||
self._conn.rollback()
|
||||
raise
|
||||
|
||||
def add_event(self, event: "Event") -> bool:
|
||||
"""
|
||||
写入单个事件。
|
||||
|
||||
失败时记录日志并返回 False,不抛异常。
|
||||
|
||||
Args:
|
||||
event: 要写入的事件对象。
|
||||
|
||||
Returns:
|
||||
写入是否成功。
|
||||
"""
|
||||
if self._conn is None:
|
||||
self._logger.error("EventStorage not initialized")
|
||||
return False
|
||||
|
||||
try:
|
||||
with self._transaction():
|
||||
# 插入事件主表。
|
||||
self._conn.execute(
|
||||
"""
|
||||
INSERT OR IGNORE INTO events (id, month_stamp, content, is_major, is_story)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
event.id,
|
||||
int(event.month_stamp),
|
||||
event.content,
|
||||
event.is_major,
|
||||
event.is_story,
|
||||
)
|
||||
)
|
||||
|
||||
# 插入关联表。
|
||||
if event.related_avatars:
|
||||
for avatar_id in event.related_avatars:
|
||||
self._conn.execute(
|
||||
"""
|
||||
INSERT OR IGNORE INTO event_avatars (event_id, avatar_id)
|
||||
VALUES (?, ?)
|
||||
""",
|
||||
(event.id, str(avatar_id))
|
||||
)
|
||||
return True
|
||||
except Exception as e:
|
||||
self._logger.error(f"Failed to write event {event.id}: {e}")
|
||||
return False
|
||||
|
||||
def _parse_cursor(self, cursor: str) -> tuple[int, int]:
|
||||
"""
|
||||
解析复合 cursor。
|
||||
|
||||
格式: {month_stamp}_{rowid}
|
||||
|
||||
Returns:
|
||||
(month_stamp, rowid)
|
||||
"""
|
||||
parts = cursor.split("_", 1)
|
||||
if len(parts) != 2:
|
||||
raise ValueError(f"Invalid cursor format: {cursor}")
|
||||
return int(parts[0]), int(parts[1])
|
||||
|
||||
def _make_cursor(self, month_stamp: int, rowid: int) -> str:
|
||||
"""生成复合 cursor。"""
|
||||
return f"{month_stamp}_{rowid}"
|
||||
|
||||
def get_events(
|
||||
self,
|
||||
avatar_id: Optional[str] = None,
|
||||
avatar_id_pair: Optional[tuple[str, str]] = None,
|
||||
cursor: Optional[str] = None,
|
||||
limit: int = 100,
|
||||
) -> tuple[list["Event"], Optional[str]]:
|
||||
"""
|
||||
分页查询事件。
|
||||
|
||||
Args:
|
||||
avatar_id: 按单个角色筛选。
|
||||
avatar_id_pair: Pair 查询(两个角色之间的事件)。
|
||||
cursor: 分页 cursor,获取该位置之前的事件。
|
||||
limit: 每页数量。
|
||||
|
||||
Returns:
|
||||
(events, next_cursor),next_cursor 为 None 表示没有更多。
|
||||
"""
|
||||
from src.classes.event import Event
|
||||
from src.classes.calendar import MonthStamp
|
||||
|
||||
if self._conn is None:
|
||||
return [], None
|
||||
|
||||
try:
|
||||
# 构建查询。
|
||||
params: list = []
|
||||
|
||||
if avatar_id_pair:
|
||||
# Pair 查询:两个角色都相关的事件。
|
||||
id1, id2 = avatar_id_pair
|
||||
base_query = """
|
||||
SELECT DISTINCT e.rowid, e.id, e.month_stamp, e.content, e.is_major, e.is_story
|
||||
FROM events e
|
||||
JOIN event_avatars ea1 ON e.id = ea1.event_id AND ea1.avatar_id = ?
|
||||
JOIN event_avatars ea2 ON e.id = ea2.event_id AND ea2.avatar_id = ?
|
||||
"""
|
||||
params.extend([id1, id2])
|
||||
elif avatar_id:
|
||||
# 单角色查询。
|
||||
base_query = """
|
||||
SELECT DISTINCT e.rowid, e.id, e.month_stamp, e.content, e.is_major, e.is_story
|
||||
FROM events e
|
||||
JOIN event_avatars ea ON e.id = ea.event_id AND ea.avatar_id = ?
|
||||
"""
|
||||
params.append(avatar_id)
|
||||
else:
|
||||
# 全部事件。
|
||||
base_query = """
|
||||
SELECT rowid, id, month_stamp, content, is_major, is_story
|
||||
FROM events e
|
||||
"""
|
||||
|
||||
# Cursor 条件(获取更旧的事件)。
|
||||
# 使用 rowid 保证同一 month_stamp 内的确定性顺序。
|
||||
where_clauses = []
|
||||
if cursor:
|
||||
cursor_month, cursor_rowid = self._parse_cursor(cursor)
|
||||
where_clauses.append(
|
||||
"(e.month_stamp < ? OR (e.month_stamp = ? AND e.rowid < ?))"
|
||||
)
|
||||
params.extend([cursor_month, cursor_month, cursor_rowid])
|
||||
|
||||
# 组装 WHERE。
|
||||
if where_clauses:
|
||||
base_query += " WHERE " + " AND ".join(where_clauses)
|
||||
|
||||
# 排序和分页(最新的在前,向上加载更旧的)。
|
||||
# 使用 rowid 保证同一 month_stamp 内的插入顺序。
|
||||
base_query += " ORDER BY e.month_stamp DESC, e.rowid DESC LIMIT ?"
|
||||
params.append(limit + 1) # 多取一条判断是否有更多。
|
||||
|
||||
rows = self._conn.execute(base_query, params).fetchall()
|
||||
|
||||
# 判断是否有更多。
|
||||
has_more = len(rows) > limit
|
||||
if has_more:
|
||||
rows = rows[:limit]
|
||||
|
||||
# 构建事件对象。
|
||||
events = []
|
||||
last_rowid = None
|
||||
last_month_stamp = None
|
||||
for row in rows:
|
||||
# 获取关联的 avatar IDs。
|
||||
avatar_rows = self._conn.execute(
|
||||
"SELECT avatar_id FROM event_avatars WHERE event_id = ?",
|
||||
(row["id"],)
|
||||
).fetchall()
|
||||
related_avatars = [r["avatar_id"] for r in avatar_rows]
|
||||
|
||||
event = Event(
|
||||
month_stamp=MonthStamp(row["month_stamp"]),
|
||||
content=row["content"],
|
||||
related_avatars=related_avatars if related_avatars else None,
|
||||
is_major=bool(row["is_major"]),
|
||||
is_story=bool(row["is_story"]),
|
||||
id=row["id"],
|
||||
)
|
||||
events.append(event)
|
||||
last_rowid = row["rowid"]
|
||||
last_month_stamp = row["month_stamp"]
|
||||
|
||||
# 生成 next_cursor。
|
||||
next_cursor = None
|
||||
if has_more and last_rowid is not None:
|
||||
next_cursor = self._make_cursor(last_month_stamp, last_rowid)
|
||||
|
||||
return events, next_cursor
|
||||
|
||||
except Exception as e:
|
||||
self._logger.error(f"Failed to query events: {e}")
|
||||
return [], None
|
||||
|
||||
def get_events_by_avatar(self, avatar_id: str, limit: int = 50) -> list["Event"]:
|
||||
"""
|
||||
后端用:获取角色相关事件(供 LLM prompt 使用)。
|
||||
|
||||
返回最新的 N 条,按时间正序排列。
|
||||
"""
|
||||
events, _ = self.get_events(avatar_id=avatar_id, limit=limit)
|
||||
return list(reversed(events)) # 转为时间正序。
|
||||
|
||||
def get_events_between(self, id1: str, id2: str, limit: int = 50) -> list["Event"]:
|
||||
"""
|
||||
后端用:获取两角色之间的事件。
|
||||
|
||||
返回最新的 N 条,按时间正序排列。
|
||||
"""
|
||||
events, _ = self.get_events(avatar_id_pair=(id1, id2), limit=limit)
|
||||
return list(reversed(events)) # 转为时间正序。
|
||||
|
||||
def get_major_events_by_avatar(self, avatar_id: str, limit: int = 10) -> list["Event"]:
|
||||
"""获取角色的大事(长期记忆)。"""
|
||||
from src.classes.event import Event
|
||||
from src.classes.calendar import MonthStamp
|
||||
|
||||
if self._conn is None:
|
||||
return []
|
||||
|
||||
try:
|
||||
rows = self._conn.execute(
|
||||
"""
|
||||
SELECT DISTINCT e.id, e.month_stamp, e.content, e.is_major, e.is_story
|
||||
FROM events e
|
||||
JOIN event_avatars ea ON e.id = ea.event_id AND ea.avatar_id = ?
|
||||
WHERE e.is_major = TRUE AND e.is_story = FALSE
|
||||
ORDER BY e.month_stamp DESC
|
||||
LIMIT ?
|
||||
""",
|
||||
(avatar_id, limit)
|
||||
).fetchall()
|
||||
|
||||
events = []
|
||||
for row in rows:
|
||||
avatar_rows = self._conn.execute(
|
||||
"SELECT avatar_id FROM event_avatars WHERE event_id = ?",
|
||||
(row["id"],)
|
||||
).fetchall()
|
||||
related_avatars = [r["avatar_id"] for r in avatar_rows]
|
||||
|
||||
event = Event(
|
||||
month_stamp=MonthStamp(row["month_stamp"]),
|
||||
content=row["content"],
|
||||
related_avatars=related_avatars if related_avatars else None,
|
||||
is_major=bool(row["is_major"]),
|
||||
is_story=bool(row["is_story"]),
|
||||
id=row["id"],
|
||||
)
|
||||
events.append(event)
|
||||
|
||||
return list(reversed(events)) # 时间正序。
|
||||
except Exception as e:
|
||||
self._logger.error(f"Failed to query major events: {e}")
|
||||
return []
|
||||
|
||||
def get_minor_events_by_avatar(self, avatar_id: str, limit: int = 10) -> list["Event"]:
|
||||
"""获取角色的小事(短期记忆,包括故事)。"""
|
||||
from src.classes.event import Event
|
||||
from src.classes.calendar import MonthStamp
|
||||
|
||||
if self._conn is None:
|
||||
return []
|
||||
|
||||
try:
|
||||
rows = self._conn.execute(
|
||||
"""
|
||||
SELECT DISTINCT e.id, e.month_stamp, e.content, e.is_major, e.is_story
|
||||
FROM events e
|
||||
JOIN event_avatars ea ON e.id = ea.event_id AND ea.avatar_id = ?
|
||||
WHERE e.is_major = FALSE OR e.is_story = TRUE
|
||||
ORDER BY e.month_stamp DESC
|
||||
LIMIT ?
|
||||
""",
|
||||
(avatar_id, limit)
|
||||
).fetchall()
|
||||
|
||||
events = []
|
||||
for row in rows:
|
||||
avatar_rows = self._conn.execute(
|
||||
"SELECT avatar_id FROM event_avatars WHERE event_id = ?",
|
||||
(row["id"],)
|
||||
).fetchall()
|
||||
related_avatars = [r["avatar_id"] for r in avatar_rows]
|
||||
|
||||
event = Event(
|
||||
month_stamp=MonthStamp(row["month_stamp"]),
|
||||
content=row["content"],
|
||||
related_avatars=related_avatars if related_avatars else None,
|
||||
is_major=bool(row["is_major"]),
|
||||
is_story=bool(row["is_story"]),
|
||||
id=row["id"],
|
||||
)
|
||||
events.append(event)
|
||||
|
||||
return list(reversed(events)) # 时间正序。
|
||||
except Exception as e:
|
||||
self._logger.error(f"Failed to query minor events: {e}")
|
||||
return []
|
||||
|
||||
def get_major_events_between(self, id1: str, id2: str, limit: int = 10) -> list["Event"]:
|
||||
"""获取两个角色之间的大事(长期记忆)。"""
|
||||
from src.classes.event import Event
|
||||
from src.classes.calendar import MonthStamp
|
||||
|
||||
if self._conn is None:
|
||||
return []
|
||||
|
||||
try:
|
||||
rows = self._conn.execute(
|
||||
"""
|
||||
SELECT DISTINCT e.id, e.month_stamp, e.content, e.is_major, e.is_story
|
||||
FROM events e
|
||||
JOIN event_avatars ea1 ON e.id = ea1.event_id AND ea1.avatar_id = ?
|
||||
JOIN event_avatars ea2 ON e.id = ea2.event_id AND ea2.avatar_id = ?
|
||||
WHERE e.is_major = TRUE AND e.is_story = FALSE
|
||||
ORDER BY e.month_stamp DESC
|
||||
LIMIT ?
|
||||
""",
|
||||
(id1, id2, limit)
|
||||
).fetchall()
|
||||
|
||||
events = []
|
||||
for row in rows:
|
||||
avatar_rows = self._conn.execute(
|
||||
"SELECT avatar_id FROM event_avatars WHERE event_id = ?",
|
||||
(row["id"],)
|
||||
).fetchall()
|
||||
related_avatars = [r["avatar_id"] for r in avatar_rows]
|
||||
|
||||
event = Event(
|
||||
month_stamp=MonthStamp(row["month_stamp"]),
|
||||
content=row["content"],
|
||||
related_avatars=related_avatars if related_avatars else None,
|
||||
is_major=bool(row["is_major"]),
|
||||
is_story=bool(row["is_story"]),
|
||||
id=row["id"],
|
||||
)
|
||||
events.append(event)
|
||||
|
||||
return list(reversed(events)) # 时间正序。
|
||||
except Exception as e:
|
||||
self._logger.error(f"Failed to query major events between: {e}")
|
||||
return []
|
||||
|
||||
def get_minor_events_between(self, id1: str, id2: str, limit: int = 10) -> list["Event"]:
|
||||
"""获取两个角色之间的小事(短期记忆)。"""
|
||||
from src.classes.event import Event
|
||||
from src.classes.calendar import MonthStamp
|
||||
|
||||
if self._conn is None:
|
||||
return []
|
||||
|
||||
try:
|
||||
rows = self._conn.execute(
|
||||
"""
|
||||
SELECT DISTINCT e.id, e.month_stamp, e.content, e.is_major, e.is_story
|
||||
FROM events e
|
||||
JOIN event_avatars ea1 ON e.id = ea1.event_id AND ea1.avatar_id = ?
|
||||
JOIN event_avatars ea2 ON e.id = ea2.event_id AND ea2.avatar_id = ?
|
||||
WHERE e.is_major = FALSE OR e.is_story = TRUE
|
||||
ORDER BY e.month_stamp DESC
|
||||
LIMIT ?
|
||||
""",
|
||||
(id1, id2, limit)
|
||||
).fetchall()
|
||||
|
||||
events = []
|
||||
for row in rows:
|
||||
avatar_rows = self._conn.execute(
|
||||
"SELECT avatar_id FROM event_avatars WHERE event_id = ?",
|
||||
(row["id"],)
|
||||
).fetchall()
|
||||
related_avatars = [r["avatar_id"] for r in avatar_rows]
|
||||
|
||||
event = Event(
|
||||
month_stamp=MonthStamp(row["month_stamp"]),
|
||||
content=row["content"],
|
||||
related_avatars=related_avatars if related_avatars else None,
|
||||
is_major=bool(row["is_major"]),
|
||||
is_story=bool(row["is_story"]),
|
||||
id=row["id"],
|
||||
)
|
||||
events.append(event)
|
||||
|
||||
return list(reversed(events)) # 时间正序。
|
||||
except Exception as e:
|
||||
self._logger.error(f"Failed to query minor events between: {e}")
|
||||
return []
|
||||
|
||||
def get_recent_events(self, limit: int = 100) -> list["Event"]:
|
||||
"""获取最近的事件(供初始状态 API 使用)。"""
|
||||
events, _ = self.get_events(limit=limit)
|
||||
return list(reversed(events)) # 时间正序。
|
||||
|
||||
def cleanup(self, keep_major: bool = True, before_month_stamp: Optional[int] = None) -> int:
|
||||
"""
|
||||
清理事件。
|
||||
|
||||
Args:
|
||||
keep_major: 是否保留大事。
|
||||
before_month_stamp: 删除此时间之前的事件。
|
||||
|
||||
Returns:
|
||||
删除的事件数量。
|
||||
"""
|
||||
if self._conn is None:
|
||||
return 0
|
||||
|
||||
try:
|
||||
conditions = []
|
||||
params: list = []
|
||||
|
||||
if keep_major:
|
||||
conditions.append("is_major = FALSE")
|
||||
|
||||
if before_month_stamp is not None:
|
||||
conditions.append("month_stamp < ?")
|
||||
params.append(before_month_stamp)
|
||||
|
||||
# 如果没有条件且要保留大事,则无需删除任何内容
|
||||
if not conditions and keep_major:
|
||||
return 0
|
||||
|
||||
where_clause = " AND ".join(conditions) if conditions else "1=1"
|
||||
|
||||
with self._transaction():
|
||||
cursor = self._conn.execute(
|
||||
f"DELETE FROM events WHERE {where_clause}",
|
||||
params
|
||||
)
|
||||
deleted = cursor.rowcount
|
||||
|
||||
self._logger.info(f"Cleaned up {deleted} events")
|
||||
return deleted
|
||||
|
||||
except Exception as e:
|
||||
self._logger.error(f"Failed to cleanup events: {e}")
|
||||
return 0
|
||||
|
||||
def count(self) -> int:
|
||||
"""获取事件总数。"""
|
||||
if self._conn is None:
|
||||
return 0
|
||||
try:
|
||||
row = self._conn.execute("SELECT COUNT(*) FROM events").fetchone()
|
||||
return row[0] if row else 0
|
||||
except Exception:
|
||||
return 0
|
||||
|
||||
def close(self) -> None:
|
||||
"""关闭数据库连接。"""
|
||||
if self._conn:
|
||||
try:
|
||||
self._conn.close()
|
||||
self._logger.info("EventStorage closed")
|
||||
except Exception as e:
|
||||
self._logger.error(f"Failed to close EventStorage: {e}")
|
||||
finally:
|
||||
self._conn = None
|
||||
@@ -1,4 +1,5 @@
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from src.classes.map import Map
|
||||
@@ -63,4 +64,29 @@ class World():
|
||||
"动作": "你有一系列可以执行的动作。要注意动作的效果、限制条件、区域和时间。",
|
||||
"装备与丹药": "通过兵器、辅助装备、丹药等装备,可以获得额外的属性加成,获得或小或大的增益。拥有好的装备或者服用好的丹药,能获得很大好处。",
|
||||
}
|
||||
return desc
|
||||
return desc
|
||||
|
||||
@classmethod
|
||||
def create_with_db(
|
||||
cls,
|
||||
map: "Map",
|
||||
month_stamp: MonthStamp,
|
||||
events_db_path: Path,
|
||||
) -> "World":
|
||||
"""
|
||||
工厂方法:创建使用 SQLite 持久化事件的 World 实例。
|
||||
|
||||
Args:
|
||||
map: 地图对象。
|
||||
month_stamp: 时间戳。
|
||||
events_db_path: 事件数据库文件路径。
|
||||
|
||||
Returns:
|
||||
配置好的 World 实例。
|
||||
"""
|
||||
event_manager = EventManager.create_with_db(events_db_path)
|
||||
return cls(
|
||||
map=map,
|
||||
month_stamp=month_stamp,
|
||||
event_manager=event_manager,
|
||||
)
|
||||
@@ -184,7 +184,7 @@ def serialize_events_for_client(events: List[Event]) -> List[dict]:
|
||||
related_ids = [str(a) for a in related_raw if a is not None]
|
||||
|
||||
serialized.append({
|
||||
"id": getattr(event, "event_id", None) or f"{stamp_int or 'evt'}-{idx}",
|
||||
"id": getattr(event, "id", None) or f"{stamp_int or 'evt'}-{idx}",
|
||||
"text": str(event),
|
||||
"content": getattr(event, "content", ""),
|
||||
"year": year,
|
||||
@@ -274,10 +274,31 @@ def check_llm_connectivity() -> tuple[bool, str]:
|
||||
|
||||
def init_game():
|
||||
"""初始化游戏世界,逻辑复用自 src/run/run.py"""
|
||||
|
||||
from datetime import datetime
|
||||
from src.sim.load_game import get_events_db_path
|
||||
|
||||
print("正在初始化游戏世界...")
|
||||
game_map = load_cultivation_world_map()
|
||||
world = World(map=game_map, month_stamp=create_month_stamp(Year(100), Month.JANUARY))
|
||||
|
||||
# 生成时间戳命名的存档路径
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M")
|
||||
save_name = f"save_{timestamp}"
|
||||
saves_dir = CONFIG.paths.saves
|
||||
saves_dir.mkdir(parents=True, exist_ok=True)
|
||||
save_path = saves_dir / f"{save_name}.json"
|
||||
events_db_path = get_events_db_path(save_path)
|
||||
|
||||
# 使用 SQLite 事件存储创建 World
|
||||
world = World.create_with_db(
|
||||
map=game_map,
|
||||
month_stamp=create_month_stamp(Year(100), Month.JANUARY),
|
||||
events_db_path=events_db_path,
|
||||
)
|
||||
print(f"事件数据库: {events_db_path}")
|
||||
|
||||
# 记录当前存档路径(供后续保存使用)
|
||||
game_instance["current_save_path"] = save_path
|
||||
|
||||
sim = Simulator(world)
|
||||
|
||||
# 宗门初始化逻辑
|
||||
@@ -645,6 +666,80 @@ def get_state():
|
||||
except Exception as e:
|
||||
return {"step": 0, "error": "Fatal: " + str(e)}
|
||||
|
||||
|
||||
@app.get("/api/events")
|
||||
def get_events(
|
||||
avatar_id: str = None,
|
||||
avatar_id_1: str = None,
|
||||
avatar_id_2: str = None,
|
||||
cursor: str = None,
|
||||
limit: int = 100,
|
||||
):
|
||||
"""
|
||||
分页获取事件列表。
|
||||
|
||||
Query Parameters:
|
||||
avatar_id: 按单个角色筛选。
|
||||
avatar_id_1: Pair 查询:角色 1。
|
||||
avatar_id_2: Pair 查询:角色 2(需同时提供 avatar_id_1)。
|
||||
cursor: 分页 cursor,获取该位置之前的事件。
|
||||
limit: 每页数量,默认 100。
|
||||
"""
|
||||
world = game_instance.get("world")
|
||||
if world is None:
|
||||
return {"events": [], "next_cursor": None, "has_more": False}
|
||||
|
||||
event_manager = getattr(world, "event_manager", None)
|
||||
if event_manager is None:
|
||||
return {"events": [], "next_cursor": None, "has_more": False}
|
||||
|
||||
# 构建 pair 参数
|
||||
avatar_id_pair = None
|
||||
if avatar_id_1 and avatar_id_2:
|
||||
avatar_id_pair = (avatar_id_1, avatar_id_2)
|
||||
|
||||
# 调用分页查询
|
||||
events, next_cursor, has_more = event_manager.get_events_paginated(
|
||||
avatar_id=avatar_id,
|
||||
avatar_id_pair=avatar_id_pair,
|
||||
cursor=cursor,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
return {
|
||||
"events": serialize_events_for_client(events),
|
||||
"next_cursor": next_cursor,
|
||||
"has_more": has_more,
|
||||
}
|
||||
|
||||
|
||||
@app.delete("/api/events/cleanup")
|
||||
def cleanup_events(
|
||||
keep_major: bool = True,
|
||||
before_month_stamp: int = None,
|
||||
):
|
||||
"""
|
||||
清理历史事件(用户触发)。
|
||||
|
||||
Query Parameters:
|
||||
keep_major: 是否保留大事,默认 true。
|
||||
before_month_stamp: 删除此时间之前的事件。
|
||||
"""
|
||||
world = game_instance.get("world")
|
||||
if world is None:
|
||||
return {"deleted": 0, "error": "No world"}
|
||||
|
||||
event_manager = getattr(world, "event_manager", None)
|
||||
if event_manager is None:
|
||||
return {"deleted": 0, "error": "No event manager"}
|
||||
|
||||
deleted = event_manager.cleanup(
|
||||
keep_major=keep_major,
|
||||
before_month_stamp=before_month_stamp,
|
||||
)
|
||||
return {"deleted": deleted}
|
||||
|
||||
|
||||
@app.get("/api/map")
|
||||
def get_map():
|
||||
"""获取静态地图数据(仅需加载一次)"""
|
||||
@@ -1183,14 +1278,16 @@ def api_save_game(req: SaveGameRequest):
|
||||
sim = game_instance.get("sim")
|
||||
if not world or not sim:
|
||||
raise HTTPException(status_code=503, detail="Game not initialized")
|
||||
|
||||
|
||||
# 尝试从 world 属性获取(如果以后添加了)
|
||||
existed_sects = getattr(world, "existed_sects", [])
|
||||
if not existed_sects:
|
||||
# fallback: 所有 sects
|
||||
existed_sects = list(sects_by_id.values())
|
||||
|
||||
success, filename = save_game(world, sim, existed_sects, save_path=None) # save_path=None 会自动生成时间戳文件名
|
||||
# 使用当前存档路径(保持 SQLite 数据库关联)
|
||||
current_save_path = game_instance.get("current_save_path")
|
||||
success, filename = save_game(world, sim, existed_sects, save_path=current_save_path)
|
||||
if success:
|
||||
return {"status": "ok", "filename": filename}
|
||||
else:
|
||||
@@ -1212,14 +1309,15 @@ def api_load_game(req: LoadGameRequest):
|
||||
|
||||
# 加载
|
||||
new_world, new_sim, new_sects = load_game(target_path)
|
||||
|
||||
|
||||
# 确保挂载 existed_sects 以便下次保存
|
||||
new_world.existed_sects = new_sects
|
||||
|
||||
# 替换全局实例
|
||||
game_instance["world"] = new_world
|
||||
game_instance["sim"] = new_sim
|
||||
|
||||
game_instance["current_save_path"] = target_path
|
||||
|
||||
return {"status": "ok", "message": "Game loaded"}
|
||||
except Exception as e:
|
||||
import traceback
|
||||
|
||||
@@ -17,6 +17,15 @@ from src.run.load_map import load_cultivation_world_map
|
||||
from src.utils.config import CONFIG
|
||||
|
||||
|
||||
def get_events_db_path(save_path: Path) -> Path:
|
||||
"""
|
||||
根据存档路径计算事件数据库路径。
|
||||
|
||||
例如:save_20260105_1423.json -> save_20260105_1423_events.db
|
||||
"""
|
||||
return save_path.with_suffix("").with_name(save_path.stem + "_events.db")
|
||||
|
||||
|
||||
def load_game(save_path: Optional[Path] = None) -> Tuple[World, Simulator, List[Sect]]:
|
||||
"""
|
||||
从文件加载游戏状态
|
||||
@@ -53,13 +62,20 @@ def load_game(save_path: Optional[Path] = None) -> Tuple[World, Simulator, List[
|
||||
|
||||
# 重建地图(地图本身不变,只需重建宗门总部位置)
|
||||
game_map = load_cultivation_world_map()
|
||||
|
||||
|
||||
# 读取世界数据
|
||||
world_data = save_data.get("world", {})
|
||||
month_stamp = MonthStamp(world_data["month_stamp"])
|
||||
|
||||
# 重建World对象
|
||||
world = World(map=game_map, month_stamp=month_stamp)
|
||||
|
||||
# 计算事件数据库路径
|
||||
events_db_path = get_events_db_path(save_path)
|
||||
|
||||
# 重建World对象(使用 SQLite 事件存储)
|
||||
world = World.create_with_db(
|
||||
map=game_map,
|
||||
month_stamp=month_stamp,
|
||||
events_db_path=events_db_path,
|
||||
)
|
||||
|
||||
# 获取本局启用的宗门
|
||||
existed_sect_ids = world_data.get("existed_sect_ids", [])
|
||||
@@ -86,19 +102,27 @@ def load_game(save_path: Optional[Path] = None) -> Tuple[World, Simulator, List[
|
||||
|
||||
# 将所有avatar添加到world
|
||||
world.avatar_manager.avatars = all_avatars
|
||||
|
||||
# 重建事件历史
|
||||
|
||||
# 检查是否需要从 JSON 迁移事件(向后兼容)
|
||||
db_event_count = world.event_manager.count()
|
||||
events_data = save_data.get("events", [])
|
||||
for event_data in events_data:
|
||||
event = Event.from_dict(event_data)
|
||||
world.event_manager.add_event(event)
|
||||
|
||||
|
||||
if db_event_count == 0 and len(events_data) > 0:
|
||||
# SQLite 数据库是空的,但 JSON 中有事件,执行迁移
|
||||
print(f"正在从 JSON 迁移 {len(events_data)} 条事件到 SQLite...")
|
||||
for event_data in events_data:
|
||||
event = Event.from_dict(event_data)
|
||||
world.event_manager.add_event(event)
|
||||
print("事件迁移完成")
|
||||
else:
|
||||
print(f"已从 SQLite 加载 {db_event_count} 条事件")
|
||||
|
||||
# 重建Simulator
|
||||
simulator_data = save_data.get("simulator", {})
|
||||
simulator = Simulator(world)
|
||||
simulator.birth_rate = simulator_data.get("birth_rate", CONFIG.game.npc_birth_rate_per_month)
|
||||
|
||||
print(f"存档加载成功!共加载 {len(all_avatars)} 个角色,{len(events_data)} 条事件")
|
||||
|
||||
print(f"存档加载成功!共加载 {len(all_avatars)} 个角色")
|
||||
return world, simulator, existed_sects
|
||||
|
||||
except Exception as e:
|
||||
|
||||
@@ -10,6 +10,7 @@ from src.classes.world import World
|
||||
from src.sim.simulator import Simulator
|
||||
from src.classes.sect import Sect
|
||||
from src.utils.config import CONFIG
|
||||
from src.sim.load_game import get_events_db_path
|
||||
|
||||
|
||||
def save_game(
|
||||
@@ -17,18 +18,18 @@ def save_game(
|
||||
simulator: Simulator,
|
||||
existed_sects: List[Sect],
|
||||
save_path: Optional[Path] = None
|
||||
) -> bool:
|
||||
) -> tuple[bool, str]:
|
||||
"""
|
||||
保存游戏状态到文件
|
||||
|
||||
|
||||
Args:
|
||||
world: 世界对象
|
||||
simulator: 模拟器对象
|
||||
existed_sects: 本局启用的宗门列表
|
||||
save_path: 保存路径,默认为saves/save.json
|
||||
|
||||
|
||||
Returns:
|
||||
保存是否成功
|
||||
(是否成功, 文件名)
|
||||
"""
|
||||
try:
|
||||
# 确定保存路径
|
||||
@@ -57,40 +58,39 @@ def save_game(
|
||||
avatars_data = []
|
||||
for avatar in world.avatar_manager.avatars.values():
|
||||
avatars_data.append(avatar.to_save_dict())
|
||||
|
||||
# 保存事件历史(限制数量)
|
||||
max_events = CONFIG.save.max_events_to_save
|
||||
events_data = []
|
||||
recent_events = world.event_manager.get_recent_events(limit=max_events)
|
||||
for event in recent_events:
|
||||
events_data.append(event.to_dict())
|
||||
|
||||
|
||||
# 事件已实时写入 SQLite,不再保存到 JSON。
|
||||
# 记录事件数据库路径到元信息中(供参考)。
|
||||
events_db_path = get_events_db_path(save_path)
|
||||
meta["events_db"] = str(events_db_path.name)
|
||||
meta["event_count"] = world.event_manager.count()
|
||||
|
||||
# 保存模拟器数据
|
||||
simulator_data = {
|
||||
"birth_rate": simulator.birth_rate
|
||||
}
|
||||
|
||||
# 组装完整的存档数据
|
||||
|
||||
# 组装完整的存档数据(不含 events,事件在 SQLite 中)
|
||||
save_data = {
|
||||
"meta": meta,
|
||||
"world": world_data,
|
||||
"avatars": avatars_data,
|
||||
"events": events_data,
|
||||
"simulator": simulator_data
|
||||
}
|
||||
|
||||
|
||||
# 写入文件
|
||||
with open(save_path, "w", encoding="utf-8") as f:
|
||||
json.dump(save_data, f, ensure_ascii=False, indent=2)
|
||||
|
||||
|
||||
print(f"游戏已保存到: {save_path}")
|
||||
return True
|
||||
|
||||
print(f"事件数据库: {events_db_path} ({meta['event_count']} 条事件)")
|
||||
return True, save_path.name
|
||||
|
||||
except Exception as e:
|
||||
print(f"保存游戏失败: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
return False, ""
|
||||
|
||||
|
||||
def get_save_info(save_path: Path) -> Optional[dict]:
|
||||
|
||||
389
tests/test_api_events.py
Normal file
389
tests/test_api_events.py
Normal file
@@ -0,0 +1,389 @@
|
||||
"""
|
||||
Tests for the Events API endpoints.
|
||||
|
||||
Covers:
|
||||
- GET /api/events - pagination and filtering
|
||||
- DELETE /api/events/cleanup - event cleanup
|
||||
|
||||
Uses FastAPI TestClient to test the API directly.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from src.classes.world import World
|
||||
from src.classes.map import Map
|
||||
from src.classes.tile import TileType
|
||||
from src.classes.calendar import Month, Year, create_month_stamp
|
||||
from src.classes.event import Event
|
||||
from src.classes.event_storage import EventStorage
|
||||
from src.classes.event_manager import EventManager
|
||||
|
||||
|
||||
def create_test_map():
|
||||
"""Create a simple 10x10 plain map for testing."""
|
||||
m = Map(width=10, height=10)
|
||||
for x in range(10):
|
||||
for y in range(10):
|
||||
m.create_tile(x, y, TileType.PLAIN)
|
||||
return m
|
||||
|
||||
|
||||
def make_event(
|
||||
year: int,
|
||||
month: int,
|
||||
content: str,
|
||||
avatar_ids: list[str] | None = None,
|
||||
is_major: bool = False,
|
||||
is_story: bool = False,
|
||||
) -> Event:
|
||||
"""Helper to create an Event."""
|
||||
month_stamp = create_month_stamp(Year(year), Month(month))
|
||||
return Event(
|
||||
month_stamp=month_stamp,
|
||||
content=content,
|
||||
related_avatars=avatar_ids,
|
||||
is_major=is_major,
|
||||
is_story=is_story,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_db_path():
|
||||
"""Create a temporary database file path."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
yield Path(tmpdir) / "test_events.db"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_world_with_events(temp_db_path):
|
||||
"""Create a mock world with event manager."""
|
||||
game_map = create_test_map()
|
||||
month_stamp = create_month_stamp(Year(100), Month.JANUARY)
|
||||
|
||||
world = World.create_with_db(
|
||||
map=game_map,
|
||||
month_stamp=month_stamp,
|
||||
events_db_path=temp_db_path,
|
||||
)
|
||||
|
||||
# Add some test events
|
||||
world.event_manager.add_event(make_event(100, 1, "Event 1", ["a1"]))
|
||||
world.event_manager.add_event(make_event(100, 2, "Event 2", ["a2"]))
|
||||
world.event_manager.add_event(make_event(100, 3, "Event between", ["a1", "a2"]))
|
||||
world.event_manager.add_event(make_event(100, 4, "Major event", ["a1"], is_major=True))
|
||||
world.event_manager.add_event(make_event(100, 5, "Story event", ["a1"], is_story=True))
|
||||
|
||||
yield world
|
||||
|
||||
world.event_manager.close()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client_with_world(mock_world_with_events):
|
||||
"""Create a TestClient with mocked game_instance."""
|
||||
# We need to patch the game_instance in main.py
|
||||
from src.server import main
|
||||
|
||||
# Backup original
|
||||
original_instance = main.game_instance.copy()
|
||||
|
||||
# Set up mock
|
||||
main.game_instance["world"] = mock_world_with_events
|
||||
main.game_instance["sim"] = MagicMock()
|
||||
main.game_instance["is_paused"] = True
|
||||
|
||||
client = TestClient(main.app)
|
||||
yield client
|
||||
|
||||
# Restore
|
||||
main.game_instance.update(original_instance)
|
||||
|
||||
|
||||
class TestGetEventsAPI:
|
||||
"""Tests for GET /api/events endpoint."""
|
||||
|
||||
def test_get_events_returns_all(self, client_with_world):
|
||||
"""Test getting all events without filters."""
|
||||
response = client_with_world.get("/api/events")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert "events" in data
|
||||
assert "next_cursor" in data
|
||||
assert "has_more" in data
|
||||
|
||||
assert len(data["events"]) == 5
|
||||
assert data["has_more"] is False
|
||||
|
||||
def test_get_events_with_limit(self, client_with_world):
|
||||
"""Test pagination with limit parameter."""
|
||||
response = client_with_world.get("/api/events?limit=2")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert len(data["events"]) == 2
|
||||
assert data["has_more"] is True
|
||||
assert data["next_cursor"] is not None
|
||||
|
||||
def test_get_events_pagination_cursor(self, client_with_world):
|
||||
"""Test pagination with cursor."""
|
||||
# First page
|
||||
response1 = client_with_world.get("/api/events?limit=3")
|
||||
data1 = response1.json()
|
||||
|
||||
cursor = data1["next_cursor"]
|
||||
assert cursor is not None
|
||||
|
||||
# Second page
|
||||
response2 = client_with_world.get(f"/api/events?limit=3&cursor={cursor}")
|
||||
data2 = response2.json()
|
||||
|
||||
assert len(data2["events"]) == 2 # 5 total, 3 in first page
|
||||
|
||||
# No overlap in event IDs
|
||||
ids1 = {e["id"] for e in data1["events"]}
|
||||
ids2 = {e["id"] for e in data2["events"]}
|
||||
assert ids1.isdisjoint(ids2)
|
||||
|
||||
def test_get_events_by_avatar(self, client_with_world):
|
||||
"""Test filtering by single avatar."""
|
||||
response = client_with_world.get("/api/events?avatar_id=a1")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# a1 has: Event 1, Event between, Major event, Story event
|
||||
assert len(data["events"]) == 4
|
||||
|
||||
for event in data["events"]:
|
||||
assert "a1" in event["related_avatar_ids"]
|
||||
|
||||
def test_get_events_by_avatar_pair(self, client_with_world):
|
||||
"""Test filtering by avatar pair."""
|
||||
response = client_with_world.get("/api/events?avatar_id_1=a1&avatar_id_2=a2")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Only "Event between" involves both
|
||||
assert len(data["events"]) == 1
|
||||
assert data["events"][0]["content"] == "Event between"
|
||||
|
||||
def test_get_events_returns_correct_structure(self, client_with_world):
|
||||
"""Test that events have correct structure."""
|
||||
response = client_with_world.get("/api/events?limit=1")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert len(data["events"]) == 1
|
||||
event = data["events"][0]
|
||||
|
||||
# Check required fields
|
||||
assert "id" in event
|
||||
assert "text" in event
|
||||
assert "content" in event
|
||||
assert "year" in event
|
||||
assert "month" in event
|
||||
assert "month_stamp" in event
|
||||
assert "related_avatar_ids" in event
|
||||
assert "is_major" in event
|
||||
assert "is_story" in event
|
||||
|
||||
def test_get_events_no_world(self):
|
||||
"""Test API response when no world is loaded."""
|
||||
from src.server import main
|
||||
|
||||
original = main.game_instance.copy()
|
||||
main.game_instance["world"] = None
|
||||
|
||||
try:
|
||||
client = TestClient(main.app)
|
||||
response = client.get("/api/events")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["events"] == []
|
||||
assert data["next_cursor"] is None
|
||||
assert data["has_more"] is False
|
||||
finally:
|
||||
main.game_instance.update(original)
|
||||
|
||||
|
||||
class TestCleanupEventsAPI:
|
||||
"""Tests for DELETE /api/events/cleanup endpoint."""
|
||||
|
||||
def test_cleanup_deletes_minor_events(self, client_with_world, mock_world_with_events):
|
||||
"""Test that cleanup deletes minor events."""
|
||||
initial_count = mock_world_with_events.event_manager.count()
|
||||
|
||||
response = client_with_world.delete("/api/events/cleanup")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Should delete non-major events (4 of them)
|
||||
assert data["deleted"] == 4
|
||||
assert mock_world_with_events.event_manager.count() == 1
|
||||
|
||||
def test_cleanup_with_keep_major_false(self, client_with_world, mock_world_with_events):
|
||||
"""Test cleanup with keep_major=false deletes all."""
|
||||
response = client_with_world.delete("/api/events/cleanup?keep_major=false")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["deleted"] == 5
|
||||
assert mock_world_with_events.event_manager.count() == 0
|
||||
|
||||
def test_cleanup_with_before_month_stamp(self, client_with_world, mock_world_with_events):
|
||||
"""Test cleanup with before_month_stamp filter."""
|
||||
# Add an older event
|
||||
old_event = make_event(50, 1, "Old event", is_major=False)
|
||||
mock_world_with_events.event_manager.add_event(old_event)
|
||||
|
||||
before_stamp = int(create_month_stamp(Year(99), Month.JANUARY))
|
||||
response = client_with_world.delete(
|
||||
f"/api/events/cleanup?keep_major=false&before_month_stamp={before_stamp}"
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Only the old event should be deleted
|
||||
assert data["deleted"] == 1
|
||||
assert mock_world_with_events.event_manager.count() == 5
|
||||
|
||||
def test_cleanup_no_world(self):
|
||||
"""Test cleanup response when no world is loaded."""
|
||||
from src.server import main
|
||||
|
||||
original = main.game_instance.copy()
|
||||
main.game_instance["world"] = None
|
||||
|
||||
try:
|
||||
client = TestClient(main.app)
|
||||
response = client.delete("/api/events/cleanup")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["deleted"] == 0
|
||||
assert "error" in data
|
||||
finally:
|
||||
main.game_instance.update(original)
|
||||
|
||||
|
||||
class TestEventsPaginationIntegration:
|
||||
"""Integration tests for events pagination."""
|
||||
|
||||
def test_full_pagination_cycle(self, temp_db_path):
|
||||
"""Test complete pagination through many events."""
|
||||
from src.server import main
|
||||
|
||||
# Create world with many events
|
||||
game_map = create_test_map()
|
||||
month_stamp = create_month_stamp(Year(100), Month.JANUARY)
|
||||
world = World.create_with_db(
|
||||
map=game_map,
|
||||
month_stamp=month_stamp,
|
||||
events_db_path=temp_db_path,
|
||||
)
|
||||
|
||||
# Add 50 events
|
||||
for i in range(50):
|
||||
world.event_manager.add_event(
|
||||
make_event(100 + (i // 12), (i % 12) + 1, f"Event {i}", ["a1"])
|
||||
)
|
||||
|
||||
original = main.game_instance.copy()
|
||||
main.game_instance["world"] = world
|
||||
main.game_instance["sim"] = MagicMock()
|
||||
|
||||
try:
|
||||
client = TestClient(main.app)
|
||||
|
||||
all_event_ids = set()
|
||||
cursor = None
|
||||
page_count = 0
|
||||
|
||||
while True:
|
||||
url = "/api/events?limit=15"
|
||||
if cursor:
|
||||
url += f"&cursor={cursor}"
|
||||
|
||||
response = client.get(url)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
for event in data["events"]:
|
||||
assert event["id"] not in all_event_ids, "Duplicate event in pagination"
|
||||
all_event_ids.add(event["id"])
|
||||
|
||||
page_count += 1
|
||||
|
||||
if not data["has_more"]:
|
||||
break
|
||||
|
||||
cursor = data["next_cursor"]
|
||||
|
||||
# Should have gotten all 50 events
|
||||
assert len(all_event_ids) == 50
|
||||
# Should have taken 4 pages (15+15+15+5)
|
||||
assert page_count == 4
|
||||
|
||||
finally:
|
||||
world.event_manager.close()
|
||||
main.game_instance.update(original)
|
||||
|
||||
def test_events_order_consistency(self, temp_db_path):
|
||||
"""Test that events maintain consistent ordering across pages."""
|
||||
from src.server import main
|
||||
|
||||
game_map = create_test_map()
|
||||
month_stamp = create_month_stamp(Year(100), Month.JANUARY)
|
||||
world = World.create_with_db(
|
||||
map=game_map,
|
||||
month_stamp=month_stamp,
|
||||
events_db_path=temp_db_path,
|
||||
)
|
||||
|
||||
# Add events with known order
|
||||
for i in range(10):
|
||||
world.event_manager.add_event(
|
||||
make_event(100, i + 1, f"Event {i}")
|
||||
)
|
||||
|
||||
original = main.game_instance.copy()
|
||||
main.game_instance["world"] = world
|
||||
main.game_instance["sim"] = MagicMock()
|
||||
|
||||
try:
|
||||
client = TestClient(main.app)
|
||||
|
||||
# Get events in two pages
|
||||
response1 = client.get("/api/events?limit=5")
|
||||
response2 = client.get(f"/api/events?limit=5&cursor={response1.json()['next_cursor']}")
|
||||
|
||||
page1 = response1.json()["events"]
|
||||
page2 = response2.json()["events"]
|
||||
|
||||
# Events should be in descending order (newest first)
|
||||
all_events = page1 + page2
|
||||
month_stamps = [e["month_stamp"] for e in all_events]
|
||||
|
||||
# Each month_stamp should be >= the next (descending order)
|
||||
for i in range(len(month_stamps) - 1):
|
||||
assert month_stamps[i] >= month_stamps[i + 1]
|
||||
|
||||
finally:
|
||||
world.event_manager.close()
|
||||
main.game_instance.update(original)
|
||||
700
tests/test_event_storage.py
Normal file
700
tests/test_event_storage.py
Normal file
@@ -0,0 +1,700 @@
|
||||
"""
|
||||
Tests for EventStorage and EventManager.
|
||||
|
||||
Covers:
|
||||
- EventStorage: add_event, get_events, pagination, cursor handling, cleanup
|
||||
- EventManager: all query methods, get_events_paginated
|
||||
- Memory fallback mode
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
from src.classes.event import Event, NULL_EVENT
|
||||
from src.classes.event_storage import EventStorage
|
||||
from src.classes.event_manager import EventManager
|
||||
from src.classes.calendar import MonthStamp, Year, Month, create_month_stamp
|
||||
|
||||
|
||||
# --- Fixtures ---
|
||||
|
||||
@pytest.fixture
|
||||
def temp_db_path():
|
||||
"""Create a temporary database file path."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
yield Path(tmpdir) / "test_events.db"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def event_storage(temp_db_path):
|
||||
"""Create an EventStorage instance with a temporary database."""
|
||||
storage = EventStorage(temp_db_path)
|
||||
yield storage
|
||||
storage.close()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def event_manager(temp_db_path):
|
||||
"""Create an EventManager with SQLite storage."""
|
||||
manager = EventManager.create_with_db(temp_db_path)
|
||||
yield manager
|
||||
manager.close()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def memory_event_manager():
|
||||
"""Create an EventManager in memory mode (no SQLite)."""
|
||||
return EventManager.create_in_memory()
|
||||
|
||||
|
||||
def make_event(
|
||||
year: int,
|
||||
month: int,
|
||||
content: str,
|
||||
avatar_ids: list[str] | None = None,
|
||||
is_major: bool = False,
|
||||
is_story: bool = False,
|
||||
event_id: str | None = None,
|
||||
) -> Event:
|
||||
"""Helper to create an Event with the given parameters."""
|
||||
month_stamp = create_month_stamp(Year(year), Month(month))
|
||||
kwargs = {
|
||||
"month_stamp": month_stamp,
|
||||
"content": content,
|
||||
"related_avatars": avatar_ids,
|
||||
"is_major": is_major,
|
||||
"is_story": is_story,
|
||||
}
|
||||
if event_id is not None:
|
||||
kwargs["id"] = event_id
|
||||
return Event(**kwargs)
|
||||
|
||||
|
||||
# --- EventStorage Tests ---
|
||||
|
||||
class TestEventStorageBasic:
|
||||
"""Basic EventStorage functionality tests."""
|
||||
|
||||
def test_init_creates_tables(self, temp_db_path):
|
||||
"""Test that EventStorage creates necessary tables on init."""
|
||||
storage = EventStorage(temp_db_path)
|
||||
assert storage._conn is not None
|
||||
|
||||
# Verify tables exist
|
||||
cursor = storage._conn.execute(
|
||||
"SELECT name FROM sqlite_master WHERE type='table' AND name IN ('events', 'event_avatars')"
|
||||
)
|
||||
tables = [row[0] for row in cursor.fetchall()]
|
||||
assert "events" in tables
|
||||
assert "event_avatars" in tables
|
||||
|
||||
storage.close()
|
||||
|
||||
def test_add_event_success(self, event_storage):
|
||||
"""Test adding a single event."""
|
||||
event = make_event(100, 5, "Test event content", ["avatar_1", "avatar_2"])
|
||||
|
||||
result = event_storage.add_event(event)
|
||||
|
||||
assert result is True
|
||||
assert event_storage.count() == 1
|
||||
|
||||
def test_add_event_duplicate_ignored(self, event_storage):
|
||||
"""Test that duplicate events (same ID) are ignored."""
|
||||
event = make_event(100, 5, "Original content", event_id="fixed-id")
|
||||
event_storage.add_event(event)
|
||||
|
||||
# Try to add with same ID but different content
|
||||
duplicate = make_event(100, 5, "Different content", event_id="fixed-id")
|
||||
result = event_storage.add_event(duplicate)
|
||||
|
||||
assert result is True # INSERT OR IGNORE doesn't fail
|
||||
assert event_storage.count() == 1
|
||||
|
||||
def test_add_event_without_avatars(self, event_storage):
|
||||
"""Test adding an event without related avatars."""
|
||||
event = make_event(100, 5, "World event", avatar_ids=None)
|
||||
|
||||
result = event_storage.add_event(event)
|
||||
|
||||
assert result is True
|
||||
assert event_storage.count() == 1
|
||||
|
||||
def test_count(self, event_storage):
|
||||
"""Test event counting."""
|
||||
assert event_storage.count() == 0
|
||||
|
||||
event_storage.add_event(make_event(100, 1, "Event 1"))
|
||||
assert event_storage.count() == 1
|
||||
|
||||
event_storage.add_event(make_event(100, 2, "Event 2"))
|
||||
assert event_storage.count() == 2
|
||||
|
||||
|
||||
class TestEventStorageQueries:
|
||||
"""EventStorage query functionality tests."""
|
||||
|
||||
def test_get_events_empty_db(self, event_storage):
|
||||
"""Test querying an empty database."""
|
||||
events, cursor = event_storage.get_events()
|
||||
|
||||
assert events == []
|
||||
assert cursor is None
|
||||
|
||||
def test_get_events_all(self, event_storage):
|
||||
"""Test getting all events (no filter)."""
|
||||
event_storage.add_event(make_event(100, 1, "Event 1", ["a1"]))
|
||||
event_storage.add_event(make_event(100, 2, "Event 2", ["a2"]))
|
||||
event_storage.add_event(make_event(100, 3, "Event 3", ["a1", "a2"]))
|
||||
|
||||
events, cursor = event_storage.get_events()
|
||||
|
||||
assert len(events) == 3
|
||||
# Events returned in descending order (newest first)
|
||||
assert events[0].content == "Event 3"
|
||||
assert events[1].content == "Event 2"
|
||||
assert events[2].content == "Event 1"
|
||||
|
||||
def test_get_events_by_avatar(self, event_storage):
|
||||
"""Test filtering events by single avatar."""
|
||||
event_storage.add_event(make_event(100, 1, "Event A1 only", ["a1"]))
|
||||
event_storage.add_event(make_event(100, 2, "Event A2 only", ["a2"]))
|
||||
event_storage.add_event(make_event(100, 3, "Event both", ["a1", "a2"]))
|
||||
|
||||
events, _ = event_storage.get_events(avatar_id="a1")
|
||||
|
||||
assert len(events) == 2
|
||||
contents = [e.content for e in events]
|
||||
assert "Event A1 only" in contents
|
||||
assert "Event both" in contents
|
||||
assert "Event A2 only" not in contents
|
||||
|
||||
def test_get_events_by_avatar_pair(self, event_storage):
|
||||
"""Test filtering events by avatar pair."""
|
||||
event_storage.add_event(make_event(100, 1, "Event A1 only", ["a1"]))
|
||||
event_storage.add_event(make_event(100, 2, "Event A2 only", ["a2"]))
|
||||
event_storage.add_event(make_event(100, 3, "Event A1+A2", ["a1", "a2"]))
|
||||
event_storage.add_event(make_event(100, 4, "Event A1+A3", ["a1", "a3"]))
|
||||
|
||||
events, _ = event_storage.get_events(avatar_id_pair=("a1", "a2"))
|
||||
|
||||
assert len(events) == 1
|
||||
assert events[0].content == "Event A1+A2"
|
||||
|
||||
def test_get_events_by_avatar_returns_related_avatars(self, event_storage):
|
||||
"""Test that related_avatars are correctly returned."""
|
||||
event_storage.add_event(make_event(100, 1, "Multi avatar", ["a1", "a2", "a3"]))
|
||||
|
||||
events, _ = event_storage.get_events(avatar_id="a1")
|
||||
|
||||
assert len(events) == 1
|
||||
assert set(events[0].related_avatars) == {"a1", "a2", "a3"}
|
||||
|
||||
|
||||
class TestEventStoragePagination:
|
||||
"""EventStorage pagination tests."""
|
||||
|
||||
def test_pagination_limit(self, event_storage):
|
||||
"""Test that limit parameter works."""
|
||||
for i in range(10):
|
||||
event_storage.add_event(make_event(100, i + 1, f"Event {i}"))
|
||||
|
||||
events, cursor = event_storage.get_events(limit=5)
|
||||
|
||||
assert len(events) == 5
|
||||
assert cursor is not None # Has more
|
||||
|
||||
def test_pagination_cursor_format(self, event_storage):
|
||||
"""Test cursor format is {month_stamp}_{rowid}."""
|
||||
for i in range(10):
|
||||
event_storage.add_event(make_event(100, i + 1, f"Event {i}"))
|
||||
|
||||
_, cursor = event_storage.get_events(limit=5)
|
||||
|
||||
assert cursor is not None
|
||||
parts = cursor.split("_")
|
||||
assert len(parts) == 2
|
||||
# Both parts should be integers
|
||||
assert parts[0].isdigit()
|
||||
assert parts[1].isdigit()
|
||||
|
||||
def test_pagination_cursor_continues(self, event_storage):
|
||||
"""Test that using cursor returns next page."""
|
||||
for i in range(10):
|
||||
event_storage.add_event(make_event(100, i + 1, f"Event {i}"))
|
||||
|
||||
# First page
|
||||
page1, cursor1 = event_storage.get_events(limit=5)
|
||||
assert len(page1) == 5
|
||||
assert cursor1 is not None # More events exist
|
||||
|
||||
# Second page
|
||||
page2, cursor2 = event_storage.get_events(limit=5, cursor=cursor1)
|
||||
assert len(page2) == 5
|
||||
|
||||
# No overlap between pages
|
||||
page1_ids = {e.id for e in page1}
|
||||
page2_ids = {e.id for e in page2}
|
||||
assert page1_ids.isdisjoint(page2_ids)
|
||||
|
||||
# cursor2 is None because all 10 events have been returned
|
||||
assert cursor2 is None
|
||||
|
||||
# All 10 unique events were returned across both pages
|
||||
all_ids = page1_ids | page2_ids
|
||||
assert len(all_ids) == 10
|
||||
|
||||
def test_pagination_no_more_events(self, event_storage):
|
||||
"""Test that cursor is None when no more events."""
|
||||
for i in range(3):
|
||||
event_storage.add_event(make_event(100, i + 1, f"Event {i}"))
|
||||
|
||||
events, cursor = event_storage.get_events(limit=10)
|
||||
|
||||
assert len(events) == 3
|
||||
assert cursor is None # No more
|
||||
|
||||
def test_pagination_with_filter(self, event_storage):
|
||||
"""Test pagination combined with avatar filter."""
|
||||
for i in range(10):
|
||||
avatar_id = "a1" if i % 2 == 0 else "a2"
|
||||
event_storage.add_event(make_event(100, i + 1, f"Event {i}", [avatar_id]))
|
||||
|
||||
# Get a1's events (5 total)
|
||||
page1, cursor = event_storage.get_events(avatar_id="a1", limit=3)
|
||||
assert len(page1) == 3
|
||||
|
||||
page2, _ = event_storage.get_events(avatar_id="a1", limit=3, cursor=cursor)
|
||||
assert len(page2) == 2 # Only 2 remaining
|
||||
|
||||
|
||||
class TestEventStorageHelperMethods:
|
||||
"""Tests for helper query methods."""
|
||||
|
||||
def test_get_events_by_avatar_method(self, event_storage):
|
||||
"""Test get_events_by_avatar returns in chronological order."""
|
||||
event_storage.add_event(make_event(100, 1, "First", ["a1"]))
|
||||
event_storage.add_event(make_event(100, 6, "Second", ["a1"]))
|
||||
event_storage.add_event(make_event(101, 1, "Third", ["a1"]))
|
||||
|
||||
events = event_storage.get_events_by_avatar("a1")
|
||||
|
||||
# Should be in chronological order (oldest first)
|
||||
assert events[0].content == "First"
|
||||
assert events[1].content == "Second"
|
||||
assert events[2].content == "Third"
|
||||
|
||||
def test_get_events_between_method(self, event_storage):
|
||||
"""Test get_events_between returns in chronological order."""
|
||||
event_storage.add_event(make_event(100, 1, "First pair", ["a1", "a2"]))
|
||||
event_storage.add_event(make_event(100, 6, "Second pair", ["a1", "a2"]))
|
||||
event_storage.add_event(make_event(100, 3, "A1 only", ["a1"]))
|
||||
|
||||
events = event_storage.get_events_between("a1", "a2")
|
||||
|
||||
assert len(events) == 2
|
||||
# Chronological order
|
||||
assert events[0].content == "First pair"
|
||||
assert events[1].content == "Second pair"
|
||||
|
||||
def test_get_major_events_by_avatar(self, event_storage):
|
||||
"""Test getting only major events for an avatar."""
|
||||
event_storage.add_event(make_event(100, 1, "Minor 1", ["a1"], is_major=False))
|
||||
event_storage.add_event(make_event(100, 2, "Major 1", ["a1"], is_major=True))
|
||||
event_storage.add_event(make_event(100, 3, "Story", ["a1"], is_major=True, is_story=True))
|
||||
event_storage.add_event(make_event(100, 4, "Major 2", ["a1"], is_major=True))
|
||||
|
||||
events = event_storage.get_major_events_by_avatar("a1")
|
||||
|
||||
# Should only include major non-story events
|
||||
assert len(events) == 2
|
||||
contents = [e.content for e in events]
|
||||
assert "Major 1" in contents
|
||||
assert "Major 2" in contents
|
||||
assert "Story" not in contents
|
||||
assert "Minor 1" not in contents
|
||||
|
||||
def test_get_minor_events_by_avatar(self, event_storage):
|
||||
"""Test getting minor events (including stories) for an avatar."""
|
||||
event_storage.add_event(make_event(100, 1, "Minor 1", ["a1"], is_major=False))
|
||||
event_storage.add_event(make_event(100, 2, "Major 1", ["a1"], is_major=True))
|
||||
event_storage.add_event(make_event(100, 3, "Story", ["a1"], is_major=True, is_story=True))
|
||||
|
||||
events = event_storage.get_minor_events_by_avatar("a1")
|
||||
|
||||
# Should include minor and story events
|
||||
assert len(events) == 2
|
||||
contents = [e.content for e in events]
|
||||
assert "Minor 1" in contents
|
||||
assert "Story" in contents
|
||||
assert "Major 1" not in contents
|
||||
|
||||
def test_get_recent_events(self, event_storage):
|
||||
"""Test get_recent_events returns in chronological order."""
|
||||
event_storage.add_event(make_event(100, 1, "First"))
|
||||
event_storage.add_event(make_event(100, 6, "Second"))
|
||||
event_storage.add_event(make_event(101, 1, "Third"))
|
||||
|
||||
events = event_storage.get_recent_events()
|
||||
|
||||
# Should be chronological (oldest first)
|
||||
assert events[0].content == "First"
|
||||
assert events[1].content == "Second"
|
||||
assert events[2].content == "Third"
|
||||
|
||||
|
||||
class TestEventStorageCleanup:
|
||||
"""Tests for event cleanup functionality."""
|
||||
|
||||
def test_cleanup_keeps_major_by_default(self, event_storage):
|
||||
"""Test that cleanup keeps major events by default."""
|
||||
event_storage.add_event(make_event(100, 1, "Minor", is_major=False))
|
||||
event_storage.add_event(make_event(100, 2, "Major", is_major=True))
|
||||
|
||||
deleted = event_storage.cleanup()
|
||||
|
||||
assert deleted == 1
|
||||
assert event_storage.count() == 1
|
||||
events = event_storage.get_recent_events()
|
||||
assert events[0].content == "Major"
|
||||
|
||||
def test_cleanup_deletes_all_when_keep_major_false(self, event_storage):
|
||||
"""Test cleanup with keep_major=False."""
|
||||
event_storage.add_event(make_event(100, 1, "Minor", is_major=False))
|
||||
event_storage.add_event(make_event(100, 2, "Major", is_major=True))
|
||||
|
||||
deleted = event_storage.cleanup(keep_major=False)
|
||||
|
||||
assert deleted == 2
|
||||
assert event_storage.count() == 0
|
||||
|
||||
def test_cleanup_before_month_stamp(self, event_storage):
|
||||
"""Test cleanup with before_month_stamp filter."""
|
||||
event_storage.add_event(make_event(100, 1, "Old", is_major=False))
|
||||
event_storage.add_event(make_event(200, 1, "New", is_major=False))
|
||||
|
||||
# Delete events before year 150
|
||||
before_stamp = int(create_month_stamp(Year(150), Month.JANUARY))
|
||||
deleted = event_storage.cleanup(keep_major=False, before_month_stamp=before_stamp)
|
||||
|
||||
assert deleted == 1
|
||||
assert event_storage.count() == 1
|
||||
events = event_storage.get_recent_events()
|
||||
assert events[0].content == "New"
|
||||
|
||||
|
||||
class TestEventStorageCursorParsing:
|
||||
"""Tests for cursor parsing edge cases."""
|
||||
|
||||
def test_parse_cursor_valid(self, event_storage):
|
||||
"""Test parsing a valid cursor."""
|
||||
month_stamp, rowid = event_storage._parse_cursor("1200_42")
|
||||
|
||||
assert month_stamp == 1200
|
||||
assert rowid == 42
|
||||
|
||||
def test_parse_cursor_invalid_format(self, event_storage):
|
||||
"""Test parsing an invalid cursor raises ValueError."""
|
||||
with pytest.raises(ValueError):
|
||||
event_storage._parse_cursor("invalid")
|
||||
|
||||
def test_make_cursor(self, event_storage):
|
||||
"""Test cursor generation."""
|
||||
cursor = event_storage._make_cursor(1200, 42)
|
||||
|
||||
assert cursor == "1200_42"
|
||||
|
||||
|
||||
# --- EventManager Tests ---
|
||||
|
||||
class TestEventManagerWithStorage:
|
||||
"""EventManager tests with SQLite storage."""
|
||||
|
||||
def test_add_event(self, event_manager):
|
||||
"""Test adding events through EventManager."""
|
||||
event = make_event(100, 5, "Test event", ["a1"])
|
||||
|
||||
event_manager.add_event(event)
|
||||
|
||||
assert event_manager.count() == 1
|
||||
|
||||
def test_add_null_event_ignored(self, event_manager):
|
||||
"""Test that NULL_EVENT is ignored."""
|
||||
event_manager.add_event(NULL_EVENT)
|
||||
|
||||
assert event_manager.count() == 0
|
||||
|
||||
def test_get_recent_events(self, event_manager):
|
||||
"""Test getting recent events."""
|
||||
event_manager.add_event(make_event(100, 1, "First", ["a1"]))
|
||||
event_manager.add_event(make_event(100, 6, "Second", ["a1"]))
|
||||
|
||||
events = event_manager.get_recent_events()
|
||||
|
||||
assert len(events) == 2
|
||||
# Chronological order
|
||||
assert events[0].content == "First"
|
||||
assert events[1].content == "Second"
|
||||
|
||||
def test_get_events_by_avatar(self, event_manager):
|
||||
"""Test getting events by avatar."""
|
||||
event_manager.add_event(make_event(100, 1, "A1 event", ["a1"]))
|
||||
event_manager.add_event(make_event(100, 2, "A2 event", ["a2"]))
|
||||
|
||||
events = event_manager.get_events_by_avatar("a1")
|
||||
|
||||
assert len(events) == 1
|
||||
assert events[0].content == "A1 event"
|
||||
|
||||
def test_get_events_between(self, event_manager):
|
||||
"""Test getting events between two avatars."""
|
||||
event_manager.add_event(make_event(100, 1, "A1 only", ["a1"]))
|
||||
event_manager.add_event(make_event(100, 2, "A1+A2", ["a1", "a2"]))
|
||||
|
||||
events = event_manager.get_events_between("a1", "a2")
|
||||
|
||||
assert len(events) == 1
|
||||
assert events[0].content == "A1+A2"
|
||||
|
||||
def test_get_major_events_by_avatar(self, event_manager):
|
||||
"""Test getting major events for an avatar."""
|
||||
event_manager.add_event(make_event(100, 1, "Minor", ["a1"], is_major=False))
|
||||
event_manager.add_event(make_event(100, 2, "Major", ["a1"], is_major=True))
|
||||
|
||||
events = event_manager.get_major_events_by_avatar("a1")
|
||||
|
||||
assert len(events) == 1
|
||||
assert events[0].content == "Major"
|
||||
|
||||
def test_get_minor_events_by_avatar(self, event_manager):
|
||||
"""Test getting minor events for an avatar."""
|
||||
event_manager.add_event(make_event(100, 1, "Minor", ["a1"], is_major=False))
|
||||
event_manager.add_event(make_event(100, 2, "Major", ["a1"], is_major=True))
|
||||
|
||||
events = event_manager.get_minor_events_by_avatar("a1")
|
||||
|
||||
assert len(events) == 1
|
||||
assert events[0].content == "Minor"
|
||||
|
||||
def test_get_major_events_between(self, event_manager):
|
||||
"""Test getting major events between two avatars."""
|
||||
event_manager.add_event(make_event(100, 1, "Minor pair", ["a1", "a2"], is_major=False))
|
||||
event_manager.add_event(make_event(100, 2, "Major pair", ["a1", "a2"], is_major=True))
|
||||
|
||||
events = event_manager.get_major_events_between("a1", "a2")
|
||||
|
||||
assert len(events) == 1
|
||||
assert events[0].content == "Major pair"
|
||||
|
||||
def test_get_minor_events_between(self, event_manager):
|
||||
"""Test getting minor events between two avatars."""
|
||||
event_manager.add_event(make_event(100, 1, "Minor pair", ["a1", "a2"], is_major=False))
|
||||
event_manager.add_event(make_event(100, 2, "Major pair", ["a1", "a2"], is_major=True))
|
||||
|
||||
events = event_manager.get_minor_events_between("a1", "a2")
|
||||
|
||||
assert len(events) == 1
|
||||
assert events[0].content == "Minor pair"
|
||||
|
||||
|
||||
class TestEventManagerPagination:
|
||||
"""EventManager pagination tests."""
|
||||
|
||||
def test_get_events_paginated_basic(self, event_manager):
|
||||
"""Test basic pagination through EventManager."""
|
||||
for i in range(10):
|
||||
event_manager.add_event(make_event(100, i + 1, f"Event {i}"))
|
||||
|
||||
events, cursor, has_more = event_manager.get_events_paginated(limit=5)
|
||||
|
||||
assert len(events) == 5
|
||||
assert cursor is not None
|
||||
assert has_more is True
|
||||
|
||||
def test_get_events_paginated_with_filter(self, event_manager):
|
||||
"""Test paginated query with avatar filter."""
|
||||
for i in range(10):
|
||||
avatar = "a1" if i % 2 == 0 else "a2"
|
||||
event_manager.add_event(make_event(100, i + 1, f"Event {i}", [avatar]))
|
||||
|
||||
events, cursor, has_more = event_manager.get_events_paginated(avatar_id="a1", limit=3)
|
||||
|
||||
assert len(events) == 3
|
||||
assert has_more is True
|
||||
for e in events:
|
||||
assert "a1" in e.related_avatars
|
||||
|
||||
def test_get_events_paginated_with_pair_filter(self, event_manager):
|
||||
"""Test paginated query with avatar pair filter."""
|
||||
event_manager.add_event(make_event(100, 1, "A1 only", ["a1"]))
|
||||
event_manager.add_event(make_event(100, 2, "A1+A2", ["a1", "a2"]))
|
||||
event_manager.add_event(make_event(100, 3, "A2 only", ["a2"]))
|
||||
|
||||
events, _, _ = event_manager.get_events_paginated(avatar_id_pair=("a1", "a2"))
|
||||
|
||||
assert len(events) == 1
|
||||
assert events[0].content == "A1+A2"
|
||||
|
||||
def test_get_events_paginated_no_more(self, event_manager):
|
||||
"""Test pagination when there are no more events."""
|
||||
event_manager.add_event(make_event(100, 1, "Event 1"))
|
||||
event_manager.add_event(make_event(100, 2, "Event 2"))
|
||||
|
||||
events, cursor, has_more = event_manager.get_events_paginated(limit=10)
|
||||
|
||||
assert len(events) == 2
|
||||
assert cursor is None
|
||||
assert has_more is False
|
||||
|
||||
|
||||
class TestEventManagerMemoryMode:
|
||||
"""EventManager tests in memory fallback mode."""
|
||||
|
||||
def test_add_and_get_events(self, memory_event_manager):
|
||||
"""Test basic operations in memory mode."""
|
||||
memory_event_manager.add_event(make_event(100, 1, "Event 1", ["a1"]))
|
||||
memory_event_manager.add_event(make_event(100, 2, "Event 2", ["a2"]))
|
||||
|
||||
events = memory_event_manager.get_recent_events()
|
||||
|
||||
assert len(events) == 2
|
||||
|
||||
def test_get_events_by_avatar_memory(self, memory_event_manager):
|
||||
"""Test avatar filtering in memory mode."""
|
||||
memory_event_manager.add_event(make_event(100, 1, "A1 event", ["a1"]))
|
||||
memory_event_manager.add_event(make_event(100, 2, "A2 event", ["a2"]))
|
||||
|
||||
events = memory_event_manager.get_events_by_avatar("a1")
|
||||
|
||||
assert len(events) == 1
|
||||
assert events[0].content == "A1 event"
|
||||
|
||||
def test_get_events_between_memory(self, memory_event_manager):
|
||||
"""Test pair filtering in memory mode."""
|
||||
memory_event_manager.add_event(make_event(100, 1, "A1 only", ["a1"]))
|
||||
memory_event_manager.add_event(make_event(100, 2, "A1+A2", ["a1", "a2"]))
|
||||
|
||||
events = memory_event_manager.get_events_between("a1", "a2")
|
||||
|
||||
assert len(events) == 1
|
||||
assert events[0].content == "A1+A2"
|
||||
|
||||
def test_get_major_events_memory(self, memory_event_manager):
|
||||
"""Test major event filtering in memory mode."""
|
||||
memory_event_manager.add_event(make_event(100, 1, "Minor", ["a1"], is_major=False))
|
||||
memory_event_manager.add_event(make_event(100, 2, "Major", ["a1"], is_major=True))
|
||||
|
||||
events = memory_event_manager.get_major_events_by_avatar("a1")
|
||||
|
||||
assert len(events) == 1
|
||||
assert events[0].content == "Major"
|
||||
|
||||
def test_get_minor_events_memory(self, memory_event_manager):
|
||||
"""Test minor event filtering in memory mode."""
|
||||
memory_event_manager.add_event(make_event(100, 1, "Minor", ["a1"], is_major=False))
|
||||
memory_event_manager.add_event(make_event(100, 2, "Story", ["a1"], is_major=True, is_story=True))
|
||||
memory_event_manager.add_event(make_event(100, 3, "Major", ["a1"], is_major=True))
|
||||
|
||||
events = memory_event_manager.get_minor_events_by_avatar("a1")
|
||||
|
||||
assert len(events) == 2
|
||||
contents = [e.content for e in events]
|
||||
assert "Minor" in contents
|
||||
assert "Story" in contents
|
||||
|
||||
def test_pagination_memory_mode(self, memory_event_manager):
|
||||
"""Test that pagination in memory mode returns all events without real pagination."""
|
||||
for i in range(10):
|
||||
memory_event_manager.add_event(make_event(100, i + 1, f"Event {i}"))
|
||||
|
||||
events, cursor, has_more = memory_event_manager.get_events_paginated(limit=5)
|
||||
|
||||
# Memory mode doesn't support real pagination
|
||||
assert len(events) == 5 # Still respects limit
|
||||
assert cursor is None
|
||||
assert has_more is False
|
||||
|
||||
def test_cleanup_memory_mode(self, memory_event_manager):
|
||||
"""Test cleanup in memory mode clears all events."""
|
||||
memory_event_manager.add_event(make_event(100, 1, "Event 1"))
|
||||
memory_event_manager.add_event(make_event(100, 2, "Event 2"))
|
||||
|
||||
deleted = memory_event_manager.cleanup()
|
||||
|
||||
assert deleted == 2
|
||||
assert memory_event_manager.count() == 0
|
||||
|
||||
|
||||
class TestEventManagerCleanup:
|
||||
"""EventManager cleanup tests with SQLite storage."""
|
||||
|
||||
def test_cleanup_delegates_to_storage(self, event_manager):
|
||||
"""Test that cleanup delegates to storage."""
|
||||
event_manager.add_event(make_event(100, 1, "Minor", is_major=False))
|
||||
event_manager.add_event(make_event(100, 2, "Major", is_major=True))
|
||||
|
||||
deleted = event_manager.cleanup()
|
||||
|
||||
assert deleted == 1
|
||||
assert event_manager.count() == 1
|
||||
|
||||
|
||||
# --- Edge Cases ---
|
||||
|
||||
class TestEdgeCases:
|
||||
"""Tests for edge cases and error handling."""
|
||||
|
||||
def test_storage_closed_operations_fail_gracefully(self, temp_db_path):
|
||||
"""Test that operations on closed storage fail gracefully."""
|
||||
storage = EventStorage(temp_db_path)
|
||||
storage.close()
|
||||
|
||||
# Should return False/empty rather than throwing
|
||||
assert storage.add_event(make_event(100, 1, "Test")) is False
|
||||
events, cursor = storage.get_events()
|
||||
assert events == []
|
||||
assert storage.count() == 0
|
||||
|
||||
def test_event_with_many_avatars(self, event_storage):
|
||||
"""Test event with many related avatars."""
|
||||
avatar_ids = [f"avatar_{i}" for i in range(20)]
|
||||
event = make_event(100, 1, "Large group event", avatar_ids)
|
||||
|
||||
event_storage.add_event(event)
|
||||
|
||||
events, _ = event_storage.get_events()
|
||||
assert len(events) == 1
|
||||
assert set(events[0].related_avatars) == set(avatar_ids)
|
||||
|
||||
def test_empty_content(self, event_storage):
|
||||
"""Test event with empty content."""
|
||||
event = make_event(100, 1, "", ["a1"])
|
||||
|
||||
result = event_storage.add_event(event)
|
||||
|
||||
assert result is True
|
||||
events, _ = event_storage.get_events()
|
||||
assert events[0].content == ""
|
||||
|
||||
def test_special_characters_in_content(self, event_storage):
|
||||
"""Test event with special characters in content."""
|
||||
content = "测试中文 & 'quotes' \"double\" <tag> END"
|
||||
event = make_event(100, 1, content, ["a1"])
|
||||
|
||||
event_storage.add_event(event)
|
||||
|
||||
events, _ = event_storage.get_events()
|
||||
assert events[0].content == content
|
||||
|
||||
def test_same_month_stamp_ordering(self, event_storage):
|
||||
"""Test that events with same month_stamp maintain insertion order."""
|
||||
# Add multiple events in the same month
|
||||
for i in range(5):
|
||||
event_storage.add_event(make_event(100, 6, f"Event {i}"))
|
||||
|
||||
events, _ = event_storage.get_events()
|
||||
|
||||
# Should be in reverse insertion order (newest first)
|
||||
assert events[0].content == "Event 4"
|
||||
assert events[4].content == "Event 0"
|
||||
489
tests/test_save_load_events.py
Normal file
489
tests/test_save_load_events.py
Normal file
@@ -0,0 +1,489 @@
|
||||
"""
|
||||
Tests for save/load functionality with SQLite event storage.
|
||||
|
||||
Covers:
|
||||
- Events persistence across save/load cycles
|
||||
- Database file switching when loading different saves
|
||||
- Event retrieval after loading
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
from src.classes.world import World
|
||||
from src.classes.map import Map
|
||||
from src.classes.tile import TileType
|
||||
from src.classes.calendar import Month, Year, create_month_stamp, MonthStamp
|
||||
from src.classes.avatar import Avatar, Gender
|
||||
from src.classes.age import Age
|
||||
from src.classes.cultivation import Realm
|
||||
from src.classes.event import Event
|
||||
from src.classes.event_storage import EventStorage
|
||||
from src.classes.event_manager import EventManager
|
||||
from src.sim.simulator import Simulator
|
||||
from src.sim.save.save_game import save_game
|
||||
from src.sim.load.load_game import load_game
|
||||
from src.utils.id_generator import get_avatar_id
|
||||
|
||||
|
||||
def create_test_map():
|
||||
"""Create a simple 10x10 plain map for testing."""
|
||||
m = Map(width=10, height=10)
|
||||
for x in range(10):
|
||||
for y in range(10):
|
||||
m.create_tile(x, y, TileType.PLAIN)
|
||||
return m
|
||||
|
||||
|
||||
def make_event(
|
||||
year: int,
|
||||
month: int,
|
||||
content: str,
|
||||
avatar_ids: list[str] | None = None,
|
||||
is_major: bool = False,
|
||||
) -> Event:
|
||||
"""Helper to create an Event."""
|
||||
month_stamp = create_month_stamp(Year(year), Month(month))
|
||||
return Event(
|
||||
month_stamp=month_stamp,
|
||||
content=content,
|
||||
related_avatars=avatar_ids,
|
||||
is_major=is_major,
|
||||
)
|
||||
|
||||
|
||||
def make_event_by_index(
|
||||
index: int,
|
||||
content: str,
|
||||
avatar_ids: list[str] | None = None,
|
||||
) -> Event:
|
||||
"""Helper to create an Event from an index (handles year/month calculation)."""
|
||||
year = 100 + (index // 12)
|
||||
month = (index % 12) + 1
|
||||
return make_event(year, month, content, avatar_ids)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_save_dir(tmp_path):
|
||||
"""Create a temporary directory for saves."""
|
||||
d = tmp_path / "saves"
|
||||
d.mkdir()
|
||||
return d
|
||||
|
||||
|
||||
class TestEventManagerWithWorld:
|
||||
"""Tests for EventManager integration with World."""
|
||||
|
||||
def test_world_creates_event_manager_with_db(self, tmp_path):
|
||||
"""Test that World.create_with_db creates proper EventManager."""
|
||||
db_path = tmp_path / "events.db"
|
||||
game_map = create_test_map()
|
||||
month_stamp = create_month_stamp(Year(100), Month.JANUARY)
|
||||
|
||||
world = World.create_with_db(
|
||||
map=game_map,
|
||||
month_stamp=month_stamp,
|
||||
events_db_path=db_path,
|
||||
)
|
||||
|
||||
# EventManager should be connected to SQLite
|
||||
assert world.event_manager is not None
|
||||
assert world.event_manager._storage is not None
|
||||
assert db_path.exists()
|
||||
|
||||
# Clean up
|
||||
world.event_manager.close()
|
||||
|
||||
def test_events_written_to_sqlite(self, tmp_path):
|
||||
"""Test that events added to World are written to SQLite."""
|
||||
db_path = tmp_path / "events.db"
|
||||
game_map = create_test_map()
|
||||
month_stamp = create_month_stamp(Year(100), Month.JANUARY)
|
||||
|
||||
world = World.create_with_db(
|
||||
map=game_map,
|
||||
month_stamp=month_stamp,
|
||||
events_db_path=db_path,
|
||||
)
|
||||
|
||||
# Add events
|
||||
event1 = make_event(100, 1, "First event", ["a1"])
|
||||
event2 = make_event(100, 2, "Second event", ["a2"])
|
||||
|
||||
world.event_manager.add_event(event1)
|
||||
world.event_manager.add_event(event2)
|
||||
|
||||
# Verify in SQLite
|
||||
assert world.event_manager.count() == 2
|
||||
|
||||
# Clean up and verify persistence
|
||||
world.event_manager.close()
|
||||
|
||||
# Reopen and verify
|
||||
storage = EventStorage(db_path)
|
||||
assert storage.count() == 2
|
||||
storage.close()
|
||||
|
||||
|
||||
class TestSaveLoadWithEvents:
|
||||
"""Tests for save/load cycle with SQLite events."""
|
||||
|
||||
def test_save_load_preserves_events(self, temp_save_dir, tmp_path):
|
||||
"""Test that events are preserved across save/load cycle."""
|
||||
# Setup world with SQLite events
|
||||
db_path = tmp_path / "events.db"
|
||||
game_map = create_test_map()
|
||||
month_stamp = create_month_stamp(Year(100), Month.JANUARY)
|
||||
|
||||
world = World.create_with_db(
|
||||
map=game_map,
|
||||
month_stamp=month_stamp,
|
||||
events_db_path=db_path,
|
||||
)
|
||||
|
||||
# Create avatar
|
||||
avatar_id = get_avatar_id()
|
||||
avatar = Avatar(
|
||||
world=world,
|
||||
name="TestAvatar",
|
||||
id=avatar_id,
|
||||
birth_month_stamp=create_month_stamp(Year(80), Month.JANUARY),
|
||||
age=Age(20, Realm.Qi_Refinement),
|
||||
gender=Gender.MALE,
|
||||
)
|
||||
world.avatar_manager.avatars[avatar.id] = avatar
|
||||
|
||||
# Add events
|
||||
for i in range(10):
|
||||
event = make_event(
|
||||
100, i + 1,
|
||||
f"Event {i} for avatar",
|
||||
[avatar_id],
|
||||
is_major=(i % 3 == 0),
|
||||
)
|
||||
world.event_manager.add_event(event)
|
||||
|
||||
original_count = world.event_manager.count()
|
||||
assert original_count == 10
|
||||
|
||||
# Save
|
||||
sim = Simulator(world)
|
||||
save_path = temp_save_dir / "test_events.json"
|
||||
success, _ = save_game(world, sim, [], save_path)
|
||||
assert success
|
||||
|
||||
# Close current event manager
|
||||
world.event_manager.close()
|
||||
|
||||
# Load
|
||||
with patch('src.run.load_map.load_cultivation_world_map', return_value=create_test_map()):
|
||||
loaded_world, loaded_sim, _ = load_game(save_path)
|
||||
|
||||
# Verify events are accessible
|
||||
# Note: After loading, the world should use a new EventManager
|
||||
# connected to the loaded save's database
|
||||
loaded_events = loaded_world.event_manager.get_recent_events()
|
||||
|
||||
# The exact behavior depends on implementation -
|
||||
# if events DB path is derived from save path, they should be preserved
|
||||
# This test may need adjustment based on actual load_game implementation
|
||||
|
||||
def test_events_filtered_by_avatar_after_load(self, temp_save_dir, tmp_path):
|
||||
"""Test that avatar-specific event queries work after loading."""
|
||||
db_path = tmp_path / "events.db"
|
||||
game_map = create_test_map()
|
||||
month_stamp = create_month_stamp(Year(100), Month.JANUARY)
|
||||
|
||||
world = World.create_with_db(
|
||||
map=game_map,
|
||||
month_stamp=month_stamp,
|
||||
events_db_path=db_path,
|
||||
)
|
||||
|
||||
# Create two avatars
|
||||
avatar1_id = get_avatar_id()
|
||||
avatar2_id = get_avatar_id()
|
||||
|
||||
avatar1 = Avatar(
|
||||
world=world,
|
||||
name="Avatar1",
|
||||
id=avatar1_id,
|
||||
birth_month_stamp=create_month_stamp(Year(80), Month.JANUARY),
|
||||
age=Age(20, Realm.Qi_Refinement),
|
||||
gender=Gender.MALE,
|
||||
)
|
||||
avatar2 = Avatar(
|
||||
world=world,
|
||||
name="Avatar2",
|
||||
id=avatar2_id,
|
||||
birth_month_stamp=create_month_stamp(Year(80), Month.JANUARY),
|
||||
age=Age(20, Realm.Qi_Refinement),
|
||||
gender=Gender.FEMALE,
|
||||
)
|
||||
|
||||
world.avatar_manager.avatars[avatar1.id] = avatar1
|
||||
world.avatar_manager.avatars[avatar2.id] = avatar2
|
||||
|
||||
# Add events for different avatars
|
||||
world.event_manager.add_event(make_event(100, 1, "Avatar1 event", [avatar1_id]))
|
||||
world.event_manager.add_event(make_event(100, 2, "Avatar2 event", [avatar2_id]))
|
||||
world.event_manager.add_event(make_event(100, 3, "Both avatars", [avatar1_id, avatar2_id]))
|
||||
|
||||
# Query before save
|
||||
avatar1_events = world.event_manager.get_events_by_avatar(avatar1_id)
|
||||
assert len(avatar1_events) == 2 # "Avatar1 event" and "Both avatars"
|
||||
|
||||
between_events = world.event_manager.get_events_between(avatar1_id, avatar2_id)
|
||||
assert len(between_events) == 1 # "Both avatars"
|
||||
|
||||
# Clean up
|
||||
world.event_manager.close()
|
||||
|
||||
|
||||
class TestEventPagination:
|
||||
"""Tests for event pagination functionality."""
|
||||
|
||||
def test_pagination_returns_correct_pages(self, tmp_path):
|
||||
"""Test that pagination returns events in correct order."""
|
||||
db_path = tmp_path / "events.db"
|
||||
storage = EventStorage(db_path)
|
||||
manager = EventManager(storage)
|
||||
|
||||
# Add 25 events
|
||||
for i in range(25):
|
||||
year = 100 + (i // 12)
|
||||
month = (i % 12) + 1
|
||||
manager.add_event(make_event(year, month, f"Event {i}"))
|
||||
|
||||
# Get first page (10 items)
|
||||
page1, cursor1, has_more1 = manager.get_events_paginated(limit=10)
|
||||
assert len(page1) == 10
|
||||
assert has_more1 is True
|
||||
assert cursor1 is not None
|
||||
|
||||
# Events should be in descending order (newest first)
|
||||
assert page1[0].content == "Event 24" # Newest
|
||||
assert page1[9].content == "Event 15"
|
||||
|
||||
# Get second page
|
||||
page2, cursor2, has_more2 = manager.get_events_paginated(limit=10, cursor=cursor1)
|
||||
assert len(page2) == 10
|
||||
assert has_more2 is True
|
||||
|
||||
# Get third page (only 5 remaining)
|
||||
page3, cursor3, has_more3 = manager.get_events_paginated(limit=10, cursor=cursor2)
|
||||
assert len(page3) == 5
|
||||
assert has_more3 is False
|
||||
assert cursor3 is None
|
||||
|
||||
# Verify no duplicates across pages
|
||||
all_ids = {e.id for e in page1} | {e.id for e in page2} | {e.id for e in page3}
|
||||
assert len(all_ids) == 25
|
||||
|
||||
manager.close()
|
||||
|
||||
def test_pagination_with_avatar_filter(self, tmp_path):
|
||||
"""Test pagination with avatar filter."""
|
||||
db_path = tmp_path / "events.db"
|
||||
storage = EventStorage(db_path)
|
||||
manager = EventManager(storage)
|
||||
|
||||
avatar1_id = "avatar_1"
|
||||
avatar2_id = "avatar_2"
|
||||
|
||||
# Add events alternating between avatars
|
||||
for i in range(20):
|
||||
avatar_id = avatar1_id if i % 2 == 0 else avatar2_id
|
||||
manager.add_event(make_event(100, (i % 12) + 1, f"Event {i}", [avatar_id]))
|
||||
|
||||
# Get avatar1's events (should be 10)
|
||||
page1, cursor, has_more = manager.get_events_paginated(
|
||||
avatar_id=avatar1_id,
|
||||
limit=5
|
||||
)
|
||||
assert len(page1) == 5
|
||||
assert has_more is True
|
||||
|
||||
# All events should be for avatar1
|
||||
for e in page1:
|
||||
assert avatar1_id in e.related_avatars
|
||||
|
||||
# Get remaining
|
||||
page2, _, _ = manager.get_events_paginated(
|
||||
avatar_id=avatar1_id,
|
||||
limit=10,
|
||||
cursor=cursor
|
||||
)
|
||||
assert len(page2) == 5
|
||||
|
||||
manager.close()
|
||||
|
||||
def test_pagination_cursor_format_stability(self, tmp_path):
|
||||
"""Test that cursor format is stable and parseable."""
|
||||
db_path = tmp_path / "events.db"
|
||||
storage = EventStorage(db_path)
|
||||
|
||||
# Add some events
|
||||
for i in range(5):
|
||||
storage.add_event(make_event(100, i + 1, f"Event {i}"))
|
||||
|
||||
_, cursor = storage.get_events(limit=3)
|
||||
|
||||
# Cursor should be in format: month_stamp_rowid
|
||||
assert cursor is not None
|
||||
parts = cursor.split("_")
|
||||
assert len(parts) == 2
|
||||
assert parts[0].isdigit()
|
||||
assert parts[1].isdigit()
|
||||
|
||||
# Cursor should be parseable
|
||||
month_stamp, rowid = storage._parse_cursor(cursor)
|
||||
assert isinstance(month_stamp, int)
|
||||
assert isinstance(rowid, int)
|
||||
|
||||
storage.close()
|
||||
|
||||
|
||||
class TestEventStorageEdgeCases:
|
||||
"""Edge case tests for event storage."""
|
||||
|
||||
def test_concurrent_writes(self, tmp_path):
|
||||
"""Test that concurrent writes don't corrupt data."""
|
||||
db_path = tmp_path / "events.db"
|
||||
storage = EventStorage(db_path)
|
||||
|
||||
# Simulate rapid writes (use make_event_by_index to handle month > 12)
|
||||
events = [make_event_by_index(i, f"Event {i}") for i in range(100)]
|
||||
|
||||
for event in events:
|
||||
result = storage.add_event(event)
|
||||
assert result is True
|
||||
|
||||
assert storage.count() == 100
|
||||
storage.close()
|
||||
|
||||
def test_large_event_content(self, tmp_path):
|
||||
"""Test handling of large event content."""
|
||||
db_path = tmp_path / "events.db"
|
||||
storage = EventStorage(db_path)
|
||||
|
||||
# Create event with large content (10KB)
|
||||
large_content = "测试内容" * 2500 # ~10KB of Chinese characters
|
||||
event = make_event(100, 1, large_content, ["a1"])
|
||||
|
||||
result = storage.add_event(event)
|
||||
assert result is True
|
||||
|
||||
events, _ = storage.get_events()
|
||||
assert len(events) == 1
|
||||
assert events[0].content == large_content
|
||||
|
||||
storage.close()
|
||||
|
||||
def test_special_characters_in_avatar_id(self, tmp_path):
|
||||
"""Test handling of special characters in avatar IDs."""
|
||||
db_path = tmp_path / "events.db"
|
||||
storage = EventStorage(db_path)
|
||||
|
||||
# UUID-style IDs with hyphens
|
||||
avatar_id = "550e8400-e29b-41d4-a716-446655440000"
|
||||
event = make_event(100, 1, "Test event", [avatar_id])
|
||||
|
||||
storage.add_event(event)
|
||||
|
||||
events = storage.get_events_by_avatar(avatar_id)
|
||||
assert len(events) == 1
|
||||
assert avatar_id in events[0].related_avatars
|
||||
|
||||
storage.close()
|
||||
|
||||
def test_empty_database_queries(self, tmp_path):
|
||||
"""Test queries on empty database return sensible results."""
|
||||
db_path = tmp_path / "events.db"
|
||||
storage = EventStorage(db_path)
|
||||
|
||||
# All queries should return empty lists, not errors
|
||||
assert storage.get_events() == ([], None)
|
||||
assert storage.get_events_by_avatar("nonexistent") == []
|
||||
assert storage.get_events_between("a1", "a2") == []
|
||||
assert storage.get_major_events_by_avatar("a1") == []
|
||||
assert storage.get_minor_events_by_avatar("a1") == []
|
||||
assert storage.get_recent_events() == []
|
||||
assert storage.count() == 0
|
||||
|
||||
storage.close()
|
||||
|
||||
|
||||
class TestEventManagerMemoryFallback:
|
||||
"""Tests for EventManager memory fallback mode."""
|
||||
|
||||
def test_memory_mode_basic_operations(self):
|
||||
"""Test that memory mode works for basic operations."""
|
||||
manager = EventManager.create_in_memory()
|
||||
|
||||
manager.add_event(make_event(100, 1, "Event 1", ["a1"]))
|
||||
manager.add_event(make_event(100, 2, "Event 2", ["a2"]))
|
||||
|
||||
assert manager.count() == 2
|
||||
|
||||
events = manager.get_recent_events()
|
||||
assert len(events) == 2
|
||||
|
||||
a1_events = manager.get_events_by_avatar("a1")
|
||||
assert len(a1_events) == 1
|
||||
|
||||
def test_memory_mode_cleanup(self):
|
||||
"""Test that cleanup works in memory mode."""
|
||||
manager = EventManager.create_in_memory()
|
||||
|
||||
manager.add_event(make_event(100, 1, "Event 1"))
|
||||
manager.add_event(make_event(100, 2, "Event 2"))
|
||||
|
||||
deleted = manager.cleanup()
|
||||
|
||||
assert deleted == 2
|
||||
assert manager.count() == 0
|
||||
|
||||
|
||||
class TestEventStorageCleanup:
|
||||
"""Tests for event cleanup functionality."""
|
||||
|
||||
def test_cleanup_with_time_filter(self, tmp_path):
|
||||
"""Test cleanup with before_month_stamp filter."""
|
||||
db_path = tmp_path / "events.db"
|
||||
storage = EventStorage(db_path)
|
||||
|
||||
# Add events at different times
|
||||
storage.add_event(make_event(50, 1, "Very old", is_major=False))
|
||||
storage.add_event(make_event(100, 1, "Old", is_major=False))
|
||||
storage.add_event(make_event(150, 1, "Recent", is_major=False))
|
||||
|
||||
# Delete events before year 100
|
||||
cutoff = int(create_month_stamp(Year(100), Month.JANUARY))
|
||||
deleted = storage.cleanup(keep_major=False, before_month_stamp=cutoff)
|
||||
|
||||
assert deleted == 1 # Only "Very old" deleted
|
||||
assert storage.count() == 2
|
||||
|
||||
storage.close()
|
||||
|
||||
def test_cleanup_preserves_major_events(self, tmp_path):
|
||||
"""Test that cleanup preserves major events by default."""
|
||||
db_path = tmp_path / "events.db"
|
||||
storage = EventStorage(db_path)
|
||||
|
||||
storage.add_event(make_event(100, 1, "Minor 1", is_major=False))
|
||||
storage.add_event(make_event(100, 2, "Major 1", is_major=True))
|
||||
storage.add_event(make_event(100, 3, "Minor 2", is_major=False))
|
||||
|
||||
deleted = storage.cleanup(keep_major=True)
|
||||
|
||||
assert deleted == 2
|
||||
assert storage.count() == 1
|
||||
|
||||
events = storage.get_recent_events()
|
||||
assert events[0].content == "Major 1"
|
||||
|
||||
storage.close()
|
||||
@@ -66,6 +66,34 @@ export interface LLMConfigDTO {
|
||||
mode: string;
|
||||
}
|
||||
|
||||
// --- Events Pagination ---
|
||||
|
||||
export interface EventDTO {
|
||||
id: string;
|
||||
text: string;
|
||||
content: string;
|
||||
year: number;
|
||||
month: number;
|
||||
month_stamp: number;
|
||||
related_avatar_ids: string[];
|
||||
is_major: boolean;
|
||||
is_story: boolean;
|
||||
}
|
||||
|
||||
export interface EventsResponseDTO {
|
||||
events: EventDTO[];
|
||||
next_cursor: string | null;
|
||||
has_more: boolean;
|
||||
}
|
||||
|
||||
export interface FetchEventsParams {
|
||||
avatar_id?: string;
|
||||
avatar_id_1?: string;
|
||||
avatar_id_2?: string;
|
||||
cursor?: string;
|
||||
limit?: number;
|
||||
}
|
||||
|
||||
export const gameApi = {
|
||||
// --- World State ---
|
||||
|
||||
@@ -165,5 +193,25 @@ export const gameApi = {
|
||||
|
||||
saveLLMConfig(config: LLMConfigDTO) {
|
||||
return httpClient.post<{ status: string; message: string }>('/api/config/llm/save', config);
|
||||
},
|
||||
|
||||
// --- Events Pagination ---
|
||||
|
||||
fetchEvents(params: FetchEventsParams = {}) {
|
||||
const query = new URLSearchParams();
|
||||
if (params.avatar_id) query.set('avatar_id', params.avatar_id);
|
||||
if (params.avatar_id_1) query.set('avatar_id_1', params.avatar_id_1);
|
||||
if (params.avatar_id_2) query.set('avatar_id_2', params.avatar_id_2);
|
||||
if (params.cursor) query.set('cursor', params.cursor);
|
||||
if (params.limit) query.set('limit', String(params.limit));
|
||||
const qs = query.toString();
|
||||
return httpClient.get<EventsResponseDTO>(`/api/events${qs ? '?' + qs : ''}`);
|
||||
},
|
||||
|
||||
cleanupEvents(keepMajor = true, beforeMonthStamp?: number) {
|
||||
const query = new URLSearchParams();
|
||||
query.set('keep_major', String(keepMajor));
|
||||
if (beforeMonthStamp !== undefined) query.set('before_month_stamp', String(beforeMonthStamp));
|
||||
return httpClient.delete<{ deleted: number }>(`/api/events/cleanup?${query}`);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,60 +1,124 @@
|
||||
<script setup lang="ts">
|
||||
import { computed, ref, watch, nextTick } from 'vue'
|
||||
import { computed, ref, watch, nextTick, onMounted } from 'vue'
|
||||
import { useWorldStore } from '../../stores/world'
|
||||
import { NSelect } from 'naive-ui'
|
||||
import { NSelect, NSpin, NButton } from 'naive-ui'
|
||||
import { highlightAvatarNames, buildAvatarColorMap } from '../../utils/eventHelper'
|
||||
import type { GameEvent } from '../../types/core'
|
||||
|
||||
const worldStore = useWorldStore()
|
||||
const filterValue = ref('all')
|
||||
const filterValue1 = ref('all')
|
||||
const filterValue2 = ref<string | null>(null) // null 表示未启用双人筛选
|
||||
const eventListRef = ref<HTMLElement | null>(null)
|
||||
|
||||
const filterOptions = computed(() => [
|
||||
{ label: '所有人', value: 'all' },
|
||||
...worldStore.avatarList.map(avatar => ({
|
||||
label: (avatar.name ?? avatar.id) + (avatar.is_dead ? ' (已故)' : ''),
|
||||
value: avatar.id
|
||||
...worldStore.avatarList.map(avatar => ({
|
||||
label: (avatar.name ?? avatar.id) + (avatar.is_dead ? ' (已故)' : ''),
|
||||
value: avatar.id
|
||||
}))
|
||||
])
|
||||
|
||||
const filteredEvents = computed(() => {
|
||||
const allEvents = worldStore.events || []
|
||||
if (filterValue.value === 'all') {
|
||||
return allEvents
|
||||
// 第二人的选项(排除第一人和"所有人")
|
||||
const filterOptions2 = computed(() =>
|
||||
worldStore.avatarList
|
||||
.filter(avatar => avatar.id !== filterValue1.value)
|
||||
.map(avatar => ({
|
||||
label: (avatar.name ?? avatar.id) + (avatar.is_dead ? ' (已故)' : ''),
|
||||
value: avatar.id
|
||||
}))
|
||||
)
|
||||
|
||||
// 直接使用 store 中的事件(已由 API 过滤)
|
||||
const displayEvents = computed(() => worldStore.events || [])
|
||||
|
||||
// 向上滚动加载更多
|
||||
function handleScroll(e: Event) {
|
||||
const el = e.target as HTMLElement
|
||||
if (!el) return
|
||||
|
||||
// 当滚动到顶部附近时,加载更多
|
||||
if (el.scrollTop < 100 && worldStore.eventsHasMore && !worldStore.eventsLoading) {
|
||||
const oldScrollHeight = el.scrollHeight
|
||||
worldStore.loadMoreEvents().then(() => {
|
||||
// 保持滚动位置(在顶部加载了新内容后)
|
||||
nextTick(() => {
|
||||
const newScrollHeight = el.scrollHeight
|
||||
el.scrollTop = newScrollHeight - oldScrollHeight + el.scrollTop
|
||||
})
|
||||
})
|
||||
}
|
||||
return allEvents.filter(event => event.relatedAvatarIds.includes(filterValue.value))
|
||||
}
|
||||
|
||||
// 构建筛选参数
|
||||
function buildFilter() {
|
||||
if (filterValue2.value && filterValue1.value !== 'all') {
|
||||
// 双人筛选
|
||||
return { avatar_id_1: filterValue1.value, avatar_id_2: filterValue2.value }
|
||||
} else if (filterValue1.value !== 'all') {
|
||||
// 单人筛选
|
||||
return { avatar_id: filterValue1.value }
|
||||
}
|
||||
return {}
|
||||
}
|
||||
|
||||
// 加载事件并滚动到底部
|
||||
async function reloadEvents() {
|
||||
await worldStore.resetEvents(buildFilter())
|
||||
nextTick(() => {
|
||||
if (eventListRef.value) {
|
||||
eventListRef.value.scrollTop = eventListRef.value.scrollHeight
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// 切换第一人筛选
|
||||
watch(filterValue1, async (newVal) => {
|
||||
// 如果选了"所有人",清除第二人筛选
|
||||
if (newVal === 'all') {
|
||||
filterValue2.value = null
|
||||
}
|
||||
await reloadEvents()
|
||||
})
|
||||
|
||||
// 智能滚动:仅当用户处于底部时才自动跟随滚动
|
||||
watch(filteredEvents, () => {
|
||||
const el = eventListRef.value
|
||||
let shouldAutoScroll = false
|
||||
// 切换第二人筛选
|
||||
watch(filterValue2, async () => {
|
||||
await reloadEvents()
|
||||
})
|
||||
|
||||
if (!el) {
|
||||
// 之前没有元素(列表为空),现在有新数据进入,应当默认滚动到底部
|
||||
shouldAutoScroll = true
|
||||
} else {
|
||||
// 元素存在,判断当前是否处于底部
|
||||
// 1. 如果内容不满一页,视为“在底部”
|
||||
// 2. 如果已溢出,判断距离底部的位置(阈值 50px)
|
||||
const isScrollable = el.scrollHeight > el.clientHeight
|
||||
const isAtBottom = !isScrollable || (el.scrollHeight - el.scrollTop - el.clientHeight < 50)
|
||||
shouldAutoScroll = isAtBottom
|
||||
// 添加第二人
|
||||
function addSecondFilter() {
|
||||
// 默认选择列表中的第一个(排除当前第一人)
|
||||
const options = filterOptions2.value
|
||||
if (options.length > 0) {
|
||||
filterValue2.value = options[0].value
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldAutoScroll) {
|
||||
// 移除第二人筛选
|
||||
function removeSecondFilter() {
|
||||
filterValue2.value = null
|
||||
}
|
||||
|
||||
// 智能滚动:仅当用户处于底部时才自动跟随滚动(用于实时推送的新事件)
|
||||
watch(displayEvents, () => {
|
||||
const el = eventListRef.value
|
||||
if (!el) return
|
||||
|
||||
const isScrollable = el.scrollHeight > el.clientHeight
|
||||
const isAtBottom = !isScrollable || (el.scrollHeight - el.scrollTop - el.clientHeight < 50)
|
||||
|
||||
if (isAtBottom) {
|
||||
nextTick(() => {
|
||||
// DOM更新后再次获取元素
|
||||
const updatedEl = eventListRef.value
|
||||
if (updatedEl) {
|
||||
updatedEl.scrollTop = updatedEl.scrollHeight
|
||||
if (eventListRef.value) {
|
||||
eventListRef.value.scrollTop = eventListRef.value.scrollHeight
|
||||
}
|
||||
})
|
||||
}
|
||||
}, { deep: true })
|
||||
|
||||
// 切换筛选对象时,强制滚动到底部
|
||||
watch(filterValue, () => {
|
||||
// 初始加载
|
||||
onMounted(async () => {
|
||||
await worldStore.resetEvents({})
|
||||
nextTick(() => {
|
||||
if (eventListRef.value) {
|
||||
eventListRef.value.scrollTop = eventListRef.value.scrollHeight
|
||||
@@ -62,9 +126,11 @@ watch(filterValue, () => {
|
||||
})
|
||||
})
|
||||
|
||||
const emptyEventMessage = computed(() => (
|
||||
filterValue.value === 'all' ? '暂无事件' : '该修士暂无事件'
|
||||
))
|
||||
const emptyEventMessage = computed(() => {
|
||||
if (filterValue2.value) return '这两人之间暂无事件'
|
||||
if (filterValue1.value !== 'all') return '该修士暂无事件'
|
||||
return '暂无事件'
|
||||
})
|
||||
|
||||
function formatEventDate(event: { year: number; month: number }) {
|
||||
return `${event.year}年${event.month}月`
|
||||
@@ -84,16 +150,49 @@ function renderEventContent(event: GameEvent): string {
|
||||
<section class="sidebar-section">
|
||||
<div class="sidebar-header">
|
||||
<h3>事件记录</h3>
|
||||
<n-select
|
||||
v-model:value="filterValue"
|
||||
:options="filterOptions"
|
||||
size="tiny"
|
||||
class="event-filter"
|
||||
/>
|
||||
<div class="filter-group">
|
||||
<n-select
|
||||
v-model:value="filterValue1"
|
||||
:options="filterOptions"
|
||||
size="tiny"
|
||||
class="event-filter"
|
||||
/>
|
||||
<!-- 双人筛选 -->
|
||||
<template v-if="filterValue2 !== null">
|
||||
<n-select
|
||||
v-model:value="filterValue2"
|
||||
:options="filterOptions2"
|
||||
size="tiny"
|
||||
class="event-filter"
|
||||
/>
|
||||
<n-button size="tiny" quaternary @click="removeSecondFilter" class="remove-btn">
|
||||
×
|
||||
</n-button>
|
||||
</template>
|
||||
<!-- 添加第二人按钮(仅当选择了单人时显示) -->
|
||||
<n-button
|
||||
v-else-if="filterValue1 !== 'all'"
|
||||
size="tiny"
|
||||
quaternary
|
||||
@click="addSecondFilter"
|
||||
class="add-btn"
|
||||
>
|
||||
+ 添加第二人
|
||||
</n-button>
|
||||
</div>
|
||||
</div>
|
||||
<div v-if="filteredEvents.length === 0" class="empty">{{ emptyEventMessage }}</div>
|
||||
<div v-else class="event-list" ref="eventListRef">
|
||||
<div v-for="event in filteredEvents" :key="event.id" class="event-item">
|
||||
<div v-if="worldStore.eventsLoading && displayEvents.length === 0" class="loading">
|
||||
<n-spin size="small" />
|
||||
<span>加载中...</span>
|
||||
</div>
|
||||
<div v-else-if="displayEvents.length === 0" class="empty">{{ emptyEventMessage }}</div>
|
||||
<div v-else class="event-list" ref="eventListRef" @scroll="handleScroll">
|
||||
<!-- 顶部加载指示器 -->
|
||||
<div v-if="worldStore.eventsHasMore" class="load-more-hint">
|
||||
<span v-if="worldStore.eventsLoading">加载中...</span>
|
||||
<span v-else>向上滚动加载更多</span>
|
||||
</div>
|
||||
<div v-for="event in displayEvents" :key="event.id" class="event-item">
|
||||
<div class="event-date">{{ formatEventDate(event) }}</div>
|
||||
<div class="event-content" v-html="renderEventContent(event)"></div>
|
||||
</div>
|
||||
@@ -124,8 +223,34 @@ function renderEventContent(event: GameEvent): string {
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.filter-group {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
.event-filter {
|
||||
width: 200px;
|
||||
width: 120px;
|
||||
}
|
||||
|
||||
.add-btn {
|
||||
color: #888;
|
||||
font-size: 11px;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.add-btn:hover {
|
||||
color: #aaa;
|
||||
}
|
||||
|
||||
.remove-btn {
|
||||
color: #888;
|
||||
font-size: 16px;
|
||||
padding: 0 4px;
|
||||
}
|
||||
|
||||
.remove-btn:hover {
|
||||
color: #f66;
|
||||
}
|
||||
|
||||
.event-list {
|
||||
@@ -160,10 +285,25 @@ function renderEventContent(event: GameEvent): string {
|
||||
white-space: pre-line;
|
||||
}
|
||||
|
||||
.empty {
|
||||
.empty, .loading {
|
||||
padding: 20px;
|
||||
text-align: center;
|
||||
color: #666;
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.loading {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.load-more-hint {
|
||||
text-align: center;
|
||||
padding: 8px;
|
||||
color: #666;
|
||||
font-size: 11px;
|
||||
border-bottom: 1px solid #2a2a2a;
|
||||
}
|
||||
</style>
|
||||
|
||||
@@ -39,6 +39,11 @@ export const useUiStore = defineStore('ui', () => {
|
||||
detailError.value = null;
|
||||
}
|
||||
|
||||
function clearHoverCache() {
|
||||
// 清除详情缓存,强制下次选择时重新加载。
|
||||
detailData.value = null;
|
||||
}
|
||||
|
||||
async function refreshDetail() {
|
||||
if (!selectedTarget.value) return;
|
||||
|
||||
@@ -78,9 +83,10 @@ export const useUiStore = defineStore('ui', () => {
|
||||
detailData,
|
||||
isLoadingDetail,
|
||||
detailError,
|
||||
|
||||
|
||||
select,
|
||||
clearSelection,
|
||||
clearHoverCache,
|
||||
refreshDetail
|
||||
};
|
||||
});
|
||||
|
||||
@@ -2,6 +2,7 @@ import { defineStore } from 'pinia';
|
||||
import { ref, shallowRef, computed } from 'vue';
|
||||
import type { AvatarSummary, GameEvent, MapMatrix, RegionSummary, CelestialPhenomenon } from '../types/core';
|
||||
import type { TickPayloadDTO, InitialStateDTO } from '../types/api';
|
||||
import type { FetchEventsParams } from '../api/game';
|
||||
import { gameApi } from '../api/game';
|
||||
import { processNewEvents, mergeAndSortEvents } from '../utils/eventHelper';
|
||||
|
||||
@@ -16,7 +17,13 @@ export const useWorldStore = defineStore('world', () => {
|
||||
const avatars = shallowRef<Map<string, AvatarSummary>>(new Map());
|
||||
|
||||
const events = shallowRef<GameEvent[]>([]);
|
||||
|
||||
|
||||
// 分页状态
|
||||
const eventsCursor = ref<string | null>(null);
|
||||
const eventsHasMore = ref(false);
|
||||
const eventsLoading = ref(false);
|
||||
const eventsFilter = ref<FetchEventsParams>({});
|
||||
|
||||
const mapData = shallowRef<MapMatrix>([]);
|
||||
const regions = shallowRef<Map<string | number, RegionSummary>>(new Map());
|
||||
|
||||
@@ -66,9 +73,31 @@ export const useWorldStore = defineStore('world', () => {
|
||||
|
||||
function addEvents(rawEvents: any[]) {
|
||||
if (!rawEvents || rawEvents.length === 0) return;
|
||||
|
||||
const newEvents = processNewEvents(rawEvents, year.value, month.value);
|
||||
events.value = mergeAndSortEvents(events.value, newEvents);
|
||||
|
||||
let newEvents = processNewEvents(rawEvents, year.value, month.value);
|
||||
|
||||
// 根据当前筛选条件过滤(数据在 SQLite 中不会丢失)
|
||||
const filter = eventsFilter.value;
|
||||
if (filter.avatar_id) {
|
||||
newEvents = newEvents.filter(e =>
|
||||
e.relatedAvatarIds?.includes(filter.avatar_id!)
|
||||
);
|
||||
} else if (filter.avatar_id_1 && filter.avatar_id_2) {
|
||||
newEvents = newEvents.filter(e =>
|
||||
e.relatedAvatarIds?.includes(filter.avatar_id_1!) &&
|
||||
e.relatedAvatarIds?.includes(filter.avatar_id_2!)
|
||||
);
|
||||
}
|
||||
|
||||
if (newEvents.length === 0) return;
|
||||
|
||||
// WebSocket 推送的新事件直接追加到末尾(最新事件在底部)
|
||||
// 使用 Set 去重(基于 id)
|
||||
const existingIds = new Set(events.value.map(e => e.id));
|
||||
const uniqueNewEvents = newEvents.filter(e => !existingIds.has(e.id));
|
||||
if (uniqueNewEvents.length > 0) {
|
||||
events.value = [...events.value, ...uniqueNewEvents];
|
||||
}
|
||||
}
|
||||
|
||||
function handleTick(payload: TickPayloadDTO) {
|
||||
@@ -90,8 +119,11 @@ export const useWorldStore = defineStore('world', () => {
|
||||
stateRes.avatars.forEach(av => avatarMap.set(av.id, av));
|
||||
}
|
||||
avatars.value = avatarMap;
|
||||
// 事件通过 resetEvents() 从分页 API 加载,这里只重置状态。
|
||||
events.value = [];
|
||||
if (stateRes.events) addEvents(stateRes.events);
|
||||
eventsCursor.value = null;
|
||||
eventsHasMore.value = false;
|
||||
eventsFilter.value = {};
|
||||
currentPhenomenon.value = stateRes.phenomenon || null;
|
||||
isLoaded.value = true;
|
||||
}
|
||||
@@ -112,6 +144,9 @@ export const useWorldStore = defineStore('world', () => {
|
||||
regions.value = regionMap;
|
||||
|
||||
applyStateSnapshot(stateRes);
|
||||
|
||||
// 从分页 API 加载事件。
|
||||
await resetEvents({});
|
||||
} catch (e) {
|
||||
console.error('Failed to initialize world', e);
|
||||
}
|
||||
@@ -131,10 +166,76 @@ export const useWorldStore = defineStore('world', () => {
|
||||
month.value = 0;
|
||||
avatars.value = new Map();
|
||||
events.value = [];
|
||||
eventsCursor.value = null;
|
||||
eventsHasMore.value = false;
|
||||
eventsFilter.value = {};
|
||||
isLoaded.value = false;
|
||||
currentPhenomenon.value = null;
|
||||
}
|
||||
|
||||
// --- 事件分页 ---
|
||||
|
||||
async function loadEvents(filter: FetchEventsParams = {}, append = false) {
|
||||
if (eventsLoading.value) return;
|
||||
eventsLoading.value = true;
|
||||
|
||||
try {
|
||||
const params: FetchEventsParams = { ...filter, limit: 100 };
|
||||
if (append && eventsCursor.value) {
|
||||
params.cursor = eventsCursor.value;
|
||||
}
|
||||
|
||||
const res = await gameApi.fetchEvents(params);
|
||||
|
||||
// 转换为 GameEvent 格式
|
||||
const newEvents: GameEvent[] = res.events.map(e => ({
|
||||
id: e.id,
|
||||
text: e.text,
|
||||
content: e.content,
|
||||
year: e.year,
|
||||
month: e.month,
|
||||
monthStamp: e.month_stamp,
|
||||
relatedAvatarIds: e.related_avatar_ids,
|
||||
isMajor: e.is_major,
|
||||
isStory: e.is_story,
|
||||
}));
|
||||
|
||||
// API 返回倒序(最新在前),反转成时间正序(最旧在前,最新在后)
|
||||
const sortedNewEvents = newEvents.reverse();
|
||||
|
||||
if (append) {
|
||||
// 加载更旧的事件,添加到顶部。
|
||||
events.value = [...sortedNewEvents, ...events.value];
|
||||
} else {
|
||||
// 切换筛选条件:直接用 API 数据替换,不做 merge。
|
||||
// TODO: API 请求期间 WebSocket 推送的事件可能丢失,用户可手动刷新。
|
||||
events.value = sortedNewEvents;
|
||||
eventsFilter.value = filter;
|
||||
}
|
||||
|
||||
eventsCursor.value = res.next_cursor;
|
||||
eventsHasMore.value = res.has_more;
|
||||
} catch (e) {
|
||||
console.error('Failed to load events', e);
|
||||
} finally {
|
||||
eventsLoading.value = false;
|
||||
}
|
||||
}
|
||||
|
||||
async function loadMoreEvents() {
|
||||
if (!eventsHasMore.value || eventsLoading.value) return;
|
||||
await loadEvents(eventsFilter.value, true);
|
||||
}
|
||||
|
||||
async function resetEvents(filter: FetchEventsParams = {}) {
|
||||
eventsLoading.value = false; // 强制允许新请求,避免被旧请求阻塞。
|
||||
eventsCursor.value = null;
|
||||
eventsHasMore.value = false;
|
||||
events.value = []; // 清空旧数据,避免筛选切换时显示残留。
|
||||
eventsFilter.value = filter; // 立即更新筛选条件,让 addEvents 也能正确过滤。
|
||||
await loadEvents(filter, false);
|
||||
}
|
||||
|
||||
async function getPhenomenaList() {
|
||||
if (phenomenaList.value.length > 0) return phenomenaList.value;
|
||||
try {
|
||||
@@ -163,17 +264,24 @@ export const useWorldStore = defineStore('world', () => {
|
||||
avatars,
|
||||
avatarList,
|
||||
events,
|
||||
eventsCursor,
|
||||
eventsHasMore,
|
||||
eventsLoading,
|
||||
eventsFilter,
|
||||
mapData,
|
||||
regions,
|
||||
isLoaded,
|
||||
frontendConfig,
|
||||
currentPhenomenon,
|
||||
phenomenaList,
|
||||
|
||||
|
||||
initialize,
|
||||
fetchState,
|
||||
handleTick,
|
||||
reset,
|
||||
loadEvents,
|
||||
loadMoreEvents,
|
||||
resetEvents,
|
||||
getPhenomenaList,
|
||||
changePhenomenon
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user