Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion docker/requirements-full.txt
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ nvidia-cusparselt-cu12==0.6.3
nvidia-nccl-cu12==2.26.2
nvidia-nvjitlink-cu12==12.6.85
nvidia-nvtx-cu12==12.6.77
ollama==0.4.9
ollama==0.5.0
onnxruntime==1.22.1
openai==1.97.0
openapi-pydantic==0.5.1
Expand Down Expand Up @@ -184,3 +184,4 @@ py-key-value-aio==0.2.8
py-key-value-shared==0.2.8
PyJWT==2.10.1
pytest==9.0.2
alibabacloud-oss-v2==1.2.2
3 changes: 2 additions & 1 deletion docker/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ mdurl==0.1.2
more-itertools==10.8.0
neo4j==5.28.1
numpy==2.3.4
ollama==0.4.9
ollama==0.5.0
openai==1.109.1
openapi-pydantic==0.5.1
orjson==3.11.4
Expand Down Expand Up @@ -123,3 +123,4 @@ uvicorn==0.38.0
uvloop==0.22.1; sys_platform != 'win32'
watchfiles==1.1.1
websockets==15.0.1
alibabacloud-oss-v2==1.2.2
139 changes: 134 additions & 5 deletions poetry.lock

Large diffs are not rendered by default.

8 changes: 7 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ classifiers = [
]
dependencies = [
"openai (>=1.77.0,<2.0.0)",
"ollama (>=0.4.8,<0.5.0)",
"ollama (>=0.5.0,<0.5.1)",
"transformers (>=4.51.3,<5.0.0)",
"tenacity (>=9.1.2,<10.0.0)", # Error handling and retrying library
"fastapi[all] (>=0.115.12,<0.116.0)", # Web framework for building APIs
Expand Down Expand Up @@ -97,6 +97,11 @@ pref-mem = [
"datasketch (>=1.6.5,<2.0.0)", # MinHash library
]

# SkillMemory
skill-mem = [
"alibabacloud-oss-v2 (>=1.2.2,<1.2.3)",
]

# All optional dependencies
# Allow users to install with `pip install MemoryOS[all]`
all = [
Expand All @@ -123,6 +128,7 @@ all = [
"volcengine-python-sdk (>=4.0.4,<5.0.0)",
"nltk (>=3.9.1,<4.0.0)",
"rake-nltk (>=1.0.6,<1.1.0)",
"alibabacloud-oss-v2 (>=1.2.2,<1.2.3)",

# Uncategorized dependencies
]
Expand Down
34 changes: 34 additions & 0 deletions src/memos/api/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -467,6 +467,35 @@ def get_reader_config() -> dict[str, Any]:
}

@staticmethod
def get_oss_config() -> dict[str, Any] | None:
"""Get OSS configuration and validate connection."""

config = {
"endpoint": os.getenv("OSS_ENDPOINT", "http://oss-cn-shanghai.aliyuncs.com"),
"access_key_id": os.getenv("OSS_ACCESS_KEY_ID", ""),
"access_key_secret": os.getenv("OSS_ACCESS_KEY_SECRET", ""),
"region": os.getenv("OSS_REGION", ""),
"bucket_name": os.getenv("OSS_BUCKET_NAME", ""),
}

# Validate that all required fields have values
required_fields = [
"endpoint",
"access_key_id",
"access_key_secret",
"region",
"bucket_name",
]
missing_fields = [field for field in required_fields if not config.get(field)]

if missing_fields:
logger.warning(
f"OSS configuration incomplete. Missing fields: {', '.join(missing_fields)}"
)
return None

return config

def get_internet_config() -> dict[str, Any]:
"""Get embedder configuration."""
reader_config = APIConfig.get_reader_config()
Expand Down Expand Up @@ -746,6 +775,11 @@ def get_product_default_config() -> dict[str, Any]:
).split(",")
if h.strip()
],
"oss_config": APIConfig.get_oss_config(),
"skills_dir_config": {
"skills_oss_dir": os.getenv("SKILLS_OSS_DIR", "skill_memory/"),
"skills_local_dir": os.getenv("SKILLS_LOCAL_DIR", "/tmp/skill_memory/"),
},
},
},
"enable_textual_memory": True,
Expand Down
3 changes: 3 additions & 0 deletions src/memos/api/handlers/component_init.py
Original file line number Diff line number Diff line change
Expand Up @@ -307,6 +307,9 @@ def init_server() -> dict[str, Any]:
)
logger.debug("Searcher created")

# Set searcher to mem_reader
mem_reader.set_searcher(searcher)

# Initialize feedback server
feedback_server = SimpleMemFeedback(
llm=llm,
Expand Down
13 changes: 12 additions & 1 deletion src/memos/api/handlers/formatters_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,13 +112,17 @@ def post_process_textual_mem(
fact_mem = [
mem
for mem in text_formatted_mem
if mem["metadata"]["memory_type"] not in ["ToolSchemaMemory", "ToolTrajectoryMemory"]
if mem["metadata"]["memory_type"]
in ["WorkingMemory", "LongTermMemory", "UserMemory", "OuterMemory"]
]
tool_mem = [
mem
for mem in text_formatted_mem
if mem["metadata"]["memory_type"] in ["ToolSchemaMemory", "ToolTrajectoryMemory"]
]
skill_mem = [
mem for mem in text_formatted_mem if mem["metadata"]["memory_type"] == "SkillMemory"
]

memories_result["text_mem"].append(
{
Expand All @@ -134,6 +138,13 @@ def post_process_textual_mem(
"total_nodes": len(tool_mem),
}
)
memories_result["skill_mem"].append(
{
"cube_id": mem_cube_id,
"memories": skill_mem,
"total_nodes": len(skill_mem),
}
)
return memories_result


Expand Down
5 changes: 4 additions & 1 deletion src/memos/api/handlers/memory_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,7 @@ def handle_get_memory(memory_id: str, naive_mem_cube: NaiveMemCube) -> GetMemory
def handle_get_memories(
get_mem_req: GetMemoryRequest, naive_mem_cube: NaiveMemCube
) -> GetMemoryResponse:
results: dict[str, Any] = {"text_mem": [], "pref_mem": [], "tool_mem": []}
results: dict[str, Any] = {"text_mem": [], "pref_mem": [], "tool_mem": [], "skill_mem": []}
memories = naive_mem_cube.text_mem.get_all(
user_name=get_mem_req.mem_cube_id,
user_id=get_mem_req.user_id,
Expand All @@ -226,6 +226,8 @@ def handle_get_memories(

if not get_mem_req.include_tool_memory:
results["tool_mem"] = []
if not get_mem_req.include_skill_memory:
results["skill_mem"] = []

preferences: list[TextualMemoryItem] = []

Expand Down Expand Up @@ -270,6 +272,7 @@ def handle_get_memories(
"text_mem": results.get("text_mem", []),
"pref_mem": results.get("pref_mem", []),
"tool_mem": results.get("tool_mem", []),
"skill_mem": results.get("skill_mem", []),
}

return GetMemoryResponse(message="Memories retrieved successfully", data=filtered_results)
Expand Down
17 changes: 15 additions & 2 deletions src/memos/api/product_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -358,6 +358,18 @@ class APISearchRequest(BaseRequest):
description="Number of tool memories to retrieve (top-K). Default: 6.",
)

include_skill_memory: bool = Field(
True,
description="Whether to retrieve skill memories along with general memories. "
"If enabled, the system will automatically recall skill memories "
"relevant to the query. Default: True.",
)
skill_mem_top_k: int = Field(
3,
ge=0,
description="Number of skill memories to retrieve (top-K). Default: 3.",
)

# ==== Filter conditions ====
# TODO: maybe add detailed description later
filter: dict[str, Any] | None = Field(
Expand Down Expand Up @@ -393,7 +405,7 @@ class APISearchRequest(BaseRequest):
# Internal field for search memory type
search_memory_type: str = Field(
"All",
description="Type of memory to search: All, WorkingMemory, LongTermMemory, UserMemory, OuterMemory, ToolSchemaMemory, ToolTrajectoryMemory",
description="Type of memory to search: All, WorkingMemory, LongTermMemory, UserMemory, OuterMemory, ToolSchemaMemory, ToolTrajectoryMemory, SkillMemory",
)

# ==== Context ====
Expand Down Expand Up @@ -772,7 +784,8 @@ class GetMemoryRequest(BaseRequest):
mem_cube_id: str = Field(..., description="Cube ID")
user_id: str | None = Field(None, description="User ID")
include_preference: bool = Field(True, description="Whether to return preference memory")
include_tool_memory: bool = Field(False, description="Whether to return tool memory")
include_tool_memory: bool = Field(True, description="Whether to return tool memory")
include_skill_memory: bool = Field(True, description="Whether to return skill memory")
filter: dict[str, Any] | None = Field(None, description="Filter for the memory")
page: int | None = Field(
None,
Expand Down
9 changes: 9 additions & 0 deletions src/memos/configs/mem_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,15 @@ class MultiModalStructMemReaderConfig(BaseMemReaderConfig):
"If None, reads from FILE_PARSER_DIRECT_MARKDOWN_HOSTNAMES environment variable.",
)

oss_config: dict[str, Any] | None = Field(
default=None,
description="OSS configuration for the MemReader",
)
skills_dir_config: dict[str, Any] | None = Field(
default=None,
description="Skills directory for the MemReader",
)


class StrategyStructMemReaderConfig(BaseMemReaderConfig):
"""StrategyStruct MemReader configuration class."""
Expand Down
7 changes: 7 additions & 0 deletions src/memos/mem_reader/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

if TYPE_CHECKING:
from memos.graph_dbs.base import BaseGraphDB
from memos.memories.textual.tree_text_memory.retrieve.searcher import Searcher


class BaseMemReader(ABC):
Expand All @@ -33,6 +34,12 @@ def set_graph_db(self, graph_db: "BaseGraphDB | None") -> None:
graph_db: The graph database instance, or None to disable recall operations.
"""

@abstractmethod
def set_searcher(self, searcher: "Searcher | None") -> None:
"""
Set the searcher instance for recall operations.
"""

@abstractmethod
def get_memory(
self, scene_data: list, type: str, info: dict[str, Any], mode: str = "fast"
Expand Down
5 changes: 5 additions & 0 deletions src/memos/mem_reader/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

if TYPE_CHECKING:
from memos.graph_dbs.base import BaseGraphDB
from memos.memories.textual.tree_text_memory.retrieve.searcher import Searcher


class MemReaderFactory(BaseMemReader):
Expand All @@ -27,6 +28,7 @@ def from_config(
cls,
config_factory: MemReaderConfigFactory,
graph_db: Optional["BaseGraphDB | None"] = None,
searcher: Optional["Searcher | None"] = None,
) -> BaseMemReader:
"""
Create a MemReader instance from configuration.
Expand All @@ -50,4 +52,7 @@ def from_config(
if graph_db is not None:
reader.set_graph_db(graph_db)

if searcher is not None:
reader.set_searcher(searcher)

return reader
Loading