Skip to content

Commit 968fecb

Browse files
SonAIengineclaude
andcommitted
refactor: 팩토리 함수, 타입 스텁, 코드 중복 제거 — 사용자 편의성 개선
- SynapticGraph.memory() / .sqlite() / .full() 팩토리 클래스메서드 추가 → 신규 사용자가 preset 기반으로 즉시 시작 가능 - __init__.pyi 타입 스텁 생성 → lazy import 클래스 IDE 자동완성 지원 - _rank_to_score() 헬퍼 추출 (search.py 내 rank→score 변환 중복 제거) - _EmbedFromBatchMixin 도입 → OpenAI/Ollama EmbeddingProvider embed() 중복 제거 - __init__.py Quick Start docstring 추가 (3단계 예제 + 백엔드 목록) Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
1 parent 5ef8da2 commit 968fecb

File tree

5 files changed

+238
-16
lines changed

5 files changed

+238
-16
lines changed

src/synaptic/__init__.py

Lines changed: 39 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,42 @@
1-
"""Synaptic Memory — Brain-inspired knowledge graph for LLM agents."""
1+
"""Synaptic Memory — Brain-inspired knowledge graph for LLM agents.
2+
3+
Quick Start
4+
-----------
5+
6+
1. In-memory (zero dependencies)::
7+
8+
from synaptic import SynapticGraph
9+
10+
graph = SynapticGraph.memory()
11+
await graph.add("API 장애 대응", "서버 재시작 후 복구", kind=NodeKind.LESSON)
12+
result = await graph.search("장애 대응")
13+
14+
2. SQLite (경량 프로덕션)::
15+
16+
graph = SynapticGraph.sqlite("knowledge.db")
17+
await graph.backend.connect()
18+
await graph.add("배포 정책", "PR 머지 후 자동 배포", kind=NodeKind.RULE)
19+
20+
3. Full-featured (LLM 분류 + 임베딩 + 관계 탐지)::
21+
22+
from synaptic.backends.sqlite import SQLiteBackend
23+
from synaptic.extensions.llm_provider import OllamaLLMProvider
24+
25+
graph = SynapticGraph.full(
26+
SQLiteBackend("knowledge.db"),
27+
llm=OllamaLLMProvider(model="gemma3:4b"),
28+
embed_api_base="http://localhost:8080/v1",
29+
)
30+
await graph.backend.connect()
31+
32+
Backends
33+
--------
34+
- ``MemoryBackend`` — 테스트/개발 (zero-dep)
35+
- ``SQLiteBackend`` — 경량 프로덕션 (``pip install synaptic-memory[sqlite]``)
36+
- ``PostgreSQLBackend`` — 프로덕션 (``pip install synaptic-memory[postgresql]``)
37+
- ``Neo4jBackend`` — 그래프 탐색 (``pip install synaptic-memory[neo4j]``)
38+
- ``CompositeBackend`` — Neo4j + Qdrant + MinIO 조합 (``pip install synaptic-memory[scale]``)
39+
"""
240

341
from __future__ import annotations
442

src/synaptic/__init__.pyi

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
"""Type stubs for synaptic — IDE autocomplete for lazy-imported classes."""
2+
3+
from synaptic.activity import ActivityTracker as ActivityTracker
4+
from synaptic.agent_search import AgentSearch as AgentSearch
5+
from synaptic.agent_search import SearchIntent as SearchIntent
6+
from synaptic.agent_search import suggest_intent as suggest_intent
7+
from synaptic.evidence import EvidenceAssembler as EvidenceAssembler
8+
from synaptic.extensions.classifier_hybrid import HybridClassifier as HybridClassifier
9+
from synaptic.extensions.classifier_llm import ClassificationResult as ClassificationResult
10+
from synaptic.extensions.classifier_llm import LLMClassifier as LLMClassifier
11+
from synaptic.extensions.classifier_rules import RuleBasedClassifier as RuleBasedClassifier
12+
from synaptic.extensions.embedder import EmbeddingProvider as EmbeddingProvider
13+
from synaptic.extensions.embedder import MockEmbeddingProvider as MockEmbeddingProvider
14+
from synaptic.extensions.embedder import OllamaEmbeddingProvider as OllamaEmbeddingProvider
15+
from synaptic.extensions.embedder import OpenAIEmbeddingProvider as OpenAIEmbeddingProvider
16+
from synaptic.extensions.llm_provider import OllamaLLMProvider as OllamaLLMProvider
17+
from synaptic.extensions.llm_provider import OpenAILLMProvider as OpenAILLMProvider
18+
from synaptic.extensions.phrase_extractor import PhraseExtractor as PhraseExtractor
19+
from synaptic.extensions.relation_detector import (
20+
EmbeddingRelationDetector as EmbeddingRelationDetector,
21+
)
22+
from synaptic.extensions.relation_detector import (
23+
RuleBasedRelationDetector as RuleBasedRelationDetector,
24+
)
25+
from synaptic.extensions.relation_detector_llm import LLMRelationDetector as LLMRelationDetector
26+
from synaptic.graph import SynapticGraph as SynapticGraph
27+
from synaptic.models import ActivatedNode as ActivatedNode
28+
from synaptic.models import ConsolidationLevel as ConsolidationLevel
29+
from synaptic.models import DigestResult as DigestResult
30+
from synaptic.models import Edge as Edge
31+
from synaptic.models import EdgeKind as EdgeKind
32+
from synaptic.models import EvidenceChain as EvidenceChain
33+
from synaptic.models import EvidenceStep as EvidenceStep
34+
from synaptic.models import Node as Node
35+
from synaptic.models import NodeKind as NodeKind
36+
from synaptic.models import SearchResult as SearchResult
37+
from synaptic.ontology import OntologyRegistry as OntologyRegistry
38+
from synaptic.ontology import PropertyDef as PropertyDef
39+
from synaptic.ontology import RelationConstraint as RelationConstraint
40+
from synaptic.ontology import TypeDef as TypeDef
41+
from synaptic.ontology import build_agent_ontology as build_agent_ontology
42+
from synaptic.ppr import personalized_pagerank as personalized_pagerank
43+
from synaptic.protocols import Digester as Digester
44+
from synaptic.protocols import GraphTraversal as GraphTraversal
45+
from synaptic.protocols import KindClassifier as KindClassifier
46+
from synaptic.protocols import QueryRewriter as QueryRewriter
47+
from synaptic.protocols import RelationDetector as RelationDetector
48+
from synaptic.protocols import StorageBackend as StorageBackend
49+
from synaptic.protocols import TagExtractor as TagExtractor
50+
from synaptic.resonance import ResonanceWeights as ResonanceWeights
51+
52+
__version__: str
53+
__all__: list[str]

src/synaptic/extensions/embedder.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,14 @@ async def embed(self, text: str) -> list[float]: ...
2323
async def embed_batch(self, texts: list[str]) -> list[list[float]]: ...
2424

2525

26+
class _EmbedFromBatchMixin:
27+
"""embed()를 embed_batch() 위임으로 기본 구현하는 mixin."""
28+
29+
async def embed(self, text: str) -> list[float]:
30+
results = await self.embed_batch([text]) # type: ignore[attr-defined]
31+
return results[0]
32+
33+
2634
class MockEmbeddingProvider:
2735
"""Mock embedding provider for testing. Returns deterministic vectors."""
2836

@@ -39,7 +47,7 @@ async def embed_batch(self, texts: list[str]) -> list[list[float]]:
3947
return [await self.embed(t) for t in texts]
4048

4149

42-
class OpenAIEmbeddingProvider:
50+
class OpenAIEmbeddingProvider(_EmbedFromBatchMixin):
4351
"""OpenAI-compatible embedding provider.
4452
4553
Works with any server implementing the /v1/embeddings endpoint:
@@ -67,10 +75,6 @@ def __init__(
6775
self._model = model
6876
self._timeout = timeout
6977

70-
async def embed(self, text: str) -> list[float]:
71-
results = await self.embed_batch([text])
72-
return results[0]
73-
7478
async def embed_batch(self, texts: list[str]) -> list[list[float]]:
7579
import aiohttp # noqa: PLC0415
7680

@@ -96,7 +100,7 @@ async def embed_batch(self, texts: list[str]) -> list[list[float]]:
96100
return embeddings
97101

98102

99-
class OllamaEmbeddingProvider:
103+
class OllamaEmbeddingProvider(_EmbedFromBatchMixin):
100104
"""Ollama native embedding endpoint (/api/embed).
101105
102106
For Ollama servers that don't expose /v1/embeddings.
@@ -121,10 +125,6 @@ def __init__(
121125
self._model = model
122126
self._timeout = timeout
123127

124-
async def embed(self, text: str) -> list[float]:
125-
results = await self.embed_batch([text])
126-
return results[0]
127-
128128
async def embed_batch(self, texts: list[str]) -> list[list[float]]:
129129
import aiohttp # noqa: PLC0415
130130

src/synaptic/graph.py

Lines changed: 129 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
import json
66
from difflib import SequenceMatcher
77
from time import time
8+
from typing import TYPE_CHECKING, Literal
89

910
from synaptic.agent_search import AgentSearch, SearchIntent, suggest_intent
1011
from synaptic.cache import NodeCache
@@ -24,7 +25,7 @@
2425
NodeKind,
2526
SearchResult,
2627
)
27-
from synaptic.ontology import OntologyRegistry
28+
from synaptic.ontology import OntologyRegistry, build_agent_ontology
2829
from synaptic.protocols import (
2930
Digester,
3031
KindClassifier,
@@ -36,9 +37,24 @@
3637
from synaptic.search import HybridSearch
3738
from synaptic.store import Store
3839

40+
if TYPE_CHECKING:
41+
from synaptic.extensions.llm_provider import LLMProvider
42+
3943

4044
class SynapticGraph:
41-
"""Facade over the synaptic memory system."""
45+
"""Facade over the synaptic memory system.
46+
47+
Quick Start::
48+
49+
# 1. In-memory (zero-dep, 테스트/프로토타이핑)
50+
graph = SynapticGraph.memory()
51+
52+
# 2. SQLite (경량 프로덕션)
53+
graph = SynapticGraph.sqlite("knowledge.db")
54+
55+
# 3. Full preset with custom backend
56+
graph = SynapticGraph(backend, classifier=..., embedder=...)
57+
"""
4258

4359
__slots__ = (
4460
"_agent_search",
@@ -85,6 +101,117 @@ def __init__(
85101
self._phrase_extractor = phrase_extractor
86102
self._agent_search = AgentSearch(hybrid=self._search)
87103

104+
# --- Factory methods ---
105+
106+
@classmethod
107+
def memory(cls, *, cache_size: int = 256) -> SynapticGraph:
108+
"""In-memory backend — zero dependencies, 테스트/프로토타이핑용.
109+
110+
Example::
111+
112+
graph = SynapticGraph.memory()
113+
await graph.add("Hello", "World")
114+
"""
115+
from synaptic.backends.memory import MemoryBackend # noqa: PLC0415
116+
from synaptic.extensions.classifier_rules import RuleBasedClassifier # noqa: PLC0415
117+
118+
return cls(
119+
MemoryBackend(),
120+
classifier=RuleBasedClassifier(),
121+
cache_size=cache_size,
122+
)
123+
124+
@classmethod
125+
def sqlite(
126+
cls,
127+
db_path: str = "synaptic.db",
128+
*,
129+
cache_size: int = 256,
130+
) -> SynapticGraph:
131+
"""SQLite backend — 경량 프로덕션, FTS5 검색 지원.
132+
133+
Example::
134+
135+
graph = SynapticGraph.sqlite("knowledge.db")
136+
await graph.backend.connect()
137+
await graph.add("Hello", "World")
138+
"""
139+
from synaptic.backends.sqlite import SQLiteBackend # noqa: PLC0415
140+
from synaptic.extensions.classifier_rules import RuleBasedClassifier # noqa: PLC0415
141+
from synaptic.extensions.relation_detector import RuleBasedRelationDetector # noqa: PLC0415
142+
143+
return cls(
144+
SQLiteBackend(db_path),
145+
classifier=RuleBasedClassifier(),
146+
relation_detector=RuleBasedRelationDetector(),
147+
ontology=build_agent_ontology(),
148+
cache_size=cache_size,
149+
)
150+
151+
@classmethod
152+
def full(
153+
cls,
154+
backend: StorageBackend,
155+
*,
156+
llm: LLMProvider | None = None,
157+
embed_api_base: str = "",
158+
embed_model: str = "default",
159+
embed_api_key: str = "",
160+
cache_size: int = 512,
161+
) -> SynapticGraph:
162+
"""Full-featured setup — LLM 분류, 임베딩, 관계 탐지, 온톨로지.
163+
164+
Example::
165+
166+
from synaptic.backends.sqlite import SQLiteBackend
167+
from synaptic.extensions.llm_provider import OllamaLLMProvider
168+
169+
graph = SynapticGraph.full(
170+
SQLiteBackend("knowledge.db"),
171+
llm=OllamaLLMProvider(model="gemma3:4b"),
172+
embed_api_base="http://localhost:8080/v1",
173+
)
174+
"""
175+
from synaptic.extensions.classifier_rules import RuleBasedClassifier # noqa: PLC0415
176+
from synaptic.extensions.relation_detector import RuleBasedRelationDetector # noqa: PLC0415
177+
178+
classifier: KindClassifier
179+
relation_detector: RelationDetector
180+
embedder: EmbeddingProvider | None = None
181+
182+
if llm is not None:
183+
from synaptic.extensions.classifier_hybrid import HybridClassifier # noqa: PLC0415
184+
from synaptic.extensions.classifier_llm import LLMClassifier # noqa: PLC0415
185+
from synaptic.extensions.relation_detector_llm import LLMRelationDetector # noqa: PLC0415
186+
187+
classifier = HybridClassifier(
188+
llm=LLMClassifier(llm, fallback=RuleBasedClassifier()),
189+
rule=RuleBasedClassifier(),
190+
)
191+
relation_detector = LLMRelationDetector(llm, fallback=RuleBasedRelationDetector())
192+
else:
193+
classifier = RuleBasedClassifier()
194+
relation_detector = RuleBasedRelationDetector()
195+
196+
if embed_api_base:
197+
from synaptic.extensions.embedder import OpenAIEmbeddingProvider # noqa: PLC0415
198+
199+
embedder = OpenAIEmbeddingProvider(
200+
api_base=embed_api_base,
201+
model=embed_model,
202+
api_key=embed_api_key,
203+
)
204+
205+
return cls(
206+
backend,
207+
classifier=classifier,
208+
relation_detector=relation_detector,
209+
embedder=embedder,
210+
ontology=build_agent_ontology(),
211+
phrase_extractor=PhraseExtractor(),
212+
cache_size=cache_size,
213+
)
214+
88215
@property
89216
def backend(self) -> StorageBackend:
90217
return self._backend

src/synaptic/search.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,11 @@
3737
_KIND_BOOST = 0.05 # kind 매칭 시 search_score 부스트량 (보수적)
3838

3939

40+
def _rank_to_score(rank: int, *, top: float = 0.95, step: float = 0.05, floor: float = 0.3) -> float:
41+
"""순위 기반 점수 변환: 1위=top, 순위마다 step 감소, floor 이하 방지."""
42+
return max(floor, top - rank * step)
43+
44+
4045
def _cosine_sim(a: list[float], b: list[float]) -> float:
4146
"""두 벡터의 코사인 유사도."""
4247
dot = sum(x * y for x, y in zip(a, b))
@@ -83,8 +88,7 @@ async def search(
8388
fts_nodes = await backend.search_fts(query, limit=limit * 2)
8489
stages_used.append("fts")
8590
for rank, node in enumerate(fts_nodes):
86-
# FTS 순위 기반 점수: 1위=0.95, 감소율 0.05
87-
score = max(0.3, 0.95 - rank * 0.05)
91+
score = _rank_to_score(rank)
8892
fts_scores[node.id] = score
8993
all_nodes[node.id] = (node, score)
9094

@@ -94,7 +98,7 @@ async def search(
9498
stages_used.append("vector")
9599
for rank, node in enumerate(vec_nodes):
96100
# Vector 순위 기반 점수 + 실제 cosine similarity 반영
97-
rank_score = max(0.3, 0.95 - rank * 0.05)
101+
rank_score = _rank_to_score(rank)
98102
# cosine similarity 직접 계산 (가능한 경우)
99103
if node.embedding and embedding:
100104
sim = _cosine_sim(embedding, node.embedding)

0 commit comments

Comments
 (0)