-
Notifications
You must be signed in to change notification settings - Fork 20
Expand file tree
/
Copy pathchroma_store.py
More file actions
248 lines (207 loc) · 8.56 KB
/
chroma_store.py
File metadata and controls
248 lines (207 loc) · 8.56 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
"""Chroma vector database memory store implementation."""
import uuid
from collections import Counter, deque
from datetime import datetime, timezone
from pathlib import Path
from typing import Any, Dict, List, Optional
from app.adapters.base import MemoryStore
from app.adapters.json_store import RawJsonStore
from app.config import settings
from app.models import MemoryStats, RetrievalHit, Schematic, SearchResult
class ChromaMemoryStore(MemoryStore):
"""Memory store backed by Chroma vector database.
Uses local JSON as source of truth, with Chroma for semantic indexing.
"""
def __init__(
self,
chroma_path: Optional[Path] = None,
json_path: Optional[Path] = None,
):
"""Initialize the Chroma store.
Args:
chroma_path: Path to Chroma persist directory.
json_path: Path to source JSON file for schematics.
"""
self.chroma_path = chroma_path or settings.chroma_path
self.json_store = RawJsonStore(json_path)
self._hits: deque[RetrievalHit] = deque(maxlen=100)
self._collection = None
self._client = None
self._initialized = False
async def _ensure_initialized(self) -> None:
"""Lazy initialization of Chroma client."""
if self._initialized:
return
try:
import chromadb
from chromadb.config import Settings as ChromaSettings
self.chroma_path.mkdir(parents=True, exist_ok=True)
self._client = chromadb.PersistentClient(
path=str(self.chroma_path),
settings=ChromaSettings(
anonymized_telemetry=False,
allow_reset=True,
),
)
self._collection = self._client.get_or_create_collection(
name="warnerco_schematics",
metadata={"description": "WARNERCO robot schematics embeddings"},
)
self._initialized = True
except ImportError:
raise ImportError(
"chromadb is required for ChromaMemoryStore. "
"Install with: poetry install"
)
async def list_schematics(
self,
filters: Optional[Dict[str, Any]] = None,
limit: int = 100,
offset: int = 0,
) -> List[Schematic]:
"""List schematics from JSON source."""
return await self.json_store.list_schematics(filters, limit, offset)
async def get_schematic(self, schematic_id: str) -> Optional[Schematic]:
"""Get a schematic from JSON source."""
return await self.json_store.get_schematic(schematic_id)
async def upsert_schematic(self, schematic: Schematic) -> Schematic:
"""Update JSON source and re-index in Chroma."""
result = await self.json_store.upsert_schematic(schematic)
# Auto-index after upsert
await self.embed_and_index(schematic.id)
return result
async def delete_schematic(self, schematic_id: str) -> bool:
"""Delete from both JSON and Chroma."""
await self._ensure_initialized()
# Remove from Chroma
try:
self._collection.delete(ids=[schematic_id])
except Exception:
pass # May not exist in Chroma
return await self.json_store.delete_schematic(schematic_id)
async def embed_and_index(self, schematic_id: str) -> bool:
"""Embed and index a schematic in Chroma."""
await self._ensure_initialized()
schematic = await self.json_store.get_schematic(schematic_id)
if not schematic:
return False
try:
# Prepare document text and metadata
document = schematic.to_embed_text()
metadata = {
"id": schematic.id,
"model": schematic.model,
"name": schematic.name,
"component": schematic.component,
"category": schematic.category,
"status": schematic.status.value,
"version": schematic.version,
}
# Upsert into Chroma (uses built-in embedding)
self._collection.upsert(
ids=[schematic.id],
documents=[document],
metadatas=[metadata],
)
return True
except Exception as e:
print(f"Error indexing schematic {schematic_id}: {e}")
return False
async def semantic_search(
self,
query: str,
filters: Optional[Dict[str, Any]] = None,
top_k: int = 5,
) -> List[SearchResult]:
"""Perform semantic search using Chroma."""
await self._ensure_initialized()
start_time = datetime.now(timezone.utc)
try:
# Build where clause from filters
where = None
if filters:
conditions = []
for key, value in filters.items():
if key in ("category", "model", "status"):
conditions.append({key: {"$eq": value.lower() if isinstance(value, str) else value}})
if len(conditions) == 1:
where = conditions[0]
elif len(conditions) > 1:
where = {"$and": conditions}
# Query Chroma
results = self._collection.query(
query_texts=[query],
n_results=top_k,
where=where,
include=["documents", "metadatas", "distances"],
)
# Convert results
search_results = []
ids = results.get("ids", [[]])[0]
distances = results.get("distances", [[]])[0]
metadatas = results.get("metadatas", [[]])[0]
for i, schematic_id in enumerate(ids):
schematic = await self.json_store.get_schematic(schematic_id)
if schematic:
# Convert distance to similarity score (Chroma uses L2 distance)
# Lower distance = higher similarity
distance = distances[i] if i < len(distances) else 1.0
score = max(0.0, 1.0 - (distance / 2.0)) # Normalize to 0-1
search_results.append(
SearchResult(
schematic=schematic,
score=score,
chunk_id=schematic_id,
)
)
# Record telemetry
duration_ms = (datetime.now(timezone.utc) - start_time).total_seconds() * 1000
hit = RetrievalHit(
id=str(uuid.uuid4()),
timestamp=datetime.now(timezone.utc).isoformat(),
query=query,
robot_ids=[r.schematic.id for r in search_results],
chunk_ids=ids,
scores=[r.score for r in search_results],
duration_ms=duration_ms,
backend=self.backend_name,
)
self._hits.append(hit)
return search_results
except Exception as e:
print(f"Chroma search error: {e}")
# Fallback to JSON keyword search
return await self.json_store.semantic_search(query, filters, top_k)
async def get_memory_stats(self) -> MemoryStats:
"""Get statistics about the Chroma store."""
await self._ensure_initialized()
json_stats = await self.json_store.get_memory_stats()
try:
collection_count = self._collection.count()
except Exception:
collection_count = 0
return MemoryStats(
backend=self.backend_name,
total_schematics=json_stats.total_schematics,
indexed_count=collection_count,
chunk_count=collection_count,
categories=json_stats.categories,
status_counts=json_stats.status_counts,
last_update=json_stats.last_update,
)
async def get_recent_hits(self, limit: int = 20) -> List[RetrievalHit]:
"""Get recent retrieval telemetry."""
return list(self._hits)[-limit:][::-1]
@property
def backend_name(self) -> str:
"""Get the name of this backend implementation."""
return "chroma"
async def index_all(self) -> int:
"""Index all schematics from JSON source. Returns count indexed."""
await self._ensure_initialized()
schematics = await self.json_store.list_schematics(limit=1000)
count = 0
for schematic in schematics:
if await self.embed_and_index(schematic.id):
count += 1
return count