From affa7bdc77bd830295bd298e36a6966aaa52b8ef Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 4 Oct 2024 13:18:44 -0500 Subject: [PATCH 01/56] swap sqlite_vec for chroma in requirements --- docker/main/requirements-wheels.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/main/requirements-wheels.txt b/docker/main/requirements-wheels.txt index 41bbcba098..f7717c808e 100644 --- a/docker/main/requirements-wheels.txt +++ b/docker/main/requirements-wheels.txt @@ -32,7 +32,7 @@ unidecode == 1.3.* # OpenVino (ONNX installed in wheels-post) openvino == 2024.3.* # Embeddings -chromadb == 0.5.7 +sqlite_vec == 0.1.3 onnx_clip == 4.0.* # Generative AI google-generativeai == 0.6.* From 139c8652a9505fd1786a73f387352c9f6c36f861 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 4 Oct 2024 13:20:02 -0500 Subject: [PATCH 02/56] load sqlite_vec in embeddings manager --- frigate/embeddings/__init__.py | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/frigate/embeddings/__init__.py b/frigate/embeddings/__init__.py index b3ad228745..00b02b1aca 100644 --- a/frigate/embeddings/__init__.py +++ b/frigate/embeddings/__init__.py @@ -1,13 +1,15 @@ -"""ChromaDB embeddings database.""" +"""SQLite-vec embeddings database.""" import json import logging import multiprocessing as mp +import os import signal import threading from types import FrameType from typing import Optional +import sqlite_vec from playhouse.sqliteq import SqliteQueueDatabase from setproctitle import setproctitle @@ -53,13 +55,19 @@ def receiveSignal(signalNumber: int, frame: Optional[FrameType]) -> None: models = [Event] db.bind(models) - embeddings = Embeddings() + conn = db.connection() + conn.enable_load_extension(True) + sqlite_vec.load(conn) + conn.enable_load_extension(False) + + embeddings = Embeddings(db) # Check if we need to re-index events if config.semantic_search.reindex: embeddings.reindex() maintainer = EmbeddingMaintainer( + db, config, stop_event, ) @@ -67,14 +75,17 @@ def receiveSignal(signalNumber: int, frame: Optional[FrameType]) -> None: class EmbeddingsContext: - def __init__(self): - self.embeddings = Embeddings() + def __init__(self, db: SqliteQueueDatabase): + self.db = db + self.embeddings = Embeddings(db) self.thumb_stats = ZScoreNormalization() self.desc_stats = ZScoreNormalization() + logger.info(f"Initializing db: {self.db}") + # load stats from disk try: - with open(f"{CONFIG_DIR}/.search_stats.json", "r") as f: + with open(os.path.join(CONFIG_DIR, ".search_stats.json"), "r") as f: data = json.loads(f.read()) self.thumb_stats.from_dict(data["thumb_stats"]) self.desc_stats.from_dict(data["desc_stats"]) @@ -87,5 +98,5 @@ def save_stats(self): "thumb_stats": self.thumb_stats.to_dict(), "desc_stats": self.desc_stats.to_dict(), } - with open(f"{CONFIG_DIR}/.search_stats.json", "w") as f: - f.write(json.dumps(contents)) + with open(os.path.join(CONFIG_DIR, ".search_stats.json"), "w") as f: + json.dump(contents, f) From 654efe6be19908e3efabf8d18d1bc5ed2bca9f78 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 4 Oct 2024 13:20:24 -0500 Subject: [PATCH 03/56] remove chroma and revamp Embeddings class for sqlite_vec --- frigate/embeddings/embeddings.py | 218 +++++++++++++++++++++---------- 1 file changed, 151 insertions(+), 67 deletions(-) diff --git a/frigate/embeddings/embeddings.py b/frigate/embeddings/embeddings.py index 540764c1be..52c4290250 100644 --- a/frigate/embeddings/embeddings.py +++ b/frigate/embeddings/embeddings.py @@ -1,37 +1,20 @@ -"""ChromaDB embeddings database.""" +"""SQLite-vec embeddings database.""" import base64 import io import logging -import sys +import struct import time +from typing import List, Tuple -import numpy as np from PIL import Image from playhouse.shortcuts import model_to_dict +from playhouse.sqliteq import SqliteQueueDatabase from frigate.models import Event -# Squelch posthog logging -logging.getLogger("chromadb.telemetry.product.posthog").setLevel(logging.CRITICAL) - -# Hot-swap the sqlite3 module for Chroma compatibility -try: - from chromadb import Collection - from chromadb import HttpClient as ChromaClient - from chromadb.config import Settings - - from .functions.clip import ClipEmbedding - from .functions.minilm_l6_v2 import MiniLMEmbedding -except RuntimeError: - __import__("pysqlite3") - sys.modules["sqlite3"] = sys.modules.pop("pysqlite3") - from chromadb import Collection - from chromadb import HttpClient as ChromaClient - from chromadb.config import Settings - - from .functions.clip import ClipEmbedding - from .functions.minilm_l6_v2 import MiniLMEmbedding +from .functions.clip import ClipEmbedding +from .functions.minilm_l6_v2 import MiniLMEmbedding logger = logging.getLogger(__name__) @@ -67,34 +50,158 @@ def get_metadata(event: Event) -> dict: ) +def serialize(vector: List[float]) -> bytes: + """Serializes a list of floats into a compact "raw bytes" format""" + return struct.pack("%sf" % len(vector), *vector) + + +def deserialize(bytes_data: bytes) -> List[float]: + """Deserializes a compact "raw bytes" format into a list of floats""" + return list(struct.unpack("%sf" % (len(bytes_data) // 4), bytes_data)) + + class Embeddings: - """ChromaDB embeddings database.""" + """SQLite-vec embeddings database.""" + + def __init__(self, db: SqliteQueueDatabase) -> None: + self.conn = db.connection() # Store the database connection instance + + # create tables if they don't exist + self._create_tables() + + self.clip_embedding = ClipEmbedding(model="ViT-B/32") + self.minilm_embedding = MiniLMEmbedding( + preferred_providers=["CPUExecutionProvider"], + ) + + def _create_tables(self): + # Create vec0 virtual table for thumbnail embeddings + self.conn.execute(""" + CREATE VIRTUAL TABLE IF NOT EXISTS vec_thumbnails USING vec0( + id TEXT PRIMARY KEY, + thumbnail_embedding FLOAT[512] + ); + """) + + # Create vec0 virtual table for description embeddings + self.conn.execute(""" + CREATE VIRTUAL TABLE IF NOT EXISTS vec_descriptions USING vec0( + id TEXT PRIMARY KEY, + description_embedding FLOAT[384] + ); + """) + + def upsert_thumbnail(self, event_id: str, thumbnail: bytes): + # Convert thumbnail bytes to PIL Image + image = Image.open(io.BytesIO(thumbnail)).convert("RGB") + # Generate embedding using CLIP + embedding = self.clip_embedding([image])[0] + + # sqlite_vec virtual tables don't support upsert, check if event_id exists + cursor = self.conn.execute( + "SELECT 1 FROM vec_thumbnails WHERE id = ?", (event_id,) + ) + row = cursor.fetchone() + + if row is None: + # Insert if the event_id does not exist + self.conn.execute( + "INSERT INTO vec_thumbnails(id, thumbnail_embedding) VALUES(?, ?)", + [event_id, serialize(embedding)], + ) + else: + # Update if the event_id already exists + self.conn.execute( + "UPDATE vec_thumbnails SET thumbnail_embedding = ? WHERE id = ?", + [serialize(embedding), event_id], + ) + + def upsert_description(self, event_id: str, description: str): + # Generate embedding using MiniLM + embedding = self.minilm_embedding([description])[0] - def __init__(self) -> None: - self.client: ChromaClient = ChromaClient( - host="127.0.0.1", - settings=Settings(anonymized_telemetry=False), + # sqlite_vec virtual tables don't support upsert, check if event_id exists + cursor = self.conn.execute( + "SELECT 1 FROM vec_descriptions WHERE id = ?", (event_id,) ) + row = cursor.fetchone() + + if row is None: + # Insert if the event_id does not exist + self.conn.execute( + "INSERT INTO vec_descriptions(id, description_embedding) VALUES(?, ?)", + [event_id, serialize(embedding)], + ) + else: + # Update if the event_id already exists + self.conn.execute( + "UPDATE vec_descriptions SET description_embedding = ? WHERE id = ?", + [serialize(embedding), event_id], + ) + + def delete_thumbnail(self, event_ids: List[str]) -> None: + ids = ", ".join("?" for _ in event_ids) - @property - def thumbnail(self) -> Collection: - return self.client.get_or_create_collection( - name="event_thumbnail", embedding_function=ClipEmbedding() + self.conn.execute( + f"DELETE FROM vec_thumbnails WHERE id IN ({ids})", tuple(event_ids) ) - @property - def description(self) -> Collection: - return self.client.get_or_create_collection( - name="event_description", - embedding_function=MiniLMEmbedding( - preferred_providers=["CPUExecutionProvider"] - ), + def delete_description(self, event_ids: List[str]) -> None: + ids = ", ".join("?" for _ in event_ids) + + self.conn.execute( + f"DELETE FROM vec_descriptions WHERE id IN ({ids})", tuple(event_ids) ) + def search_thumbnail(self, event_id: str, limit=10) -> List[Tuple[str, float]]: + # check if it's already embedded + cursor = self.conn.execute( + "SELECT thumbnail_embedding FROM vec_thumbnails WHERE id = ?", (event_id,) + ) + row = cursor.fetchone() + if row: + query_embedding = deserialize(row[0]) + else: + # If not embedded, fetch the thumbnail from the Event table and embed it + event = Event.get_by_id(event_id) + thumbnail = base64.b64decode(event.thumbnail) + image = Image.open(io.BytesIO(thumbnail)).convert("RGB") + query_embedding = self.clip_embedding([image])[0] + self.upsert_thumbnail(event_id, thumbnail) + + cursor = self.conn.execute( + """ + SELECT + vec_thumbnails.id, + distance + FROM vec_thumbnails + WHERE thumbnail_embedding MATCH ? + AND k = ? + ORDER BY distance + """, + [serialize(query_embedding), limit], + ) + return cursor.fetchall() + + def search_description(self, query_text: str, limit=10) -> List[Tuple[str, float]]: + query_embedding = self.minilm_embedding([query_text])[0] + cursor = self.conn.execute( + """ + SELECT + vec_descriptions.id, + distance + FROM vec_descriptions + WHERE description_embedding MATCH ? + AND k = ? + ORDER BY distance + """, + [serialize(query_embedding), limit], + ) + return cursor.fetchall() + def reindex(self) -> None: """Reindex all event embeddings.""" logger.info("Indexing event embeddings...") - self.client.reset() st = time.time() totals = { @@ -115,37 +222,14 @@ def reindex(self) -> None: ) while len(events) > 0: - thumbnails = {"ids": [], "images": [], "metadatas": []} - descriptions = {"ids": [], "documents": [], "metadatas": []} - event: Event for event in events: - metadata = get_metadata(event) thumbnail = base64.b64decode(event.thumbnail) - img = np.array(Image.open(io.BytesIO(thumbnail)).convert("RGB")) - thumbnails["ids"].append(event.id) - thumbnails["images"].append(img) - thumbnails["metadatas"].append(metadata) + self.upsert_thumbnail(event.id, thumbnail) + totals["thumb"] += 1 if description := event.data.get("description", "").strip(): - descriptions["ids"].append(event.id) - descriptions["documents"].append(description) - descriptions["metadatas"].append(metadata) - - if len(thumbnails["ids"]) > 0: - totals["thumb"] += len(thumbnails["ids"]) - self.thumbnail.upsert( - images=thumbnails["images"], - metadatas=thumbnails["metadatas"], - ids=thumbnails["ids"], - ) - - if len(descriptions["ids"]) > 0: - totals["desc"] += len(descriptions["ids"]) - self.description.upsert( - documents=descriptions["documents"], - metadatas=descriptions["metadatas"], - ids=descriptions["ids"], - ) + totals["desc"] += 1 + self.upsert_description(event.id, description) current_page += 1 events = ( From 4444d820892b18ae75b3504f06e2b1d763e580de Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 4 Oct 2024 13:21:14 -0500 Subject: [PATCH 04/56] manual minilm onnx inference --- frigate/embeddings/functions/minilm_l6_v2.py | 104 ++++++++++++++++++- 1 file changed, 100 insertions(+), 4 deletions(-) diff --git a/frigate/embeddings/functions/minilm_l6_v2.py b/frigate/embeddings/functions/minilm_l6_v2.py index f90060fdb3..651a640939 100644 --- a/frigate/embeddings/functions/minilm_l6_v2.py +++ b/frigate/embeddings/functions/minilm_l6_v2.py @@ -1,11 +1,107 @@ -"""Embedding function for ONNX MiniLM-L6 model used in Chroma.""" +"""Embedding function for ONNX MiniLM-L6 model.""" -from chromadb.utils.embedding_functions import ONNXMiniLM_L6_V2 +import errno +import logging +import os +from pathlib import Path +from typing import List + +import numpy as np +import onnxruntime as ort +import requests +from transformers import AutoTokenizer from frigate.const import MODEL_CACHE_DIR -class MiniLMEmbedding(ONNXMiniLM_L6_V2): - """Override DOWNLOAD_PATH to download to cache directory.""" +class MiniLMEmbedding: + """Embedding function for ONNX MiniLM-L6 model.""" DOWNLOAD_PATH = f"{MODEL_CACHE_DIR}/all-MiniLM-L6-v2" + MODEL_NAME = "sentence-transformers/all-MiniLM-L6-v2" + IMAGE_MODEL_FILE = "model.onnx" + TOKENIZER_FILE = "tokenizer" + + def __init__(self, preferred_providers=None): + """Initialize MiniLM Embedding function.""" + self.tokenizer = self._load_tokenizer() + + model_path = os.path.join(self.DOWNLOAD_PATH, self.IMAGE_MODEL_FILE) + if not os.path.exists(model_path): + self._download_model() + + if preferred_providers is None: + preferred_providers = ["CPUExecutionProvider"] + + self.session = self._load_model(model_path) + + def _load_tokenizer(self): + """Load the tokenizer from the local path or download it if not available.""" + tokenizer_path = os.path.join(self.DOWNLOAD_PATH, self.TOKENIZER_FILE) + if os.path.exists(tokenizer_path): + return AutoTokenizer.from_pretrained(tokenizer_path) + else: + return AutoTokenizer.from_pretrained(self.MODEL_NAME) + + def _download_model(self): + """Download the ONNX model and tokenizer from a remote source if they don't exist.""" + logging.info(f"Downloading {self.MODEL_NAME} ONNX model and tokenizer...") + + # Download the tokenizer + tokenizer = AutoTokenizer.from_pretrained(self.MODEL_NAME) + os.makedirs(self.DOWNLOAD_PATH, exist_ok=True) + tokenizer.save_pretrained(os.path.join(self.DOWNLOAD_PATH, self.TOKENIZER_FILE)) + + # Download the ONNX model + s3_url = f"https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2/resolve/main/onnx/{self.IMAGE_MODEL_FILE}" + model_path = os.path.join(self.DOWNLOAD_PATH, self.IMAGE_MODEL_FILE) + self._download_from_url(s3_url, model_path) + + logging.info(f"Model and tokenizer saved to {self.DOWNLOAD_PATH}") + + def _download_from_url(self, url: str, save_path: str): + """Download a file from a URL and save it to a specified path.""" + temporary_filename = Path(save_path).with_name( + os.path.basename(save_path) + ".part" + ) + temporary_filename.parent.mkdir(parents=True, exist_ok=True) + with requests.get(url, stream=True, allow_redirects=True) as r: + # if the content type is HTML, it's not the actual model file + if "text/html" in r.headers.get("Content-Type", ""): + raise ValueError( + f"Expected an ONNX file but received HTML from the URL: {url}" + ) + + # Ensure the download is successful + r.raise_for_status() + + # Write the model to a temporary file first + with open(temporary_filename, "wb") as f: + for chunk in r.iter_content(chunk_size=8192): + f.write(chunk) + + temporary_filename.rename(save_path) + + def _load_model(self, path: str): + """Load the ONNX model from a given path.""" + providers = ["CPUExecutionProvider"] + if os.path.exists(path): + return ort.InferenceSession(path, providers=providers) + else: + raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), path) + + def __call__(self, texts: List[str]) -> List[np.ndarray]: + """Generate embeddings for the given texts.""" + inputs = self.tokenizer( + texts, padding=True, truncation=True, return_tensors="np" + ) + + input_names = [input.name for input in self.session.get_inputs()] + onnx_inputs = {name: inputs[name] for name in input_names if name in inputs} + + # Run inference + outputs = self.session.run(None, onnx_inputs) + + embeddings = outputs[0].mean(axis=1) + + return [embedding for embedding in embeddings] From 3c334175c7f61062dac966a520aa78ff40c2570e Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 4 Oct 2024 13:21:29 -0500 Subject: [PATCH 05/56] remove chroma in clip model --- frigate/embeddings/functions/clip.py | 31 +++++++++++++--------------- 1 file changed, 14 insertions(+), 17 deletions(-) diff --git a/frigate/embeddings/functions/clip.py b/frigate/embeddings/functions/clip.py index a4e75a510d..55cdb3b472 100644 --- a/frigate/embeddings/functions/clip.py +++ b/frigate/embeddings/functions/clip.py @@ -4,18 +4,13 @@ import logging import os from pathlib import Path -from typing import Tuple, Union +from typing import List, Union +import numpy as np import onnxruntime as ort import requests -from chromadb import EmbeddingFunction, Embeddings -from chromadb.api.types import ( - Documents, - Images, - is_document, - is_image, -) from onnx_clip import OnnxClip +from PIL import Image from frigate.const import MODEL_CACHE_DIR @@ -27,7 +22,7 @@ class Clip(OnnxClip): def _load_models( model: str, silent: bool, - ) -> Tuple[ort.InferenceSession, ort.InferenceSession]: + ) -> tuple[ort.InferenceSession, ort.InferenceSession]: """ These models are a part of the container. Treat as as such. """ @@ -87,20 +82,22 @@ def _load_model(path: str, silent: bool): return ort.InferenceSession(path, providers=providers) -class ClipEmbedding(EmbeddingFunction): - """Embedding function for CLIP model used in Chroma.""" +class ClipEmbedding: + """Embedding function for CLIP model.""" def __init__(self, model: str = "ViT-B/32"): """Initialize CLIP Embedding function.""" self.model = Clip(model) - def __call__(self, input: Union[Documents, Images]) -> Embeddings: - embeddings: Embeddings = [] + def __call__(self, input: Union[List[str], List[Image.Image]]) -> List[np.ndarray]: + embeddings = [] for item in input: - if is_image(item): + if isinstance(item, Image.Image): result = self.model.get_image_embeddings([item]) - embeddings.append(result[0, :].tolist()) - elif is_document(item): + embeddings.append(result[0]) + elif isinstance(item, str): result = self.model.get_text_embeddings([item]) - embeddings.append(result[0, :].tolist()) + embeddings.append(result[0]) + else: + raise ValueError(f"Unsupported input type: {type(item)}") return embeddings From 3e5420c4109cedd7544ccf7075bd411ff426c5b4 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 4 Oct 2024 13:22:01 -0500 Subject: [PATCH 06/56] migrate api from chroma to sqlite_vec --- frigate/api/event.py | 85 +++++++++++--------------------------------- 1 file changed, 20 insertions(+), 65 deletions(-) diff --git a/frigate/api/event.py b/frigate/api/event.py index 3c861f901a..9457d01488 100644 --- a/frigate/api/event.py +++ b/frigate/api/event.py @@ -1,8 +1,6 @@ """Event apis.""" -import base64 import datetime -import io import logging import os from functools import reduce @@ -10,12 +8,10 @@ from urllib.parse import unquote import cv2 -import numpy as np from fastapi import APIRouter, Request from fastapi.params import Depends from fastapi.responses import JSONResponse from peewee import JOIN, DoesNotExist, fn, operator -from PIL import Image from playhouse.shortcuts import model_to_dict from frigate.api.defs.events_body import ( @@ -39,7 +35,6 @@ CLIPS_DIR, ) from frigate.embeddings import EmbeddingsContext -from frigate.embeddings.embeddings import get_metadata from frigate.models import Event, ReviewSegment, Timeline from frigate.object_processing import TrackedObject from frigate.util.builtin import get_tz_modifiers @@ -389,6 +384,8 @@ def events_search(request: Request, params: EventsSearchQueryParams = Depends()) context: EmbeddingsContext = request.app.embeddings + logger.info(f"context: {context.embeddings}, conn: {context.embeddings.conn}") + selected_columns = [ Event.id, Event.camera, @@ -484,14 +481,10 @@ def events_search(request: Request, params: EventsSearchQueryParams = Depends()) else: event_ids = [] - # Build the Chroma where clause based on the event IDs - where = {"id": {"$in": event_ids}} if event_ids else {} - - thumb_ids = {} - desc_ids = {} + thumb_results = [] + desc_results = [] if search_type == "similarity": - # Grab the ids of events that match the thumbnail image embeddings try: search_event: Event = Event.get(Event.id == event_id) except DoesNotExist: @@ -504,62 +497,25 @@ def events_search(request: Request, params: EventsSearchQueryParams = Depends()) ), status_code=404, ) - thumbnail = base64.b64decode(search_event.thumbnail) - img = np.array(Image.open(io.BytesIO(thumbnail)).convert("RGB")) - thumb_result = context.embeddings.thumbnail.query( - query_images=[img], - n_results=limit, - where=where, - ) - thumb_ids = dict( - zip( - thumb_result["ids"][0], - context.thumb_stats.normalize(thumb_result["distances"][0]), - ) - ) + thumb_results = context.embeddings.search_thumbnail(search_event.id, limit) else: search_types = search_type.split(",") if "thumbnail" in search_types: - thumb_result = context.embeddings.thumbnail.query( - query_texts=[query], - n_results=limit, - where=where, - ) - # Do a rudimentary normalization of the difference in distances returned by CLIP and MiniLM. - thumb_ids = dict( - zip( - thumb_result["ids"][0], - context.thumb_stats.normalize(thumb_result["distances"][0]), - ) - ) + thumb_results = context.embeddings.search_thumbnail(query, limit) if "description" in search_types: - desc_result = context.embeddings.description.query( - query_texts=[query], - n_results=limit, - where=where, - ) - desc_ids = dict( - zip( - desc_result["ids"][0], - context.desc_stats.normalize(desc_result["distances"][0]), - ) - ) + desc_results = context.embeddings.search_description(query, limit) results = {} - for event_id in thumb_ids.keys() | desc_ids: - min_distance = min( - i - for i in (thumb_ids.get(event_id), desc_ids.get(event_id)) - if i is not None - ) - results[event_id] = { - "distance": min_distance, - "source": "thumbnail" - if min_distance == thumb_ids.get(event_id) - else "description", - } + for result in thumb_results + desc_results: + event_id, distance = result[0], result[1] + if event_id in event_ids or not event_ids: + if event_id not in results or distance < results[event_id]["distance"]: + results[event_id] = { + "distance": distance, + "source": "thumbnail" if result in thumb_results else "description", + } if not results: return JSONResponse(content=[]) @@ -975,10 +931,9 @@ def set_description( # If semantic search is enabled, update the index if request.app.frigate_config.semantic_search.enabled: context: EmbeddingsContext = request.app.embeddings - context.embeddings.description.upsert( - documents=[new_description], - metadatas=[get_metadata(event)], - ids=[event_id], + context.embeddings.upsert_description( + event_id=event_id, + description=new_description, ) response_message = ( @@ -1065,8 +1020,8 @@ def delete_event(request: Request, event_id: str): # If semantic search is enabled, update the index if request.app.frigate_config.semantic_search.enabled: context: EmbeddingsContext = request.app.embeddings - context.embeddings.thumbnail.delete(ids=[event_id]) - context.embeddings.description.delete(ids=[event_id]) + context.embeddings.delete_thumbnail(id=[event_id]) + context.embeddings.delete_description(id=[event_id]) return JSONResponse( content=({"success": True, "message": "Event " + event_id + " deleted"}), status_code=200, From f5aceece73d39142a1987e9cd919294578b1cf5b Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 4 Oct 2024 13:22:32 -0500 Subject: [PATCH 07/56] migrate event cleanup from chroma to sqlite_vec --- frigate/events/cleanup.py | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/frigate/events/cleanup.py b/frigate/events/cleanup.py index 7404495262..74f4a59ac7 100644 --- a/frigate/events/cleanup.py +++ b/frigate/events/cleanup.py @@ -8,6 +8,8 @@ from multiprocessing.synchronize import Event as MpEvent from pathlib import Path +from playhouse.sqliteq import SqliteQueueDatabase + from frigate.config import FrigateConfig from frigate.const import CLIPS_DIR from frigate.embeddings.embeddings import Embeddings @@ -22,16 +24,19 @@ class EventCleanupType(str, Enum): class EventCleanup(threading.Thread): - def __init__(self, config: FrigateConfig, stop_event: MpEvent): + def __init__( + self, config: FrigateConfig, stop_event: MpEvent, db: SqliteQueueDatabase + ): super().__init__(name="event_cleanup") self.config = config self.stop_event = stop_event + self.db = db self.camera_keys = list(self.config.cameras.keys()) self.removed_camera_labels: list[str] = None self.camera_labels: dict[str, dict[str, any]] = {} if self.config.semantic_search.enabled: - self.embeddings = Embeddings() + self.embeddings = Embeddings(self.db) def get_removed_camera_labels(self) -> list[Event]: """Get a list of distinct labels for removed cameras.""" @@ -229,15 +234,8 @@ def run(self) -> None: Event.delete().where(Event.id << chunk).execute() if self.config.semantic_search.enabled: - for collection in [ - self.embeddings.thumbnail, - self.embeddings.description, - ]: - existing_ids = collection.get(ids=chunk, include=[])["ids"] - if existing_ids: - collection.delete(ids=existing_ids) - logger.debug( - f"Deleted {len(existing_ids)} embeddings from {collection.__class__.__name__}" - ) + self.embeddings.delete_description(chunk) + self.embeddings.delete_thumbnail(chunk) + logger.debug(f"Deleted {len(events_to_delete)} embeddings") logger.info("Exiting event cleanup...") From cb5b982b61398a5707b7904f3bcde37ed3447ff7 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 4 Oct 2024 13:23:06 -0500 Subject: [PATCH 08/56] migrate embedding maintainer from chroma to sqlite_vec --- frigate/embeddings/maintainer.py | 44 ++++++++++---------------------- 1 file changed, 14 insertions(+), 30 deletions(-) diff --git a/frigate/embeddings/maintainer.py b/frigate/embeddings/maintainer.py index 3c3d956c89..4cb6a3bca3 100644 --- a/frigate/embeddings/maintainer.py +++ b/frigate/embeddings/maintainer.py @@ -1,7 +1,6 @@ -"""Maintain embeddings in Chroma.""" +"""Maintain embeddings in SQLite-vec.""" import base64 -import io import logging import os import threading @@ -11,7 +10,7 @@ import cv2 import numpy as np from peewee import DoesNotExist -from PIL import Image +from playhouse.sqliteq import SqliteQueueDatabase from frigate.comms.event_metadata_updater import ( EventMetadataSubscriber, @@ -26,7 +25,7 @@ from frigate.models import Event from frigate.util.image import SharedMemoryFrameManager, calculate_region -from .embeddings import Embeddings, get_metadata +from .embeddings import Embeddings logger = logging.getLogger(__name__) @@ -36,13 +35,14 @@ class EmbeddingMaintainer(threading.Thread): def __init__( self, + db: SqliteQueueDatabase, config: FrigateConfig, stop_event: MpEvent, ) -> None: threading.Thread.__init__(self) self.name = "embeddings_maintainer" self.config = config - self.embeddings = Embeddings() + self.embeddings = Embeddings(db) self.event_subscriber = EventUpdateSubscriber() self.event_end_subscriber = EventEndSubscriber() self.event_metadata_subscriber = EventMetadataSubscriber( @@ -56,7 +56,7 @@ def __init__( self.genai_client = get_genai_client(config.genai) def run(self) -> None: - """Maintain a Chroma vector database for semantic search.""" + """Maintain a SQLite-vec database for semantic search.""" while not self.stop_event.is_set(): self._process_updates() self._process_finalized() @@ -117,12 +117,11 @@ def _process_finalized(self) -> None: if event.data.get("type") != "object": continue - # Extract valid event metadata - metadata = get_metadata(event) + # Extract valid thumbnail thumbnail = base64.b64decode(event.thumbnail) # Embed the thumbnail - self._embed_thumbnail(event_id, thumbnail, metadata) + self._embed_thumbnail(event_id, thumbnail) if ( camera_config.genai.enabled @@ -183,7 +182,6 @@ def _process_finalized(self) -> None: args=( event, embed_image, - metadata, ), ).start() @@ -219,25 +217,16 @@ def _create_thumbnail(self, yuv_frame, box, height=500) -> Optional[bytes]: return None - def _embed_thumbnail(self, event_id: str, thumbnail: bytes, metadata: dict) -> None: + def _embed_thumbnail(self, event_id: str, thumbnail: bytes) -> None: """Embed the thumbnail for an event.""" + self.embeddings.upsert_thumbnail(event_id, thumbnail) - # Encode the thumbnail - img = np.array(Image.open(io.BytesIO(thumbnail)).convert("RGB")) - self.embeddings.thumbnail.upsert( - images=[img], - metadatas=[metadata], - ids=[event_id], - ) - - def _embed_description( - self, event: Event, thumbnails: list[bytes], metadata: dict - ) -> None: + def _embed_description(self, event: Event, thumbnails: list[bytes]) -> None: """Embed the description for an event.""" camera_config = self.config.cameras[event.camera] description = self.genai_client.generate_description( - camera_config, thumbnails, metadata + camera_config, thumbnails, event.label ) if not description: @@ -251,11 +240,7 @@ def _embed_description( ) # Encode the description - self.embeddings.description.upsert( - documents=[description], - metadatas=[metadata], - ids=[event.id], - ) + self.embeddings.upsert_description(event.id, description) logger.debug( "Generated description for %s (%d images): %s", @@ -276,7 +261,6 @@ def handle_regenerate_description(self, event_id: str, source: str) -> None: logger.error(f"GenAI not enabled for camera {event.camera}") return - metadata = get_metadata(event) thumbnail = base64.b64decode(event.thumbnail) logger.debug( @@ -315,4 +299,4 @@ def handle_regenerate_description(self, event_id: str, source: str) -> None: ) ) - self._embed_description(event, embed_image, metadata) + self._embed_description(event, embed_image) From b31216da7435eb04381dafd41f8fd9a4a9084820 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 4 Oct 2024 13:23:26 -0500 Subject: [PATCH 09/56] genai description for sqlite_vec --- frigate/genai/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/frigate/genai/__init__.py b/frigate/genai/__init__.py index afc7830214..b413017c8b 100644 --- a/frigate/genai/__init__.py +++ b/frigate/genai/__init__.py @@ -31,12 +31,12 @@ def generate_description( self, camera_config: CameraConfig, thumbnails: list[bytes], - metadata: dict[str, any], + label: str, ) -> Optional[str]: """Generate a description for the frame.""" prompt = camera_config.genai.object_prompts.get( - metadata["label"], camera_config.genai.prompt - ).format(**metadata) + label, camera_config.genai.prompt + ) return self._send(prompt, thumbnails) def _init_provider(self): From 5181ea7b3de494347946f4c04c2147a08773a305 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 4 Oct 2024 13:23:46 -0500 Subject: [PATCH 10/56] load sqlite_vec in main thread db --- frigate/app.py | 23 +++++++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/frigate/app.py b/frigate/app.py index ca419e045d..daebae8bb1 100644 --- a/frigate/app.py +++ b/frigate/app.py @@ -9,6 +9,7 @@ from typing import Any, Optional import psutil +import sqlite_vec import uvicorn from peewee_migrate import Router from playhouse.sqlite_ext import SqliteExtDatabase @@ -223,11 +224,8 @@ def init_review_segment_manager(self) -> None: def init_embeddings_manager(self) -> None: if not self.config.semantic_search.enabled: - self.embeddings = None return - # Create a client for other processes to use - self.embeddings = EmbeddingsContext() embedding_process = util.Process( target=manage_embeddings, name="embeddings_manager", @@ -266,6 +264,14 @@ def bind_database(self) -> None: ] self.db.bind(models) + if self.config.semantic_search.enabled: + # use existing db connection to load sqlite_vec extension + conn = self.db.connection() + conn.enable_load_extension(True) + sqlite_vec.load(conn) + conn.enable_load_extension(False) + logger.info(f"main connection: {self.db}") + def check_db_data_migrations(self) -> None: # check if vacuum needs to be run if not os.path.exists(f"{CONFIG_DIR}/.exports"): @@ -277,6 +283,14 @@ def check_db_data_migrations(self) -> None: migrate_exports(self.config.ffmpeg, list(self.config.cameras.keys())) + def init_embeddings_client(self) -> None: + if not self.config.semantic_search.enabled: + self.embeddings = None + return + + # Create a client for other processes to use + self.embeddings = EmbeddingsContext(self.db) + def init_external_event_processor(self) -> None: self.external_event_processor = ExternalEventProcessor(self.config) @@ -467,7 +481,7 @@ def start_event_processor(self) -> None: self.event_processor.start() def start_event_cleanup(self) -> None: - self.event_cleanup = EventCleanup(self.config, self.stop_event) + self.event_cleanup = EventCleanup(self.config, self.stop_event, self.db) self.event_cleanup.start() def start_record_cleanup(self) -> None: @@ -591,6 +605,7 @@ def start(self) -> None: self.init_go2rtc() self.bind_database() self.check_db_data_migrations() + self.init_embeddings_client() self.init_inter_process_communicator() self.init_dispatcher() self.start_detectors() From 1b7f469daf5c92611d8b21867e252583460a43fc Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 4 Oct 2024 15:40:53 -0500 Subject: [PATCH 11/56] extend the SqliteQueueDatabase class and use peewee db.execute_sql --- frigate/api/event.py | 3 +- frigate/app.py | 23 ++--- frigate/embeddings/__init__.py | 15 ++-- frigate/embeddings/embeddings.py | 139 ++++++++++++++----------------- frigate/embeddings/sqlitevecq.py | 19 +++++ 5 files changed, 94 insertions(+), 105 deletions(-) create mode 100644 frigate/embeddings/sqlitevecq.py diff --git a/frigate/api/event.py b/frigate/api/event.py index 9457d01488..7d48023551 100644 --- a/frigate/api/event.py +++ b/frigate/api/event.py @@ -384,8 +384,6 @@ def events_search(request: Request, params: EventsSearchQueryParams = Depends()) context: EmbeddingsContext = request.app.embeddings - logger.info(f"context: {context.embeddings}, conn: {context.embeddings.conn}") - selected_columns = [ Event.id, Event.camera, @@ -503,6 +501,7 @@ def events_search(request: Request, params: EventsSearchQueryParams = Depends()) if "thumbnail" in search_types: thumb_results = context.embeddings.search_thumbnail(query, limit) + logger.info(f"thumb results: {thumb_results}") if "description" in search_types: desc_results = context.embeddings.search_description(query, limit) diff --git a/frigate/app.py b/frigate/app.py index daebae8bb1..255e0c1a95 100644 --- a/frigate/app.py +++ b/frigate/app.py @@ -9,11 +9,9 @@ from typing import Any, Optional import psutil -import sqlite_vec import uvicorn from peewee_migrate import Router from playhouse.sqlite_ext import SqliteExtDatabase -from playhouse.sqliteq import SqliteQueueDatabase import frigate.util as util from frigate.api.auth import hash_password @@ -41,6 +39,7 @@ RECORD_DIR, ) from frigate.embeddings import EmbeddingsContext, manage_embeddings +from frigate.embeddings.sqlitevecq import SqliteVecQueueDatabase from frigate.events.audio import AudioProcessor from frigate.events.cleanup import EventCleanup from frigate.events.external import ExternalEventProcessor @@ -240,7 +239,7 @@ def init_embeddings_manager(self) -> None: def bind_database(self) -> None: """Bind db to the main process.""" # NOTE: all db accessing processes need to be created before the db can be bound to the main process - self.db = SqliteQueueDatabase( + self.db = SqliteVecQueueDatabase( self.config.database.path, pragmas={ "auto_vacuum": "FULL", # Does not defragment database @@ -250,6 +249,7 @@ def bind_database(self) -> None: timeout=max( 60, 10 * len([c for c in self.config.cameras.values() if c.enabled]) ), + load_vec_extension=self.config.semantic_search.enabled, ) models = [ Event, @@ -264,14 +264,6 @@ def bind_database(self) -> None: ] self.db.bind(models) - if self.config.semantic_search.enabled: - # use existing db connection to load sqlite_vec extension - conn = self.db.connection() - conn.enable_load_extension(True) - sqlite_vec.load(conn) - conn.enable_load_extension(False) - logger.info(f"main connection: {self.db}") - def check_db_data_migrations(self) -> None: # check if vacuum needs to be run if not os.path.exists(f"{CONFIG_DIR}/.exports"): @@ -284,12 +276,9 @@ def check_db_data_migrations(self) -> None: migrate_exports(self.config.ffmpeg, list(self.config.cameras.keys())) def init_embeddings_client(self) -> None: - if not self.config.semantic_search.enabled: - self.embeddings = None - return - - # Create a client for other processes to use - self.embeddings = EmbeddingsContext(self.db) + if self.config.semantic_search.enabled: + # Create a client for other processes to use + self.embeddings = EmbeddingsContext(self.db) def init_external_event_processor(self) -> None: self.external_event_processor = ExternalEventProcessor(self.config) diff --git a/frigate/embeddings/__init__.py b/frigate/embeddings/__init__.py index 00b02b1aca..970060eb44 100644 --- a/frigate/embeddings/__init__.py +++ b/frigate/embeddings/__init__.py @@ -9,12 +9,11 @@ from types import FrameType from typing import Optional -import sqlite_vec -from playhouse.sqliteq import SqliteQueueDatabase from setproctitle import setproctitle from frigate.config import FrigateConfig from frigate.const import CONFIG_DIR +from frigate.embeddings.sqlitevecq import SqliteVecQueueDatabase from frigate.models import Event from frigate.util.services import listen @@ -43,7 +42,7 @@ def receiveSignal(signalNumber: int, frame: Optional[FrameType]) -> None: listen() # Configure Frigate DB - db = SqliteQueueDatabase( + db = SqliteVecQueueDatabase( config.database.path, pragmas={ "auto_vacuum": "FULL", # Does not defragment database @@ -51,15 +50,11 @@ def receiveSignal(signalNumber: int, frame: Optional[FrameType]) -> None: "synchronous": "NORMAL", # Safe when using WAL https://www.sqlite.org/pragma.html#pragma_synchronous }, timeout=max(60, 10 * len([c for c in config.cameras.values() if c.enabled])), + load_vec_extension=True, ) models = [Event] db.bind(models) - conn = db.connection() - conn.enable_load_extension(True) - sqlite_vec.load(conn) - conn.enable_load_extension(False) - embeddings = Embeddings(db) # Check if we need to re-index events @@ -75,9 +70,9 @@ def receiveSignal(signalNumber: int, frame: Optional[FrameType]) -> None: class EmbeddingsContext: - def __init__(self, db: SqliteQueueDatabase): + def __init__(self, db: SqliteVecQueueDatabase): self.db = db - self.embeddings = Embeddings(db) + self.embeddings = Embeddings(self.db) self.thumb_stats = ZScoreNormalization() self.desc_stats = ZScoreNormalization() diff --git a/frigate/embeddings/embeddings.py b/frigate/embeddings/embeddings.py index 52c4290250..a1f0b96868 100644 --- a/frigate/embeddings/embeddings.py +++ b/frigate/embeddings/embeddings.py @@ -5,12 +5,12 @@ import logging import struct import time -from typing import List, Tuple +from typing import List, Tuple, Union from PIL import Image from playhouse.shortcuts import model_to_dict -from playhouse.sqliteq import SqliteQueueDatabase +from frigate.embeddings.sqlitevecq import SqliteVecQueueDatabase from frigate.models import Event from .functions.clip import ClipEmbedding @@ -63,10 +63,10 @@ def deserialize(bytes_data: bytes) -> List[float]: class Embeddings: """SQLite-vec embeddings database.""" - def __init__(self, db: SqliteQueueDatabase) -> None: - self.conn = db.connection() # Store the database connection instance + def __init__(self, db: SqliteVecQueueDatabase) -> None: + self.db = db - # create tables if they don't exist + # Create tables if they don't exist self._create_tables() self.clip_embedding = ClipEmbedding(model="ViT-B/32") @@ -76,7 +76,7 @@ def __init__(self, db: SqliteQueueDatabase) -> None: def _create_tables(self): # Create vec0 virtual table for thumbnail embeddings - self.conn.execute(""" + self.db.execute_sql(""" CREATE VIRTUAL TABLE IF NOT EXISTS vec_thumbnails USING vec0( id TEXT PRIMARY KEY, thumbnail_embedding FLOAT[512] @@ -84,7 +84,7 @@ def _create_tables(self): """) # Create vec0 virtual table for description embeddings - self.conn.execute(""" + self.db.execute_sql(""" CREATE VIRTUAL TABLE IF NOT EXISTS vec_descriptions USING vec0( id TEXT PRIMARY KEY, description_embedding FLOAT[384] @@ -97,79 +97,65 @@ def upsert_thumbnail(self, event_id: str, thumbnail: bytes): # Generate embedding using CLIP embedding = self.clip_embedding([image])[0] - # sqlite_vec virtual tables don't support upsert, check if event_id exists - cursor = self.conn.execute( - "SELECT 1 FROM vec_thumbnails WHERE id = ?", (event_id,) + self.db.execute_sql( + """ + INSERT OR REPLACE INTO vec_thumbnails(id, thumbnail_embedding) + VALUES(?, ?) + """, + (event_id, serialize(embedding)), ) - row = cursor.fetchone() - - if row is None: - # Insert if the event_id does not exist - self.conn.execute( - "INSERT INTO vec_thumbnails(id, thumbnail_embedding) VALUES(?, ?)", - [event_id, serialize(embedding)], - ) - else: - # Update if the event_id already exists - self.conn.execute( - "UPDATE vec_thumbnails SET thumbnail_embedding = ? WHERE id = ?", - [serialize(embedding), event_id], - ) def upsert_description(self, event_id: str, description: str): # Generate embedding using MiniLM embedding = self.minilm_embedding([description])[0] - # sqlite_vec virtual tables don't support upsert, check if event_id exists - cursor = self.conn.execute( - "SELECT 1 FROM vec_descriptions WHERE id = ?", (event_id,) + self.db.execute_sql( + """ + INSERT OR REPLACE INTO vec_descriptions(id, description_embedding) + VALUES(?, ?) + """, + (event_id, serialize(embedding)), ) - row = cursor.fetchone() - - if row is None: - # Insert if the event_id does not exist - self.conn.execute( - "INSERT INTO vec_descriptions(id, description_embedding) VALUES(?, ?)", - [event_id, serialize(embedding)], - ) - else: - # Update if the event_id already exists - self.conn.execute( - "UPDATE vec_descriptions SET description_embedding = ? WHERE id = ?", - [serialize(embedding), event_id], - ) def delete_thumbnail(self, event_ids: List[str]) -> None: - ids = ", ".join("?" for _ in event_ids) - - self.conn.execute( - f"DELETE FROM vec_thumbnails WHERE id IN ({ids})", tuple(event_ids) + ids = ",".join(["?" for _ in event_ids]) + self.db.execute_sql( + f"DELETE FROM vec_thumbnails WHERE id IN ({ids})", event_ids ) def delete_description(self, event_ids: List[str]) -> None: - ids = ", ".join("?" for _ in event_ids) - - self.conn.execute( - f"DELETE FROM vec_descriptions WHERE id IN ({ids})", tuple(event_ids) + ids = ",".join(["?" for _ in event_ids]) + self.db.execute_sql( + f"DELETE FROM vec_descriptions WHERE id IN ({ids})", event_ids ) - def search_thumbnail(self, event_id: str, limit=10) -> List[Tuple[str, float]]: - # check if it's already embedded - cursor = self.conn.execute( - "SELECT thumbnail_embedding FROM vec_thumbnails WHERE id = ?", (event_id,) - ) - row = cursor.fetchone() - if row: - query_embedding = deserialize(row[0]) - else: - # If not embedded, fetch the thumbnail from the Event table and embed it - event = Event.get_by_id(event_id) - thumbnail = base64.b64decode(event.thumbnail) - image = Image.open(io.BytesIO(thumbnail)).convert("RGB") - query_embedding = self.clip_embedding([image])[0] - self.upsert_thumbnail(event_id, thumbnail) - - cursor = self.conn.execute( + def search_thumbnail( + self, query: Union[Event, str], limit=10 + ) -> List[Tuple[str, float]]: + if isinstance(query, Event): + cursor = self.db.execute_sql( + """ + SELECT thumbnail_embedding FROM vec_thumbnails WHERE id = ? + """, + [query.id], + ) + + row = cursor.fetchone() if cursor else None + + if row: + query_embedding = deserialize( + row[0] + ) # Deserialize the thumbnail embedding + else: + # If no embedding found, generate it + thumbnail = base64.b64decode(query.thumbnail) + self.upsert_thumbnail(query.id, thumbnail) + image = Image.open(io.BytesIO(thumbnail)).convert("RGB") + query = self.clip_embedding([image])[0] + + query_embedding = self.clip_embedding([query])[0] + + results = self.db.execute_sql( """ SELECT vec_thumbnails.id, @@ -178,14 +164,15 @@ def search_thumbnail(self, event_id: str, limit=10) -> List[Tuple[str, float]]: WHERE thumbnail_embedding MATCH ? AND k = ? ORDER BY distance - """, - [serialize(query_embedding), limit], - ) - return cursor.fetchall() + """, + (serialize(query_embedding), limit), + ).fetchall() + + return results def search_description(self, query_text: str, limit=10) -> List[Tuple[str, float]]: query_embedding = self.minilm_embedding([query_text])[0] - cursor = self.conn.execute( + results = self.db.execute_sql( """ SELECT vec_descriptions.id, @@ -194,13 +181,13 @@ def search_description(self, query_text: str, limit=10) -> List[Tuple[str, float WHERE description_embedding MATCH ? AND k = ? ORDER BY distance - """, - [serialize(query_embedding), limit], - ) - return cursor.fetchall() + """, + (serialize(query_embedding), limit), + ).fetchall() + + return results def reindex(self) -> None: - """Reindex all event embeddings.""" logger.info("Indexing event embeddings...") st = time.time() diff --git a/frigate/embeddings/sqlitevecq.py b/frigate/embeddings/sqlitevecq.py new file mode 100644 index 0000000000..2053640108 --- /dev/null +++ b/frigate/embeddings/sqlitevecq.py @@ -0,0 +1,19 @@ +import sqlite_vec +from playhouse.sqliteq import SqliteQueueDatabase + + +class SqliteVecQueueDatabase(SqliteQueueDatabase): + def __init__(self, *args, load_vec_extension=False, **kwargs): + super().__init__(*args, **kwargs) + self.load_vec_extension = load_vec_extension + + def _connect(self, *args, **kwargs): + conn = super()._connect(*args, **kwargs) + if self.load_vec_extension: + self._load_vec_extension(conn) + return conn + + def _load_vec_extension(self, conn): + conn.enable_load_extension(True) + sqlite_vec.load(conn) + conn.enable_load_extension(False) From fc242e200133c3450b907539fc68309ce6991e2e Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 4 Oct 2024 15:44:28 -0500 Subject: [PATCH 12/56] search with Event type for similarity --- frigate/api/event.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frigate/api/event.py b/frigate/api/event.py index 7d48023551..f0f846b006 100644 --- a/frigate/api/event.py +++ b/frigate/api/event.py @@ -495,7 +495,7 @@ def events_search(request: Request, params: EventsSearchQueryParams = Depends()) ), status_code=404, ) - thumb_results = context.embeddings.search_thumbnail(search_event.id, limit) + thumb_results = context.embeddings.search_thumbnail(search_event, limit) else: search_types = search_type.split(",") From afb36a5ce969b58c7fbbe595358aef08fefea91d Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 4 Oct 2024 15:59:00 -0500 Subject: [PATCH 13/56] fix similarity search --- frigate/embeddings/embeddings.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/frigate/embeddings/embeddings.py b/frigate/embeddings/embeddings.py index a1f0b96868..90e97efa70 100644 --- a/frigate/embeddings/embeddings.py +++ b/frigate/embeddings/embeddings.py @@ -132,7 +132,7 @@ def delete_description(self, event_ids: List[str]) -> None: def search_thumbnail( self, query: Union[Event, str], limit=10 ) -> List[Tuple[str, float]]: - if isinstance(query, Event): + if query.__class__ == Event: cursor = self.db.execute_sql( """ SELECT thumbnail_embedding FROM vec_thumbnails WHERE id = ? @@ -151,9 +151,9 @@ def search_thumbnail( thumbnail = base64.b64decode(query.thumbnail) self.upsert_thumbnail(query.id, thumbnail) image = Image.open(io.BytesIO(thumbnail)).convert("RGB") - query = self.clip_embedding([image])[0] - - query_embedding = self.clip_embedding([query])[0] + query_embedding = self.clip_embedding([image])[0] + else: + query_embedding = self.clip_embedding([query])[0] results = self.db.execute_sql( """ From 980a889001ee82d913b08d9ecd5dd3df7d508ab0 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 4 Oct 2024 16:06:04 -0500 Subject: [PATCH 14/56] install and add comment about transformers --- docker/main/requirements-wheels.txt | 1 + frigate/embeddings/functions/minilm_l6_v2.py | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/docker/main/requirements-wheels.txt b/docker/main/requirements-wheels.txt index f7717c808e..3148848cad 100644 --- a/docker/main/requirements-wheels.txt +++ b/docker/main/requirements-wheels.txt @@ -33,6 +33,7 @@ unidecode == 1.3.* openvino == 2024.3.* # Embeddings sqlite_vec == 0.1.3 +transformers == 4.45.* onnx_clip == 4.0.* # Generative AI google-generativeai == 0.6.* diff --git a/frigate/embeddings/functions/minilm_l6_v2.py b/frigate/embeddings/functions/minilm_l6_v2.py index 651a640939..6a0e2d5efb 100644 --- a/frigate/embeddings/functions/minilm_l6_v2.py +++ b/frigate/embeddings/functions/minilm_l6_v2.py @@ -9,6 +9,10 @@ import numpy as np import onnxruntime as ort import requests + +# importing this without pytorch or others causes a warning +# https://github.com/huggingface/transformers/issues/27214 +# suppressed by setting env TRANSFORMERS_NO_ADVISORY_WARNINGS=1 from transformers import AutoTokenizer from frigate.const import MODEL_CACHE_DIR From df94a941fc3ce3b6cf8333d1690d9cc7e2cb7d62 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 4 Oct 2024 17:18:50 -0500 Subject: [PATCH 15/56] fix normalization --- frigate/api/event.py | 73 ++++++++++++++++++++++++++++++++------------ 1 file changed, 53 insertions(+), 20 deletions(-) diff --git a/frigate/api/event.py b/frigate/api/event.py index f0f846b006..a225028495 100644 --- a/frigate/api/event.py +++ b/frigate/api/event.py @@ -479,42 +479,75 @@ def events_search(request: Request, params: EventsSearchQueryParams = Depends()) else: event_ids = [] - thumb_results = [] - desc_results = [] + thumb_ids = {} + desc_ids = {} if search_type == "similarity": try: search_event: Event = Event.get(Event.id == event_id) except DoesNotExist: return JSONResponse( - content=( - { - "success": False, - "message": "Event not found", - } - ), + content={ + "success": False, + "message": "Event not found", + }, status_code=404, ) - thumb_results = context.embeddings.search_thumbnail(search_event, limit) + + # Get thumbnail results for the specific event + thumb_result = context.embeddings.search_thumbnail( + search_event, event_ids, limit + ) + + thumb_ids = dict( + zip( + [result[0] for result in thumb_result], + context.thumb_stats.normalize([result[1] for result in thumb_result]), + ) + ) else: search_types = search_type.split(",") if "thumbnail" in search_types: - thumb_results = context.embeddings.search_thumbnail(query, limit) - logger.info(f"thumb results: {thumb_results}") + thumb_result = context.embeddings.search_thumbnail(query, event_ids, limit) + + thumb_ids = dict( + zip( + [result[0] for result in thumb_result], + context.thumb_stats.normalize( + [result[1] for result in thumb_result] + ), + ) + ) if "description" in search_types: - desc_results = context.embeddings.search_description(query, limit) + desc_result = context.embeddings.search_description(query, event_ids, limit) + + desc_ids = dict( + zip( + [result[0] for result in desc_result], + context.desc_stats.normalize([result[1] for result in desc_result]), + ) + ) results = {} - for result in thumb_results + desc_results: - event_id, distance = result[0], result[1] - if event_id in event_ids or not event_ids: - if event_id not in results or distance < results[event_id]["distance"]: - results[event_id] = { - "distance": distance, - "source": "thumbnail" if result in thumb_results else "description", - } + for event_id in thumb_ids.keys() | desc_ids.keys(): + thumb_distance = thumb_ids.get(event_id) + desc_distance = desc_ids.get(event_id) + + # Select the minimum distance from the available results + if thumb_distance is not None and ( + desc_distance is None or thumb_distance < desc_distance + ): + results[event_id] = { + "distance": thumb_distance, + "source": "thumbnail", + } + elif desc_distance is not None: + results[event_id] = { + "distance": desc_distance, + "source": "description", + } if not results: return JSONResponse(content=[]) From cdafb318fd11de33a6a049cf344a2dd02bcf16f4 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 4 Oct 2024 17:18:56 -0500 Subject: [PATCH 16/56] add id filter --- frigate/embeddings/embeddings.py | 55 ++++++++++++++++++++++++-------- 1 file changed, 41 insertions(+), 14 deletions(-) diff --git a/frigate/embeddings/embeddings.py b/frigate/embeddings/embeddings.py index 90e97efa70..55ac6447bc 100644 --- a/frigate/embeddings/embeddings.py +++ b/frigate/embeddings/embeddings.py @@ -130,7 +130,7 @@ def delete_description(self, event_ids: List[str]) -> None: ) def search_thumbnail( - self, query: Union[Event, str], limit=10 + self, query: Union[Event, str], event_ids: List[str] = None, limit=10 ) -> List[Tuple[str, float]]: if query.__class__ == Event: cursor = self.db.execute_sql( @@ -155,35 +155,62 @@ def search_thumbnail( else: query_embedding = self.clip_embedding([query])[0] - results = self.db.execute_sql( - """ + sql_query = """ SELECT vec_thumbnails.id, distance FROM vec_thumbnails WHERE thumbnail_embedding MATCH ? AND k = ? - ORDER BY distance - """, - (serialize(query_embedding), limit), - ).fetchall() + """ + + # Add the IN clause if event_ids is provided and not empty + # this is the only filter supported by sqlite-vec as of 0.1.3 + if event_ids: + sql_query += " AND id IN ({})".format(",".join("?" * len(event_ids))) + + sql_query += " ORDER BY distance" + logger.info(f"thumb query: {sql_query}") + + parameters = ( + [serialize(query_embedding), limit] + event_ids + if event_ids + else [serialize(query_embedding), limit] + ) + + results = self.db.execute_sql(sql_query, parameters).fetchall() return results - def search_description(self, query_text: str, limit=10) -> List[Tuple[str, float]]: + def search_description( + self, query_text: str, event_ids: List[str] = None, limit=10 + ) -> List[Tuple[str, float]]: query_embedding = self.minilm_embedding([query_text])[0] - results = self.db.execute_sql( - """ + + # Prepare the base SQL query + sql_query = """ SELECT vec_descriptions.id, distance FROM vec_descriptions WHERE description_embedding MATCH ? AND k = ? - ORDER BY distance - """, - (serialize(query_embedding), limit), - ).fetchall() + """ + + # Add the IN clause if event_ids is provided and not empty + # this is the only filter supported by sqlite-vec as of 0.1.3 + if event_ids: + sql_query += " AND id IN ({})".format(",".join("?" * len(event_ids))) + + sql_query += " ORDER BY distance" + + parameters = ( + [serialize(query_embedding), limit] + event_ids + if event_ids + else [serialize(query_embedding), limit] + ) + + results = self.db.execute_sql(sql_query, parameters).fetchall() return results From e74d35a17c76a946b29420392c9b2290141236f7 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 4 Oct 2024 17:22:03 -0500 Subject: [PATCH 17/56] clean up --- frigate/embeddings/__init__.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/frigate/embeddings/__init__.py b/frigate/embeddings/__init__.py index 970060eb44..b26e1328ee 100644 --- a/frigate/embeddings/__init__.py +++ b/frigate/embeddings/__init__.py @@ -76,8 +76,6 @@ def __init__(self, db: SqliteVecQueueDatabase): self.thumb_stats = ZScoreNormalization() self.desc_stats = ZScoreNormalization() - logger.info(f"Initializing db: {self.db}") - # load stats from disk try: with open(os.path.join(CONFIG_DIR, ".search_stats.json"), "r") as f: From ac9f9042219e1fd6d4b967e546b04e0af53a6129 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 4 Oct 2024 17:23:39 -0500 Subject: [PATCH 18/56] clean up --- frigate/embeddings/embeddings.py | 1 - 1 file changed, 1 deletion(-) diff --git a/frigate/embeddings/embeddings.py b/frigate/embeddings/embeddings.py index 55ac6447bc..1b12bdc5f9 100644 --- a/frigate/embeddings/embeddings.py +++ b/frigate/embeddings/embeddings.py @@ -170,7 +170,6 @@ def search_thumbnail( sql_query += " AND id IN ({})".format(",".join("?" * len(event_ids))) sql_query += " ORDER BY distance" - logger.info(f"thumb query: {sql_query}") parameters = ( [serialize(query_embedding), limit] + event_ids From 6607ad297091d4515469e38ced3802ef38216f1b Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 4 Oct 2024 18:03:40 -0500 Subject: [PATCH 19/56] fully remove chroma and add transformers env var --- docker/main/Dockerfile | 8 ++--- .../s6-rc.d/chroma-log/consumer-for | 1 - .../chroma-log/dependencies.d/log-prepare | 0 .../s6-rc.d/chroma-log/pipeline-name | 1 - .../etc/s6-overlay/s6-rc.d/chroma-log/run | 4 --- .../etc/s6-overlay/s6-rc.d/chroma-log/type | 1 - .../s6-rc.d/chroma/dependencies.d/base | 0 .../etc/s6-overlay/s6-rc.d/chroma/finish | 28 ----------------- .../s6-overlay/s6-rc.d/chroma/producer-for | 1 - .../rootfs/etc/s6-overlay/s6-rc.d/chroma/run | 27 ----------------- .../s6-overlay/s6-rc.d/chroma/timeout-kill | 1 - .../rootfs/etc/s6-overlay/s6-rc.d/chroma/type | 1 - .../s6-rc.d/frigate/dependencies.d/chroma | 0 .../etc/s6-overlay/s6-rc.d/log-prepare/run | 2 +- docker/main/rootfs/usr/local/chroma | 14 --------- .../semantic_search/get_search_settings.py | 30 ------------------- 16 files changed, 3 insertions(+), 116 deletions(-) delete mode 100644 docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma-log/consumer-for delete mode 100644 docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma-log/dependencies.d/log-prepare delete mode 100644 docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma-log/pipeline-name delete mode 100755 docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma-log/run delete mode 100644 docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma-log/type delete mode 100644 docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/dependencies.d/base delete mode 100644 docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/finish delete mode 100644 docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/producer-for delete mode 100644 docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/run delete mode 100644 docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/timeout-kill delete mode 100644 docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/type delete mode 100644 docker/main/rootfs/etc/s6-overlay/s6-rc.d/frigate/dependencies.d/chroma delete mode 100755 docker/main/rootfs/usr/local/chroma delete mode 100644 docker/main/rootfs/usr/local/semantic_search/get_search_settings.py diff --git a/docker/main/Dockerfile b/docker/main/Dockerfile index 342727de76..3e0dc83db5 100644 --- a/docker/main/Dockerfile +++ b/docker/main/Dockerfile @@ -197,12 +197,8 @@ ARG APT_KEY_DONT_WARN_ON_DANGEROUS_USAGE=DontWarn ENV NVIDIA_VISIBLE_DEVICES=all ENV NVIDIA_DRIVER_CAPABILITIES="compute,video,utility" -# Turn off Chroma Telemetry: https://docs.trychroma.com/telemetry#opting-out -ENV ANONYMIZED_TELEMETRY=False -# Allow resetting the chroma database -ENV ALLOW_RESET=True -# Disable tokenizer parallelism warning -ENV TOKENIZERS_PARALLELISM=true +# https://github.com/huggingface/transformers/issues/27214 +ENV TRANSFORMERS_NO_ADVISORY_WARNINGS=1 ENV PATH="/usr/local/go2rtc/bin:/usr/local/tempio/bin:/usr/local/nginx/sbin:${PATH}" ENV LIBAVFORMAT_VERSION_MAJOR=60 diff --git a/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma-log/consumer-for b/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma-log/consumer-for deleted file mode 100644 index 4b935d3cb5..0000000000 --- a/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma-log/consumer-for +++ /dev/null @@ -1 +0,0 @@ -chroma \ No newline at end of file diff --git a/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma-log/dependencies.d/log-prepare b/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma-log/dependencies.d/log-prepare deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma-log/pipeline-name b/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma-log/pipeline-name deleted file mode 100644 index 71256e9ed9..0000000000 --- a/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma-log/pipeline-name +++ /dev/null @@ -1 +0,0 @@ -chroma-pipeline \ No newline at end of file diff --git a/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma-log/run b/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma-log/run deleted file mode 100755 index 2e47fd3ebe..0000000000 --- a/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma-log/run +++ /dev/null @@ -1,4 +0,0 @@ -#!/command/with-contenv bash -# shellcheck shell=bash - -exec logutil-service /dev/shm/logs/chroma diff --git a/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma-log/type b/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma-log/type deleted file mode 100644 index 5883cff0cd..0000000000 --- a/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma-log/type +++ /dev/null @@ -1 +0,0 @@ -longrun diff --git a/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/dependencies.d/base b/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/dependencies.d/base deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/finish b/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/finish deleted file mode 100644 index b6206b4ccf..0000000000 --- a/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/finish +++ /dev/null @@ -1,28 +0,0 @@ -#!/command/with-contenv bash -# shellcheck shell=bash -# Take down the S6 supervision tree when the service exits - -set -o errexit -o nounset -o pipefail - -# Logs should be sent to stdout so that s6 can collect them - -declare exit_code_container -exit_code_container=$(cat /run/s6-linux-init-container-results/exitcode) -readonly exit_code_container -readonly exit_code_service="${1}" -readonly exit_code_signal="${2}" -readonly service="ChromaDB" - -echo "[INFO] Service ${service} exited with code ${exit_code_service} (by signal ${exit_code_signal})" - -if [[ "${exit_code_service}" -eq 256 ]]; then - if [[ "${exit_code_container}" -eq 0 ]]; then - echo $((128 + exit_code_signal)) >/run/s6-linux-init-container-results/exitcode - fi -elif [[ "${exit_code_service}" -ne 0 ]]; then - if [[ "${exit_code_container}" -eq 0 ]]; then - echo "${exit_code_service}" >/run/s6-linux-init-container-results/exitcode - fi -fi - -exec /run/s6/basedir/bin/halt diff --git a/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/producer-for b/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/producer-for deleted file mode 100644 index c17b71e87a..0000000000 --- a/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/producer-for +++ /dev/null @@ -1 +0,0 @@ -chroma-log diff --git a/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/run b/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/run deleted file mode 100644 index ef477d8a28..0000000000 --- a/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/run +++ /dev/null @@ -1,27 +0,0 @@ -#!/command/with-contenv bash -# shellcheck shell=bash -# Start the Frigate service - -set -o errexit -o nounset -o pipefail - -# Logs should be sent to stdout so that s6 can collect them - -# Tell S6-Overlay not to restart this service -s6-svc -O . - -search_enabled=`python3 /usr/local/semantic_search/get_search_settings.py | jq -r .enabled` - -# Replace the bash process with the Frigate process, redirecting stderr to stdout -exec 2>&1 - -if [[ "$search_enabled" == 'true' ]]; then - echo "[INFO] Starting ChromaDB..." - exec /usr/local/chroma run --path /config/chroma --host 127.0.0.1 -else - while true - do - sleep 9999 - continue - done - exit 0 -fi diff --git a/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/timeout-kill b/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/timeout-kill deleted file mode 100644 index 6f4f418441..0000000000 --- a/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/timeout-kill +++ /dev/null @@ -1 +0,0 @@ -120000 diff --git a/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/type b/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/type deleted file mode 100644 index 5883cff0cd..0000000000 --- a/docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/type +++ /dev/null @@ -1 +0,0 @@ -longrun diff --git a/docker/main/rootfs/etc/s6-overlay/s6-rc.d/frigate/dependencies.d/chroma b/docker/main/rootfs/etc/s6-overlay/s6-rc.d/frigate/dependencies.d/chroma deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/docker/main/rootfs/etc/s6-overlay/s6-rc.d/log-prepare/run b/docker/main/rootfs/etc/s6-overlay/s6-rc.d/log-prepare/run index 0661f01c2f..c493e320ee 100755 --- a/docker/main/rootfs/etc/s6-overlay/s6-rc.d/log-prepare/run +++ b/docker/main/rootfs/etc/s6-overlay/s6-rc.d/log-prepare/run @@ -4,7 +4,7 @@ set -o errexit -o nounset -o pipefail -dirs=(/dev/shm/logs/frigate /dev/shm/logs/go2rtc /dev/shm/logs/nginx /dev/shm/logs/certsync /dev/shm/logs/chroma) +dirs=(/dev/shm/logs/frigate /dev/shm/logs/go2rtc /dev/shm/logs/nginx /dev/shm/logs/certsync) mkdir -p "${dirs[@]}" chown nobody:nogroup "${dirs[@]}" diff --git a/docker/main/rootfs/usr/local/chroma b/docker/main/rootfs/usr/local/chroma deleted file mode 100755 index 5147db3877..0000000000 --- a/docker/main/rootfs/usr/local/chroma +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/python3 -# -*- coding: utf-8 -*-s -__import__("pysqlite3") - -import re -import sys - -sys.modules["sqlite3"] = sys.modules.pop("pysqlite3") - -from chromadb.cli.cli import app - -if __name__ == "__main__": - sys.argv[0] = re.sub(r"(-script\.pyw|\.exe)?$", "", sys.argv[0]) - sys.exit(app()) diff --git a/docker/main/rootfs/usr/local/semantic_search/get_search_settings.py b/docker/main/rootfs/usr/local/semantic_search/get_search_settings.py deleted file mode 100644 index ec3c9c1fa7..0000000000 --- a/docker/main/rootfs/usr/local/semantic_search/get_search_settings.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Prints the semantic_search config as json to stdout.""" - -import json -import os - -from ruamel.yaml import YAML - -yaml = YAML() - -config_file = os.environ.get("CONFIG_FILE", "/config/config.yml") - -# Check if we can use .yaml instead of .yml -config_file_yaml = config_file.replace(".yml", ".yaml") -if os.path.isfile(config_file_yaml): - config_file = config_file_yaml - -try: - with open(config_file) as f: - raw_config = f.read() - - if config_file.endswith((".yaml", ".yml")): - config: dict[str, any] = yaml.load(raw_config) - elif config_file.endswith(".json"): - config: dict[str, any] = json.loads(raw_config) -except FileNotFoundError: - config: dict[str, any] = {} - -search_config: dict[str, any] = config.get("semantic_search", {"enabled": False}) - -print(json.dumps(search_config)) From ddfabbb78cfa13f60f98352ddd2c426a672f9bba Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 4 Oct 2024 18:14:27 -0500 Subject: [PATCH 20/56] readd uvicorn for fastapi --- docker/main/requirements-wheels.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/main/requirements-wheels.txt b/docker/main/requirements-wheels.txt index 3148848cad..7a36f9e5f7 100644 --- a/docker/main/requirements-wheels.txt +++ b/docker/main/requirements-wheels.txt @@ -2,6 +2,7 @@ click == 8.1.* # FastAPI starlette-context == 0.3.6 fastapi == 0.115.0 +uvicorn == 0.30.* slowapi == 0.1.9 imutils == 0.5.* joserfc == 1.0.* From e28767b107c2cddd55ff310f8fe5bb7c8d6c9a9b Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 4 Oct 2024 19:15:18 -0500 Subject: [PATCH 21/56] readd tokenizer parallelism env var --- docker/main/Dockerfile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docker/main/Dockerfile b/docker/main/Dockerfile index 3e0dc83db5..62effdcaf1 100644 --- a/docker/main/Dockerfile +++ b/docker/main/Dockerfile @@ -197,6 +197,9 @@ ARG APT_KEY_DONT_WARN_ON_DANGEROUS_USAGE=DontWarn ENV NVIDIA_VISIBLE_DEVICES=all ENV NVIDIA_DRIVER_CAPABILITIES="compute,video,utility" +# Disable tokenizer parallelism warning +# https://stackoverflow.com/questions/62691279/how-to-disable-tokenizers-parallelism-true-false-warning/72926996#72926996 +ENV TOKENIZERS_PARALLELISM=true # https://github.com/huggingface/transformers/issues/27214 ENV TRANSFORMERS_NO_ADVISORY_WARNINGS=1 From a4d43a97c46c43949da22b668ee887c496968efe Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 4 Oct 2024 19:17:47 -0500 Subject: [PATCH 22/56] remove chroma from docs --- docker/main/Dockerfile | 2 +- docs/docs/configuration/semantic_search.md | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/main/Dockerfile b/docker/main/Dockerfile index 62effdcaf1..ace7e7d065 100644 --- a/docker/main/Dockerfile +++ b/docker/main/Dockerfile @@ -163,7 +163,7 @@ RUN wget -q https://bootstrap.pypa.io/get-pip.py -O get-pip.py \ COPY docker/main/requirements.txt /requirements.txt RUN pip3 install -r /requirements.txt -# Build pysqlite3 from source to support ChromaDB +# Build pysqlite3 from source COPY docker/main/build_pysqlite3.sh /build_pysqlite3.sh RUN /build_pysqlite3.sh diff --git a/docs/docs/configuration/semantic_search.md b/docs/docs/configuration/semantic_search.md index a82b9cccaa..9e88f25963 100644 --- a/docs/docs/configuration/semantic_search.md +++ b/docs/docs/configuration/semantic_search.md @@ -5,7 +5,7 @@ title: Using Semantic Search Semantic Search in Frigate allows you to find tracked objects within your review items using either the image itself, a user-defined text description, or an automatically generated one. This feature works by creating _embeddings_ — numerical vector representations — for both the images and text descriptions of your tracked objects. By comparing these embeddings, Frigate assesses their similarities to deliver relevant search results. -Frigate has support for two models to create embeddings, both of which run locally: [OpenAI CLIP](https://openai.com/research/clip) and [all-MiniLM-L6-v2](https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2). Embeddings are then saved to a local instance of [ChromaDB](https://trychroma.com). +Frigate has support for two models to create embeddings, both of which run locally: [OpenAI CLIP](https://openai.com/research/clip) and [all-MiniLM-L6-v2](https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2). Embeddings are then saved to Frigate's database. Semantic Search is accessed via the _Explore_ view in the Frigate UI. @@ -29,7 +29,7 @@ If you are enabling the Search feature for the first time, be advised that Friga ### OpenAI CLIP -This model is able to embed both images and text into the same vector space, which allows `image -> image` and `text -> image` similarity searches. Frigate uses this model on tracked objects to encode the thumbnail image and store it in Chroma. When searching for tracked objects via text in the search box, Frigate will perform a `text -> image` similarity search against this embedding. When clicking "Find Similar" in the tracked object detail pane, Frigate will perform an `image -> image` similarity search to retrieve the closest matching thumbnails. +This model is able to embed both images and text into the same vector space, which allows `image -> image` and `text -> image` similarity searches. Frigate uses this model on tracked objects to encode the thumbnail image and store it in the database. When searching for tracked objects via text in the search box, Frigate will perform a `text -> image` similarity search against this embedding. When clicking "Find Similar" in the tracked object detail pane, Frigate will perform an `image -> image` similarity search to retrieve the closest matching thumbnails. ### all-MiniLM-L6-v2 From 3b78908ef381d8d38a80ec5d6bd306dc38d3cf0c Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 4 Oct 2024 19:19:49 -0500 Subject: [PATCH 23/56] remove chroma from UI --- frigate/api/app.py | 5 ++--- web/src/pages/Explore.tsx | 2 +- web/src/types/log.ts | 2 +- web/src/utils/logUtil.ts | 40 --------------------------------------- 4 files changed, 4 insertions(+), 45 deletions(-) diff --git a/frigate/api/app.py b/frigate/api/app.py index 78201dcd85..2e16f4fcba 100644 --- a/frigate/api/app.py +++ b/frigate/api/app.py @@ -384,12 +384,12 @@ def vainfo(): @router.get("/logs/{service}", tags=[Tags.logs]) def logs( - service: str = Path(enum=["frigate", "nginx", "go2rtc", "chroma"]), + service: str = Path(enum=["frigate", "nginx", "go2rtc"]), download: Optional[str] = None, start: Optional[int] = 0, end: Optional[int] = None, ): - """Get logs for the requested service (frigate/nginx/go2rtc/chroma)""" + """Get logs for the requested service (frigate/nginx/go2rtc)""" def download_logs(service_location: str): try: @@ -408,7 +408,6 @@ def download_logs(service_location: str): "frigate": "/dev/shm/logs/frigate/current", "go2rtc": "/dev/shm/logs/go2rtc/current", "nginx": "/dev/shm/logs/nginx/current", - "chroma": "/dev/shm/logs/chroma/current", } service_location = log_locations.get(service) diff --git a/web/src/pages/Explore.tsx b/web/src/pages/Explore.tsx index cfbbe96e04..4af6e1f194 100644 --- a/web/src/pages/Explore.tsx +++ b/web/src/pages/Explore.tsx @@ -168,7 +168,7 @@ export default function Explore() { if (searchQuery) { const [url] = searchQuery; - // for chroma, only load 100 results for description and similarity + // for embeddings, only load 100 results for description and similarity if (url === "events/search" && searchResults.length >= 100) { return; } diff --git a/web/src/types/log.ts b/web/src/types/log.ts index 235a6ea933..407f67e6d2 100644 --- a/web/src/types/log.ts +++ b/web/src/types/log.ts @@ -12,5 +12,5 @@ export type LogLine = { content: string; }; -export const logTypes = ["frigate", "go2rtc", "nginx", "chroma"] as const; +export const logTypes = ["frigate", "go2rtc", "nginx"] as const; export type LogType = (typeof logTypes)[number]; diff --git a/web/src/utils/logUtil.ts b/web/src/utils/logUtil.ts index 5c787c3960..569d417beb 100644 --- a/web/src/utils/logUtil.ts +++ b/web/src/utils/logUtil.ts @@ -128,46 +128,6 @@ export function parseLogLines(logService: LogType, logs: string[]) { }; }) .filter((value) => value != null) as LogLine[]; - } else if (logService == "chroma") { - return logs - .map((line) => { - const match = frigateDateStamp.exec(line); - - if (!match) { - const infoIndex = line.indexOf("[INFO]"); - - if (infoIndex != -1) { - return { - dateStamp: line.substring(0, 19), - severity: "info", - section: "startup", - content: line.substring(infoIndex + 6).trim(), - }; - } - - return null; - } - - const startup = - line.indexOf("Starting component") !== -1 || - line.indexOf("startup") !== -1 || - line.indexOf("Started") !== -1 || - line.indexOf("Uvicorn") !== -1; - const api = !!httpMethods.exec(line); - const tag = startup ? "startup" : api ? "API" : "server"; - - return { - dateStamp: match.toString().slice(1, -1), - severity: pythonSeverity - .exec(line) - ?.at(0) - ?.toString() - ?.toLowerCase() as LogSeverity, - section: tag, - content: line.substring(match.index + match[0].length).trim(), - }; - }) - .filter((value) => value != null) as LogLine[]; } return []; From d189893459ca831a8d1e7839d6bbd206fa441ee9 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sat, 5 Oct 2024 05:44:39 -0500 Subject: [PATCH 24/56] try removing custom pysqlite3 build --- docker/main/Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/main/Dockerfile b/docker/main/Dockerfile index ace7e7d065..a6174e4fe3 100644 --- a/docker/main/Dockerfile +++ b/docker/main/Dockerfile @@ -164,8 +164,8 @@ COPY docker/main/requirements.txt /requirements.txt RUN pip3 install -r /requirements.txt # Build pysqlite3 from source -COPY docker/main/build_pysqlite3.sh /build_pysqlite3.sh -RUN /build_pysqlite3.sh +# COPY docker/main/build_pysqlite3.sh /build_pysqlite3.sh +# RUN /build_pysqlite3.sh COPY docker/main/requirements-wheels.txt /requirements-wheels.txt RUN pip3 wheel --wheel-dir=/wheels -r /requirements-wheels.txt From 92298e6578e66cdcdc9fa1df7c23acd5ef8f48ee Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sat, 5 Oct 2024 05:45:06 -0500 Subject: [PATCH 25/56] hard code limit --- frigate/embeddings/embeddings.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/frigate/embeddings/embeddings.py b/frigate/embeddings/embeddings.py index 1b12bdc5f9..faf4f81d35 100644 --- a/frigate/embeddings/embeddings.py +++ b/frigate/embeddings/embeddings.py @@ -130,7 +130,7 @@ def delete_description(self, event_ids: List[str]) -> None: ) def search_thumbnail( - self, query: Union[Event, str], event_ids: List[str] = None, limit=10 + self, query: Union[Event, str], event_ids: List[str] = None ) -> List[Tuple[str, float]]: if query.__class__ == Event: cursor = self.db.execute_sql( @@ -157,11 +157,11 @@ def search_thumbnail( sql_query = """ SELECT - vec_thumbnails.id, + id, distance FROM vec_thumbnails WHERE thumbnail_embedding MATCH ? - AND k = ? + AND k = 100 """ # Add the IN clause if event_ids is provided and not empty @@ -172,9 +172,9 @@ def search_thumbnail( sql_query += " ORDER BY distance" parameters = ( - [serialize(query_embedding), limit] + event_ids + [serialize(query_embedding)] + event_ids if event_ids - else [serialize(query_embedding), limit] + else [serialize(query_embedding)] ) results = self.db.execute_sql(sql_query, parameters).fetchall() @@ -182,18 +182,18 @@ def search_thumbnail( return results def search_description( - self, query_text: str, event_ids: List[str] = None, limit=10 + self, query_text: str, event_ids: List[str] = None ) -> List[Tuple[str, float]]: query_embedding = self.minilm_embedding([query_text])[0] # Prepare the base SQL query sql_query = """ SELECT - vec_descriptions.id, + id, distance FROM vec_descriptions WHERE description_embedding MATCH ? - AND k = ? + AND k = 100 """ # Add the IN clause if event_ids is provided and not empty @@ -204,9 +204,9 @@ def search_description( sql_query += " ORDER BY distance" parameters = ( - [serialize(query_embedding), limit] + event_ids + [serialize(query_embedding)] + event_ids if event_ids - else [serialize(query_embedding), limit] + else [serialize(query_embedding)] ) results = self.db.execute_sql(sql_query, parameters).fetchall() From fa6fda42958084905324f41faf2ed91103a37725 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sat, 5 Oct 2024 05:45:25 -0500 Subject: [PATCH 26/56] optimize queries --- frigate/api/event.py | 154 ++++++++++++++++++------------------------- 1 file changed, 64 insertions(+), 90 deletions(-) diff --git a/frigate/api/event.py b/frigate/api/event.py index a225028495..0815e7c66c 100644 --- a/frigate/api/event.py +++ b/frigate/api/event.py @@ -11,7 +11,7 @@ from fastapi import APIRouter, Request from fastapi.params import Depends from fastapi.responses import JSONResponse -from peewee import JOIN, DoesNotExist, fn, operator +from peewee import JOIN, DoesNotExist, Window, fn, operator from playhouse.shortcuts import model_to_dict from frigate.api.defs.events_body import ( @@ -259,7 +259,7 @@ def events(params: EventsQueryParams = Depends()): @router.get("/events/explore") def events_explore(limit: int = 10): - subquery = Event.select( + ranked_events = Event.select( Event.id, Event.camera, Event.label, @@ -275,38 +275,37 @@ def events_explore(limit: int = 10): Event.false_positive, Event.box, Event.data, - fn.rank() + fn.COUNT(Event.id).over(partition_by=[Event.label]).alias("event_count"), + Window.row_number() .over(partition_by=[Event.label], order_by=[Event.start_time.desc()]) .alias("rank"), - fn.COUNT(Event.id).over(partition_by=[Event.label]).alias("event_count"), - ).alias("subquery") + ).alias("ranked_events") query = ( - Event.select( - subquery.c.id, - subquery.c.camera, - subquery.c.label, - subquery.c.zones, - subquery.c.start_time, - subquery.c.end_time, - subquery.c.has_clip, - subquery.c.has_snapshot, - subquery.c.plus_id, - subquery.c.retain_indefinitely, - subquery.c.sub_label, - subquery.c.top_score, - subquery.c.false_positive, - subquery.c.box, - subquery.c.data, - subquery.c.event_count, + ranked_events.select( + ranked_events.c.id, + ranked_events.c.camera, + ranked_events.c.label, + ranked_events.c.zones, + ranked_events.c.start_time, + ranked_events.c.end_time, + ranked_events.c.has_clip, + ranked_events.c.has_snapshot, + ranked_events.c.plus_id, + ranked_events.c.retain_indefinitely, + ranked_events.c.sub_label, + ranked_events.c.top_score, + ranked_events.c.false_positive, + ranked_events.c.box, + ranked_events.c.data, + ranked_events.c.event_count, ) - .from_(subquery) - .where(subquery.c.rank <= limit) - .order_by(subquery.c.event_count.desc(), subquery.c.start_time.desc()) + .where(ranked_events.c.rank <= limit) + .order_by(ranked_events.c.event_count.desc(), ranked_events.c.start_time.desc()) .dicts() ) - events = list(query.iterator()) + events = list(query) processed_events = [ {k: v for k, v in event.items() if k != "data"} @@ -406,16 +405,12 @@ def events_search(request: Request, params: EventsSearchQueryParams = Depends()) event_filters = [] if cameras != "all": - camera_list = cameras.split(",") - event_filters.append((Event.camera << camera_list)) + event_filters.append((Event.camera << cameras.split(","))) if labels != "all": - label_list = labels.split(",") - event_filters.append((Event.label << label_list)) + event_filters.append((Event.label << labels.split(","))) if zones != "all": - # use matching so events with multiple zones - # still match on a search where any zone matches zone_clauses = [] filtered_zones = zones.split(",") @@ -426,8 +421,7 @@ def events_search(request: Request, params: EventsSearchQueryParams = Depends()) for zone in filtered_zones: zone_clauses.append((Event.zones.cast("text") % f'*"{zone}"*')) - zone_clause = reduce(operator.or_, zone_clauses) - event_filters.append((zone_clause)) + event_filters.append((reduce(operator.or_, zone_clauses))) if after: event_filters.append((Event.start_time > after)) @@ -436,13 +430,11 @@ def events_search(request: Request, params: EventsSearchQueryParams = Depends()) event_filters.append((Event.start_time < before)) if time_range != DEFAULT_TIME_RANGE: - # get timezone arg to ensure browser times are used tz_name = params.timezone hour_modifier, minute_modifier, _ = get_tz_modifiers(tz_name) times = time_range.split(",") - time_after = times[0] - time_before = times[1] + time_after, time_before = times start_hour_fun = fn.strftime( "%H:%M", @@ -465,23 +457,8 @@ def events_search(request: Request, params: EventsSearchQueryParams = Depends()) event_filters.append((start_hour_fun > time_after)) event_filters.append((start_hour_fun < time_before)) - if event_filters: - filtered_event_ids = ( - Event.select(Event.id) - .where(reduce(operator.and_, event_filters)) - .tuples() - .iterator() - ) - event_ids = [event_id[0] for event_id in filtered_event_ids] - - if not event_ids: - return JSONResponse(content=[]) # No events to search on - else: - event_ids = [] - - thumb_ids = {} - desc_ids = {} - + # Perform semantic search + search_results = {} if search_type == "similarity": try: search_event: Event = Event.get(Event.id == event_id) @@ -494,23 +471,22 @@ def events_search(request: Request, params: EventsSearchQueryParams = Depends()) status_code=404, ) - # Get thumbnail results for the specific event - thumb_result = context.embeddings.search_thumbnail( - search_event, event_ids, limit - ) - + thumb_result = context.embeddings.search_thumbnail(search_event) thumb_ids = dict( zip( [result[0] for result in thumb_result], context.thumb_stats.normalize([result[1] for result in thumb_result]), ) ) + search_results = { + event_id: {"distance": distance, "source": "thumbnail"} + for event_id, distance in thumb_ids.items() + } else: search_types = search_type.split(",") if "thumbnail" in search_types: - thumb_result = context.embeddings.search_thumbnail(query, event_ids, limit) - + thumb_result = context.embeddings.search_thumbnail(query) thumb_ids = dict( zip( [result[0] for result in thumb_result], @@ -519,40 +495,35 @@ def events_search(request: Request, params: EventsSearchQueryParams = Depends()) ), ) ) + search_results.update( + { + event_id: {"distance": distance, "source": "thumbnail"} + for event_id, distance in thumb_ids.items() + } + ) if "description" in search_types: - desc_result = context.embeddings.search_description(query, event_ids, limit) - + desc_result = context.embeddings.search_description(query) desc_ids = dict( zip( [result[0] for result in desc_result], context.desc_stats.normalize([result[1] for result in desc_result]), ) ) - - results = {} - for event_id in thumb_ids.keys() | desc_ids.keys(): - thumb_distance = thumb_ids.get(event_id) - desc_distance = desc_ids.get(event_id) - - # Select the minimum distance from the available results - if thumb_distance is not None and ( - desc_distance is None or thumb_distance < desc_distance - ): - results[event_id] = { - "distance": thumb_distance, - "source": "thumbnail", - } - elif desc_distance is not None: - results[event_id] = { - "distance": desc_distance, - "source": "description", - } - - if not results: + for event_id, distance in desc_ids.items(): + if ( + event_id not in search_results + or distance < search_results[event_id]["distance"] + ): + search_results[event_id] = { + "distance": distance, + "source": "description", + } + + if not search_results: return JSONResponse(content=[]) - # Get the event data + # Fetch events in a single query events = ( Event.select(*selected_columns) .join( @@ -560,11 +531,14 @@ def events_search(request: Request, params: EventsSearchQueryParams = Depends()) JOIN.LEFT_OUTER, on=(fn.json_extract(ReviewSegment.data, "$.detections").contains(Event.id)), ) - .where(Event.id << list(results.keys())) + .where( + (Event.id << list(search_results.keys())) + & reduce(operator.and_, event_filters) + if event_filters + else True + ) .dicts() - .iterator() ) - events = list(events) events = [ {k: v for k, v in event.items() if k != "data"} @@ -576,8 +550,8 @@ def events_search(request: Request, params: EventsSearchQueryParams = Depends()) } } | { - "search_distance": results[event["id"]]["distance"], - "search_source": results[event["id"]]["source"], + "search_distance": search_results[event["id"]]["distance"], + "search_source": search_results[event["id"]]["source"], } for event in events ] From 6bfee9993e166503d9dac56ead6c1a2886968a76 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sat, 5 Oct 2024 06:07:33 -0500 Subject: [PATCH 27/56] revert explore query --- frigate/api/event.py | 51 ++++++++++++++++++++++---------------------- 1 file changed, 26 insertions(+), 25 deletions(-) diff --git a/frigate/api/event.py b/frigate/api/event.py index 0815e7c66c..6da4a824eb 100644 --- a/frigate/api/event.py +++ b/frigate/api/event.py @@ -11,7 +11,7 @@ from fastapi import APIRouter, Request from fastapi.params import Depends from fastapi.responses import JSONResponse -from peewee import JOIN, DoesNotExist, Window, fn, operator +from peewee import JOIN, DoesNotExist, fn, operator from playhouse.shortcuts import model_to_dict from frigate.api.defs.events_body import ( @@ -259,7 +259,7 @@ def events(params: EventsQueryParams = Depends()): @router.get("/events/explore") def events_explore(limit: int = 10): - ranked_events = Event.select( + subquery = Event.select( Event.id, Event.camera, Event.label, @@ -275,37 +275,38 @@ def events_explore(limit: int = 10): Event.false_positive, Event.box, Event.data, - fn.COUNT(Event.id).over(partition_by=[Event.label]).alias("event_count"), - Window.row_number() + fn.rank() .over(partition_by=[Event.label], order_by=[Event.start_time.desc()]) .alias("rank"), - ).alias("ranked_events") + fn.COUNT(Event.id).over(partition_by=[Event.label]).alias("event_count"), + ).alias("subquery") query = ( - ranked_events.select( - ranked_events.c.id, - ranked_events.c.camera, - ranked_events.c.label, - ranked_events.c.zones, - ranked_events.c.start_time, - ranked_events.c.end_time, - ranked_events.c.has_clip, - ranked_events.c.has_snapshot, - ranked_events.c.plus_id, - ranked_events.c.retain_indefinitely, - ranked_events.c.sub_label, - ranked_events.c.top_score, - ranked_events.c.false_positive, - ranked_events.c.box, - ranked_events.c.data, - ranked_events.c.event_count, + Event.select( + subquery.c.id, + subquery.c.camera, + subquery.c.label, + subquery.c.zones, + subquery.c.start_time, + subquery.c.end_time, + subquery.c.has_clip, + subquery.c.has_snapshot, + subquery.c.plus_id, + subquery.c.retain_indefinitely, + subquery.c.sub_label, + subquery.c.top_score, + subquery.c.false_positive, + subquery.c.box, + subquery.c.data, + subquery.c.event_count, ) - .where(ranked_events.c.rank <= limit) - .order_by(ranked_events.c.event_count.desc(), ranked_events.c.start_time.desc()) + .from_(subquery) + .where(subquery.c.rank <= limit) + .order_by(subquery.c.event_count.desc(), subquery.c.start_time.desc()) .dicts() ) - events = list(query) + events = list(query.iterator()) processed_events = [ {k: v for k, v in event.items() if k != "data"} From 18e1eca914b44712013d54a98c6f66c6f73687d9 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sat, 5 Oct 2024 06:46:34 -0500 Subject: [PATCH 28/56] fix query --- frigate/api/event.py | 49 +++++++++++++++++++++++++++----------------- 1 file changed, 30 insertions(+), 19 deletions(-) diff --git a/frigate/api/event.py b/frigate/api/event.py index 6da4a824eb..b68872658f 100644 --- a/frigate/api/event.py +++ b/frigate/api/event.py @@ -525,22 +525,23 @@ def events_search(request: Request, params: EventsSearchQueryParams = Depends()) return JSONResponse(content=[]) # Fetch events in a single query - events = ( - Event.select(*selected_columns) - .join( - ReviewSegment, - JOIN.LEFT_OUTER, - on=(fn.json_extract(ReviewSegment.data, "$.detections").contains(Event.id)), - ) - .where( - (Event.id << list(search_results.keys())) - & reduce(operator.and_, event_filters) - if event_filters - else True - ) - .dicts() + events_query = Event.select(*selected_columns).join( + ReviewSegment, + JOIN.LEFT_OUTER, + on=(fn.json_extract(ReviewSegment.data, "$.detections").contains(Event.id)), ) + # Apply filters, if any + if event_filters: + events_query = events_query.where(reduce(operator.and_, event_filters)) + + # If we did a similarity search, limit events to those in search_results + if search_results: + events_query = events_query.where(Event.id << list(search_results.keys())) + + events = events_query.dicts() + + # Build the final event list events = [ {k: v for k, v in event.items() if k != "data"} | { @@ -550,13 +551,23 @@ def events_search(request: Request, params: EventsSearchQueryParams = Depends()) if k in ["type", "score", "top_score", "description"] } } - | { - "search_distance": search_results[event["id"]]["distance"], - "search_source": search_results[event["id"]]["source"], - } + | ( + { + "search_distance": search_results[event["id"]]["distance"], + "search_source": search_results[event["id"]]["source"], + } + if event["id"] in search_results + else {} + ) for event in events ] - events = sorted(events, key=lambda x: x["search_distance"])[:limit] + + # Sort by search distance if search_results are available + if search_results: + events = sorted(events, key=lambda x: x.get("search_distance", float("inf"))) + + # Limit the number of events returned + events = events[:limit] return JSONResponse(content=events) From 57d0aabd5674e801a8ccae7f21fb3e608976c74d Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sat, 5 Oct 2024 12:18:33 -0500 Subject: [PATCH 29/56] keep building pysqlite3 --- docker/main/Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/main/Dockerfile b/docker/main/Dockerfile index a6174e4fe3..ace7e7d065 100644 --- a/docker/main/Dockerfile +++ b/docker/main/Dockerfile @@ -164,8 +164,8 @@ COPY docker/main/requirements.txt /requirements.txt RUN pip3 install -r /requirements.txt # Build pysqlite3 from source -# COPY docker/main/build_pysqlite3.sh /build_pysqlite3.sh -# RUN /build_pysqlite3.sh +COPY docker/main/build_pysqlite3.sh /build_pysqlite3.sh +RUN /build_pysqlite3.sh COPY docker/main/requirements-wheels.txt /requirements-wheels.txt RUN pip3 wheel --wheel-dir=/wheels -r /requirements-wheels.txt From 3e58a6dfaf83bd2b0421aa05dfc4a8e94afbbe06 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sat, 5 Oct 2024 12:18:49 -0500 Subject: [PATCH 30/56] single pass fetch and process --- frigate/api/event.py | 45 ++++++++++++++++++++------------------------ 1 file changed, 20 insertions(+), 25 deletions(-) diff --git a/frigate/api/event.py b/frigate/api/event.py index b68872658f..a87d099407 100644 --- a/frigate/api/event.py +++ b/frigate/api/event.py @@ -539,37 +539,32 @@ def events_search(request: Request, params: EventsSearchQueryParams = Depends()) if search_results: events_query = events_query.where(Event.id << list(search_results.keys())) - events = events_query.dicts() - - # Build the final event list - events = [ - {k: v for k, v in event.items() if k != "data"} - | { - "data": { - k: v - for k, v in event["data"].items() - if k in ["type", "score", "top_score", "description"] - } + # Fetch events and process them in a single pass + processed_events = [] + for event in events_query.dicts(): + processed_event = {k: v for k, v in event.items() if k != "data"} + processed_event["data"] = { + k: v + for k, v in event["data"].items() + if k in ["type", "score", "top_score", "description"] } - | ( - { - "search_distance": search_results[event["id"]]["distance"], - "search_source": search_results[event["id"]]["source"], - } - if event["id"] in search_results - else {} - ) - for event in events - ] - # Sort by search distance if search_results are available + if event["id"] in search_results: + processed_event["search_distance"] = search_results[event["id"]]["distance"] + processed_event["search_source"] = search_results[event["id"]]["source"] + + processed_events.append(processed_event) + + # Sort by search distance if search_results are available, otherwise by start_time if search_results: - events = sorted(events, key=lambda x: x.get("search_distance", float("inf"))) + processed_events.sort(key=lambda x: x.get("search_distance", float("inf"))) + else: + processed_events.sort(key=lambda x: x["start_time"], reverse=True) # Limit the number of events returned - events = events[:limit] + processed_events = processed_events[:limit] - return JSONResponse(content=events) + return JSONResponse(content=processed_events) @router.get("/events/summary") From bc528123b38ba27a786c49016749ee24ff5d1bbc Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sat, 5 Oct 2024 12:20:50 -0500 Subject: [PATCH 31/56] remove unnecessary re-embed --- frigate/embeddings/embeddings.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/frigate/embeddings/embeddings.py b/frigate/embeddings/embeddings.py index faf4f81d35..cfc96c2e73 100644 --- a/frigate/embeddings/embeddings.py +++ b/frigate/embeddings/embeddings.py @@ -105,6 +105,8 @@ def upsert_thumbnail(self, event_id: str, thumbnail: bytes): (event_id, serialize(embedding)), ) + return embedding + def upsert_description(self, event_id: str, description: str): # Generate embedding using MiniLM embedding = self.minilm_embedding([description])[0] @@ -117,6 +119,8 @@ def upsert_description(self, event_id: str, description: str): (event_id, serialize(embedding)), ) + return embedding + def delete_thumbnail(self, event_ids: List[str]) -> None: ids = ",".join(["?" for _ in event_ids]) self.db.execute_sql( @@ -147,11 +151,9 @@ def search_thumbnail( row[0] ) # Deserialize the thumbnail embedding else: - # If no embedding found, generate it + # If no embedding found, generate it and return it thumbnail = base64.b64decode(query.thumbnail) - self.upsert_thumbnail(query.id, thumbnail) - image = Image.open(io.BytesIO(thumbnail)).convert("RGB") - query_embedding = self.clip_embedding([image])[0] + query_embedding = self.upsert_thumbnail(query.id, thumbnail) else: query_embedding = self.clip_embedding([query])[0] @@ -166,9 +168,12 @@ def search_thumbnail( # Add the IN clause if event_ids is provided and not empty # this is the only filter supported by sqlite-vec as of 0.1.3 + # but it seems to be broken in this version if event_ids: sql_query += " AND id IN ({})".format(",".join("?" * len(event_ids))) + # order by distance DESC is not implemented in this version of sqlite-vec + # when it's implemented, we can use cosine similarity sql_query += " ORDER BY distance" parameters = ( @@ -198,9 +203,12 @@ def search_description( # Add the IN clause if event_ids is provided and not empty # this is the only filter supported by sqlite-vec as of 0.1.3 + # but it seems to be broken in this version if event_ids: sql_query += " AND id IN ({})".format(",".join("?" * len(event_ids))) + # order by distance DESC is not implemented in this version of sqlite-vec + # when it's implemented, we can use cosine similarity sql_query += " ORDER BY distance" parameters = ( From 72e488362d1cd14be0ab076316d30ada239a5791 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sat, 5 Oct 2024 12:24:10 -0500 Subject: [PATCH 32/56] update deps --- docker/main/requirements-wheels.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/main/requirements-wheels.txt b/docker/main/requirements-wheels.txt index 7a36f9e5f7..4db0a935a9 100644 --- a/docker/main/requirements-wheels.txt +++ b/docker/main/requirements-wheels.txt @@ -37,9 +37,9 @@ sqlite_vec == 0.1.3 transformers == 4.45.* onnx_clip == 4.0.* # Generative AI -google-generativeai == 0.6.* -ollama == 0.2.* -openai == 1.30.* +google-generativeai == 0.8.* +ollama == 0.3.* +openai == 1.51.* # push notifications py-vapid == 1.9.* pywebpush == 2.0.* From 87cc0079b12cd5c008739aa6494f749e265a0f8b Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sat, 5 Oct 2024 12:44:24 -0500 Subject: [PATCH 33/56] move SqliteVecQueueDatabase to db directory --- frigate/app.py | 2 +- frigate/{embeddings => db}/sqlitevecq.py | 0 frigate/embeddings/__init__.py | 2 +- frigate/embeddings/embeddings.py | 2 +- 4 files changed, 3 insertions(+), 3 deletions(-) rename frigate/{embeddings => db}/sqlitevecq.py (100%) diff --git a/frigate/app.py b/frigate/app.py index 255e0c1a95..e370150bb9 100644 --- a/frigate/app.py +++ b/frigate/app.py @@ -38,8 +38,8 @@ MODEL_CACHE_DIR, RECORD_DIR, ) +from frigate.db.sqlitevecq import SqliteVecQueueDatabase from frigate.embeddings import EmbeddingsContext, manage_embeddings -from frigate.embeddings.sqlitevecq import SqliteVecQueueDatabase from frigate.events.audio import AudioProcessor from frigate.events.cleanup import EventCleanup from frigate.events.external import ExternalEventProcessor diff --git a/frigate/embeddings/sqlitevecq.py b/frigate/db/sqlitevecq.py similarity index 100% rename from frigate/embeddings/sqlitevecq.py rename to frigate/db/sqlitevecq.py diff --git a/frigate/embeddings/__init__.py b/frigate/embeddings/__init__.py index b26e1328ee..12fca7caf3 100644 --- a/frigate/embeddings/__init__.py +++ b/frigate/embeddings/__init__.py @@ -13,7 +13,7 @@ from frigate.config import FrigateConfig from frigate.const import CONFIG_DIR -from frigate.embeddings.sqlitevecq import SqliteVecQueueDatabase +from frigate.db.sqlitevecq import SqliteVecQueueDatabase from frigate.models import Event from frigate.util.services import listen diff --git a/frigate/embeddings/embeddings.py b/frigate/embeddings/embeddings.py index cfc96c2e73..2378ea64cb 100644 --- a/frigate/embeddings/embeddings.py +++ b/frigate/embeddings/embeddings.py @@ -10,7 +10,7 @@ from PIL import Image from playhouse.shortcuts import model_to_dict -from frigate.embeddings.sqlitevecq import SqliteVecQueueDatabase +from frigate.db.sqlitevecq import SqliteVecQueueDatabase from frigate.models import Event from .functions.clip import ClipEmbedding From cf7b27875e65d4ae898e7133904b2cb5c7b5ded9 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sun, 6 Oct 2024 07:44:49 -0500 Subject: [PATCH 34/56] make search thumbnail take up full size of results box --- web/src/components/card/SearchThumbnail.tsx | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/web/src/components/card/SearchThumbnail.tsx b/web/src/components/card/SearchThumbnail.tsx index c595cc85fa..620c445016 100644 --- a/web/src/components/card/SearchThumbnail.tsx +++ b/web/src/components/card/SearchThumbnail.tsx @@ -52,12 +52,11 @@ export default function SearchThumbnail({ className="absolute inset-0" imgLoaded={imgLoaded} /> -
+
Date: Sun, 6 Oct 2024 07:45:04 -0500 Subject: [PATCH 35/56] improve typing --- frigate/db/sqlitevecq.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/frigate/db/sqlitevecq.py b/frigate/db/sqlitevecq.py index 2053640108..26d7e921c7 100644 --- a/frigate/db/sqlitevecq.py +++ b/frigate/db/sqlitevecq.py @@ -1,19 +1,21 @@ +import sqlite3 + import sqlite_vec from playhouse.sqliteq import SqliteQueueDatabase class SqliteVecQueueDatabase(SqliteQueueDatabase): - def __init__(self, *args, load_vec_extension=False, **kwargs): + def __init__(self, *args, load_vec_extension: bool = False, **kwargs) -> None: super().__init__(*args, **kwargs) - self.load_vec_extension = load_vec_extension + self.load_vec_extension: bool = load_vec_extension - def _connect(self, *args, **kwargs): - conn = super()._connect(*args, **kwargs) + def _connect(self, *args, **kwargs) -> sqlite3.Connection: + conn: sqlite3.Connection = super()._connect(*args, **kwargs) if self.load_vec_extension: self._load_vec_extension(conn) return conn - def _load_vec_extension(self, conn): + def _load_vec_extension(self, conn: sqlite3.Connection) -> None: conn.enable_load_extension(True) sqlite_vec.load(conn) conn.enable_load_extension(False) From 52a2bbc489c8a2fe5ba2e7bf2a796aeafed64d22 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 7 Oct 2024 08:05:59 -0500 Subject: [PATCH 36/56] improve model downloading and add status screen --- frigate/app.py | 4 +- frigate/comms/dispatcher.py | 11 ++ frigate/const.py | 1 + frigate/embeddings/__init__.py | 3 +- frigate/embeddings/embeddings.py | 24 ++- frigate/embeddings/functions/clip.py | 165 +++++++++++++------ frigate/embeddings/functions/minilm_l6_v2.py | 132 +++++++-------- frigate/types.py | 8 + frigate/util/downloader.py | 119 +++++++++++++ web/src/api/ws.tsx | 36 ++++ web/src/pages/Explore.tsx | 128 +++++++++++--- web/src/types/ws.ts | 6 + 12 files changed, 494 insertions(+), 143 deletions(-) create mode 100644 frigate/util/downloader.py diff --git a/frigate/app.py b/frigate/app.py index e370150bb9..65272f2be2 100644 --- a/frigate/app.py +++ b/frigate/app.py @@ -590,13 +590,13 @@ def start(self) -> None: self.init_onvif() self.init_recording_manager() self.init_review_segment_manager() - self.init_embeddings_manager() self.init_go2rtc() self.bind_database() self.check_db_data_migrations() - self.init_embeddings_client() self.init_inter_process_communicator() self.init_dispatcher() + self.init_embeddings_manager() + self.init_embeddings_client() self.start_detectors() self.start_video_output_processor() self.start_ptz_autotracker() diff --git a/frigate/comms/dispatcher.py b/frigate/comms/dispatcher.py index a987f6a382..1605d645af 100644 --- a/frigate/comms/dispatcher.py +++ b/frigate/comms/dispatcher.py @@ -16,10 +16,12 @@ REQUEST_REGION_GRID, UPDATE_CAMERA_ACTIVITY, UPDATE_EVENT_DESCRIPTION, + UPDATE_MODEL_STATE, UPSERT_REVIEW_SEGMENT, ) from frigate.models import Event, Previews, Recordings, ReviewSegment from frigate.ptz.onvif import OnvifCommandEnum, OnvifController +from frigate.types import ModelStatusTypesEnum from frigate.util.object import get_camera_regions_grid from frigate.util.services import restart_frigate @@ -83,6 +85,7 @@ def __init__( comm.subscribe(self._receive) self.camera_activity = {} + self.model_state = {} def _receive(self, topic: str, payload: str) -> Optional[Any]: """Handle receiving of payload from communicators.""" @@ -144,6 +147,14 @@ def _receive(self, topic: str, payload: str) -> Optional[Any]: "event_update", json.dumps({"id": event.id, "description": event.data["description"]}), ) + elif topic == UPDATE_MODEL_STATE: + model = payload["model"] + state = payload["state"] + self.model_state[model] = ModelStatusTypesEnum[state] + self.publish("model_state", json.dumps(self.model_state)) + elif topic == "modelState": + model_state = self.model_state.copy() + self.publish("model_state", json.dumps(model_state)) elif topic == "onConnect": camera_status = self.camera_activity.copy() diff --git a/frigate/const.py b/frigate/const.py index b37ca662e1..e8e841f4f2 100644 --- a/frigate/const.py +++ b/frigate/const.py @@ -84,6 +84,7 @@ CLEAR_ONGOING_REVIEW_SEGMENTS = "clear_ongoing_review_segments" UPDATE_CAMERA_ACTIVITY = "update_camera_activity" UPDATE_EVENT_DESCRIPTION = "update_event_description" +UPDATE_MODEL_STATE = "update_model_state" # Stats Values diff --git a/frigate/embeddings/__init__.py b/frigate/embeddings/__init__.py index 12fca7caf3..381d95ed19 100644 --- a/frigate/embeddings/__init__.py +++ b/frigate/embeddings/__init__.py @@ -71,8 +71,7 @@ def receiveSignal(signalNumber: int, frame: Optional[FrameType]) -> None: class EmbeddingsContext: def __init__(self, db: SqliteVecQueueDatabase): - self.db = db - self.embeddings = Embeddings(self.db) + self.embeddings = Embeddings(db) self.thumb_stats = ZScoreNormalization() self.desc_stats = ZScoreNormalization() diff --git a/frigate/embeddings/embeddings.py b/frigate/embeddings/embeddings.py index 2378ea64cb..c763bf304f 100644 --- a/frigate/embeddings/embeddings.py +++ b/frigate/embeddings/embeddings.py @@ -10,8 +10,11 @@ from PIL import Image from playhouse.shortcuts import model_to_dict +from frigate.comms.inter_process import InterProcessRequestor +from frigate.const import UPDATE_MODEL_STATE from frigate.db.sqlitevecq import SqliteVecQueueDatabase from frigate.models import Event +from frigate.types import ModelStatusTypesEnum from .functions.clip import ClipEmbedding from .functions.minilm_l6_v2 import MiniLMEmbedding @@ -65,11 +68,30 @@ class Embeddings: def __init__(self, db: SqliteVecQueueDatabase) -> None: self.db = db + self.requestor = InterProcessRequestor() # Create tables if they don't exist self._create_tables() - self.clip_embedding = ClipEmbedding(model="ViT-B/32") + models = [ + "sentence-transformers/all-MiniLM-L6-v2-model.onnx", + "sentence-transformers/all-MiniLM-L6-v2-tokenizer", + "clip-clip_image_model_vitb32.onnx", + "clip-clip_text_model_vitb32.onnx", + ] + + for model in models: + self.requestor.send_data( + UPDATE_MODEL_STATE, + { + "model": model, + "state": ModelStatusTypesEnum.not_downloaded, + }, + ) + + self.clip_embedding = ClipEmbedding( + preferred_providers=["CPUExecutionProvider"] + ) self.minilm_embedding = MiniLMEmbedding( preferred_providers=["CPUExecutionProvider"], ) diff --git a/frigate/embeddings/functions/clip.py b/frigate/embeddings/functions/clip.py index 55cdb3b472..a997bcb6f6 100644 --- a/frigate/embeddings/functions/clip.py +++ b/frigate/embeddings/functions/clip.py @@ -1,30 +1,59 @@ -"""CLIP Embeddings for Frigate.""" - -import errno import logging import os -from pathlib import Path -from typing import List, Union +from typing import List, Optional, Union import numpy as np import onnxruntime as ort -import requests -from onnx_clip import OnnxClip +from onnx_clip import OnnxClip, Preprocessor, Tokenizer from PIL import Image -from frigate.const import MODEL_CACHE_DIR +from frigate.const import MODEL_CACHE_DIR, UPDATE_MODEL_STATE +from frigate.types import ModelStatusTypesEnum +from frigate.util.downloader import ModelDownloader + +logger = logging.getLogger(__name__) class Clip(OnnxClip): - """Override load models to download to cache directory.""" + """Override load models to use pre-downloaded models from cache directory.""" + + def __init__( + self, + model: str = "ViT-B/32", + batch_size: Optional[int] = None, + providers: List[str] = ["CPUExecutionProvider"], + ): + """ + Instantiates the model and required encoding classes. + + Args: + model: The model to utilize. Currently ViT-B/32 and RN50 are + allowed. + batch_size: If set, splits the lists in `get_image_embeddings` + and `get_text_embeddings` into batches of this size before + passing them to the model. The embeddings are then concatenated + back together before being returned. This is necessary when + passing large amounts of data (perhaps ~100 or more). + """ + allowed_models = ["ViT-B/32", "RN50"] + if model not in allowed_models: + raise ValueError(f"`model` must be in {allowed_models}. Got {model}.") + if model == "ViT-B/32": + self.embedding_size = 512 + elif model == "RN50": + self.embedding_size = 1024 + self.image_model, self.text_model = self._load_models(model, providers) + self._tokenizer = Tokenizer() + self._preprocessor = Preprocessor() + self._batch_size = batch_size @staticmethod def _load_models( model: str, - silent: bool, + providers: List[str], ) -> tuple[ort.InferenceSession, ort.InferenceSession]: """ - These models are a part of the container. Treat as as such. + Load models from cache directory. """ if model == "ViT-B/32": IMAGE_MODEL_FILE = "clip_image_model_vitb32.onnx" @@ -38,58 +67,92 @@ def _load_models( models = [] for model_file in [IMAGE_MODEL_FILE, TEXT_MODEL_FILE]: path = os.path.join(MODEL_CACHE_DIR, "clip", model_file) - models.append(Clip._load_model(path, silent)) + models.append(Clip._load_model(path, providers)) return models[0], models[1] @staticmethod - def _load_model(path: str, silent: bool): - providers = ["CPUExecutionProvider"] - - try: - if os.path.exists(path): - return ort.InferenceSession(path, providers=providers) - else: - raise FileNotFoundError( - errno.ENOENT, - os.strerror(errno.ENOENT), - path, - ) - except Exception: - s3_url = f"https://lakera-clip.s3.eu-west-1.amazonaws.com/{os.path.basename(path)}" - if not silent: - logging.info( - f"The model file ({path}) doesn't exist " - f"or it is invalid. Downloading it from the public S3 " - f"bucket: {s3_url}." # noqa: E501 - ) - - # Download from S3 - # Saving to a temporary file first to avoid corrupting the file - temporary_filename = Path(path).with_name(os.path.basename(path) + ".part") - - # Create any missing directories in the path - temporary_filename.parent.mkdir(parents=True, exist_ok=True) - - with requests.get(s3_url, stream=True) as r: - r.raise_for_status() - with open(temporary_filename, "wb") as f: - for chunk in r.iter_content(chunk_size=8192): - f.write(chunk) - f.flush() - # Finally move the temporary file to the correct location - temporary_filename.rename(path) + def _load_model(path: str, providers: List[str]): + if os.path.exists(path): return ort.InferenceSession(path, providers=providers) + else: + logger.warning(f"CLIP model file {path} not found.") + return None class ClipEmbedding: """Embedding function for CLIP model.""" - def __init__(self, model: str = "ViT-B/32"): - """Initialize CLIP Embedding function.""" - self.model = Clip(model) + def __init__( + self, + model: str = "ViT-B/32", + silent: bool = False, + preferred_providers: List[str] = ["CPUExecutionProvider"], + ): + self.model_name = model + self.silent = silent + self.preferred_providers = preferred_providers + self.model_files = self._get_model_files() + self.model = None + + self.downloader = ModelDownloader( + model_name="clip", + download_path=os.path.join(MODEL_CACHE_DIR, "clip"), + file_names=self.model_files, + download_func=self._download_model, + silent=self.silent, + ) + self.downloader.ensure_model_files() + + def _get_model_files(self): + if self.model_name == "ViT-B/32": + return ["clip_image_model_vitb32.onnx", "clip_text_model_vitb32.onnx"] + elif self.model_name == "RN50": + return ["clip_image_model_rn50.onnx", "clip_text_model_rn50.onnx"] + else: + raise ValueError( + f"Unexpected model {self.model_name}. No `.onnx` file found." + ) + + def _download_model(self, path: str): + s3_url = ( + f"https://lakera-clip.s3.eu-west-1.amazonaws.com/{os.path.basename(path)}" + ) + try: + ModelDownloader.download_from_url(s3_url, path, self.silent) + self.downloader.requestor.send_data( + UPDATE_MODEL_STATE, + { + "model": f"{self.model_name}-{os.path.basename(path)}", + "state": ModelStatusTypesEnum.downloaded, + }, + ) + except Exception: + self.downloader.requestor.send_data( + UPDATE_MODEL_STATE, + { + "model": f"{self.model_name}-{os.path.basename(path)}", + "state": ModelStatusTypesEnum.error, + }, + ) + + def _load_model(self): + if self.model is None: + self.downloader.wait_for_download() + self.model = Clip(self.model_name, providers=self.preferred_providers) def __call__(self, input: Union[List[str], List[Image.Image]]) -> List[np.ndarray]: + self._load_model() + if ( + self.model is None + or self.model.image_model is None + or self.model.text_model is None + ): + logger.info( + "CLIP model is not fully loaded. Please wait for the download to complete." + ) + return [] + embeddings = [] for item in input: if isinstance(item, Image.Image): diff --git a/frigate/embeddings/functions/minilm_l6_v2.py b/frigate/embeddings/functions/minilm_l6_v2.py index 6a0e2d5efb..a3a8b45b3a 100644 --- a/frigate/embeddings/functions/minilm_l6_v2.py +++ b/frigate/embeddings/functions/minilm_l6_v2.py @@ -1,21 +1,20 @@ -"""Embedding function for ONNX MiniLM-L6 model.""" - -import errno import logging import os -from pathlib import Path from typing import List import numpy as np import onnxruntime as ort -import requests # importing this without pytorch or others causes a warning # https://github.com/huggingface/transformers/issues/27214 # suppressed by setting env TRANSFORMERS_NO_ADVISORY_WARNINGS=1 from transformers import AutoTokenizer -from frigate.const import MODEL_CACHE_DIR +from frigate.const import MODEL_CACHE_DIR, UPDATE_MODEL_STATE +from frigate.types import ModelStatusTypesEnum +from frigate.util.downloader import ModelDownloader + +logger = logging.getLogger(__name__) class MiniLMEmbedding: @@ -26,86 +25,83 @@ class MiniLMEmbedding: IMAGE_MODEL_FILE = "model.onnx" TOKENIZER_FILE = "tokenizer" - def __init__(self, preferred_providers=None): - """Initialize MiniLM Embedding function.""" - self.tokenizer = self._load_tokenizer() - - model_path = os.path.join(self.DOWNLOAD_PATH, self.IMAGE_MODEL_FILE) - if not os.path.exists(model_path): - self._download_model() + def __init__(self, preferred_providers=["CPUExecutionProvider"]): + self.preferred_providers = preferred_providers + self.tokenizer = None + self.session = None - if preferred_providers is None: - preferred_providers = ["CPUExecutionProvider"] - - self.session = self._load_model(model_path) + self.downloader = ModelDownloader( + model_name=self.MODEL_NAME, + download_path=self.DOWNLOAD_PATH, + file_names=[self.IMAGE_MODEL_FILE, self.TOKENIZER_FILE], + download_func=self._download_model, + ) + self.downloader.ensure_model_files() + + def _download_model(self, path: str): + try: + if os.path.basename(path) == self.IMAGE_MODEL_FILE: + s3_url = f"https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2/resolve/main/onnx/{self.IMAGE_MODEL_FILE}" + ModelDownloader.download_from_url(s3_url, path) + elif os.path.basename(path) == self.TOKENIZER_FILE: + logger.info("Downloading MiniLM tokenizer") + tokenizer = AutoTokenizer.from_pretrained( + self.MODEL_NAME, clean_up_tokenization_spaces=False + ) + tokenizer.save_pretrained(path) + + self.downloader.requestor.send_data( + UPDATE_MODEL_STATE, + { + "model": f"{self.MODEL_NAME}-{os.path.basename(path)}", + "state": ModelStatusTypesEnum.downloaded, + }, + ) + except Exception: + self.downloader.requestor.send_data( + UPDATE_MODEL_STATE, + { + "model": f"{self.MODEL_NAME}-{os.path.basename(path)}", + "state": ModelStatusTypesEnum.error, + }, + ) + + def _load_model_and_tokenizer(self): + if self.tokenizer is None or self.session is None: + self.downloader.wait_for_download() + self.tokenizer = self._load_tokenizer() + self.session = self._load_model( + os.path.join(self.DOWNLOAD_PATH, self.IMAGE_MODEL_FILE), + self.preferred_providers, + ) def _load_tokenizer(self): - """Load the tokenizer from the local path or download it if not available.""" tokenizer_path = os.path.join(self.DOWNLOAD_PATH, self.TOKENIZER_FILE) - if os.path.exists(tokenizer_path): - return AutoTokenizer.from_pretrained(tokenizer_path) - else: - return AutoTokenizer.from_pretrained(self.MODEL_NAME) - - def _download_model(self): - """Download the ONNX model and tokenizer from a remote source if they don't exist.""" - logging.info(f"Downloading {self.MODEL_NAME} ONNX model and tokenizer...") - - # Download the tokenizer - tokenizer = AutoTokenizer.from_pretrained(self.MODEL_NAME) - os.makedirs(self.DOWNLOAD_PATH, exist_ok=True) - tokenizer.save_pretrained(os.path.join(self.DOWNLOAD_PATH, self.TOKENIZER_FILE)) - - # Download the ONNX model - s3_url = f"https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2/resolve/main/onnx/{self.IMAGE_MODEL_FILE}" - model_path = os.path.join(self.DOWNLOAD_PATH, self.IMAGE_MODEL_FILE) - self._download_from_url(s3_url, model_path) - - logging.info(f"Model and tokenizer saved to {self.DOWNLOAD_PATH}") - - def _download_from_url(self, url: str, save_path: str): - """Download a file from a URL and save it to a specified path.""" - temporary_filename = Path(save_path).with_name( - os.path.basename(save_path) + ".part" + return AutoTokenizer.from_pretrained( + tokenizer_path, clean_up_tokenization_spaces=False ) - temporary_filename.parent.mkdir(parents=True, exist_ok=True) - with requests.get(url, stream=True, allow_redirects=True) as r: - # if the content type is HTML, it's not the actual model file - if "text/html" in r.headers.get("Content-Type", ""): - raise ValueError( - f"Expected an ONNX file but received HTML from the URL: {url}" - ) - - # Ensure the download is successful - r.raise_for_status() - - # Write the model to a temporary file first - with open(temporary_filename, "wb") as f: - for chunk in r.iter_content(chunk_size=8192): - f.write(chunk) - temporary_filename.rename(save_path) - - def _load_model(self, path: str): - """Load the ONNX model from a given path.""" - providers = ["CPUExecutionProvider"] + def _load_model(self, path: str, providers: List[str]): if os.path.exists(path): return ort.InferenceSession(path, providers=providers) else: - raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), path) + logger.warning(f"MiniLM model file {path} not found.") + return None def __call__(self, texts: List[str]) -> List[np.ndarray]: - """Generate embeddings for the given texts.""" + self._load_model_and_tokenizer() + + if self.session is None or self.tokenizer is None: + logger.error("MiniLM model or tokenizer is not loaded.") + return [] + inputs = self.tokenizer( texts, padding=True, truncation=True, return_tensors="np" ) - input_names = [input.name for input in self.session.get_inputs()] onnx_inputs = {name: inputs[name] for name in input_names if name in inputs} - # Run inference outputs = self.session.run(None, onnx_inputs) - embeddings = outputs[0].mean(axis=1) return [embedding for embedding in embeddings] diff --git a/frigate/types.py b/frigate/types.py index 21f55e5023..3e6ad46ccf 100644 --- a/frigate/types.py +++ b/frigate/types.py @@ -1,3 +1,4 @@ +from enum import Enum from typing import TypedDict from frigate.camera import CameraMetrics @@ -11,3 +12,10 @@ class StatsTrackingTypes(TypedDict): latest_frigate_version: str last_updated: int processes: dict[str, int] + + +class ModelStatusTypesEnum(str, Enum): + not_downloaded = "not_downloaded" + downloading = "downloading" + downloaded = "downloaded" + error = "error" diff --git a/frigate/util/downloader.py b/frigate/util/downloader.py new file mode 100644 index 0000000000..65b45425a8 --- /dev/null +++ b/frigate/util/downloader.py @@ -0,0 +1,119 @@ +import logging +import os +import threading +import time +from pathlib import Path +from typing import Callable, List + +import requests + +from frigate.comms.inter_process import InterProcessRequestor +from frigate.const import UPDATE_MODEL_STATE +from frigate.types import ModelStatusTypesEnum + +logger = logging.getLogger(__name__) + + +class FileLock: + def __init__(self, path): + self.path = path + self.lock_file = f"{path}.lock" + + def acquire(self): + parent_dir = os.path.dirname(self.lock_file) + os.makedirs(parent_dir, exist_ok=True) + + while True: + try: + with open(self.lock_file, "x"): + return + except FileExistsError: + time.sleep(0.1) + + def release(self): + try: + os.remove(self.lock_file) + except FileNotFoundError: + pass + + +class ModelDownloader: + def __init__( + self, + model_name: str, + download_path: str, + file_names: List[str], + download_func: Callable[[str], None], + silent: bool = False, + ): + self.model_name = model_name + self.download_path = download_path + self.file_names = file_names + self.download_func = download_func + self.silent = silent + self.requestor = InterProcessRequestor() + self.download_thread = None + self.download_complete = threading.Event() + + def ensure_model_files(self): + for file in self.file_names: + self.requestor.send_data( + UPDATE_MODEL_STATE, + { + "model": f"{self.model_name}-{file}", + "state": ModelStatusTypesEnum.downloading, + }, + ) + self.download_thread = threading.Thread(target=self._download_models) + self.download_thread.start() + + def _download_models(self): + for file_name in self.file_names: + path = os.path.join(self.download_path, file_name) + lock = FileLock(path) + + if not os.path.exists(path): + lock.acquire() + try: + if not os.path.exists(path): + self.download_func(path) + finally: + lock.release() + + self.requestor.send_data( + UPDATE_MODEL_STATE, + { + "model": f"{self.model_name}-{file_name}", + "state": ModelStatusTypesEnum.downloaded, + }, + ) + + self.download_complete.set() + + @staticmethod + def download_from_url(url: str, save_path: str, silent: bool = False): + temporary_filename = Path(save_path).with_name( + os.path.basename(save_path) + ".part" + ) + temporary_filename.parent.mkdir(parents=True, exist_ok=True) + + if not silent: + logger.info(f"Downloading model file from: {url}") + + try: + with requests.get(url, stream=True, allow_redirects=True) as r: + r.raise_for_status() + with open(temporary_filename, "wb") as f: + for chunk in r.iter_content(chunk_size=8192): + f.write(chunk) + + temporary_filename.rename(save_path) + except Exception as e: + logger.error(f"Error downloading model: {str(e)}") + raise + + if not silent: + logger.info(f"Downloading complete: {url}") + + def wait_for_download(self): + self.download_complete.wait() diff --git a/web/src/api/ws.tsx b/web/src/api/ws.tsx index 79d2bd3b4b..a78722b66a 100644 --- a/web/src/api/ws.tsx +++ b/web/src/api/ws.tsx @@ -5,6 +5,7 @@ import { FrigateCameraState, FrigateEvent, FrigateReview, + ModelState, ToggleableSetting, } from "@/types/ws"; import { FrigateStats } from "@/types/stats"; @@ -266,6 +267,41 @@ export function useInitialCameraState( return { payload: data ? data[camera] : undefined }; } +export function useModelState( + model: string, + revalidateOnFocus: boolean = true, +): { payload: ModelState } { + const { + value: { payload }, + send: sendCommand, + } = useWs("model_state", "modelState"); + + const data = useDeepMemo(JSON.parse(payload as string)); + + useEffect(() => { + let listener = undefined; + if (revalidateOnFocus) { + sendCommand("modelState"); + listener = () => { + if (document.visibilityState == "visible") { + sendCommand("modelState"); + } + }; + addEventListener("visibilitychange", listener); + } + + return () => { + if (listener) { + removeEventListener("visibilitychange", listener); + } + }; + // we know that these deps are correct + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [revalidateOnFocus]); + + return { payload: data ? data[model] : undefined }; +} + export function useMotionActivity(camera: string): { payload: string } { const { value: { payload }, diff --git a/web/src/pages/Explore.tsx b/web/src/pages/Explore.tsx index 4af6e1f194..7fc0f92862 100644 --- a/web/src/pages/Explore.tsx +++ b/web/src/pages/Explore.tsx @@ -1,11 +1,15 @@ -import { useEventUpdate } from "@/api/ws"; +import { useEventUpdate, useModelState } from "@/api/ws"; +import ActivityIndicator from "@/components/indicators/activity-indicator"; import { useApiFilterArgs } from "@/hooks/use-api-filter"; import { useTimezone } from "@/hooks/use-date-utils"; import { FrigateConfig } from "@/types/frigateConfig"; import { SearchFilter, SearchQuery, SearchResult } from "@/types/search"; +import { ModelState } from "@/types/ws"; import SearchView from "@/views/search/SearchView"; import { useCallback, useEffect, useMemo, useState } from "react"; +import { LuCheck, LuExternalLink, LuX } from "react-icons/lu"; import { TbExclamationCircle } from "react-icons/tb"; +import { Link } from "react-router-dom"; import useSWR from "swr"; import useSWRInfinite from "swr/infinite"; @@ -111,14 +115,10 @@ export default function Explore() { // paging - // usually slow only on first run while downloading models - const [isSlowLoading, setIsSlowLoading] = useState(false); - const getKey = ( pageIndex: number, previousPageData: SearchResult[] | null, ): SearchQuery => { - if (isSlowLoading && !similaritySearch) return null; if (previousPageData && !previousPageData.length) return null; // reached the end if (!searchQuery) return null; @@ -143,12 +143,6 @@ export default function Explore() { revalidateFirstPage: true, revalidateOnFocus: true, revalidateAll: false, - onLoadingSlow: () => { - if (!similaritySearch) { - setIsSlowLoading(true); - } - }, - loadingTimeout: 15000, }); const searchResults = useMemo( @@ -188,17 +182,113 @@ export default function Explore() { // eslint-disable-next-line react-hooks/exhaustive-deps }, [eventUpdate]); + // model states + + const { payload: minilmModelState } = useModelState( + "sentence-transformers/all-MiniLM-L6-v2-model.onnx", + ); + const { payload: minilmTokenizerState } = useModelState( + "sentence-transformers/all-MiniLM-L6-v2-tokenizer", + ); + const { payload: clipImageModelState } = useModelState( + "clip-clip_image_model_vitb32.onnx", + ); + const { payload: clipTextModelState } = useModelState( + "clip-clip_text_model_vitb32.onnx", + ); + + const allModelsLoaded = useMemo(() => { + return ( + minilmModelState === "downloaded" && + minilmTokenizerState === "downloaded" && + clipImageModelState === "downloaded" && + clipTextModelState === "downloaded" + ); + }, [ + minilmModelState, + minilmTokenizerState, + clipImageModelState, + clipTextModelState, + ]); + + const renderModelStateIcon = (modelState: ModelState) => { + if (modelState === "downloading") { + return ; + } + if (modelState === "downloaded") { + return ; + } + if (modelState === "not_downloaded" || modelState === "error") { + return ; + } + return null; + }; + + if ( + !minilmModelState || + !minilmTokenizerState || + !clipImageModelState || + !clipTextModelState + ) { + return ( + + ); + } + return ( <> - {isSlowLoading && !similaritySearch ? ( + {!allModelsLoaded ? (
-
-

Search Unavailable

- -

- If this is your first time using Search, be patient while Frigate - downloads the necessary embeddings models. Check Frigate logs. -

+
+
+ +
Search Unavailable
+
+
+ Frigate is downloading the necessary embeddings models to support + semantic searching. This may take several minutes depending on the + speed of your network connection. +
+
+
+ {renderModelStateIcon(clipImageModelState)} + CLIP image model +
+
+ {renderModelStateIcon(clipTextModelState)} + CLIP text model +
+
+ {renderModelStateIcon(minilmModelState)} + MiniLM sentence model +
+
+ {renderModelStateIcon(minilmTokenizerState)} + MiniLM tokenizer +
+
+ {(minilmModelState === "error" || + clipImageModelState === "error" || + clipTextModelState === "error") && ( +
+ An error has occurred. Check Frigate logs. +
+ )} +
+ You may want to reindex the embeddings of your tracked objects + once the models are downloaded. +
+
+ + Read the documentation{" "} + + +
) : ( diff --git a/web/src/types/ws.ts b/web/src/types/ws.ts index 0fae44b07b..a8211d269c 100644 --- a/web/src/types/ws.ts +++ b/web/src/types/ws.ts @@ -56,4 +56,10 @@ export interface FrigateCameraState { objects: ObjectType[]; } +export type ModelState = + | "not_downloaded" + | "downloading" + | "downloaded" + | "error"; + export type ToggleableSetting = "ON" | "OFF"; From 96795200f70ac5d1a83b0b4411fe0db2a7f8547b Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 7 Oct 2024 08:18:51 -0500 Subject: [PATCH 37/56] daemon downloading thread --- frigate/util/downloader.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/frigate/util/downloader.py b/frigate/util/downloader.py index 65b45425a8..6c7dc423b7 100644 --- a/frigate/util/downloader.py +++ b/frigate/util/downloader.py @@ -64,7 +64,11 @@ def ensure_model_files(self): "state": ModelStatusTypesEnum.downloading, }, ) - self.download_thread = threading.Thread(target=self._download_models) + self.download_thread = threading.Thread( + target=self._download_models, + name=f"_download_model_{self.model_name}", + daemon=True, + ).start() self.download_thread.start() def _download_models(self): From d7df5c9068fafac03dec6813c1bf565b16d416e5 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 7 Oct 2024 08:41:39 -0500 Subject: [PATCH 38/56] catch case when semantic search is disabled --- frigate/app.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/frigate/app.py b/frigate/app.py index 65272f2be2..3c430f7d34 100644 --- a/frigate/app.py +++ b/frigate/app.py @@ -279,6 +279,8 @@ def init_embeddings_client(self) -> None: if self.config.semantic_search.enabled: # Create a client for other processes to use self.embeddings = EmbeddingsContext(self.db) + else: + self.embeddings = None def init_external_event_processor(self) -> None: self.external_event_processor = ExternalEventProcessor(self.config) From 41d1f0463356b3711b203a2b786da3d317850471 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 7 Oct 2024 09:23:20 -0500 Subject: [PATCH 39/56] fix typing --- docker/main/build_sqlite_vec.sh | 17 +++++++++++++++++ frigate/app.py | 3 +-- 2 files changed, 18 insertions(+), 2 deletions(-) create mode 100644 docker/main/build_sqlite_vec.sh diff --git a/docker/main/build_sqlite_vec.sh b/docker/main/build_sqlite_vec.sh new file mode 100644 index 0000000000..d9a872a078 --- /dev/null +++ b/docker/main/build_sqlite_vec.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +set -euxo pipefail + +SQLITE_VEC_VERSION="0.1.3" + +mkdir /tmp/sqlite_vec +# Grab the sqlite_vec source code. +wget -nv https://github.com/asg017/sqlite-vec/archive/refs/tags/v${SQLITE_VEC_VERSION}.tar.gz +tar -zxf v${SQLITE_VEC_VERSION}.tar.gz -C /tmp/sqlite_vec + +cd /tmp/sqlite_vec/sqlite-vec-${SQLITE_VEC_VERSION} +# build loadable module +make loadable + +# install it + diff --git a/frigate/app.py b/frigate/app.py index 3c430f7d34..ac35de0a07 100644 --- a/frigate/app.py +++ b/frigate/app.py @@ -93,6 +93,7 @@ def __init__(self, config: Any) -> None: self.processes: dict[str, int] = {} self.region_grids: dict[str, list[list[dict[str, int]]]] = {} self.config: FrigateConfig = config + self.embeddings: Optional[EmbeddingsContext] = None def ensure_dirs(self) -> None: for d in [ @@ -279,8 +280,6 @@ def init_embeddings_client(self) -> None: if self.config.semantic_search.enabled: # Create a client for other processes to use self.embeddings = EmbeddingsContext(self.db) - else: - self.embeddings = None def init_external_event_processor(self) -> None: self.external_event_processor = ExternalEventProcessor(self.config) From d5f6a14f210cc46a61f1810b4c1ad9aef87e4b5c Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 7 Oct 2024 09:25:22 -0500 Subject: [PATCH 40/56] build sqlite_vec from source --- docker/main/Dockerfile | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docker/main/Dockerfile b/docker/main/Dockerfile index ace7e7d065..a5e42aec91 100644 --- a/docker/main/Dockerfile +++ b/docker/main/Dockerfile @@ -167,6 +167,10 @@ RUN pip3 install -r /requirements.txt COPY docker/main/build_pysqlite3.sh /build_pysqlite3.sh RUN /build_pysqlite3.sh +# Build sqlite_vec from source +COPY docker/main/build_sqlite_vec.sh /build_sqlite_vec.sh +RUN /build_sqlite_vec.sh + COPY docker/main/requirements-wheels.txt /requirements-wheels.txt RUN pip3 wheel --wheel-dir=/wheels -r /requirements-wheels.txt From 4391e9cc26b65a7b88d1af492724cd836c1b5bcb Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 7 Oct 2024 09:30:49 -0500 Subject: [PATCH 41/56] resolve conflict --- frigate/app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frigate/app.py b/frigate/app.py index ac35de0a07..6b0e499499 100644 --- a/frigate/app.py +++ b/frigate/app.py @@ -91,9 +91,9 @@ def __init__(self, config: Any) -> None: self.camera_metrics: dict[str, CameraMetrics] = {} self.ptz_metrics: dict[str, PTZMetrics] = {} self.processes: dict[str, int] = {} + self.embeddings: Optional[EmbeddingsContext] = None self.region_grids: dict[str, list[list[dict[str, int]]]] = {} self.config: FrigateConfig = config - self.embeddings: Optional[EmbeddingsContext] = None def ensure_dirs(self) -> None: for d in [ From 8f9f25f20667e55c20c575247db1b21cbfeebaef Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 7 Oct 2024 09:37:46 -0500 Subject: [PATCH 42/56] file permissions --- docker/main/build_sqlite_vec.sh | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 docker/main/build_sqlite_vec.sh diff --git a/docker/main/build_sqlite_vec.sh b/docker/main/build_sqlite_vec.sh old mode 100644 new mode 100755 From afac9788cf20483d0f720f72dc17c27461c8e72a Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 7 Oct 2024 11:09:39 -0500 Subject: [PATCH 43/56] try build deps --- docker/main/build_sqlite_vec.sh | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docker/main/build_sqlite_vec.sh b/docker/main/build_sqlite_vec.sh index d9a872a078..3f100330e3 100755 --- a/docker/main/build_sqlite_vec.sh +++ b/docker/main/build_sqlite_vec.sh @@ -4,6 +4,12 @@ set -euxo pipefail SQLITE_VEC_VERSION="0.1.3" +cp /etc/apt/sources.list /etc/apt/sources.list.d/sources-src.list +sed -i 's|deb http|deb-src http|g' /etc/apt/sources.list.d/sources-src.list +apt-get update + +apt-get -yqq build-dep sqlite3 gettext + mkdir /tmp/sqlite_vec # Grab the sqlite_vec source code. wget -nv https://github.com/asg017/sqlite-vec/archive/refs/tags/v${SQLITE_VEC_VERSION}.tar.gz @@ -14,4 +20,5 @@ cd /tmp/sqlite_vec/sqlite-vec-${SQLITE_VEC_VERSION} make loadable # install it +cp dist/vec0 /usr/local/lib From 3f289d587bddd0346998d25537f0d4e55cf8ad86 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 7 Oct 2024 11:14:38 -0500 Subject: [PATCH 44/56] remove sources --- docker/main/build_sqlite_vec.sh | 3 --- 1 file changed, 3 deletions(-) diff --git a/docker/main/build_sqlite_vec.sh b/docker/main/build_sqlite_vec.sh index 3f100330e3..289e953654 100755 --- a/docker/main/build_sqlite_vec.sh +++ b/docker/main/build_sqlite_vec.sh @@ -4,10 +4,7 @@ set -euxo pipefail SQLITE_VEC_VERSION="0.1.3" -cp /etc/apt/sources.list /etc/apt/sources.list.d/sources-src.list -sed -i 's|deb http|deb-src http|g' /etc/apt/sources.list.d/sources-src.list apt-get update - apt-get -yqq build-dep sqlite3 gettext mkdir /tmp/sqlite_vec From ce80aeba7820c8006ecb1cb47d1835ffe4c18fb2 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 7 Oct 2024 11:25:44 -0500 Subject: [PATCH 45/56] sources --- docker/main/Dockerfile | 11 +++++++---- docker/main/build_sqlite_vec.sh | 2 ++ 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/docker/main/Dockerfile b/docker/main/Dockerfile index a5e42aec91..b446662c61 100644 --- a/docker/main/Dockerfile +++ b/docker/main/Dockerfile @@ -30,6 +30,13 @@ RUN --mount=type=tmpfs,target=/tmp --mount=type=tmpfs,target=/var/cache/apt \ --mount=type=cache,target=/root/.ccache \ /deps/build_nginx.sh +# Build sqlite_vec from source +COPY docker/main/build_sqlite_vec.sh /deps/build_sqlite_vec.sh +RUN --mount=type=tmpfs,target=/tmp --mount=type=tmpfs,target=/var/cache/apt \ + --mount=type=bind,source=docker/main/build_sqlite_vec.sh,target=/deps/build_sqlite_vec.sh \ + --mount=type=cache,target=/root/.ccache \ + /deps/build_sqlite_vec.sh + FROM scratch AS go2rtc ARG TARGETARCH WORKDIR /rootfs/usr/local/go2rtc/bin @@ -167,10 +174,6 @@ RUN pip3 install -r /requirements.txt COPY docker/main/build_pysqlite3.sh /build_pysqlite3.sh RUN /build_pysqlite3.sh -# Build sqlite_vec from source -COPY docker/main/build_sqlite_vec.sh /build_sqlite_vec.sh -RUN /build_sqlite_vec.sh - COPY docker/main/requirements-wheels.txt /requirements-wheels.txt RUN pip3 wheel --wheel-dir=/wheels -r /requirements-wheels.txt diff --git a/docker/main/build_sqlite_vec.sh b/docker/main/build_sqlite_vec.sh index 289e953654..cf1a38ede3 100755 --- a/docker/main/build_sqlite_vec.sh +++ b/docker/main/build_sqlite_vec.sh @@ -4,6 +4,8 @@ set -euxo pipefail SQLITE_VEC_VERSION="0.1.3" +cp /etc/apt/sources.list /etc/apt/sources.list.d/sources-src.list +sed -i 's|deb http|deb-src http|g' /etc/apt/sources.list.d/sources-src.list apt-get update apt-get -yqq build-dep sqlite3 gettext From e34f94851f6ce74ef61ab3820cd3d9f5d6bd5ac3 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 7 Oct 2024 11:29:29 -0500 Subject: [PATCH 46/56] fix thread start --- frigate/util/downloader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frigate/util/downloader.py b/frigate/util/downloader.py index 6c7dc423b7..642dc7c8f0 100644 --- a/frigate/util/downloader.py +++ b/frigate/util/downloader.py @@ -68,7 +68,7 @@ def ensure_model_files(self): target=self._download_models, name=f"_download_model_{self.model_name}", daemon=True, - ).start() + ) self.download_thread.start() def _download_models(self): From 28d48dc60c452670c78f9c5ed99d2f6fd4ce83c1 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 7 Oct 2024 11:31:50 -0500 Subject: [PATCH 47/56] include git in build --- docker/main/build_sqlite_vec.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/main/build_sqlite_vec.sh b/docker/main/build_sqlite_vec.sh index cf1a38ede3..eb7d89522a 100755 --- a/docker/main/build_sqlite_vec.sh +++ b/docker/main/build_sqlite_vec.sh @@ -7,7 +7,7 @@ SQLITE_VEC_VERSION="0.1.3" cp /etc/apt/sources.list /etc/apt/sources.list.d/sources-src.list sed -i 's|deb http|deb-src http|g' /etc/apt/sources.list.d/sources-src.list apt-get update -apt-get -yqq build-dep sqlite3 gettext +apt-get -yqq build-dep sqlite3 gettext git mkdir /tmp/sqlite_vec # Grab the sqlite_vec source code. @@ -19,5 +19,5 @@ cd /tmp/sqlite_vec/sqlite-vec-${SQLITE_VEC_VERSION} make loadable # install it -cp dist/vec0 /usr/local/lib +cp dist/vec0.so /usr/local/lib From 2eaf16035e73a52d3240d5c1b6957fa624aaf705 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 7 Oct 2024 11:52:15 -0500 Subject: [PATCH 48/56] reorder embeddings after detectors are started --- frigate/app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frigate/app.py b/frigate/app.py index 6b0e499499..2da6d62586 100644 --- a/frigate/app.py +++ b/frigate/app.py @@ -596,9 +596,9 @@ def start(self) -> None: self.check_db_data_migrations() self.init_inter_process_communicator() self.init_dispatcher() + self.start_detectors() self.init_embeddings_manager() self.init_embeddings_client() - self.start_detectors() self.start_video_output_processor() self.start_ptz_autotracker() self.init_historical_regions() From f0893665ce0d906acd31a1232c5851cbecf6705e Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 7 Oct 2024 11:54:17 -0500 Subject: [PATCH 49/56] build with sqlite amalgamation --- docker/main/build_sqlite_vec.sh | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/docker/main/build_sqlite_vec.sh b/docker/main/build_sqlite_vec.sh index eb7d89522a..fa3272ad6c 100755 --- a/docker/main/build_sqlite_vec.sh +++ b/docker/main/build_sqlite_vec.sh @@ -15,9 +15,18 @@ wget -nv https://github.com/asg017/sqlite-vec/archive/refs/tags/v${SQLITE_VEC_VE tar -zxf v${SQLITE_VEC_VERSION}.tar.gz -C /tmp/sqlite_vec cd /tmp/sqlite_vec/sqlite-vec-${SQLITE_VEC_VERSION} + +mkdir -p vendor +curl -o sqlite-amalgamation.zip https://www.sqlite.org/2024/sqlite-amalgamation-3450300.zip +unzip -d +unzip sqlite-amalgamation.zip +mv sqlite-amalgamation-3450300/* vendor/ +rmdir sqlite-amalgamation-3450300 +rm sqlite-amalgamation.zip + # build loadable module make loadable # install it -cp dist/vec0.so /usr/local/lib +cp dist/vec.o /usr/local/lib From f5f28202f0b4eeed12599cf3b21789ff2b98cc6a Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 7 Oct 2024 11:55:14 -0500 Subject: [PATCH 50/56] non-platform specific --- docker/main/build_sqlite_vec.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/main/build_sqlite_vec.sh b/docker/main/build_sqlite_vec.sh index fa3272ad6c..518ca885f4 100755 --- a/docker/main/build_sqlite_vec.sh +++ b/docker/main/build_sqlite_vec.sh @@ -28,5 +28,5 @@ rm sqlite-amalgamation.zip make loadable # install it -cp dist/vec.o /usr/local/lib +cp dist/vec0.* /usr/local/lib From 8b6bd30bba60528dc583c2df8a37e6f37fc3b16f Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 7 Oct 2024 12:01:30 -0500 Subject: [PATCH 51/56] use wget instead of curl --- docker/main/build_sqlite_vec.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/main/build_sqlite_vec.sh b/docker/main/build_sqlite_vec.sh index 518ca885f4..c93ec9920a 100755 --- a/docker/main/build_sqlite_vec.sh +++ b/docker/main/build_sqlite_vec.sh @@ -17,7 +17,7 @@ tar -zxf v${SQLITE_VEC_VERSION}.tar.gz -C /tmp/sqlite_vec cd /tmp/sqlite_vec/sqlite-vec-${SQLITE_VEC_VERSION} mkdir -p vendor -curl -o sqlite-amalgamation.zip https://www.sqlite.org/2024/sqlite-amalgamation-3450300.zip +wget -O sqlite-amalgamation.zip https://www.sqlite.org/2024/sqlite-amalgamation-3450300.zip unzip -d unzip sqlite-amalgamation.zip mv sqlite-amalgamation-3450300/* vendor/ From 2e72f6f8cdf428f3649bcd087ac0c351aca8d2ff Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 7 Oct 2024 12:05:36 -0500 Subject: [PATCH 52/56] remove unzip -d --- docker/main/build_sqlite_vec.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/docker/main/build_sqlite_vec.sh b/docker/main/build_sqlite_vec.sh index c93ec9920a..3dc28bcbf7 100755 --- a/docker/main/build_sqlite_vec.sh +++ b/docker/main/build_sqlite_vec.sh @@ -18,7 +18,6 @@ cd /tmp/sqlite_vec/sqlite-vec-${SQLITE_VEC_VERSION} mkdir -p vendor wget -O sqlite-amalgamation.zip https://www.sqlite.org/2024/sqlite-amalgamation-3450300.zip -unzip -d unzip sqlite-amalgamation.zip mv sqlite-amalgamation-3450300/* vendor/ rmdir sqlite-amalgamation-3450300 From dbdbbc02713ea516dbf8ea241d4bd17808c50dce Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 7 Oct 2024 12:15:55 -0500 Subject: [PATCH 53/56] remove sqlite_vec from requirements and load the compiled version --- docker/main/requirements-wheels.txt | 1 - frigate/db/sqlitevecq.py | 6 ++++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/docker/main/requirements-wheels.txt b/docker/main/requirements-wheels.txt index 4db0a935a9..5db2e8886f 100644 --- a/docker/main/requirements-wheels.txt +++ b/docker/main/requirements-wheels.txt @@ -33,7 +33,6 @@ unidecode == 1.3.* # OpenVino (ONNX installed in wheels-post) openvino == 2024.3.* # Embeddings -sqlite_vec == 0.1.3 transformers == 4.45.* onnx_clip == 4.0.* # Generative AI diff --git a/frigate/db/sqlitevecq.py b/frigate/db/sqlitevecq.py index 26d7e921c7..8f5b3dae71 100644 --- a/frigate/db/sqlitevecq.py +++ b/frigate/db/sqlitevecq.py @@ -1,6 +1,5 @@ import sqlite3 -import sqlite_vec from playhouse.sqliteq import SqliteQueueDatabase @@ -9,6 +8,9 @@ def __init__(self, *args, load_vec_extension: bool = False, **kwargs) -> None: super().__init__(*args, **kwargs) self.load_vec_extension: bool = load_vec_extension + # no extension necessary, sqlite will load correctly for each platform + self.sqlite_vec_path = "/usr/local/lib/vec0" + def _connect(self, *args, **kwargs) -> sqlite3.Connection: conn: sqlite3.Connection = super()._connect(*args, **kwargs) if self.load_vec_extension: @@ -17,5 +19,5 @@ def _connect(self, *args, **kwargs) -> sqlite3.Connection: def _load_vec_extension(self, conn: sqlite3.Connection) -> None: conn.enable_load_extension(True) - sqlite_vec.load(conn) + conn.load_extension(self.sqlite_vec_path) conn.enable_load_extension(False) From 78a5075cd0196e280e0d6ea9023c83b82c339b87 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 7 Oct 2024 13:44:35 -0500 Subject: [PATCH 54/56] fix build --- docker/main/Dockerfile | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docker/main/Dockerfile b/docker/main/Dockerfile index b446662c61..128159d5f2 100644 --- a/docker/main/Dockerfile +++ b/docker/main/Dockerfile @@ -30,6 +30,9 @@ RUN --mount=type=tmpfs,target=/tmp --mount=type=tmpfs,target=/var/cache/apt \ --mount=type=cache,target=/root/.ccache \ /deps/build_nginx.sh +FROM wget AS sqlite-vec +ARG DEBIAN_FRONTEND + # Build sqlite_vec from source COPY docker/main/build_sqlite_vec.sh /deps/build_sqlite_vec.sh RUN --mount=type=tmpfs,target=/tmp --mount=type=tmpfs,target=/var/cache/apt \ @@ -184,6 +187,7 @@ RUN pip3 wheel --no-deps --wheel-dir=/wheels-post -r /requirements-wheels-post.t # Collect deps in a single layer FROM scratch AS deps-rootfs COPY --from=nginx /usr/local/nginx/ /usr/local/nginx/ +COPY --from=sqlite-vec /usr/local/lib/ /usr/local/lib/ COPY --from=go2rtc /rootfs/ / COPY --from=libusb-build /usr/local/lib /usr/local/lib COPY --from=tempio /rootfs/ / From 5cda95f5bfaede037e87e3de5ae421e88e9fdafd Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 7 Oct 2024 14:18:56 -0500 Subject: [PATCH 55/56] avoid race in db connection --- frigate/db/sqlitevecq.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frigate/db/sqlitevecq.py b/frigate/db/sqlitevecq.py index 8f5b3dae71..51035ee4f2 100644 --- a/frigate/db/sqlitevecq.py +++ b/frigate/db/sqlitevecq.py @@ -5,8 +5,8 @@ class SqliteVecQueueDatabase(SqliteQueueDatabase): def __init__(self, *args, load_vec_extension: bool = False, **kwargs) -> None: - super().__init__(*args, **kwargs) self.load_vec_extension: bool = load_vec_extension + super().__init__(*args, **kwargs) # no extension necessary, sqlite will load correctly for each platform self.sqlite_vec_path = "/usr/local/lib/vec0" From e3a81db0bb67748b074159768dc92c9eb3d8e5e0 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 7 Oct 2024 15:20:45 -0500 Subject: [PATCH 56/56] add scale_factor and bias to description zscore normalization --- frigate/embeddings/__init__.py | 2 +- frigate/embeddings/functions/minilm_l6_v2.py | 4 +-- frigate/embeddings/util.py | 14 ++++++++--- web/src/views/search/SearchView.tsx | 26 +++----------------- 4 files changed, 17 insertions(+), 29 deletions(-) diff --git a/frigate/embeddings/__init__.py b/frigate/embeddings/__init__.py index 381d95ed19..aa7590994f 100644 --- a/frigate/embeddings/__init__.py +++ b/frigate/embeddings/__init__.py @@ -73,7 +73,7 @@ class EmbeddingsContext: def __init__(self, db: SqliteVecQueueDatabase): self.embeddings = Embeddings(db) self.thumb_stats = ZScoreNormalization() - self.desc_stats = ZScoreNormalization() + self.desc_stats = ZScoreNormalization(scale_factor=2.5, bias=0.5) # load stats from disk try: diff --git a/frigate/embeddings/functions/minilm_l6_v2.py b/frigate/embeddings/functions/minilm_l6_v2.py index a3a8b45b3a..5245edcdc9 100644 --- a/frigate/embeddings/functions/minilm_l6_v2.py +++ b/frigate/embeddings/functions/minilm_l6_v2.py @@ -46,7 +46,7 @@ def _download_model(self, path: str): elif os.path.basename(path) == self.TOKENIZER_FILE: logger.info("Downloading MiniLM tokenizer") tokenizer = AutoTokenizer.from_pretrained( - self.MODEL_NAME, clean_up_tokenization_spaces=False + self.MODEL_NAME, clean_up_tokenization_spaces=True ) tokenizer.save_pretrained(path) @@ -78,7 +78,7 @@ def _load_model_and_tokenizer(self): def _load_tokenizer(self): tokenizer_path = os.path.join(self.DOWNLOAD_PATH, self.TOKENIZER_FILE) return AutoTokenizer.from_pretrained( - tokenizer_path, clean_up_tokenization_spaces=False + tokenizer_path, clean_up_tokenization_spaces=True ) def _load_model(self, path: str, providers: List[str]): diff --git a/frigate/embeddings/util.py b/frigate/embeddings/util.py index 7550716c93..0b2acd4d67 100644 --- a/frigate/embeddings/util.py +++ b/frigate/embeddings/util.py @@ -4,12 +4,15 @@ class ZScoreNormalization: - """Running Z-score normalization for search distance.""" - - def __init__(self): + def __init__(self, scale_factor: float = 1.0, bias: float = 0.0): + """Initialize with optional scaling and bias adjustments.""" + """scale_factor adjusts the magnitude of each score""" + """bias will artificially shift the entire distribution upwards""" self.n = 0 self.mean = 0 self.m2 = 0 + self.scale_factor = scale_factor + self.bias = bias @property def variance(self): @@ -23,7 +26,10 @@ def normalize(self, distances: list[float]): self._update(distances) if self.stddev == 0: return distances - return [(x - self.mean) / self.stddev for x in distances] + return [ + (x - self.mean) / self.stddev * self.scale_factor + self.bias + for x in distances + ] def _update(self, distances: list[float]): for x in distances: diff --git a/web/src/views/search/SearchView.tsx b/web/src/views/search/SearchView.tsx index 7e77c20b8d..27090bb825 100644 --- a/web/src/views/search/SearchView.tsx +++ b/web/src/views/search/SearchView.tsx @@ -189,19 +189,9 @@ export default function SearchView({ // confidence score - probably needs tweaking - const zScoreToConfidence = (score: number, source: string) => { - let midpoint, scale; - - if (source === "thumbnail") { - midpoint = 2; - scale = 0.5; - } else { - midpoint = 0.5; - scale = 1.5; - } - + const zScoreToConfidence = (score: number) => { // Sigmoid function: 1 / (1 + e^x) - const confidence = 1 / (1 + Math.exp((score - midpoint) * scale)); + const confidence = 1 / (1 + Math.exp(score)); return Math.round(confidence * 100); }; @@ -412,21 +402,13 @@ export default function SearchView({ ) : ( )} - {zScoreToConfidence( - value.search_distance, - value.search_source, - )} - % + {zScoreToConfidence(value.search_distance)}% Matched {value.search_source} at{" "} - {zScoreToConfidence( - value.search_distance, - value.search_source, - )} - % + {zScoreToConfidence(value.search_distance)}%