Skip to content

Commit

Permalink
Merge pull request #227 from Kiln-AI/logger_staging
Browse files Browse the repository at this point in the history
Merge Logger work into Main
  • Loading branch information
scosman authored Feb 26, 2025
2 parents d78563e + 1610915 commit 64c580c
Show file tree
Hide file tree
Showing 12 changed files with 143 additions and 31 deletions.
7 changes: 5 additions & 2 deletions app/desktop/custom_tray.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
import io
import logging
import sys

import pystray

logger = logging.getLogger(__name__)


class KilnTray(pystray.Icon):
# Special handling for Mac to support dark/light mode and retina icons
Expand Down Expand Up @@ -33,7 +36,7 @@ def _assert_image(self):
self._icon_image.setTemplate_(True)
# set the logical size of the image, which will be scaled for retina
self._icon_image.setSize_(logical_size)
except Exception as e:
except Exception:
# Continue, this shouldn't be fatal
print("Mac Tray Error", e)
logger.error("Mac Tray Error", exc_info=True)
self._status_item.button().setImage_(self._icon_image) # type: ignore
3 changes: 2 additions & 1 deletion app/desktop/desktop_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import uvicorn
from fastapi import FastAPI

from app.desktop.log_config import log_config
from app.desktop.studio_server.data_gen_api import connect_data_gen_api
from app.desktop.studio_server.finetune_api import connect_fine_tune_api
from app.desktop.studio_server.prompt_api import connect_prompt_api
Expand Down Expand Up @@ -46,8 +47,8 @@ def server_config(port=8757):
make_app(),
host="127.0.0.1",
port=port,
log_level="warning",
use_colors=False,
log_config=log_config(),
)


Expand Down
101 changes: 101 additions & 0 deletions app/desktop/log_config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
import os
from enum import Enum
from typing import List

from kiln_ai.utils.config import Config


class LogDestination(Enum):
CONSOLE = "console"
FILE = "file"
ALL = "all"


def get_log_level() -> str:
return os.getenv("KILN_LOG_LEVEL", "WARNING")


def get_log_file_path() -> str:
"""Get the path to the log file, using environment override if specified.
Returns:
str: The path to the log file
"""
log_path_default = os.path.join(Config.settings_dir(), "logs", "kiln_desktop.log")
log_path = os.getenv("KILN_LOG_FILE", log_path_default)

# Ensure the log directory exists
os.makedirs(os.path.dirname(log_path), exist_ok=True)
return log_path


def get_max_file_bytes() -> int:
"""
The maximum number of bytes to write to the log file.
When the file reaches this size, it will be rotated.
"""
default_max_bytes = 20971520 # 20MB
return int(os.getenv("KILN_LOG_MAX_BYTES", default_max_bytes))


def get_max_backup_count() -> int:
"""
The number of backup files to keep in the log directory.
Past that, the oldest files are deleted.
"""
default_backup_count = 3
return int(os.getenv("KILN_LOG_BACKUP_COUNT", default_backup_count))


def get_default_formatter() -> str:
return "%(asctime)s.%(msecs)03d - %(levelname)s - %(name)s - %(message)s"


def get_handlers() -> List[str]:
destination = os.getenv("KILN_LOG_DESTINATION", "all")
handlers = {
LogDestination.FILE: ["logfile"],
LogDestination.CONSOLE: ["logconsole"],
LogDestination.ALL: ["logfile", "logconsole"],
}
return handlers[LogDestination(destination)]


def log_config():
return {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
# uvicorn expects a "default" formatter
"default": {
"format": get_default_formatter(),
},
# uvicorn expects an "access" formatter
"access": {
"format": get_default_formatter(),
},
"logformatter": {
"format": get_default_formatter(),
},
"console": {
"format": "%(levelname)s: %(message)s",
},
},
"handlers": {
"logfile": {
"class": "logging.handlers.RotatingFileHandler",
"level": get_log_level(),
"formatter": "logformatter",
"filename": get_log_file_path(),
"mode": "a",
"maxBytes": get_max_file_bytes(),
"backupCount": get_max_backup_count(),
},
"logconsole": {
"class": "logging.StreamHandler",
"level": get_log_level(),
"formatter": "console",
},
},
"root": {"level": get_log_level(), "handlers": get_handlers()},
}
24 changes: 14 additions & 10 deletions app/desktop/studio_server/provider_api.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import logging
import os
from dataclasses import dataclass
from datetime import datetime, timedelta
Expand All @@ -19,16 +20,15 @@
ollama_base_url,
parse_ollama_tags,
)
from kiln_ai.adapters.provider_tools import (
provider_name_from_id,
provider_warnings,
)
from kiln_ai.adapters.provider_tools import provider_name_from_id, provider_warnings
from kiln_ai.datamodel.registry import all_projects
from kiln_ai.utils.config import Config
from kiln_ai.utils.exhaustive_error import raise_exhaustive_enum_error
from langchain_aws import ChatBedrockConverse
from pydantic import BaseModel, Field

logger = logging.getLogger(__name__)


async def connect_ollama(custom_ollama_url: str | None = None) -> OllamaConnection:
# Tags is a list of Ollama models. Proves Ollama is running, and models are available.
Expand Down Expand Up @@ -598,9 +598,9 @@ def custom_models() -> AvailableModels | None:
untested_model=True,
)
)
except Exception as e:
except Exception:
# Continue on to the rest
print(f"Error processing custom model {model_id}: {e}")
logger.error("Error processing custom model %s", model_id, exc_info=True)

return AvailableModels(
provider_name="Custom Models",
Expand Down Expand Up @@ -698,11 +698,13 @@ def openai_compatible_providers_load_cache() -> OpenAICompatibleProviderCache |
models: List[ModelDetails] = []
base_url = provider.get("base_url")
if not base_url or not base_url.startswith("http"):
print(f"No base URL for OpenAI compatible provider {provider} - {base_url}")
logger.warning(
"No base URL for OpenAI compatible provider %s - %s", provider, base_url
)
continue
name = provider.get("name")
if not name:
print(f"No name for OpenAI compatible provider {provider}")
logger.warning("No name for OpenAI compatible provider %s", provider)
continue

# API key is optional, as some providers don't require it
Expand Down Expand Up @@ -736,8 +738,10 @@ def openai_compatible_providers_load_cache() -> OpenAICompatibleProviderCache |
models=models,
)
)
except Exception as e:
print(f"Error connecting to OpenAI compatible provider {name}: {e}")
except Exception:
logger.error(
"Error connecting to OpenAI compatible provider %s", name, exc_info=True
)
has_error = True
continue

Expand Down
2 changes: 1 addition & 1 deletion app/desktop/studio_server/test_provider_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -1463,4 +1463,4 @@ async def test_disconnect_api_key_unsupported_provider(client, provider_id):
)

assert response.status_code == 400
assert response.json() == {"message": f"Provider not supported"}
assert response.json() == {"message": "Provider not supported"}
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import json
import logging
import tempfile
from pathlib import Path
from unittest.mock import Mock
Expand Down Expand Up @@ -27,6 +28,8 @@
TaskRun,
)

logger = logging.getLogger(__name__)


@pytest.fixture
def mock_task():
Expand Down Expand Up @@ -474,7 +477,7 @@ def test_generate_vertex_template_thinking():

result = generate_vertex_gemini_1_5(training_data)

print(result)
logger.info(result)

assert result == {
"systemInstruction": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -324,11 +324,6 @@ async def test_langchain_adapter_model_no_structured_output_support(tmp_path):

import pytest

from kiln_ai.adapters.ml_model_list import KilnModelProvider, ModelProviderName
from kiln_ai.adapters.model_adapters.langchain_adapters import (
langchain_model_from_provider,
)


@pytest.mark.parametrize(
"provider_name",
Expand Down
1 change: 0 additions & 1 deletion libs/core/kiln_ai/adapters/ollama_tools.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import os
from typing import Any, List

import httpx
Expand Down
12 changes: 6 additions & 6 deletions libs/core/kiln_ai/adapters/test_generate_docs.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import logging
from typing import List

import pytest

from libs.core.kiln_ai.adapters.ml_model_list import (
KilnModelProvider,
built_in_models,
)
from libs.core.kiln_ai.adapters.ml_model_list import KilnModelProvider, built_in_models
from libs.core.kiln_ai.adapters.provider_tools import provider_name_from_id

logger = logging.getLogger(__name__)


def _all_providers_support(providers: List[KilnModelProvider], attribute: str) -> bool:
"""Check if all providers support a given feature"""
Expand Down Expand Up @@ -58,8 +58,8 @@ def test_generate_model_table():
table.append(row)

# Print the table (useful for documentation)
print("\nModel Capability Matrix:\n")
print("\n".join(table))
logger.info("\nModel Capability Matrix:\n")
logger.info("\n".join(table))

# Basic assertions to ensure the table is well-formed
assert len(table) > 2, "Table should have header and at least one row"
Expand Down
1 change: 0 additions & 1 deletion libs/core/kiln_ai/adapters/test_ollama_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
def test_parse_ollama_tags_no_models():
json_response = '{"models":[{"name":"scosman_net","model":"scosman_net:latest"},{"name":"phi3.5:latest","model":"phi3.5:latest","modified_at":"2024-10-02T12:04:35.191519822-04:00","size":2176178843,"digest":"61819fb370a3c1a9be6694869331e5f85f867a079e9271d66cb223acb81d04ba","details":{"parent_model":"","format":"gguf","family":"phi3","families":["phi3"],"parameter_size":"3.8B","quantization_level":"Q4_0"}},{"name":"gemma2:2b","model":"gemma2:2b","modified_at":"2024-09-09T16:46:38.64348929-04:00","size":1629518495,"digest":"8ccf136fdd5298f3ffe2d69862750ea7fb56555fa4d5b18c04e3fa4d82ee09d7","details":{"parent_model":"","format":"gguf","family":"gemma2","families":["gemma2"],"parameter_size":"2.6B","quantization_level":"Q4_0"}},{"name":"llama3.1:latest","model":"llama3.1:latest","modified_at":"2024-09-01T17:19:43.481523695-04:00","size":4661230720,"digest":"f66fc8dc39ea206e03ff6764fcc696b1b4dfb693f0b6ef751731dd4e6269046e","details":{"parent_model":"","format":"gguf","family":"llama","families":["llama"],"parameter_size":"8.0B","quantization_level":"Q4_0"}}]}'
tags = json.loads(json_response)
print(json.dumps(tags, indent=2))
conn = parse_ollama_tags(tags)
assert "phi3.5:latest" in conn.supported_models
assert "gemma2:2b" in conn.supported_models
Expand Down
6 changes: 4 additions & 2 deletions libs/core/kiln_ai/adapters/test_prompt_builders.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import json
import logging

import pytest

Expand Down Expand Up @@ -33,6 +34,8 @@
TaskRun,
)

logger = logging.getLogger(__name__)


def test_simple_prompt_builder(tmp_path):
task = build_test_task(tmp_path)
Expand Down Expand Up @@ -269,7 +272,6 @@ def test_few_shot_prompt_builder(tmp_path):
rating=TaskOutputRating(value=4 + (i % 2), reason="Good joke"),
),
)
print("RATING", "Joke Initial Output ", i + 1, " - RATED:", 4 + (i % 2), "\n")
if i < 2:
run = run.model_copy(
update={
Expand All @@ -290,7 +292,7 @@ def test_few_shot_prompt_builder(tmp_path):
prompt = prompt_builder.build_prompt(include_json_instructions=False)
assert prompt.count("## Example") == 4

print("PROMPT", prompt)
logger.info("PROMPT: %s", prompt)
# Verify the order of examples (2 repaired, then 2 highest-rated)
assert "Repaired Joke 1" in prompt
assert "Repaired Joke 2" in prompt
Expand Down
7 changes: 6 additions & 1 deletion libs/core/kiln_ai/utils/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,10 +142,15 @@ def __setattr__(self, name, value):
raise AttributeError(f"Config has no attribute '{name}'")

@classmethod
def settings_path(cls, create=True):
def settings_dir(cls, create=True):
settings_dir = os.path.join(Path.home(), ".kiln_ai")
if create and not os.path.exists(settings_dir):
os.makedirs(settings_dir)
return settings_dir

@classmethod
def settings_path(cls, create=True):
settings_dir = cls.settings_dir(create)
return os.path.join(settings_dir, "settings.yaml")

@classmethod
Expand Down

0 comments on commit 64c580c

Please sign in to comment.