Skip to content

Commit

Permalink
v 3.10.1
Browse files Browse the repository at this point in the history
default logging refactoring / improvements:
- possibility to configure logging output method / request+response formatters separately
- fix color reset bug
  • Loading branch information
Nayjest committed Aug 4, 2024
1 parent edb2543 commit a0e771a
Show file tree
Hide file tree
Showing 2 changed files with 38 additions and 19 deletions.
2 changes: 1 addition & 1 deletion microcore/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,4 +144,4 @@ def delete(self, collection: str, what: str | list[str] | dict):
# "wrappers",
]

__version__ = "3.10.0"
__version__ = "3.10.1"
55 changes: 37 additions & 18 deletions microcore/logging.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,19 @@
import dataclasses
from colorama import Fore, Style, init
from colorama import Fore, init

from .configuration import ApiType
from ._env import env, config
from ._prepare_llm_args import prepare_chat_messages, prepare_prompt
from .utils import is_chat_model, is_notebook


class LoggingConfig:
PROMPT_COLOR = Fore.LIGHTGREEN_EX
RESPONSE_COLOR = Fore.CYAN
INDENT: str = "\t"
DENSE: bool = False


def _log_request(prompt, **kwargs):
def _format_request_log_str(prompt, **kwargs) -> str:
nl = "\n" if LoggingConfig.DENSE else "\n" + LoggingConfig.INDENT
model = _resolve_model(**kwargs)
print(
f"{Fore.RESET}Requesting LLM {Fore.MAGENTA}{model}{Style.RESET_ALL}:",
end=" " if LoggingConfig.DENSE else "\n",
out = (
f"{LoggingConfig.COLOR_RESET}Requesting LLM "
f"{Fore.MAGENTA}{model}{LoggingConfig.COLOR_RESET}:"
+ (" " if LoggingConfig.DENSE else "\n")
)
if is_chat_model(model, env().config):
for msg in prepare_chat_messages(prompt):
Expand All @@ -32,17 +26,20 @@ def _log_request(prompt, **kwargs):
content = (" " if LoggingConfig.DENSE else nl2) + nl2.join(
content.split("\n")
)
print(
f'{"" if LoggingConfig.DENSE else LoggingConfig.INDENT}'
f"{LoggingConfig.PROMPT_COLOR}[{role.capitalize()}]:{content}"
out += (
f"{'' if LoggingConfig.DENSE else LoggingConfig.INDENT}"
f"{LoggingConfig.PROMPT_COLOR}[{role.capitalize()}]:"
f"{content}{LoggingConfig.COLOR_RESET}"
)
else:
lines = prepare_prompt(prompt).split("\n")
print(
out = (
LoggingConfig.PROMPT_COLOR
+ (" " if LoggingConfig.DENSE else LoggingConfig.INDENT)
+ nl.join(lines)
+ LoggingConfig.COLOR_RESET
)
return out


def _resolve_model(**kwargs):
Expand All @@ -53,12 +50,34 @@ def _resolve_model(**kwargs):
return model


def _log_response(out):
def _format_response_log_str(out) -> str:
nl = "\n" if LoggingConfig.DENSE else "\n" + LoggingConfig.INDENT
out_indented = (" " if LoggingConfig.DENSE else nl) + nl.join(
(out or "").split("\n")
)
print(f"{Fore.RESET}LLM Response:{LoggingConfig.RESPONSE_COLOR}{out_indented}")
return (
f"{LoggingConfig.COLOR_RESET}LLM Response:"
f"{LoggingConfig.RESPONSE_COLOR}{out_indented}{LoggingConfig.COLOR_RESET}"
)


class LoggingConfig:
PROMPT_COLOR = Fore.LIGHTGREEN_EX
RESPONSE_COLOR = Fore.CYAN
COLOR_RESET = Fore.RESET
INDENT: str = "\t"
DENSE: bool = False
OUTPUT_METHOD: callable = print
REQUEST_FORMATTER: callable = _format_request_log_str
RESPONSE_FORMATTER: callable = _format_response_log_str


def _log_request(prompt, **kwargs):
LoggingConfig.OUTPUT_METHOD(_format_request_log_str(prompt, **kwargs))


def _log_response(out):
LoggingConfig.OUTPUT_METHOD(_format_response_log_str(out))


def use_logging():
Expand Down

0 comments on commit a0e771a

Please sign in to comment.