diff options
| author | Fuwn <[email protected]> | 2025-07-27 20:40:53 +0200 |
|---|---|---|
| committer | Fuwn <[email protected]> | 2025-07-27 20:40:53 +0200 |
| commit | 2e6e229eed6d72330ab2d535d389987f80c614a3 (patch) | |
| tree | c90683a70f566f61bd6af526cc7039dcce87da61 /src | |
| parent | feat: Initial commit (diff) | |
| download | umapyai-2e6e229eed6d72330ab2d535d389987f80c614a3.tar.xz umapyai-2e6e229eed6d72330ab2d535d389987f80c614a3.zip | |
feat: Pretty logging
Diffstat (limited to 'src')
| -rw-r--r-- | src/umapyai/__init__.py | 54 |
1 files changed, 35 insertions, 19 deletions
diff --git a/src/umapyai/__init__.py b/src/umapyai/__init__.py index 4898488..021807c 100644 --- a/src/umapyai/__init__.py +++ b/src/umapyai/__init__.py @@ -6,7 +6,18 @@ from sentence_transformers import SentenceTransformer import requests import subprocess import psutil -from .constants import ARTICLES_DIRECTORY, CHROMA_DIRECTORY, CHROMA_COLLECTION, CHUNK_SIZE, EMBEDDING_MODEL, OLLAMA_MODEL, TOP_K, OLLAMA_URL +from loguru import logger +from .constants import (ARTICLES_DIRECTORY, CHROMA_DIRECTORY, CHROMA_COLLECTION, + CHUNK_SIZE, EMBEDDING_MODEL, OLLAMA_MODEL, TOP_K, + OLLAMA_URL) + +logger.remove() +logger.add( + sys.stderr, + level="INFO", + colorize=True, + format="<green>{time:YYYY-MM-DD HH:mm:ss}</green> | <level>{level: <8}</level> | <cyan>{function}</cyan> | <level>{message}</level>" +) def is_ollama_live(): @@ -19,7 +30,7 @@ def is_ollama_live(): def start_ollama_server(): - print("Starting Ollama server with OLLAMA_ORIGINS='*' ...") + logger.info("Starting Ollama server with OLLAMA_ORIGINS='*' ...") environment = os.environ.copy() environment["OLLAMA_ORIGINS"] = "*" @@ -31,19 +42,19 @@ def start_ollama_server(): for _ in range(30): if is_ollama_live(): - print("Ollama is now live.") + logger.success("Ollama is now live.") return process time.sleep(1) - print("ERROR: Ollama server did not start after 30 seconds.") + logger.error("Ollama server did not start after 30 seconds.") process.terminate() sys.exit(1) def kill_ollama(process): - print("Killing Ollama ...") + logger.info("Killing Ollama ...") try: parent_process = psutil.Process(process.pid) @@ -53,7 +64,7 @@ def kill_ollama(process): parent_process.terminate() except Exception as error: - print("Error killing Ollama:", error) + logger.error(f"Error killing Ollama: {error}") def ensure_model_pulled(model): @@ -61,13 +72,13 @@ def ensure_model_pulled(model): tags = requests.get(f"{OLLAMA_URL}/api/tags").json().get("models", []) if not any(model in m.get("name", "") for m in tags): - print(f"Pulling model '{model}' ...") + logger.info(f"Pulling model '{model}' ...") subprocess.run(["ollama", "pull", model], check=True) else: - print(f"Model '{model}' already pulled.") + logger.success(f"Model '{model}' already pulled.") except Exception as e: - print("Couldn't check/pull Ollama model:", e) - print("Proceeding anyway ...") + logger.warning(f"Couldn't check/pull Ollama model: {e}") + logger.warning("Proceeding anyway ...") def main(): @@ -79,11 +90,11 @@ def main(): ollama_process = start_ollama_server() started_ollama = True else: - print("Ollama is already running.") + logger.info("Ollama is already running.") ensure_model_pulled(OLLAMA_MODEL) - print("Chunking articles ...") + logger.info("Chunking articles ...") chunks = [] @@ -102,23 +113,23 @@ def main(): if chunk.strip(): chunks.append({"source": file_name, "chunk": chunk}) - print(f"Total chunks: {len(chunks)}") - print("Generating embeddings ...") + logger.success(f"Total chunks: {len(chunks)}") + logger.info("Generating embeddings ...") model = SentenceTransformer(EMBEDDING_MODEL) for chunk in chunks: chunk["embedding"] = model.encode(chunk["chunk"]) - print("Storing embeddings in ChromaDB ...") + logger.info("Storing embeddings in ChromaDB ...") chroma_client = chromadb.PersistentClient(path=CHROMA_DIRECTORY) if CHROMA_COLLECTION in [ collection.name for collection in chroma_client.list_collections() ]: - print("Collection exists, deleting and recreating for fresh import ...") - + logger.warning( + "Collection exists, deleting and recreating for fresh import ...") chroma_client.delete_collection(CHROMA_COLLECTION) collection = chroma_client.get_or_create_collection(CHROMA_COLLECTION) @@ -159,7 +170,8 @@ def main(): except Exception as error: return f"Error communicating with Ollama: {error}" - print("\nReady! Ask your Uma Musume build questions (type 'exit' to quit):") + logger.success( + "Ready! Ask your Uma Musume build questions (type 'exit' to quit).") while True: user_query = input("\n> ") @@ -184,4 +196,8 @@ def main(): finally: if started_ollama and ollama_process is not None: kill_ollama(ollama_process) - print("Ollama server stopped.") + logger.info("Ollama server stopped.") + + +if __name__ == "__main__": + main() |