diff --git a/cllm/main.py b/cllm/main.py index de35d6b..d015ecd 100644 --- a/cllm/main.py +++ b/cllm/main.py @@ -6,9 +6,8 @@ import tomllib from langchain_openai import ChatOpenAI from langchain_core.prompts import (ChatPromptTemplate, FewShotChatMessagePromptTemplate) - +from rich.progress import Progress, SpinnerColumn, TextColumn from typing import Optional -from typing_extensions import Annotated # version of the CLI app = typer.Typer(help="Empower your CLI experience with a command search tool driven by LLM magic!", @@ -27,60 +26,66 @@ def version_callback(value: bool): @app.command() def search(query : str): """Search a command from the LLM model""" - home_dir = os.path.expanduser("~") - save_dir = os.path.join(home_dir, ".cllm") - - if not os.path.exists(save_dir): - typer.echo("Please set the API key first.") - raise typer.Exit(code=1) - - filepath = os.path.join(save_dir, "credentials.json") - - with open(filepath, "r", encoding="utf-8") as file: - data = json.load(file) - api_key = data["OPEN_AI"] + with Progress( + SpinnerColumn(), + TextColumn("[progress.description]{task.description}"), + transient=True, + ) as progress: + progress.add_task(description="Searching...", total=None) + home_dir = os.path.expanduser("~") + save_dir = os.path.join(home_dir, ".cllm") + + if not os.path.exists(save_dir): + typer.echo("Please set the API key first.") + raise typer.Exit(code=1) + + filepath = os.path.join(save_dir, "credentials.json") + + with open(filepath, "r", encoding="utf-8") as file: + data = json.load(file) + api_key = data["OPEN_AI"] - os.environ["OPENAI_API_KEY"] = api_key + os.environ["OPENAI_API_KEY"] = api_key - examples = [ - {"input" : "Show all pods in k8s", - "output" : "kubectl get pods"}, - {"input" : "Find all files recursively within the current directory that contain 'a' in their filenames.", - "output" : "find . -type f -name '*a*' -print"}, - {"input" : "Provide the command to build and push a Docker image from the current directory.", - "output" : "docker build -t myapp:latest . -—push"}, - ] - - example_prompt = ChatPromptTemplate.from_messages( - [ - ("human", "{input}"), - ("ai", "{output}"), + examples = [ + {"input" : "Show all pods in k8s", + "output" : "kubectl get pods"}, + {"input" : "Find all files recursively within the current directory that contain 'a' in their filenames.", + "output" : "find . -type f -name '*a*' -print"}, + {"input" : "Provide the command to build and push a Docker image from the current directory.", + "output" : "docker build -t myapp:latest . -—push"}, ] - ) - chat_template = ChatPromptTemplate.from_messages( - [ - ("system", "I want you to act as generating a command for request tasks on {os_name}. \ - Also please don't explain the commands, just generate the command."), - FewShotChatMessagePromptTemplate(example_prompt=example_prompt, examples=examples).format(), - ("system", "Only generate the command, don't explain it."), - ("human", "task : {task}"), + example_prompt = ChatPromptTemplate.from_messages( + [ + ("human", "{input}"), + ("ai", "{output}"), + ] + ) - ] - ) + chat_template = ChatPromptTemplate.from_messages( + [ + ("system", "I want you to act as generating a command for request tasks on {os_name}. \ + Also please don't explain the commands, just generate the command."), + FewShotChatMessagePromptTemplate(example_prompt=example_prompt, examples=examples).format(), + ("system", "Only generate the command, don't explain it."), + ("human", "task : {task}"), - prompt = chat_template.format_messages( - task = query, - os_name = platform.system() - ) + ] + ) - model = ChatOpenAI() - - output = model.invoke(prompt) + prompt = chat_template.format_messages( + task = query, + os_name = platform.system() + ) - if output.content[:4] == "AI: ": - output.content = output.content[4:] + model = ChatOpenAI() + output = model.invoke(prompt) + + if output.content[:4] == "AI: ": + output.content = output.content[4:] + typer.echo(output.content) @app.callback()