Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 7 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,12 +1,16 @@
[package]
name = "cllm"
version = "0.1.0"
version = "0.1.1"
edition = "2021"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
clap = { version = "4.5.1", features = ["derive"] }
tokio = { version = "1.36.0", features = ["full"] }
llm-chain = "0.12.0"
llm-chain-openai = "0.12.0"
llm-chain = "0.13.0"
llm-chain-openai = "0.13.0"
serde_json = "1.0"
dirs = "5.0"
spinners = "4.1.1"
cli-clipboard = "0.4.0"
83 changes: 81 additions & 2 deletions src/commands/search/mod.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
use clap::Parser;

use std::env;
use spinners::{Spinner, Spinners};
use llm_chain::{
chains::conversation::Chain, executor, parameters, prompt, step::Step,
options::{ModelRef, OptionsBuilder},
prompt::{Conversation, ChatMessageCollection},
};
use llm_chain_openai::chatgpt::Model;
use cli_clipboard::{ClipboardContext, ClipboardProvider};
#[derive(Debug, Parser)]
#[clap(
name = "search",
Expand All @@ -11,7 +19,78 @@ pub struct Search {
qeury: String,
}

pub fn few_shot_template(list: Vec<(String, String)>) -> ChatMessageCollection<String> {

let mut ret_prompt = Conversation::new();

for (user, assistant) in &list {
ret_prompt = ret_prompt.with_user(user.to_string()).with_assistant(assistant.to_string());
}

ret_prompt
}

pub async fn handle_search(search: Search) -> Result<(), Box<dyn std::error::Error>> {
println!("Searching for: {}", search.qeury);

if !env::var("OPENAI_API_KEY").is_ok() {
println!("Please set your OpenAI API key using the `set key` command.");
return Ok(());
}

let mut spinner = Spinner::new(Spinners::Dots9, "Searching for the command...".into());

let model = ModelRef::from_model_name(Model::Gpt35Turbo.to_string());

let mut option_builder = OptionsBuilder::new();
option_builder.add_option(llm_chain::options::Opt::Model(model));
let options = option_builder.build();

let exec = executor!(
chatgpt,
options
)?;

let few_shot_examples: Vec<(String, String)> = vec![
("Show all pods in k8s".to_string(), "kubectl get pods".to_string()),
("Find all files recursively within the current directory that contain 'a' in their filenames.".to_string(), "find . -type f -name '*a*' -print".to_string()),
("Provide the command to build and push a Docker image from the current directory.".to_string(), "docker build -t myapp:latest --path".to_string()),
];


let mut conversation = Conversation::new()
.with_system_template(
"I want you to act as generating a command for request tasks on {{os_name}}. Also please don't explain the commands, just generate the command.",
&parameters!{"os_name" => env::consts::OS}
).unwrap();

let few_shot_prompt = few_shot_template(few_shot_examples);

conversation.append(few_shot_prompt);

let conversation = conversation
.with_system(
"Only generate the command, don't explain it".to_string()
);

let mut chain = Chain::new_with_message_collection(
&conversation
);

let step = Step::for_prompt_template(
prompt!(
user: "task : {{query}}"
)
);
let parameters = parameters!().with("query", search.qeury);
let res = chain.send_message(step, &parameters, &exec).await?;
let res = res.to_immediate().await?.as_content().to_chat().to_string();
let res = res.split("Assistant: ").collect::<Vec<&str>>()[1].to_string().trim().to_string();

let mut ctx: ClipboardContext = ClipboardProvider::new().unwrap();
ctx.set_contents(res.clone().to_string()).unwrap();

spinner.stop_and_persist("✔", "Finished searching for the command and copied to your clipboard :)".into());

println!("{}", res);
Ok(())
}
28 changes: 27 additions & 1 deletion src/commands/set/key/mod.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
use clap::Parser;
use dirs;
use std::env;
use std::fs::File;
use std::io::prelude::*;

#[derive(Debug, Parser)]
#[clap(
Expand All @@ -12,6 +16,28 @@ pub struct Key {
}

pub async fn handle_key(key: Key) -> Result<(), Box<dyn std::error::Error>> {
println!("Setting API Key: {}", key.api_key);

let home_dir = dirs::home_dir().unwrap();
let save_dir = home_dir.join(".cllm");
let config_path = save_dir.join("credentials.json");

if !save_dir.exists() {
std::fs::create_dir_all(&save_dir)?;
}

let mut config = if config_path.exists() {
let config = std::fs::read_to_string(config_path.clone())?;
serde_json::from_str(&config)?
}
else {
serde_json::json!({})
};

config["OPEN_AI"] = key.api_key.clone().into();
let config = serde_json::to_string_pretty(&config)?;
File::create(config_path)?.write_all(config.as_bytes())?;
env::set_var("OPENAI_API_KEY", key.api_key);

println!("API key set successfully.");
Ok(())
}
22 changes: 21 additions & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,19 +1,39 @@
pub(crate) mod commands;

use std::env;
use clap::Parser;
use commands::{Commands, handle_command};
use dirs;

#[derive(Debug, Parser)]
#[clap(
version,
author,
about="Empower your CLI experience with a command search tool driven by LLM magic!\n\
Github: https://github.com/dev-backpack/cllm\n\
If you have any questions or suggestions, feel free to open an issue on the github repo."
)]
struct Cli {
#[clap(subcommand)]
pub commands: Commands,
}

pub async fn run() -> Result<(), Box<dyn std::error::Error>> {

// Set the OPENAI_API_KEY environment variable
let home_dir = dirs::home_dir().unwrap();
let save_dir = home_dir.join(".cllm");
let config_path = save_dir.join("credentials.json");

if config_path.exists() {
let config = std::fs::read_to_string(config_path).unwrap();
let config: serde_json::Value = serde_json::from_str(&config).unwrap();

if config["OPEN_AI"].is_string() {
let api_key = config["OPEN_AI"].as_str().unwrap();
env::set_var("OPENAI_API_KEY", api_key);
}
}

let cli: Cli = Cli::parse();

if let Err(_error) = handle_command(cli.commands).await {
Expand Down
24 changes: 22 additions & 2 deletions src/main.rs
Original file line number Diff line number Diff line change
@@ -1,21 +1,41 @@
pub mod commands;

use std::env;
use clap::Parser;
use commands::{Commands, handle_command};
use dirs;

#[derive(Debug, Parser)]
#[clap(
version,
author,
about="Empower your CLI experience with a command search tool driven by LLM magic!\n\
Github: https://github.com/dev-backpack/cllm\n\
If you have any questions or suggestions, feel free to open an issue on the github repo."
)]
struct Cli {
#[clap(subcommand)]
pub commands: Commands,
}


#[tokio::main]
async fn main() {

// Set the OPENAI_API_KEY environment variable
let home_dir = dirs::home_dir().unwrap();
let save_dir = home_dir.join(".cllm");
let config_path = save_dir.join("credentials.json");

if config_path.exists() {
let config = std::fs::read_to_string(config_path).unwrap();
let config: serde_json::Value = serde_json::from_str(&config).unwrap();

if config["OPEN_AI"].is_string() {
let api_key = config["OPEN_AI"].as_str().unwrap();
env::set_var("OPENAI_API_KEY", api_key);
}
}

// Parse the command line arguments
let cli = Cli::parse();

if let Err(_error) = handle_command(cli.commands).await {
Expand Down