feat: support `-e/--execute` to execute shell command (#318)

pull/319/head
sigoden 3 months ago committed by GitHub
parent 6c0204e696
commit 7638412128
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -47,6 +47,7 @@ Download it from [GitHub Releases](https://github.com/sigoden/aichat/releases),
- Gemini: gemini-pro/gemini-pro-vision/gemini-ultra
- LocalAI: opensource LLMs and other openai-compatible LLMs
- Ollama: opensource LLMs
- VertexAI: gemini-1.0-pro/gemini.1.0-pro-vision/gemini-1.0-ultra/gemini-1.0-ultra-vision
- Azure-OpenAI: user deployed gpt-3.5/gpt-4
- Ernie: ernie-bot-turbo/ernie-bot/ernie-bot-8k/ernie-bot-4
- Qianwen: qwen-turbo/qwen-plus/qwen-max/qwen-max-longcontext/qwen-vl-plus
@ -57,6 +58,7 @@ Download it from [GitHub Releases](https://github.com/sigoden/aichat/releases),
- Support [Roles](#roles)
- Support context-aware conversation (session)
- Support multimodal models (vision)
- Support executing commands using natural language
- Syntax highlighting for markdown and 200+ languages in code blocks
- Stream output
- Support proxy
@ -308,6 +310,7 @@ Options:
-m, --model <MODEL> Choose a LLM model
-r, --role <ROLE> Choose a role
-s, --session [<SESSION>] Create or reuse a session
-e, --execute Execute commands using natural language
-f, --file <FILE>... Attach files to the message to be sent
-H, --no-highlight Disable syntax highlighting
-S, --no-stream No stream output
@ -352,10 +355,48 @@ aichat -r shell --info # Show role info
$(echo "$data" | aichat -S -H to json) # Use aichat in a script
```
### Execute commands using natural language
Simply input what you want to do in natural language, and aichat will prompt and run the command that achieves your intent.
```
aichat -s <text>...
```
![aichat-execute](https://github.com/sigoden/aichat/assets/4012553/9bc89a3f-c366-4f46-b4b8-94ac2e4213cb)
Aichat is aware of OS and `$SHELL` you are using, it will provide shell command for specific system you have. For instance, if you ask `aichat` to update your system, it will return a command based on your OS. Here's an example using macOS:
```sh
aichat -e update my system
# sudo softwareupdate -i -a
# ? [e]xecute, [d]escribe, [a]bort: (e)
```
The same prompt, when used on Ubuntu, will generate a different suggestion:
```sh
aichat -e update my system
# sudo apt update && sudo apt upgrade -y
# ? [e]xecute, [d]escribe, [a]bort: (e)
```
We can still use pipes to pass input to aichat and generate shell commands:
```sh
aichat -e POST localhost with < data.json
# curl -X POST -H "Content-Type: application/json" -d '{"a": 1, "b": 2}' localhost
# ? [e]xecute, [d]escribe, [a]bort: (e)
```
We can also pipe the output of aichat which will disable interactive mode.
```sh
aichat -e find all json files in current folder | pbcopy
```
## License
Copyright (c) 2023 aichat-developers.
aichat is made available under the terms of either the MIT License or the Apache License 2.0, at your option.
Aichat is made available under the terms of either the MIT License or the Apache License 2.0, at your option.
See the LICENSE-APACHE and LICENSE-MIT files for license details.

@ -12,6 +12,9 @@ pub struct Cli {
/// Create or reuse a session
#[clap(short = 's', long)]
pub session: Option<Option<String>>,
/// Execute commands using natural language
#[clap(short = 'e', long)]
pub execute: bool,
/// Attach files to the message to be sent.
#[clap(short = 'f', long, num_args = 1.., value_name = "FILE")]
pub file: Option<Vec<String>>,
@ -43,6 +46,7 @@ pub struct Cli {
#[clap(long)]
pub list_sessions: bool,
/// Input text
#[clap(trailing_var_arg = true)]
text: Vec<String>,
}

@ -158,7 +158,7 @@ impl Config {
Ok(config)
}
pub fn onstart(&mut self) -> Result<()> {
pub fn prelude(&mut self) -> Result<()> {
let prelude = self.prelude.clone();
let err_msg = || format!("Invalid prelude '{}", prelude);
match prelude.split_once(':') {
@ -275,6 +275,20 @@ impl Config {
pub fn set_role(&mut self, name: &str) -> Result<()> {
let role = self.retrieve_role(name)?;
self.set_role_obj(role)
}
pub fn set_execute_role(&mut self) -> Result<()> {
let role = Role::for_execute();
self.set_role_obj(role)
}
pub fn set_describe_role(&mut self) -> Result<()> {
let role = Role::for_describe();
self.set_role_obj(role)
}
pub fn set_role_obj(&mut self, role: Role) -> Result<()> {
if let Some(session) = self.session.as_mut() {
session.update_role(Some(role.clone()))?;
}

@ -1,4 +1,7 @@
use crate::client::{Message, MessageContent, MessageRole};
use crate::{
client::{Message, MessageContent, MessageRole},
utils::{detect_os, detect_shell},
};
use anyhow::{Context, Result};
use serde::{Deserialize, Serialize};
@ -18,6 +21,39 @@ pub struct Role {
}
impl Role {
pub fn for_execute() -> Self {
let os = detect_os();
let shell = detect_shell();
let shell = match shell.rsplit_once('/') {
Some((_, v)) => v,
None => &shell,
};
Self {
name: "__builtin__".into(),
prompt: format!(
r#"Provide only {shell} commands for {os} without any description.
If there is a lack of details, provide most logical solution.
Ensure the output is a valid shell command.
If multiple steps required try to combine them together using &&.
Provide only plain text without Markdown formatting.
Do not provide markdown formatting such as ```"#
),
temperature: None,
}
}
pub fn for_describe() -> Self {
Self {
name: "__builtin__".into(),
prompt: r#"Provide a terse, single sentence description of the given shell command.
Describe each argument and option of the command.
Provide short responses in about 80 words.
APPLY MARKDOWN formatting when possible."#
.into(),
temperature: None,
}
}
pub fn info(&self) -> Result<String> {
let output = serde_yaml::to_string(&self)
.with_context(|| format!("Unable to show info about role {}", &self.name))?;

@ -11,16 +11,20 @@ mod utils;
use crate::cli::Cli;
use crate::config::{Config, GlobalConfig};
use crate::utils::{prompt_op_err, run_command};
use anyhow::Result;
use anyhow::{bail, Result};
use clap::Parser;
use client::{ensure_model_capabilities, init_client, list_models};
use config::Input;
use inquire::validator::Validation;
use inquire::Text;
use is_terminal::IsTerminal;
use parking_lot::RwLock;
use render::{render_error, render_stream, MarkdownRender};
use repl::Repl;
use std::io::{stderr, stdin, stdout, Read};
use std::process;
use std::sync::Arc;
use utils::{cl100k_base_singleton, create_abort_signal};
@ -56,13 +60,17 @@ fn main() -> Result<()> {
if cli.dry_run {
config.write().dry_run = true;
}
if let Some(name) = &cli.role {
config.write().set_role(name)?;
}
if let Some(session) = &cli.session {
config
.write()
.start_session(session.as_ref().map(|v| v.as_str()))?;
if cli.execute {
config.write().set_execute_role()?;
} else {
if let Some(name) = &cli.role {
config.write().set_role(name)?;
}
if let Some(session) = &cli.session {
config
.write()
.start_session(session.as_ref().map(|v| v.as_str()))?;
}
}
if let Some(model) = &cli.model {
config.write().set_model(model)?;
@ -75,35 +83,27 @@ fn main() -> Result<()> {
println!("{}", info);
return Ok(());
}
config.write().onstart()?;
if let Err(err) = start(&config, text, cli.file, cli.no_stream) {
let text = aggregate_text(text)?;
if cli.execute {
match text {
Some(text) => {
execute(&config, &text)?;
return Ok(());
}
None => bail!("No input text"),
}
}
config.write().prelude()?;
if let Err(err) = match text {
Some(text) => start_directive(&config, &text, cli.file, cli.no_stream),
None => start_interactive(&config),
} {
let highlight = stderr().is_terminal() && config.read().highlight;
render_error(err, highlight)
}
Ok(())
}
fn start(
config: &GlobalConfig,
text: Option<String>,
include: Option<Vec<String>>,
no_stream: bool,
) -> Result<()> {
if stdin().is_terminal() {
match text {
Some(text) => start_directive(config, &text, include, no_stream),
None => start_interactive(config),
}
} else {
let mut input = String::new();
stdin().read_to_string(&mut input)?;
if let Some(text) = text {
input = format!("{text}\n{input}");
}
start_directive(config, &input, include, no_stream)
}
}
fn start_directive(
config: &GlobalConfig,
text: &str,
@ -139,3 +139,73 @@ fn start_interactive(config: &GlobalConfig) -> Result<()> {
let mut repl: Repl = Repl::init(config)?;
repl.run()
}
fn execute(config: &GlobalConfig, text: &str) -> Result<()> {
let input = Input::from_str(text);
let client = init_client(config)?;
config.read().maybe_print_send_tokens(&input);
let eval_str = client.send_message(input.clone())?;
let render_options = config.read().get_render_options()?;
let mut markdown_render = MarkdownRender::init(render_options)?;
if config.read().dry_run {
println!("{}", markdown_render.render(&eval_str).trim());
return Ok(());
}
if stdout().is_terminal() {
println!("{}", markdown_render.render(&eval_str).trim());
let mut describe = false;
loop {
let anwser = Text::new("[e]xecute, [d]escribe, [a]bort: ")
.with_default("e")
.with_validator(|input: &str| {
match matches!(input, "E" | "e" | "D" | "d" | "A" | "a") {
true => Ok(Validation::Valid),
false => Ok(Validation::Invalid(
"Invalid input, choice one of e, d or a".into(),
)),
}
})
.prompt()
.map_err(prompt_op_err)?;
match anwser.as_str() {
"E" | "e" => {
let code = run_command(&eval_str)?;
if code != 0 {
process::exit(code);
}
}
"D" | "d" => {
if !describe {
config.write().set_describe_role()?;
}
let input = Input::from_str(&eval_str);
let abort = create_abort_signal();
render_stream(&input, client.as_ref(), config, abort)?;
describe = true;
continue;
}
_ => {}
}
break;
}
} else {
println!("{}", eval_str);
}
Ok(())
}
fn aggregate_text(text: Option<String>) -> Result<Option<String>> {
let text = if stdin().is_terminal() {
text
} else {
let mut stdin_text = String::new();
stdin().read_to_string(&mut stdin_text)?;
if let Some(text) = text {
Some(format!("{text}\n{stdin_text}"))
} else {
Some(stdin_text)
}
};
Ok(text)
}

@ -11,6 +11,8 @@ pub use self::render_prompt::render_prompt;
pub use self::tiktoken::cl100k_base_singleton;
use sha2::{Digest, Sha256};
use std::env;
use std::process::Command;
pub fn now() -> String {
let now = chrono::Local::now();
@ -87,6 +89,50 @@ pub fn sha256sum(input: &str) -> String {
format!("{:x}", result)
}
pub fn detect_os() -> String {
let os = env::consts::OS;
if os == "linux" {
if let Ok(contents) = std::fs::read_to_string("/etc/os-release") {
for line in contents.lines() {
if let Some(id) = line.strip_prefix("ID=") {
return format!("{os}/{id}");
}
}
}
}
os.to_string()
}
pub fn detect_shell() -> String {
let os = env::consts::OS;
if os == "windows" {
if let Some(true) = env::var("PSModulePath")
.ok()
.map(|v| v.split(';').count() >= 3)
{
"powershell.exe".into()
} else {
"cmd.exe".into()
}
} else {
env::var("SHELL").unwrap_or_else(|_| "/bin/sh".to_string())
}
}
pub fn run_command(eval_str: &str) -> anyhow::Result<i32> {
let shell = detect_shell();
let mut command = Command::new(&shell);
if shell == "powershell.exe" {
command.arg("-Command").arg(eval_str);
} else if shell == "cmd.exe" {
command.arg("/c").arg(eval_str);
} else {
command.arg("-c").arg(eval_str);
};
let status = command.status()?;
Ok(status.code().unwrap_or_default())
}
#[cfg(test)]
mod tests {
use super::*;

Loading…
Cancel
Save