Initial commit

master
Tymon 1 year ago
commit 1925a2dba7

3
.gitignore vendored

@ -0,0 +1,3 @@
/target
/history
config.ron

3250
Cargo.lock generated

File diff suppressed because it is too large Load Diff

@ -0,0 +1,18 @@
[package]
name = "aigui"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
anyhow = "1.0.70"
chrono = { version = "0.4.24", features = ["serde"] }
eframe = "0.21.3"
egui-notify = "0.6.0"
once_cell = "1.17.1"
reqwest = { version = "0.11.15", features = ["json"] }
ron = "0.8.0"
serde = { version = "1.0.158", features = ["derive"] }
serde_json = "1.0.94"
tokio = { version = "1.26.0", features = ["full"] }

@ -0,0 +1,22 @@
This software is licensed under the "Anyone But Richard M Stallman"
(ABRMS) license, described below. No other licenses may apply.
--------------------------------------------
The "Anyone But Richard M Stallman" license
--------------------------------------------
Do anything you want with this program, with the exceptions listed
below under "EXCEPTIONS".
THIS SOFTWARE IS PROVIDED "AS IS" WITH NO WARRANTY OF ANY KIND.
In the unlikely event that you happen to make a zillion bucks off of
this, then good for you; consider buying a homeless person a meal.
EXCEPTIONS
----------
Richard M Stallman (the guy behind GNU, etc.) may not make use of or
redistribute this program or any of its derivatives.

@ -0,0 +1,3 @@
# AI Gui
Crazy configurable AI gui made with egui/eframe for fun

@ -0,0 +1,8 @@
# TODO
## Features
- Add config editor
- Add custom fetch header support to AI config
## Important
- Make config file be in an actual config directory like ~/.config

@ -0,0 +1,27 @@
(
app: App(
// Path to directory to save history files in. If set to `None` then history is disabled.
history_dir: Some("./history")
),
ai: [
AI(
// Name of the AI shown to the UI
name: "Example",
// Generation endpoint
url: "https://example.com/generate",
// Request parameters, example usage below
params: {
// Simple string
"model": String("example"),
// Prompt type, allows for appening a string to the end of the prompt. Useful for stuff like `<|endofprompt|>`.
"prompt": Prompt(Some("<|endofprompt|>")),
// Temperature type
"temperature": Temperature,
// Max tokens type
"max_tokens": MaxTokens
},
// JSON objects to get from response
retriever: [ "output" ]
)
]
)

@ -0,0 +1,105 @@
use std::{collections::HashMap, sync::mpsc::Sender};
use crate::config::{ParamType, AI};
use anyhow::{Context, Result};
use once_cell::sync::Lazy;
use reqwest::Client;
use serde::{Deserialize, Serialize};
use serde_json::Value;
static CLIENT: Lazy<Client> = Lazy::new(Client::new);
#[derive(Clone, Serialize, Deserialize)]
pub struct Params {
pub prompt: String,
pub temperature: f32,
pub max_tokens: i32,
}
impl Default for Params {
fn default() -> Self {
Self {
prompt: Default::default(),
temperature: 1.0,
max_tokens: 1000,
}
}
}
pub async fn generate(ai: AI, params: Params) -> Result<String> {
let mut json_params: HashMap<String, Value> = HashMap::new();
// yeah this is cursed but i could not think of a better way to do it
for (key, val) in ai.params {
match val {
ParamType::String(string) => {
json_params.insert(key, Value::from(string));
}
ParamType::Prompt(addition) => {
json_params.insert(
key,
Value::from(format!("{}{}", params.prompt, addition.unwrap_or_default())),
);
}
ParamType::Temperature => {
json_params.insert(key, Value::from(params.temperature));
}
ParamType::MaxTokens => {
json_params.insert(key, Value::from(params.max_tokens));
}
}
}
let mut res: &serde_json::Value = &CLIENT
.post(ai.url)
.json(&json_params)
.send()
.await?
.json()
.await?;
for retriever in ai.retriever {
// it works but at what cost
if let Ok(retriever) = retriever.clone().into_rust::<usize>() {
res = res.get(retriever).context("Failed to execute retriever")?;
} else {
let retriever = retriever.into_rust::<String>()?;
res = res.get(retriever).context("Failed to execute retriever")?;
}
}
Ok(res
.as_str()
.context("Output is not a valid string")?
.to_string())
}
#[derive(PartialEq)]
pub enum ResponseType {
Error,
Success,
}
pub struct Response {
pub response_type: ResponseType,
pub output: String,
}
pub fn generate_with_mpsc(ai: AI, params: Params, tx: Sender<Response>) {
tokio::spawn(async move {
let output = match generate(ai, params).await {
Ok(output) => Response {
response_type: ResponseType::Success,
output,
},
Err(error) => Response {
response_type: ResponseType::Error,
output: error.to_string(),
},
};
tx.send(output)
.expect("Failed to send output, this should never happen!")
});
}

@ -0,0 +1,238 @@
use std::{sync::mpsc::{self, Receiver, Sender}, time::Duration};
use chrono::Local;
use eframe::{
egui::{
self, CentralPanel, ComboBox, DragValue, Grid, Layout, ScrollArea, Sense, Slider, TextEdit,
Window,
},
emath::Align,
epaint::Vec2,
};
use egui_notify::Toasts;
use crate::{
ai::{generate_with_mpsc, Params, Response, ResponseType},
config::{AI, CONFIG},
history::{read_all_history, write_history, HistoryEntry},
};
pub struct App {
toasts: Toasts,
output: String,
selected_ai: AI,
params: Params,
history_open: bool,
history: Vec<HistoryEntry>,
open_history_entry: Option<HistoryEntry>,
tx: Sender<Response>,
rx: Receiver<Response>,
}
impl Default for App {
fn default() -> Self {
let (tx, rx) = mpsc::channel();
Self {
toasts: Default::default(),
output: "Awaiting generation...".to_string(),
selected_ai: CONFIG.ai[0].clone(),
params: Default::default(),
history_open: false,
history: vec![],
open_history_entry: None,
tx,
rx,
}
}
}
impl eframe::App for App {
fn update(&mut self, ctx: &egui::Context, _frame: &mut eframe::Frame) {
if let Ok(recieved) = self.rx.try_recv() {
if recieved.response_type == ResponseType::Success {
self.output = recieved.output.trim().to_string();
} else {
self.output = "Failed to generate!".to_string();
self.toasts
.error("Failed to generate! Error printed to console.");
eprintln!("{}", recieved.output);
}
match write_history(HistoryEntry {
date: Local::now(),
selected_ai: self.selected_ai.name.clone(),
params: self.params.clone(),
result: self.output.clone(),
}) {
Ok(_) => {}
Err(error) => {
self.toasts
.error(format!("Failed to write history! {}", error));
}
}
}
CentralPanel::default().show(ctx, |ui| {
Window::new("History")
.open(&mut self.history_open)
.collapsible(false)
.hscroll(true)
.show(ctx, |ui| {
if let Some(entry) = self.open_history_entry.clone() {
ui.set_max_width(580.0);
ui.horizontal(|ui| {
// maybe make these param indicators look more like the main ui somehow
ui.label(format!("{} - AI", entry.selected_ai));
ui.separator();
ui.label(format!(
"{} - Temperature",
entry.params.temperature.to_string()
));
ui.separator();
ui.label(format!(
"{} - Max Tokens",
entry.params.max_tokens.to_string()
));
ui.with_layout(Layout::right_to_left(Align::Max), |ui| {
if ui.button("Back").clicked() {
self.open_history_entry = None;
}
});
});
ui.with_layout(Layout::left_to_right(Align::Min), |ui| {
ScrollArea::vertical()
.id_source("history_prompt_scroll")
.show(ui, |ui| {
ui.add(TextEdit::multiline(&mut entry.params.prompt.as_str()));
});
ScrollArea::vertical()
.id_source("history_output_scroll")
.show(ui, |ui| {
ui.add(TextEdit::multiline(&mut entry.result.as_str()));
});
})
.response
.rect
.width();
} else {
ScrollArea::vertical()
.id_source("history_scroll")
.show(ui, |ui| {
Grid::new("history_grid").num_columns(1).show(
ui,
|ui| {
for entry in &self.history {
let response = ui
.horizontal(|ui| {
ui.label(&entry.selected_ai);
ui.label(
entry
.date
.format("%Y-%m-%d %H:%M:%S")
.to_string(),
)
})
.response;
let clickable =
ui.allocate_rect(response.rect, Sense::click());
if clickable.clicked() {
self.open_history_entry = Some(entry.clone());
}
ui.end_row()
}
},
);
});
}
});
ui.vertical(|ui| {
if ui.button("Generate!").clicked() {
self.output = "Generating...".to_string();
generate_with_mpsc(
self.selected_ai.clone(),
self.params.clone(),
self.tx.clone(),
)
}
ui.horizontal(|ui| {
ComboBox::from_label("AI")
.selected_text(&self.selected_ai.name)
.show_ui(ui, |ui| {
for ai in &CONFIG.ai {
ui.selectable_value(&mut self.selected_ai, ai.clone(), &ai.name);
}
});
ui.separator();
ui.add(
Slider::new(&mut self.params.temperature, 0.0..=2.0)
.text("Temperature")
.show_value(true),
);
ui.separator();
ui.add(DragValue::new(&mut self.params.max_tokens));
ui.label("Max Tokens");
ui.with_layout(Layout::right_to_left(Align::Max), |ui| {
ui.add_visible_ui(CONFIG.app.history_dir.is_some(), |ui| {
if ui.button("History").clicked() {
// add proper error handling
self.history = read_all_history().unwrap();
self.history_open = true;
}
});
});
});
ui.with_layout(Layout::left_to_right(Align::Min), |ui| {
let size = ui.available_size();
ScrollArea::vertical()
.id_source("prompt_scroll")
.show(ui, |ui| {
ui.add_sized(
Vec2::new(size.x / 2.0, size.y),
TextEdit::multiline(&mut self.params.prompt),
);
});
ScrollArea::vertical()
.id_source("output_scroll")
.show(ui, |ui| {
ui.add_sized(
Vec2::new(size.x / 2.0, size.y),
TextEdit::multiline(&mut self.output.as_str()),
);
});
});
});
self.toasts.show(ctx);
ctx.request_repaint_after(Duration::from_secs(1))
});
}
}

@ -0,0 +1,39 @@
use std::{fs, collections::HashMap};
use once_cell::sync::Lazy;
use ron::Value;
use serde::Deserialize;
pub static CONFIG: Lazy<Config> = Lazy::new(|| {
let config = fs::read_to_string("./config.ron").expect("Failed to read config.ron");
ron::from_str(&config).expect("Failed to parse config")
});
#[derive(Deserialize, Debug)]
pub struct Config {
pub app: App,
pub ai: Vec<AI>,
}
#[derive(Deserialize, Debug)]
pub struct App {
pub history_dir: Option<String>,
}
#[derive(Deserialize, Clone, PartialEq, Debug)]
pub struct AI {
pub name: String,
pub url: String,
pub params: HashMap<String, ParamType>,
pub retriever: Vec<Value>,
}
#[derive(Deserialize, Clone, PartialEq, Debug)]
pub enum ParamType {
String(String),
Prompt(Option<String>),
Temperature,
MaxTokens
}

@ -0,0 +1,55 @@
use anyhow::Result;
use chrono::{DateTime, Local};
use serde::{Deserialize, Serialize};
use std::fs;
use crate::{ai::Params, config::CONFIG};
#[derive(Serialize, Deserialize, Clone)]
pub struct HistoryEntry {
pub date: DateTime<Local>,
pub selected_ai: String,
pub params: Params,
pub result: String,
}
pub fn write_history(entry: HistoryEntry) -> Result<()> {
// should be safe to unwrap as this function will never be called without it
let history_dir = CONFIG.app.history_dir.as_ref().unwrap();
fs::create_dir_all(history_dir)?;
let json = serde_json::to_string(&entry)?;
fs::write(
format!("{}/{}.json", history_dir, entry.date.timestamp()),
json,
)?;
Ok(())
}
/*
this should probably be async all things considered however losing out on proper error handling with ? is
not worth it. so here's hoping that rust fs (and serde_json (meaning serde too)) is fast enough to read like 10000 files
*/
pub fn read_all_history() -> Result<Vec<HistoryEntry>> {
// should be safe to unwrap as this function will never be called without it
let history_dir = CONFIG.app.history_dir.as_ref().unwrap();
let mut entries = vec![];
for dir_entry in fs::read_dir(history_dir)? {
let dir_entry = dir_entry?;
if dir_entry.file_type()?.is_file() {
let json = fs::read_to_string(dir_entry.path())?;
let entry: HistoryEntry = serde_json::from_str(&json)?;
entries.push(entry);
}
}
entries.sort_by(|a, b|b.date.cmp(&a.date));
Ok(entries)
}

@ -0,0 +1,23 @@
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
mod ai;
mod app;
mod config;
mod history;
use once_cell::sync::Lazy;
use crate::app::App;
use crate::config::CONFIG;
#[tokio::main]
async fn main() {
// Forces once_cell to init the config
Lazy::force(&CONFIG);
let options = eframe::NativeOptions {
..Default::default()
};
eframe::run_native("AI Gui", options, Box::new(|_| Box::new(App::default()))).unwrap();
}