diff --git a/aish b/aish new file mode 100644 index 0000000..ca70c29 --- /dev/null +++ b/aish @@ -0,0 +1,315 @@ +#!/usr/bin/env node + +import fs from 'fs' + +const settings = { + config_file: '~/.config/aish/config.json', +} +const args = { + model: null, + cmd: 'query', + output: 'txt', + query: '', + memory: null, +} + +const showHelp = () => { + console.log('app') + console.log('---') + console.log('args :') + console.log(' --help') + console.log(' --list-models') + console.log(' --model name') + console.log(' --list-chats') + console.log(' --show-chat name') + console.log(' --delete-chat name') + console.log(' -c | --config configFile') + console.log(' -m | --chat-memory name') + console.log(' -o | --output txt|json|debug') +} + +const parseArgs = () => { + const pargs = process.argv.slice(2) + let error_flag = false + for(let i = 0; i < pargs.length; i++) { + switch(pargs[i]){ + case '--help': + showHelp() + process.exit() + break + case '-c': + case '--config': + settings.config_file = pargs[i+1] + if(!fs.existsSync(settings.config_file)) { + console.log('Config file not found!') + process.exit(1) + } + i++ + break + case '-m': + case '--chat-memory': + args.memory = pargs[i+1] + i++ + break + case '--show-chat': + args.cmd = 'show-chat' + args.memory = pargs[i+1] + i++ + break + case '--delete-chat': + args.cmd = 'delete-chat' + args.memory = pargs[i+1] + i++ + break + case '-o': + case '--output': + switch(pargs[i+1]) { + case 'txt': + case 'json': + case 'debug': + args.output = pargs[i+1] + break + default: + console.log('Unknow output format : ' + pargs[i+1] + '!') + process.exit(1) + } + i++ + break + case '--list-models': + args.cmd = 'list-models' + break + case '--model': + args.model = pargs[i+1] + i++ + break + case '--list-chats': + args.cmd = 'list-chats' + break + default: + if(i < pargs.length - 1) { + error_flag = true + console.log('Unknow arg: ' + pargs[i]) + } else { + args.query = pargs[i] + } + } + } + if(error_flag == true) { + showHelp() + process.exit(1) + } +} +const loadConfig = () => { + try { + if(!fs.existsSync(settings.config_file)) { + console.log('Config file not found!') + process.exit(1) + } + return JSON.parse(fs.readFileSync(settings.config_file, 'utf8')) + } catch (e) { + console.log('Config file error!') + process.exit(1) + } +} +parseArgs() +const config = loadConfig() +if(args.model) config.model = args.model + +const chat_memory_exist = (name) => { + return fs.existsSync(config.data + '/chats/' + name) +} +const chat_memory_load = (name) => { + if(!fs.existsSync(config.data + '/chats/' + name)) { + console.log('Memory not found') + process.exit(1) + } + try { + return JSON.parse(fs.readFileSync(config.data + '/chats/' + name)) + } catch(e) { + console.log('Memory read error') + process.exit(1) + } +} +const chat_memory_save = (name, value) => { + try { + if(!fs.existsSync(config.data + '/chats')) { + fs.mkdirSync(config.data + '/chats') + } + fs.writeFileSync(config.data + '/chats/' + name, JSON.stringify(value)) + } catch(e) { + console.log('Memory write error') + process.exit(1) + } +} +const chat_memory_delete = (name) => { + if(!fs.existsSync(config.data + '/chats/' + name)) { + console.log('Memory not found') + process.exit(1) + } + try { + fs.rmSync(config.data + '/chats/' + name) + console.log(name + ' deleted') + } catch(e) { + console.log('Delete memory error') + process.exit(1) + } +} +const ai_request = async (path, method, data) => { + const options = { + method: method, + headers: { + 'Authorization': 'Bearer ' + config.api_key, + 'Content-Type': 'application/json', + }, + } + if(data) options.body = JSON.stringify(data) + const result = await fetch(config.base_url + path, options) + return result.json() +} + +const model_list = async () => { + const models = await ai_request(config.path.models, 'GET') + switch(args.output) { + case 'txt': + models.models.forEach(model => { + console.log(model.name) + }) + break + case 'json': + console.log(models.models) + break + case 'debug': + console.log(models) + break + default: + process.exit(1) + } +} +const chat_list = async () => { + if(!fs.existsSync(config.data)) { + console.log('Data dir not found!') + process.exit(1) + } + switch(args.output) { + case 'txt': + fs.readdirSync(config.data + '/chats').forEach(dir => { + console.log(dir) + }) + break + case 'json': + console.log(fs.readdirSync(config.data + '/chats')) + break + case 'debug': + console.log(fs.readdirSync(config.data + '/chats')) + break + default: + process.exit(1) + } +} +const chat_show = async () => { + const chat = chat_memory_load(args.memory) + switch(args.output) { + case 'txt': + console.log('Model: ' + chat.model) + console.log() + chat.messages.forEach(message => { + console.log('\n=== @' + message.role + ' =============') + console.log(message.content) + }) + break + case 'json': + case 'debug': + console.log(chat) + break + } +} +const chat_delete = async () => { + chat_memory_delete(args.memory) +} +const chat_query = async () => { + const data = { + "model": config.model, + "messages": [ + { + "role": "system", + "content": config.prompt, + }, + { + "role": "user", + "content": args.query, + }, + ], + "options": config.options, + } + if(args.memory) { + if(chat_memory_exist(args.memory) == true) { + const memory = chat_memory_load(args.memory) + data.messages = memory.messages + data.messages.push( + { + "role": "user", + "content": args.query, + }, + ) + } + } + const response = await ai_request(config.path.completion, 'POST', data) + if(args.memory) { + data.messages.push({ + role: 'assistant', + content: response.choices[0].message.content, + }) + chat_memory_save(args.memory, data) + } + switch(args.output) { + case 'txt': + if(response.choices.length == 1) { + let message = response.choices[0].message.content.trim() + if(message.substr(0,3) == '```' && message.substr(-3) == '```' ) { + message = message.substr(message.indexOf('\n') + 1) + message = message.substring(message.lastIndexOf('\n'), 0) + } + console.log(message) + } else { + response.choices.forEach(item => { + console.log(item.message.content) + }) + } + break + case 'json': + console.log(response.choices) + break + case 'debug': + console.log(response) + break + default: + process.exit(1) + } +} + +const read_stdin = async () => { + let stdin = ""; + if(process.stdin.isTTY !== true) { + for await (const chunk of process.stdin) stdin += chunk + args.query = args.query + '\n\n' + stdin + } +} +await read_stdin() + +switch(args.cmd) { + case 'list-models': + model_list() + break + case 'list-chats': + chat_list() + break + case 'show-chat': + chat_show() + break + case 'delete-chat': + chat_delete() + break + case 'query': + chat_query() + break +} diff --git a/config.sample.json b/config.sample.json new file mode 100644 index 0000000..36728fc --- /dev/null +++ b/config.sample.json @@ -0,0 +1,46 @@ +{ + "data": "./data", + "base_url": "https://...", + "api_key": "sk-...", + "path": { + "models": "/ollama/api/tags", + "completion": "/api/chat/completions" + }, + "model": "default", + "prompt": "You are a brilliant assistant who answers using every possible perspective on the subject.\nYou don't use interjections.\nWhen I ask to convert or respond in a specific format, you only display the result.\nYou think, speak and answer in French.\n\n", + "options": { + "top_k": 20, + "top_p": 0.9, + "temperature": 0 + }, + "options_example": { + "num_keep": 5, + "seed": 42, + "num_predict": 100, + "top_k": 20, + "top_p": 0.9, + "min_p": 0.0, + "tfs_z": 0.5, + "typical_p": 0.7, + "repeat_last_n": 33, + "temperature": 0.8, + "repeat_penalty": 1.2, + "presence_penalty": 1.5, + "frequency_penalty": 1.0, + "mirostat": 1, + "mirostat_tau": 0.8, + "mirostat_eta": 0.6, + "penalize_newline": true, + "stop": ["\n", "user:"], + "numa": false, + "num_ctx": 1024, + "num_batch": 2, + "num_gpu": 1, + "main_gpu": 0, + "low_vram": false, + "vocab_only": false, + "use_mmap": true, + "use_mlock": false, + "num_thread": 8 + } +} diff --git a/makerelease.sh b/makerelease.sh new file mode 100755 index 0000000..b0d6b33 --- /dev/null +++ b/makerelease.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +declare -r VERSION=${1} +declare -r MESSAGE=${2} +declare -r TAGBRANCH=main +declare CURRENTBRANCH="" + +showHelp() { + echo makerelease version +} + +if [ "${VERSION}" == "" ]; then + showHelp + echo "" + echo "no version provided!" + exit 1 +fi + +CURRENTBRANCH=$(git rev-parse --abbrev-ref HEAD) + +if [ ! "${CURRENTBRANCH}" == "dev" ]; then + echo "You are not in dev branch!" + echo "Use dev branch to make a release!" + exit 1 +fi + +git checkout "${TAGBRANCH}" +git merge "${CURRENTBRANCH}" +git push +git tag -a "${VERSION}" -m "${MESSAGE}" +git push --tags +git checkout "${CURRENTBRANCH}"