diff --git a/README.md b/README.md index 3b26757..c8a01de 100644 --- a/README.md +++ b/README.md @@ -3,8 +3,8 @@ It's like dotfiles, but no, it's just Lua scripts I find useful. ## Scripts - `2webm.lua`: Converts everything in the working directory to .webm files. -- `popen-command-test.lua`: A badly-named WIP script for working with LLMs through WSL using [ollama](https://github.com/jmorganca/ollama). -- `print-arguments.lua`: For testing how a Lua script receives arguments. (It can be platform-specific.) +- `llm.lua`: (Windows only!) A very WIP script for working with LLMs through WSL using [ollama](https://github.com/jmorganca/ollama). +- `print-arguments.lua`: For testing how a Lua script receives arguments, because this can be platform-specific. - `test.lua`: (Dev Test) Used repeatedly while working on these scripts to verify minor details I'm forgetful about. - `utility-functions.lua`: (Library) Required for many of these scripts to run. - `video-dl.lua`: A few premade command lines for using `yt-dlp` to download what I want quicker. diff --git a/llm.lua b/llm.lua new file mode 100644 index 0000000..aeb53db --- /dev/null +++ b/llm.lua @@ -0,0 +1,144 @@ +#!/usr/bin/env luajit + +local help = [[Usage: + + llm.lua [...] + +: What is desired. + create, pull, download, install : Creates a model from local + Modelfile. If that doesn't exist, uses ollama pull to + download the specified. + query, run [model] [input]: Runs a model. Defaults to using + dolphin-mixtral when [model] is not specified. If [input] is not + specified, opens in interactive mode. [input] cannot be the name + of an existing model alone (that would just open the model + interactively). Cannot download a model (a non-existing model + name is treated as input). + help: Print this helptext. +]] + +if arg[1] and arg[1]:find("help") then + print(help) + return false +end + +local error_occurred, utility = pcall(function() return dofile(arg[0]:match("@?(.*/)") or arg[0]:match("@?(.*\\)") .. "utility-functions.lua") end) if not error_occurred then error("\n\nThis script is installed improperly. Follow instructions at:\n\thttps://github.com/TangentFoxy/.lua-files#installation\n") end +-- util.required_program("wsl") -- This fails on my system, necessitating a special function to run commands in WSL. +-- I have no idea how to check for ollama being installed through WSL, else that check would be here. +utility.required_program("pwsh") -- Apparently this is AND isn't PowerShell. Isn't the future amazing? + +local action = arg[1] + + + +-- On my system, it is impossible to call wsl directly from Lua. No idea why. +local function wsl_command(command, get_output) + local command = "pwsh -Command wsl --exec \"" .. utility.escape_quotes(command) .. "\"" + + local output + if get_output or (get_output == nil) then + output = os.capture_safe(command) + return output:trim() + else + os.execute(command) + end +end + +-- TODO make this check against existant models and error if you try to query a non-existant model +local function query_model(model, prompt) + if prompt then + return wsl_command("ollama run " .. model .. " \"" .. utility.escape_quotes(prompt) .. "\"") + else + return wsl_command("ollama run " .. model, false) + end +end + +local function get_models() + local raw_list = wsl_command("ollama list") + + -- TODO export to utility if this works + local function lines(text, fn) + for line in text:gmatch("[^\r\n]+") do + fn(line) + end + end + + local list = {} + lines(raw_list, function(line) + local name = line:gmatch("%S+")() -- thanks to https://lua-users.org/wiki/SplitJoin + if name ~= "NAME" then -- this is kinda a real shitty way to just ignore the first line :D + table.insert(list, name) + end + end) + + return list +end + +local function model_exists(model) + local models = get_models() + for _, name in ipairs(models) do + if model == name then + return true + end + end + return false +end + + + +local execute = { + create = function() + -- check for conflicts, then search local modelfiles -> create from local, then try a pull command, else return false + end, + query = function() + local model = arg[2] + local query = {} + for i = 3, #arg do + table.insert(query, arg[i]) + end + query = table.concat(query, " ") + + -- verify we've selected a model + if model then + if not model_exists(model) then + query = model .. " " .. query + model = "dolphin-mixtral" + end + else + model = "dolphin-mixtral" + end + + -- enter interactive mode or send prompt? + if query == "" then + return query_model(model) + else + print(query_model(model, prompt)) + return true + end + end, +} +execute.pull = execute.create +execute.download = execute.create +execute.install = execute.create +execute.run = execute.query + +if execute[action] then + execute[action]() +else + print("Invalid ") + print("Received:", "action", action) +end + + + +-- ollama install command: curl https://ollama.ai/install.sh | sh + +local function query_dolphin(prompt) + query_model("dolphin-mixtral", prompt) +end +-- print(query_dolphin("Say only the word 'cheese'.")) + +-- TEMPORARY creation, need to make this system able to manage models automatically or semi-automatically +-- wsl_command("ollama create curt --file ") + +-- print(query_model("curt", "How are you?")) diff --git a/popen-command-test.lua b/popen-command-test.lua deleted file mode 100644 index dc61e7c..0000000 --- a/popen-command-test.lua +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env luajit - --- ollama install command: curl https://ollama.ai/install.sh | sh - -local error_occurred, utility = pcall(function() return dofile(arg[0]:match("@?(.*/)") or arg[0]:match("@?(.*\\)") .. "utility-functions.lua") end) if not error_occurred then error("\n\nThis script is installed improperly. Follow instructions at:\n\thttps://github.com/TangentFoxy/.lua-files#installation\n") end --- util.required_program("wsl") -- This fails on my system, necessitating a special function to run commands in WSL. -utility.required_program("pwsh") -- Apparently this is and isn't PowerShell. Isn't the future amazing? - --- On my system, it is impossible to call wsl directly from Lua. No idea why. -local function wsl_command(command, get_output) - local file_name = utility.tmp_file_name() - local output - - command = "pwsh -Command wsl --exec \"" .. utility.escape_quotes(command) .. "\"" - - if get_output then - command = command .. " > " .. file_name - end - - os.execute(command) - - if get_output then - local file = io.open(file_name, "r") - local output = file:read("*all") - file:close() - os.execute("rm " .. file_name) -- TODO replace with version I know works from somewhere else - return output:trim() - end -end - -local function query_model(model, prompt) - local command = "ollama run " .. model .. " \"" .. utility.escape_quotes(prompt) .. "\"" - return wsl_command(command, true) -end - -local function query_dolphin(prompt) - query_model("dolphin-mixtral", prompt) -end --- print(query_dolphin("Say only the word 'cheese'.")) - --- TEMPORARY creation, need to make this system able to manage models automatically or semi-automatically -wsl_command("ollama create curt --file ") - -print(query_model("curt", "How are you?")) diff --git a/utility-functions.lua b/utility-functions.lua index 6c5d954..565f65c 100644 --- a/utility-functions.lua +++ b/utility-functions.lua @@ -87,7 +87,7 @@ utility.ls = function(path) local output = os.capture_safe(command, tmp_file_name) return function(fn) - for line in output:gmatch("[^\r\n]+") do + for line in output:gmatch("[^\r\n]+") do -- thanks to https://stackoverflow.com/a/32847589 if line ~= tmp_file_name then -- exclude temporary file name fn(line) end diff --git a/video-dl.lua b/video-dl.lua index f5342fd..6788404 100644 --- a/video-dl.lua +++ b/video-dl.lua @@ -56,6 +56,6 @@ execute.meta = execute.metadata if execute[action] then execute[action]() else - print("Invalid ") + print("Invalid [action]") print("Received:", "action", action, "url", url) end