diff --git a/llm.lua b/llm.lua index 26c9f1f..48d1eac 100644 --- a/llm.lua +++ b/llm.lua @@ -32,7 +32,8 @@ local action = arg[1] -- On my system, it is impossible to call wsl directly from Lua. No idea why. -local function wsl_command(command, get_output) +local function wsl_command(command, get_output) -- defaults to getting the output + -- print("wsl running:", command) local command = "pwsh -Command wsl --exec \"" .. utility.escape_quotes(command) .. "\"" local output @@ -77,7 +78,7 @@ end local function model_exists(model) local models = get_models() for _, name in ipairs(models) do - if model == name then + if model == name:sub(1, name:find(":") - 1) then -- ignore tags! return true end end @@ -88,7 +89,46 @@ end local execute = { create = function() + local model = arg[2] + if not model then print("A model name must be specified.") return false end -- check for conflicts, then search local modelfiles -> create from local, then try a pull command, else return false + + if model_exists(model) then + print("A model called \"" .. model .. "\" already exists.") + return false + end + + local search_path = utility.path .. "ollama-modelfiles" + if utility.OS == "Windows" then + search_path = search_path .. "\\" + else + search_path = search_path .. "/" + end + + local success + utility.ls(search_path)(function(file_name) + if model == file_name then + -- WSL can't comprehend a Windows path and treads it as a local path extension, so we must modify the path + search_path = search_path:gsub("\\", "/"):gsub("(%a):", function(capture) return "/mnt/" .. capture:lower() end) + local output = wsl_command("ollama create " .. model .. " --file " .. search_path .. model) + print(output) + + if output:find("success") then + success = true + else + success = false + end + end + end) + if type(success) ~= nil then return success end + + local output = (wsl_command("ollama pull " .. model)):trim() + print(output) + if output:find("Error") then + return false + else + return true + end end, query = function() local model = arg[2] @@ -135,7 +175,4 @@ end -- print(query_dolphin("Say only the word 'cheese'.")) --- TEMPORARY creation, need to make this system able to manage models automatically or semi-automatically --- wsl_command("ollama create curt --file ") - -- print(query_model("curt", "How are you?")) diff --git a/utility-functions.lua b/utility-functions.lua index 565f65c..d998241 100644 --- a/utility-functions.lua +++ b/utility-functions.lua @@ -8,6 +8,8 @@ else utility.OS = "UNIX-like" end +utility.path = arg[0]:match("@?(.*/)") or arg[0]:match("@?(.*\\)") -- related to discussion in https://stackoverflow.com/q/6380820 + -- always uses outputting to a temporary file to guarantee safety function os.capture_safe(command, tmp_file_name) local file_name = tmp_file_name or utility.tmp_file_name() @@ -80,7 +82,7 @@ utility.ls = function(path) command = "ls -1" end if path then - command = command .. "\"" .. path .. "\"" + command = command .. " \"" .. path .. "\"" end local tmp_file_name = utility.tmp_file_name()