2024-01-14 00:54:25 +00:00
|
|
|
#!/usr/bin/env luajit
|
2024-01-13 11:31:10 +00:00
|
|
|
|
2024-01-14 00:54:25 +00:00
|
|
|
-- ollama install command: curl https://ollama.ai/install.sh | sh
|
|
|
|
|
|
|
|
local error_occurred, utility = pcall(function() return dofile(arg[0]:match("@?(.*/)") or arg[0]:match("@?(.*\\)") .. "utility-functions.lua") end) if not error_occurred then error("\n\nThis script is installed improperly. Follow instructions at:\n\thttps://github.com/TangentFoxy/.lua-files#installation\n") end
|
|
|
|
-- util.required_program("wsl") -- This fails on my system, necessitating a special function to run commands in WSL.
|
|
|
|
utility.required_program("pwsh") -- Apparently this is and isn't PowerShell. Isn't the future amazing?
|
2024-01-13 11:31:10 +00:00
|
|
|
|
2024-01-13 12:19:28 +00:00
|
|
|
-- On my system, it is impossible to call wsl directly from Lua. No idea why.
|
2024-01-14 00:54:25 +00:00
|
|
|
local function wsl_command(command, get_output)
|
|
|
|
local file_name = utility.tmp_file_name()
|
2024-01-13 12:19:28 +00:00
|
|
|
local output
|
2024-01-13 11:31:10 +00:00
|
|
|
|
2024-01-14 00:54:25 +00:00
|
|
|
command = "pwsh -Command wsl --exec \"" .. utility.escape_quotes(command) .. "\""
|
2024-01-13 11:31:10 +00:00
|
|
|
|
2024-01-14 00:54:25 +00:00
|
|
|
if get_output then
|
2024-01-13 12:19:28 +00:00
|
|
|
command = command .. " > " .. file_name
|
|
|
|
end
|
2024-01-13 11:31:10 +00:00
|
|
|
|
2024-01-13 12:19:28 +00:00
|
|
|
os.execute(command)
|
2024-01-13 11:31:10 +00:00
|
|
|
|
2024-01-14 00:54:25 +00:00
|
|
|
if get_output then
|
2024-01-13 12:19:28 +00:00
|
|
|
local file = io.open(file_name, "r")
|
2024-01-14 00:54:25 +00:00
|
|
|
local output = file:read("*all")
|
2024-01-13 12:19:28 +00:00
|
|
|
file:close()
|
|
|
|
os.execute("rm " .. file_name) -- TODO replace with version I know works from somewhere else
|
2024-01-14 00:54:25 +00:00
|
|
|
return output:trim()
|
2024-01-13 12:19:28 +00:00
|
|
|
end
|
|
|
|
end
|
2024-01-13 11:31:10 +00:00
|
|
|
|
2024-01-14 00:54:25 +00:00
|
|
|
local function query_model(model, prompt)
|
|
|
|
local command = "ollama run " .. model .. " \"" .. utility.escape_quotes(prompt) .. "\""
|
2024-01-13 12:19:28 +00:00
|
|
|
return wsl_command(command, true)
|
2024-01-14 00:54:25 +00:00
|
|
|
end
|
2024-01-13 11:31:10 +00:00
|
|
|
|
2024-01-14 00:54:25 +00:00
|
|
|
local function query_dolphin(prompt)
|
|
|
|
query_model("dolphin-mixtral", prompt)
|
2024-01-13 12:19:28 +00:00
|
|
|
end
|
2024-01-14 00:54:25 +00:00
|
|
|
-- print(query_dolphin("Say only the word 'cheese'."))
|
|
|
|
|
|
|
|
-- TEMPORARY creation, need to make this system able to manage models automatically or semi-automatically
|
|
|
|
wsl_command("ollama create curt --file ")
|
2024-01-13 11:31:10 +00:00
|
|
|
|
2024-01-14 00:54:25 +00:00
|
|
|
print(query_model("curt", "How are you?"))
|