fixed llm

This commit is contained in:
Tangent / Rose / Nebula Rosa 2024-01-13 21:16:31 -07:00
parent e9ecceb0b4
commit 93561b460b

View File

@ -112,7 +112,7 @@ local execute = {
if query == "" then if query == "" then
return query_model(model) return query_model(model)
else else
print(query_model(model, prompt)) print(query_model(model, query))
return true return true
end end
end, end,
@ -133,9 +133,6 @@ end
-- ollama install command: curl https://ollama.ai/install.sh | sh -- ollama install command: curl https://ollama.ai/install.sh | sh
local function query_dolphin(prompt)
query_model("dolphin-mixtral", prompt)
end
-- print(query_dolphin("Say only the word 'cheese'.")) -- print(query_dolphin("Say only the word 'cheese'."))
-- TEMPORARY creation, need to make this system able to manage models automatically or semi-automatically -- TEMPORARY creation, need to make this system able to manage models automatically or semi-automatically