Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added dropdown select for locally installed models #658

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 13 additions & 2 deletions src/api.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,10 @@ const {
generate,
stop,
serve,
list,
} = require("./service/ollama/ollama.js");

let model = "mistral";
let model = "mistral:latest";

function debugLog(msg) {
if (global.debug) {
Expand All @@ -36,7 +37,7 @@ async function runOllamaModel(event, msg) {
await run(model, (json) => {
// status will be set if the model is downloading
if (json.status) {
if (json.status.includes("downloading")) {
if (json.status.includes("downloading") || json.status.includes("pulling")) {
const percent = Math.round((json.completed / json.total) * 100);
const content = isNaN(percent)
? "Downloading AI model..."
Expand Down Expand Up @@ -151,6 +152,15 @@ async function serveOllama(event) {
}
}

async function listLocalModels(event) {
try {
modelList= await list()
event.reply("ollama:list", { success: true, content: modelList });
} catch (err) {
event.reply("ollama:list", { success: false, content: err.message });
}
}

function stopOllama(event) {
stop();
}
Expand All @@ -164,4 +174,5 @@ module.exports = {
serveOllama,
runOllamaModel,
stopOllama,
listLocalModels,
};
46 changes: 44 additions & 2 deletions src/client.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,17 +18,21 @@ const settingsView = document.getElementById("settings-view");
const settingsCancelBtn = document.getElementById("cancel-btn");
const settingsCloseBtn = document.getElementById("settings-close-btn");
const settingsSaveBtn = document.getElementById("save-btn");
const settingsDownloadBtn = document.getElementById("download-btn");
const modelSelectInput = document.getElementById("model-select");
const modelSelectDownloadInput = document.getElementById("model-select-download");
const downloadEmptyWarning = document.getElementById("download-empty-warning");

let responseElem;

/**
* This is the initial chain of events that must run on start-up.
* 1. Start the Ollama server.
* 1. Start the Ollama server
* 2. Run the model. This will load the model into memory so that first chat is not slow.
* This step will also download the model if it is not already downloaded.
* 3. Monitor the run status
* 4. Load the chat
* 5. List locally avaliable models
*/

// 1. Start the Ollama server
Expand Down Expand Up @@ -66,6 +70,21 @@ window.electronAPI.onOllamaRun((event, data) => {
statusMsg.textContent = data.content;
});

// 5. List avaliable models
window.electronAPI.listLocalModels((event, data) => {

let models=data.content.models
for(let i = 0; i < models.length; i++) {
var opt = document.createElement('option');
opt.value = models[i].name;
opt.innerHTML = models[i].name;
modelSelectInput.appendChild(opt);
}


}
);

// Update the display when a document is loaded
window.electronAPI.onDocumentLoaded((event, data) => {
document.getElementById("file-spinner").style.display = "none";
Expand Down Expand Up @@ -190,7 +209,6 @@ window.electronAPI.onChatReply((event, data) => {
historyContainer.scrollTop = historyContainer.scrollHeight;
}
});

// Open file dialog
openFileButton.addEventListener("click", () => {
document.getElementById("file-open-icon").style.display = "none";
Expand Down Expand Up @@ -253,3 +271,27 @@ userInput.addEventListener("input", function () {
this.style.height = "auto";
this.style.height = this.scrollHeight + "px";
});
//Download button in the settings menu
settingsDownloadBtn.addEventListener("click", () => {
let selectedModel=modelSelectDownloadInput.value

if (selectedModel.length==0){
alert("Input the model name from ollama.com/library to download a new model!")
return;
}
if (!selectedModel.includes(":")){
//select latest model version if none is specified
selectedModel=selectedModel.concat(":latest")
}
var opt = document.createElement('option');
opt.value = selectedModel;
opt.innerHTML = selectedModel;
modelSelectInput.appendChild(opt);
window.electronAPI.setModel(selectedModel);
window.electronAPI.runOllama();
modelSelectDownloadInput.value=""
chatView.style.display = "none";
settingsView.style.display = "none";
document.getElementById("initial-view").style.display = "flex";

});
12 changes: 12 additions & 0 deletions src/index.css
Original file line number Diff line number Diff line change
Expand Up @@ -219,6 +219,15 @@ button:focus {
font-size: 1rem;
width: 100%;
}
#model-select-download {
padding: 10px;
background-color: var(--response-background);
border: 1px solid var(--primary-text);
color: var(--primary-text);
border-radius: 4px;
font-size: 1rem;
width: 100%;
}

#model-select:focus {
outline: none;
Expand Down Expand Up @@ -460,4 +469,7 @@ button:focus {
#user-input-text {
flex-grow: 1;
margin-right: 10px;
}
#download-empty-warning{
display: none;
}
10 changes: 8 additions & 2 deletions src/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -89,12 +89,18 @@ <h4>Settings</h4>
<button id="settings-close-btn">X</button>
</div>
<div class="settings-content">
<label for="model-select" id="model-label">Model Name</label>
<input id="model-select" placeholder="Select a model" autofocus>
<label for="model-select" id="model-label">Locally Installed Models</label>
<select id="model-select">
</select>
<div class="button-group">
<button id="cancel-btn">Cancel</button>
<button id="save-btn">Save</button>
</div>
<label for="model-select-download" id="model-download-label">Download new model</label>
<input id="model-select-download" placeholder="Model id from ollama.com/library">
<div class="button-group">
<button id="download-btn">Download</button>
</div>
</div>
</div>
</div>
Expand Down
2 changes: 2 additions & 0 deletions src/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ const {
stopOllama,
loadDocument,
runOllamaModel,
listLocalModels,
} = require("./api.js");

// When debug is set to true, the app will log debug messages to the console
Expand Down Expand Up @@ -74,6 +75,7 @@ app.on("ready", () => {
ipcMain.on("ollama:serve", serveOllama);
ipcMain.on("ollama:run", runOllamaModel);
ipcMain.on("ollama:stop", stopOllama);
ipcMain.on("ollama:list", listLocalModels);

if (app.isPackaged) {
// Check app location
Expand Down
6 changes: 6 additions & 0 deletions src/preload.js
Original file line number Diff line number Diff line change
Expand Up @@ -37,5 +37,11 @@ contextBridge.exposeInMainWorld("electronAPI", {
callback(event, data);
});
},
listLocalModels: (callback) => {
ipcRenderer.send("ollama:list")
ipcRenderer.on("ollama:list", (event, data) => {
callback(event, data);
});
},
setModel: (model) => ipcRenderer.send("model:set", model),
});
46 changes: 44 additions & 2 deletions src/service/ollama/ollama.js
Original file line number Diff line number Diff line change
Expand Up @@ -155,8 +155,8 @@ class Ollama {

if (done) {
// We break before reaching here
// This means the prompt is not finished (maybe crashed?)
throw new Error("Failed to fulfill prompt");
// This means the downloading of the model failed
throw new Error("Failed to download model: "+model);
}

// Parse responses are they are received from the Ollama server
Expand All @@ -173,6 +173,43 @@ class Ollama {
}
}

async listModels() {

const response = await fetch(this.host + "/api/tags", {
method: "GET",
cache: "no-store",
});

if (response.status !== 200) {
let err = `HTTP Error (${response.status}): `;
err += await response.text();

throw new Error(err);
}

const reader = response.body.getReader();

//Reads the stream until list of models is returned
while (true) {
const { done, value } = await reader.read();

if (done) {
// We break before reaching here
// This means the prompt is not finished (maybe crashed?)
throw new Error("Failed to fulfill prompt");
}

// Parse responses are they are received from the Ollama server
for (const buffer of this.parse(value)) {
const json = JSON.parse(buffer);

// done
return json;

}
}
}

async run(model, fn) {
await this.pull(model, fn);
await this.generate(model, "", fn);
Expand Down Expand Up @@ -359,6 +396,10 @@ function serve() {
const ollama = Ollama.getOllama();
return ollama.serve();
}
function list() {
const ollama = Ollama.getOllama();
return ollama.listModels();
}

module.exports = {
run,
Expand All @@ -368,4 +409,5 @@ module.exports = {
clearHistory,
stop,
serve,
list,
};