Skip to content

Commit

Permalink
modellist: revert a buggy change from #2086
Browse files Browse the repository at this point in the history
Signed-off-by: Jared Van Bortel <jared@nomic.ai>
  • Loading branch information
cebtenzzre committed Jul 3, 2024
1 parent cd100c8 commit 8545522
Showing 1 changed file with 31 additions and 15 deletions.
46 changes: 31 additions & 15 deletions gpt4all-chat/modellist.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -903,7 +903,22 @@ void ModelList::updateData(const QString &id, const QVector<QPair<int, QVariant>
break;
}
case IsEmbeddingModelRole:
info->isEmbeddingModel = value.toBool(); break;
{
if (value.isValid()) {
info->isEmbeddingModel = value.toBool();
} else if (!info->checkedEmbeddingModel) {
auto filename = info->filename();
if (!info->installed || info->isOnline) {
info->isEmbeddingModel = false; // can only check installed offline models
} else {
// read GGUF and decide based on model architecture
auto path = (info->dirpath + filename).toStdString();
info->isEmbeddingModel = LLModel::Implementation::isEmbeddingModel(path);
}
}
info->checkedEmbeddingModel = true;
break;
}
case TemperatureRole:
info->setTemperature(value.toDouble()); break;
case TopPRole:
Expand Down Expand Up @@ -956,21 +971,11 @@ void ModelList::updateData(const QString &id, const QVector<QPair<int, QVariant>
}

// Extra guarantee that these always remains in sync with filesystem
QString modelPath = info->dirpath + info->filename();
const QFileInfo fileInfo(modelPath);
const QFileInfo fileInfo(info->dirpath + info->filename());
info->installed = fileInfo.exists();
const QFileInfo incompleteInfo(incompleteDownloadPath(info->filename()));
info->isIncomplete = incompleteInfo.exists();

// check installed, discovered/sideloaded models only (including clones)
if (!info->checkedEmbeddingModel && !info->isEmbeddingModel && info->installed
&& (info->isDiscovered() || info->description().isEmpty()))
{
// read GGUF and decide based on model architecture
info->isEmbeddingModel = LLModel::Implementation::isEmbeddingModel(modelPath.toStdString());
info->checkedEmbeddingModel = true;
}

if (shouldSort) {
auto s = m_discoverSort;
auto d = m_discoverSortDirection;
Expand Down Expand Up @@ -999,9 +1004,6 @@ void ModelList::resortModel()

void ModelList::updateDataByFilename(const QString &filename, QVector<QPair<int, QVariant>> data)
{
if (data.isEmpty())
return; // no-op

QVector<QString> modelsById;
{
QMutexLocker locker(&m_mutex);
Expand All @@ -1015,6 +1017,12 @@ void ModelList::updateDataByFilename(const QString &filename, QVector<QPair<int,
return;
}

if (data.isEmpty())
return;

if (data.constLast().first != IsEmbeddingModelRole)
data.append({ IsEmbeddingModelRole, QVariant() });

for (const QString &id : modelsById)
updateData(id, data);
}
Expand Down Expand Up @@ -1071,6 +1079,7 @@ QString ModelList::clone(const ModelInfo &model)
{ ModelList::RepeatPenaltyTokensRole, model.repeatPenaltyTokens() },
{ ModelList::PromptTemplateRole, model.promptTemplate() },
{ ModelList::SystemPromptRole, model.systemPrompt() },
{ ModelList::IsEmbeddingModelRole, QVariant() },
};
updateData(id, data);
return id;
Expand Down Expand Up @@ -1491,6 +1500,7 @@ void ModelList::parseModelsJsonFile(const QByteArray &jsonData, bool save)
data.append({ ModelList::PromptTemplateRole, obj["promptTemplate"].toString() });
if (obj.contains("systemPrompt"))
data.append({ ModelList::SystemPromptRole, obj["systemPrompt"].toString() });
data.append({ ModelList::IsEmbeddingModelRole, QVariant() });
updateData(id, data);
}

Expand Down Expand Up @@ -1521,6 +1531,7 @@ void ModelList::parseModelsJsonFile(const QByteArray &jsonData, bool save)
{ ModelList::QuantRole, "NA" },
{ ModelList::TypeRole, "GPT" },
{ ModelList::UrlRole, "https://api.openai.com/v1/chat/completions"},
{ ModelList::IsEmbeddingModelRole, QVariant() },
};
updateData(id, data);
}
Expand Down Expand Up @@ -1549,6 +1560,7 @@ void ModelList::parseModelsJsonFile(const QByteArray &jsonData, bool save)
{ ModelList::QuantRole, "NA" },
{ ModelList::TypeRole, "GPT" },
{ ModelList::UrlRole, "https://api.openai.com/v1/chat/completions"},
{ ModelList::IsEmbeddingModelRole, QVariant() },
};
updateData(id, data);
}
Expand Down Expand Up @@ -1580,6 +1592,7 @@ void ModelList::parseModelsJsonFile(const QByteArray &jsonData, bool save)
{ ModelList::QuantRole, "NA" },
{ ModelList::TypeRole, "Mistral" },
{ ModelList::UrlRole, "https://api.mistral.ai/v1/chat/completions"},
{ ModelList::IsEmbeddingModelRole, QVariant() },
};
updateData(id, data);
}
Expand All @@ -1605,6 +1618,7 @@ void ModelList::parseModelsJsonFile(const QByteArray &jsonData, bool save)
{ ModelList::QuantRole, "NA" },
{ ModelList::TypeRole, "Mistral" },
{ ModelList::UrlRole, "https://api.mistral.ai/v1/chat/completions"},
{ ModelList::IsEmbeddingModelRole, QVariant() },
};
updateData(id, data);
}
Expand All @@ -1631,6 +1645,7 @@ void ModelList::parseModelsJsonFile(const QByteArray &jsonData, bool save)
{ ModelList::QuantRole, "NA" },
{ ModelList::TypeRole, "Mistral" },
{ ModelList::UrlRole, "https://api.mistral.ai/v1/chat/completions"},
{ ModelList::IsEmbeddingModelRole, QVariant() },
};
updateData(id, data);
}
Expand Down Expand Up @@ -1766,6 +1781,7 @@ void ModelList::updateModelsFromSettings()
const QString systemPrompt = settings.value(g + "/systemPrompt").toString();
data.append({ ModelList::SystemPromptRole, systemPrompt });
}
data.append({ ModelList::IsEmbeddingModelRole, QVariant() });
updateData(id, data);
}
}
Expand Down

0 comments on commit 8545522

Please sign in to comment.