Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Multiple fixes for ModelList/Download #3034

Draft
wants to merge 15 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions gpt4all-chat/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
- Fix the local server rejecting min\_p/top\_p less than 1 ([#2996](https://github.com/nomic-ai/gpt4all/pull/2996))
- Fix "regenerate" always forgetting the most recent message ([#3011](https://github.com/nomic-ai/gpt4all/pull/3011))
- Fix loaded chats forgetting context when there is a system prompt ([#3015](https://github.com/nomic-ai/gpt4all/pull/3015))
- Fix scroll reset upon download, model removal sometimes not working, and models.json cache location ([#3034](https://github.com/nomic-ai/gpt4all/pull/3034))

## [3.3.1] - 2024-09-27 ([v3.3.y](https://github.com/nomic-ai/gpt4all/tree/v3.3.y))

Expand Down
4 changes: 1 addition & 3 deletions gpt4all-chat/qml/AddModelView.qml
Original file line number Diff line number Diff line change
Expand Up @@ -441,9 +441,7 @@ Rectangle {
Layout.alignment: Qt.AlignTop | Qt.AlignHCenter
visible: !isDownloading && (installed || isIncomplete)
Accessible.description: qsTr("Remove model from filesystem")
onClicked: {
Download.removeModel(filename);
}
onClicked: Download.removeModel(id)
}

MySettingsButton {
Expand Down
2 changes: 1 addition & 1 deletion gpt4all-chat/qml/ModelSettings.qml
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ MySettingsTab {
enabled: root.currentModelInfo.isClone
text: qsTr("Remove")
onClicked: {
ModelList.removeClone(root.currentModelInfo);
ModelList.uninstall(root.currentModelInfo);
comboBox.currentIndex = 0;
}
}
Expand Down
4 changes: 1 addition & 3 deletions gpt4all-chat/qml/ModelsView.qml
Original file line number Diff line number Diff line change
Expand Up @@ -221,9 +221,7 @@ Rectangle {
Layout.alignment: Qt.AlignTop | Qt.AlignHCenter
visible: !isDownloading && (installed || isIncomplete)
Accessible.description: qsTr("Remove model from filesystem")
onClicked: {
Download.removeModel(filename);
}
onClicked: Download.removeModel(id)
}

MySettingsButton {
Expand Down
2 changes: 1 addition & 1 deletion gpt4all-chat/src/database.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1659,7 +1659,7 @@ void Database::scanQueue()
if (info.isPdf()) {
QPdfDocument doc;
if (doc.load(document_path) != QPdfDocument::Error::None) {
qWarning() << "ERROR: Could not load pdf" << document_id << document_path;;
qWarning() << "ERROR: Could not load pdf" << document_id << document_path;
return updateFolderToIndex(folder_id, countForFolder);
}
title = doc.metaData(QPdfDocument::MetaDataField::Title).toString();
Expand Down
50 changes: 21 additions & 29 deletions gpt4all-chat/src/download.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -328,38 +328,30 @@ void Download::installCompatibleModel(const QString &modelName, const QString &a
ModelList::globalInstance()->updateDataByFilename(modelFile, {{ ModelList::InstalledRole, true }});
}

void Download::removeModel(const QString &modelFile)
// FIXME(jared): With the current implementation, it is not possible to remove a duplicate
// model file (same filename, different subdirectory) from within GPT4All
// without restarting it.
void Download::removeModel(const QString &id)
{
const QString filePath = MySettings::globalInstance()->modelPath() + modelFile;
QFile incompleteFile(ModelList::globalInstance()->incompleteDownloadPath(modelFile));
if (incompleteFile.exists()) {
incompleteFile.remove();
}
auto *modelList = ModelList::globalInstance();

bool shouldRemoveInstalled = false;
QFile file(filePath);
if (file.exists()) {
const ModelInfo info = ModelList::globalInstance()->modelInfoByFilename(modelFile);
MySettings::globalInstance()->eraseModel(info);
shouldRemoveInstalled = info.installed && !info.isClone() && (info.isDiscovered() || info.isCompatibleApi || info.description() == "" /*indicates sideloaded*/);
if (shouldRemoveInstalled)
ModelList::globalInstance()->removeInstalled(info);
Network::globalInstance()->trackEvent("remove_model", { {"model", modelFile} });
file.remove();
emit toastMessage(tr("Model \"%1\" is removed.").arg(info.name()));
}
auto info = modelList->modelInfo(id);
if (info.id().isEmpty())
return;

if (!shouldRemoveInstalled) {
QVector<QPair<int, QVariant>> data {
{ ModelList::InstalledRole, false },
{ ModelList::BytesReceivedRole, 0 },
{ ModelList::BytesTotalRole, 0 },
{ ModelList::TimestampRole, 0 },
{ ModelList::SpeedRole, QString() },
{ ModelList::DownloadErrorRole, QString() },
};
ModelList::globalInstance()->updateDataByFilename(modelFile, data);
}
Network::globalInstance()->trackEvent("remove_model", { {"model", info.filename()} });

// remove incomplete download
QFile(modelList->incompleteDownloadPath(info.filename())).remove();

// remove file, if this is a real model
if (!info.isClone())
QFile(info.path()).remove();

// remove model list entry
modelList->uninstall(info);

emit toastMessage(tr("Model \"%1\" is removed.").arg(info.name()));
}

void Download::handleSslErrors(QNetworkReply *reply, const QList<QSslError> &errors)
Expand Down
2 changes: 1 addition & 1 deletion gpt4all-chat/src/download.h
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ class Download : public QObject
Q_INVOKABLE void cancelDownload(const QString &modelFile);
Q_INVOKABLE void installModel(const QString &modelFile, const QString &apiKey);
Q_INVOKABLE void installCompatibleModel(const QString &modelName, const QString &apiKey, const QString &baseUrl);
Q_INVOKABLE void removeModel(const QString &modelFile);
Q_INVOKABLE void removeModel(const QString &id);
Q_INVOKABLE bool isFirstStart(bool writeVersion = false) const;

public Q_SLOTS:
Expand Down
2 changes: 0 additions & 2 deletions gpt4all-chat/src/localdocsmodel.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ LocalDocsCollectionsModel::LocalDocsCollectionsModel(QObject *parent)
connect(this, &LocalDocsCollectionsModel::rowsInserted, this, &LocalDocsCollectionsModel::countChanged);
connect(this, &LocalDocsCollectionsModel::rowsRemoved, this, &LocalDocsCollectionsModel::countChanged);
connect(this, &LocalDocsCollectionsModel::modelReset, this, &LocalDocsCollectionsModel::countChanged);
connect(this, &LocalDocsCollectionsModel::layoutChanged, this, &LocalDocsCollectionsModel::countChanged);
}

bool LocalDocsCollectionsModel::filterAcceptsRow(int sourceRow,
Expand Down Expand Up @@ -67,7 +66,6 @@ LocalDocsModel::LocalDocsModel(QObject *parent)
connect(this, &LocalDocsModel::rowsInserted, this, &LocalDocsModel::countChanged);
connect(this, &LocalDocsModel::rowsRemoved, this, &LocalDocsModel::countChanged);
connect(this, &LocalDocsModel::modelReset, this, &LocalDocsModel::countChanged);
connect(this, &LocalDocsModel::layoutChanged, this, &LocalDocsModel::countChanged);
}

int LocalDocsModel::rowCount(const QModelIndex &parent) const
Expand Down
Loading