mirror of
https://git.mirrors.martin98.com/https://github.com/Ultimaker/Cura
synced 2025-08-16 10:05:56 +08:00
Merge pull request #20467 from Ultimaker/CURA-12156_dont_zip_downloadable_plugins
[CURA-12156] Don't zip re-downloadable plugins.
This commit is contained in:
commit
a90b7fb59b
@ -1,4 +1,4 @@
|
|||||||
# Copyright (c) 2018 Ultimaker B.V.
|
# Copyright (c) 2025 UltiMaker
|
||||||
# Cura is released under the terms of the LGPLv3 or higher.
|
# Cura is released under the terms of the LGPLv3 or higher.
|
||||||
from typing import Tuple, Optional, TYPE_CHECKING, Dict, Any
|
from typing import Tuple, Optional, TYPE_CHECKING, Dict, Any
|
||||||
|
|
||||||
@ -9,14 +9,10 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
|
|
||||||
class Backups:
|
class Backups:
|
||||||
"""The back-ups API provides a version-proof bridge between Cura's
|
"""The back-ups API provides a version-proof bridge between Cura's BackupManager and plug-ins that hook into it.
|
||||||
|
|
||||||
BackupManager and plug-ins that hook into it.
|
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
from cura.API import CuraAPI
|
from cura.API import CuraAPI
|
||||||
api = CuraAPI()
|
api = CuraAPI()
|
||||||
api.backups.createBackup()
|
api.backups.createBackup()
|
||||||
@ -26,19 +22,22 @@ class Backups:
|
|||||||
def __init__(self, application: "CuraApplication") -> None:
|
def __init__(self, application: "CuraApplication") -> None:
|
||||||
self.manager = BackupsManager(application)
|
self.manager = BackupsManager(application)
|
||||||
|
|
||||||
def createBackup(self) -> Tuple[Optional[bytes], Optional[Dict[str, Any]]]:
|
def createBackup(self, available_remote_plugins: frozenset[str] = frozenset()) -> Tuple[Optional[bytes], Optional[Dict[str, Any]]]:
|
||||||
"""Create a new back-up using the BackupsManager.
|
"""Create a new back-up using the BackupsManager.
|
||||||
|
|
||||||
:return: Tuple containing a ZIP file with the back-up data and a dict with metadata about the back-up.
|
:return: Tuple containing a ZIP file with the back-up data and a dict with metadata about the back-up.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return self.manager.createBackup()
|
return self.manager.createBackup(available_remote_plugins)
|
||||||
|
|
||||||
def restoreBackup(self, zip_file: bytes, meta_data: Dict[str, Any]) -> None:
|
def restoreBackup(self, zip_file: bytes, meta_data: Dict[str, Any], auto_close: bool = True) -> None:
|
||||||
"""Restore a back-up using the BackupsManager.
|
"""Restore a back-up using the BackupsManager.
|
||||||
|
|
||||||
:param zip_file: A ZIP file containing the actual back-up data.
|
:param zip_file: A ZIP file containing the actual back-up data.
|
||||||
:param meta_data: Some metadata needed for restoring a back-up, like the Cura version number.
|
:param meta_data: Some metadata needed for restoring a back-up, like the Cura version number.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return self.manager.restoreBackup(zip_file, meta_data)
|
return self.manager.restoreBackup(zip_file, meta_data, auto_close=auto_close)
|
||||||
|
|
||||||
|
def shouldReinstallDownloadablePlugins(self) -> bool:
|
||||||
|
return self.manager.shouldReinstallDownloadablePlugins()
|
||||||
|
@ -1,5 +1,8 @@
|
|||||||
# Copyright (c) 2021 Ultimaker B.V.
|
# Copyright (c) 2025 UltiMaker
|
||||||
# Cura is released under the terms of the LGPLv3 or higher.
|
# Cura is released under the terms of the LGPLv3 or higher.
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
@ -7,12 +10,13 @@ import re
|
|||||||
import shutil
|
import shutil
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from zipfile import ZipFile, ZIP_DEFLATED, BadZipfile
|
from zipfile import ZipFile, ZIP_DEFLATED, BadZipfile
|
||||||
from typing import Dict, Optional, TYPE_CHECKING, List
|
from typing import Callable, Dict, Optional, TYPE_CHECKING, List
|
||||||
|
|
||||||
from UM import i18nCatalog
|
from UM import i18nCatalog
|
||||||
from UM.Logger import Logger
|
from UM.Logger import Logger
|
||||||
from UM.Message import Message
|
from UM.Message import Message
|
||||||
from UM.Platform import Platform
|
from UM.Platform import Platform
|
||||||
|
from UM.PluginRegistry import PluginRegistry
|
||||||
from UM.Resources import Resources
|
from UM.Resources import Resources
|
||||||
from UM.Version import Version
|
from UM.Version import Version
|
||||||
|
|
||||||
@ -30,6 +34,7 @@ class Backup:
|
|||||||
"""These files should be ignored when making a backup."""
|
"""These files should be ignored when making a backup."""
|
||||||
|
|
||||||
IGNORED_FOLDERS = [] # type: List[str]
|
IGNORED_FOLDERS = [] # type: List[str]
|
||||||
|
"""These folders should be ignored when making a backup."""
|
||||||
|
|
||||||
SECRETS_SETTINGS = ["general/ultimaker_auth_data"]
|
SECRETS_SETTINGS = ["general/ultimaker_auth_data"]
|
||||||
"""Secret preferences that need to obfuscated when making a backup of Cura"""
|
"""Secret preferences that need to obfuscated when making a backup of Cura"""
|
||||||
@ -42,7 +47,7 @@ class Backup:
|
|||||||
self.zip_file = zip_file # type: Optional[bytes]
|
self.zip_file = zip_file # type: Optional[bytes]
|
||||||
self.meta_data = meta_data # type: Optional[Dict[str, str]]
|
self.meta_data = meta_data # type: Optional[Dict[str, str]]
|
||||||
|
|
||||||
def makeFromCurrent(self) -> None:
|
def makeFromCurrent(self, available_remote_plugins: frozenset[str] = frozenset()) -> None:
|
||||||
"""Create a back-up from the current user config folder."""
|
"""Create a back-up from the current user config folder."""
|
||||||
|
|
||||||
cura_release = self._application.getVersion()
|
cura_release = self._application.getVersion()
|
||||||
@ -68,7 +73,7 @@ class Backup:
|
|||||||
|
|
||||||
# Create an empty buffer and write the archive to it.
|
# Create an empty buffer and write the archive to it.
|
||||||
buffer = io.BytesIO()
|
buffer = io.BytesIO()
|
||||||
archive = self._makeArchive(buffer, version_data_dir)
|
archive = self._makeArchive(buffer, version_data_dir, available_remote_plugins)
|
||||||
if archive is None:
|
if archive is None:
|
||||||
return
|
return
|
||||||
files = archive.namelist()
|
files = archive.namelist()
|
||||||
@ -77,9 +82,7 @@ class Backup:
|
|||||||
machine_count = max(len([s for s in files if "machine_instances/" in s]) - 1, 0) # If people delete their profiles but not their preferences, it can still make a backup, and report -1 profiles. Server crashes on this.
|
machine_count = max(len([s for s in files if "machine_instances/" in s]) - 1, 0) # If people delete their profiles but not their preferences, it can still make a backup, and report -1 profiles. Server crashes on this.
|
||||||
material_count = max(len([s for s in files if "materials/" in s]) - 1, 0)
|
material_count = max(len([s for s in files if "materials/" in s]) - 1, 0)
|
||||||
profile_count = max(len([s for s in files if "quality_changes/" in s]) - 1, 0)
|
profile_count = max(len([s for s in files if "quality_changes/" in s]) - 1, 0)
|
||||||
# We don't store plugins anymore, since if you can make backups, you have an account (and the plugins are
|
plugin_count = len([s for s in files if "plugin.json" in s])
|
||||||
# on the marketplace anyway)
|
|
||||||
plugin_count = 0
|
|
||||||
# Store the archive and metadata so the BackupManager can fetch them when needed.
|
# Store the archive and metadata so the BackupManager can fetch them when needed.
|
||||||
self.zip_file = buffer.getvalue()
|
self.zip_file = buffer.getvalue()
|
||||||
self.meta_data = {
|
self.meta_data = {
|
||||||
@ -92,22 +95,72 @@ class Backup:
|
|||||||
# Restore the obfuscated settings
|
# Restore the obfuscated settings
|
||||||
self._illuminate(**secrets)
|
self._illuminate(**secrets)
|
||||||
|
|
||||||
def _makeArchive(self, buffer: "io.BytesIO", root_path: str) -> Optional[ZipFile]:
|
def _fillToInstallsJson(self, file_path: str, reinstall_on_restore: frozenset[str], add_to_archive: Callable[[str, str], None]) -> Optional[str]:
|
||||||
|
""" Moves all plugin-data (in a config-file) for plugins that could be (re)installed from the Marketplace from
|
||||||
|
'installed' to 'to_installs' before adding that file to the archive.
|
||||||
|
|
||||||
|
Note that the 'filename'-entry in the package-data (of the plugins) might not be valid anymore on restore.
|
||||||
|
We'll replace it on restore instead, as that's the time when the new package is downloaded.
|
||||||
|
|
||||||
|
:param file_path: Absolute path to the packages-file.
|
||||||
|
:param reinstall_on_restore: A set of plugins that _can_ be reinstalled from the Marketplace.
|
||||||
|
:param add_to_archive: A function/lambda that takes a filename and adds it to the archive (as the 2nd name).
|
||||||
|
"""
|
||||||
|
with open(file_path, "r") as file:
|
||||||
|
data = json.load(file)
|
||||||
|
reinstall, keep_in = {}, {}
|
||||||
|
for install_id, install_info in data["installed"].items():
|
||||||
|
(reinstall if install_id in reinstall_on_restore else keep_in)[install_id] = install_info
|
||||||
|
data["installed"] = keep_in
|
||||||
|
data["to_install"].update(reinstall)
|
||||||
|
if data is not None:
|
||||||
|
tmpfile = tempfile.NamedTemporaryFile(delete_on_close=False)
|
||||||
|
with open(tmpfile.name, "w") as outfile:
|
||||||
|
json.dump(data, outfile)
|
||||||
|
add_to_archive(tmpfile.name, file_path)
|
||||||
|
return tmpfile.name
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _findRedownloadablePlugins(self, available_remote_plugins: frozenset) -> (frozenset[str], frozenset[str]):
|
||||||
|
""" Find all plugins that should be able to be reinstalled from the Marketplace.
|
||||||
|
|
||||||
|
:param plugins_path: Path to all plugins in the user-space.
|
||||||
|
:return: Tuple of a set of plugin-ids and a set of plugin-paths.
|
||||||
|
"""
|
||||||
|
plugin_reg = PluginRegistry.getInstance()
|
||||||
|
id = "id"
|
||||||
|
plugins = [v for v in plugin_reg.getAllMetaData()
|
||||||
|
if v[id] in available_remote_plugins and not plugin_reg.isBundledPlugin(v[id])]
|
||||||
|
return frozenset([v[id] for v in plugins]), frozenset([v["location"] for v in plugins])
|
||||||
|
|
||||||
|
def _makeArchive(self, buffer: "io.BytesIO", root_path: str, available_remote_plugins: frozenset) -> Optional[ZipFile]:
|
||||||
"""Make a full archive from the given root path with the given name.
|
"""Make a full archive from the given root path with the given name.
|
||||||
|
|
||||||
:param root_path: The root directory to archive recursively.
|
:param root_path: The root directory to archive recursively.
|
||||||
:return: The archive as bytes.
|
:return: The archive as bytes.
|
||||||
"""
|
"""
|
||||||
ignore_string = re.compile("|".join(self.IGNORED_FILES + self.IGNORED_FOLDERS))
|
ignore_string = re.compile("|".join(self.IGNORED_FILES + self.IGNORED_FOLDERS))
|
||||||
|
reinstall_instead_ids, reinstall_instead_paths = self._findRedownloadablePlugins(available_remote_plugins)
|
||||||
|
tmpfiles = []
|
||||||
try:
|
try:
|
||||||
archive = ZipFile(buffer, "w", ZIP_DEFLATED)
|
archive = ZipFile(buffer, "w", ZIP_DEFLATED)
|
||||||
for root, folders, files in os.walk(root_path):
|
add_path_to_archive = lambda path, alt_path: archive.write(path, alt_path[len(root_path) + len(os.sep):])
|
||||||
|
for root, folders, files in os.walk(root_path, topdown=True):
|
||||||
for item_name in folders + files:
|
for item_name in folders + files:
|
||||||
absolute_path = os.path.join(root, item_name)
|
absolute_path = os.path.join(root, item_name)
|
||||||
if ignore_string.search(absolute_path):
|
if ignore_string.search(absolute_path) or any([absolute_path.startswith(x) for x in reinstall_instead_paths]):
|
||||||
continue
|
continue
|
||||||
archive.write(absolute_path, absolute_path[len(root_path) + len(os.sep):])
|
if item_name == "packages.json":
|
||||||
|
tmpfiles.append(
|
||||||
|
self._fillToInstallsJson(absolute_path, reinstall_instead_ids, add_path_to_archive))
|
||||||
|
else:
|
||||||
|
add_path_to_archive(absolute_path, absolute_path)
|
||||||
archive.close()
|
archive.close()
|
||||||
|
for tmpfile_path in tmpfiles:
|
||||||
|
try:
|
||||||
|
os.remove(tmpfile_path)
|
||||||
|
except IOError as ex:
|
||||||
|
Logger.warning(f"Couldn't remove temporary file '{tmpfile_path}' because '{ex}'.")
|
||||||
return archive
|
return archive
|
||||||
except (IOError, OSError, BadZipfile) as error:
|
except (IOError, OSError, BadZipfile) as error:
|
||||||
Logger.log("e", "Could not create archive from user data directory: %s", error)
|
Logger.log("e", "Could not create archive from user data directory: %s", error)
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
# Copyright (c) 2018 Ultimaker B.V.
|
# Copyright (c) 2025 UltiMaker
|
||||||
# Cura is released under the terms of the LGPLv3 or higher.
|
# Cura is released under the terms of the LGPLv3 or higher.
|
||||||
|
|
||||||
from typing import Dict, Optional, Tuple, TYPE_CHECKING
|
from typing import Dict, Optional, Tuple, TYPE_CHECKING
|
||||||
@ -22,7 +22,10 @@ class BackupsManager:
|
|||||||
def __init__(self, application: "CuraApplication") -> None:
|
def __init__(self, application: "CuraApplication") -> None:
|
||||||
self._application = application
|
self._application = application
|
||||||
|
|
||||||
def createBackup(self) -> Tuple[Optional[bytes], Optional[Dict[str, str]]]:
|
def shouldReinstallDownloadablePlugins(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def createBackup(self, available_remote_plugins: frozenset[str] = frozenset()) -> Tuple[Optional[bytes], Optional[Dict[str, str]]]:
|
||||||
"""
|
"""
|
||||||
Get a back-up of the current configuration.
|
Get a back-up of the current configuration.
|
||||||
|
|
||||||
@ -31,17 +34,18 @@ class BackupsManager:
|
|||||||
|
|
||||||
self._disableAutoSave()
|
self._disableAutoSave()
|
||||||
backup = Backup(self._application)
|
backup = Backup(self._application)
|
||||||
backup.makeFromCurrent()
|
backup.makeFromCurrent(available_remote_plugins if self.shouldReinstallDownloadablePlugins() else frozenset())
|
||||||
self._enableAutoSave()
|
self._enableAutoSave()
|
||||||
# We don't return a Backup here because we want plugins only to interact with our API and not full objects.
|
# We don't return a Backup here because we want plugins only to interact with our API and not full objects.
|
||||||
return backup.zip_file, backup.meta_data
|
return backup.zip_file, backup.meta_data
|
||||||
|
|
||||||
def restoreBackup(self, zip_file: bytes, meta_data: Dict[str, str]) -> None:
|
def restoreBackup(self, zip_file: bytes, meta_data: Dict[str, str], auto_close: bool = True) -> None:
|
||||||
"""
|
"""
|
||||||
Restore a back-up from a given ZipFile.
|
Restore a back-up from a given ZipFile.
|
||||||
|
|
||||||
:param zip_file: A bytes object containing the actual back-up.
|
:param zip_file: A bytes object containing the actual back-up.
|
||||||
:param meta_data: A dict containing some metadata that is needed to restore the back-up correctly.
|
:param meta_data: A dict containing some metadata that is needed to restore the back-up correctly.
|
||||||
|
:param auto_close: Normally, Cura will need to close immediately after restoring the back-up.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not meta_data.get("cura_release", None):
|
if not meta_data.get("cura_release", None):
|
||||||
@ -54,7 +58,7 @@ class BackupsManager:
|
|||||||
backup = Backup(self._application, zip_file = zip_file, meta_data = meta_data)
|
backup = Backup(self._application, zip_file = zip_file, meta_data = meta_data)
|
||||||
restored = backup.restore()
|
restored = backup.restore()
|
||||||
|
|
||||||
if restored:
|
if restored and auto_close:
|
||||||
# At this point, Cura will need to restart for the changes to take effect.
|
# At this point, Cura will need to restart for the changes to take effect.
|
||||||
# We don't want to store the data at this point as that would override the just-restored backup.
|
# We don't want to store the data at this point as that would override the just-restored backup.
|
||||||
self._application.windowClosed(save_data = False)
|
self._application.windowClosed(save_data = False)
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
# Copyright (c) 2020 Ultimaker B.V.
|
# Copyright (c) 2025 UltiMaker
|
||||||
# Cura is released under the terms of the LGPLv3 or higher.
|
# Cura is released under the terms of the LGPLv3 or higher.
|
||||||
import json
|
import json
|
||||||
import threading
|
import threading
|
||||||
@ -13,11 +13,14 @@ from UM.Message import Message
|
|||||||
from UM.TaskManagement.HttpRequestManager import HttpRequestManager
|
from UM.TaskManagement.HttpRequestManager import HttpRequestManager
|
||||||
from UM.TaskManagement.HttpRequestScope import JsonDecoratorScope
|
from UM.TaskManagement.HttpRequestScope import JsonDecoratorScope
|
||||||
from UM.i18n import i18nCatalog
|
from UM.i18n import i18nCatalog
|
||||||
|
from cura.ApplicationMetadata import CuraSDKVersion
|
||||||
from cura.CuraApplication import CuraApplication
|
from cura.CuraApplication import CuraApplication
|
||||||
from cura.UltimakerCloud.UltimakerCloudScope import UltimakerCloudScope
|
from cura.UltimakerCloud.UltimakerCloudScope import UltimakerCloudScope
|
||||||
|
import cura.UltimakerCloud.UltimakerCloudConstants as UltimakerCloudConstants
|
||||||
|
|
||||||
catalog = i18nCatalog("cura")
|
catalog = i18nCatalog("cura")
|
||||||
|
|
||||||
|
PACKAGES_URL = f"{UltimakerCloudConstants.CuraCloudAPIRoot}/cura-packages/v{UltimakerCloudConstants.CuraCloudAPIVersion}/cura/v{CuraSDKVersion}/packages"
|
||||||
|
|
||||||
class CreateBackupJob(Job):
|
class CreateBackupJob(Job):
|
||||||
"""Creates backup zip, requests upload url and uploads the backup file to cloud storage."""
|
"""Creates backup zip, requests upload url and uploads the backup file to cloud storage."""
|
||||||
@ -40,23 +43,54 @@ class CreateBackupJob(Job):
|
|||||||
self._job_done = threading.Event()
|
self._job_done = threading.Event()
|
||||||
"""Set when the job completes. Does not indicate success."""
|
"""Set when the job completes. Does not indicate success."""
|
||||||
self.backup_upload_error_message = ""
|
self.backup_upload_error_message = ""
|
||||||
"""After the job completes, an empty string indicates success. Othrerwise, the value is a translated message."""
|
"""After the job completes, an empty string indicates success. Otherwise, the value is a translated message."""
|
||||||
|
|
||||||
|
def _setPluginFetchErrorMessage(self, error_msg: str) -> None:
|
||||||
|
Logger.error(f"Fetching plugins for backup resulted in error: {error_msg}")
|
||||||
|
self.backup_upload_error_message = "Couldn't update currently available plugins, backup stopped."
|
||||||
|
self._upload_message.hide()
|
||||||
|
self._job_done.set()
|
||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
upload_message = Message(catalog.i18nc("@info:backup_status", "Creating your backup..."),
|
self._upload_message = Message(catalog.i18nc("@info:backup_status", "Fetch re-downloadable package-ids..."),
|
||||||
title = self.MESSAGE_TITLE,
|
title = self.MESSAGE_TITLE,
|
||||||
progress = -1)
|
progress = -1)
|
||||||
upload_message.show()
|
self._upload_message.show()
|
||||||
|
CuraApplication.getInstance().processEvents()
|
||||||
|
|
||||||
|
if CuraApplication.getInstance().getCuraAPI().backups.shouldReinstallDownloadablePlugins():
|
||||||
|
request_url = f"{PACKAGES_URL}?package_type=plugin"
|
||||||
|
scope = JsonDecoratorScope(UltimakerCloudScope(CuraApplication.getInstance()))
|
||||||
|
HttpRequestManager.getInstance().get(
|
||||||
|
request_url,
|
||||||
|
scope=scope,
|
||||||
|
callback=self._continueRun,
|
||||||
|
error_callback=lambda reply, error: self._setPluginFetchErrorMessage(str(error)),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self._continueRun()
|
||||||
|
|
||||||
|
def _continueRun(self, reply: "QNetworkReply" = None) -> None:
|
||||||
|
if reply is not None:
|
||||||
|
response_data = HttpRequestManager.readJSON(reply)
|
||||||
|
if "data" not in response_data:
|
||||||
|
self._setPluginFetchErrorMessage(f"Missing 'data' from response. Keys in response: {response_data.keys()}")
|
||||||
|
return
|
||||||
|
available_remote_plugins = frozenset({v["package_id"] for v in response_data["data"]})
|
||||||
|
else:
|
||||||
|
available_remote_plugins = frozenset()
|
||||||
|
|
||||||
|
self._upload_message.setText(catalog.i18nc("@info:backup_status", "Creating your backup..."))
|
||||||
CuraApplication.getInstance().processEvents()
|
CuraApplication.getInstance().processEvents()
|
||||||
cura_api = CuraApplication.getInstance().getCuraAPI()
|
cura_api = CuraApplication.getInstance().getCuraAPI()
|
||||||
self._backup_zip, backup_meta_data = cura_api.backups.createBackup()
|
self._backup_zip, backup_meta_data = cura_api.backups.createBackup(available_remote_plugins)
|
||||||
|
|
||||||
if not self._backup_zip or not backup_meta_data:
|
if not self._backup_zip or not backup_meta_data:
|
||||||
self.backup_upload_error_message = catalog.i18nc("@info:backup_status", "There was an error while creating your backup.")
|
self.backup_upload_error_message = catalog.i18nc("@info:backup_status", "There was an error while creating your backup.")
|
||||||
upload_message.hide()
|
self._upload_message.hide()
|
||||||
return
|
return
|
||||||
|
|
||||||
upload_message.setText(catalog.i18nc("@info:backup_status", "Uploading your backup..."))
|
self._upload_message.setText(catalog.i18nc("@info:backup_status", "Uploading your backup..."))
|
||||||
CuraApplication.getInstance().processEvents()
|
CuraApplication.getInstance().processEvents()
|
||||||
|
|
||||||
# Create an upload entry for the backup.
|
# Create an upload entry for the backup.
|
||||||
@ -64,13 +98,18 @@ class CreateBackupJob(Job):
|
|||||||
backup_meta_data["description"] = "{}.backup.{}.cura.zip".format(timestamp, backup_meta_data["cura_release"])
|
backup_meta_data["description"] = "{}.backup.{}.cura.zip".format(timestamp, backup_meta_data["cura_release"])
|
||||||
self._requestUploadSlot(backup_meta_data, len(self._backup_zip))
|
self._requestUploadSlot(backup_meta_data, len(self._backup_zip))
|
||||||
|
|
||||||
self._job_done.wait()
|
# Note: One 'process events' call wasn't enough with the changed situation somehow.
|
||||||
|
for _ in range(5000):
|
||||||
|
CuraApplication.getInstance().processEvents()
|
||||||
|
if self._job_done.wait(0.02):
|
||||||
|
break
|
||||||
|
|
||||||
if self.backup_upload_error_message == "":
|
if self.backup_upload_error_message == "":
|
||||||
upload_message.setText(catalog.i18nc("@info:backup_status", "Your backup has finished uploading."))
|
self._upload_message.setText(catalog.i18nc("@info:backup_status", "Your backup has finished uploading."))
|
||||||
upload_message.setProgress(None) # Hide progress bar
|
self._upload_message.setProgress(None) # Hide progress bar
|
||||||
else:
|
else:
|
||||||
# some error occurred. This error is presented to the user by DrivePluginExtension
|
# some error occurred. This error is presented to the user by DrivePluginExtension
|
||||||
upload_message.hide()
|
self._upload_message.hide()
|
||||||
|
|
||||||
def _requestUploadSlot(self, backup_metadata: Dict[str, Any], backup_size: int) -> None:
|
def _requestUploadSlot(self, backup_metadata: Dict[str, Any], backup_size: int) -> None:
|
||||||
"""Request a backup upload slot from the API.
|
"""Request a backup upload slot from the API.
|
||||||
@ -83,7 +122,6 @@ class CreateBackupJob(Job):
|
|||||||
"metadata": backup_metadata
|
"metadata": backup_metadata
|
||||||
}
|
}
|
||||||
}).encode()
|
}).encode()
|
||||||
|
|
||||||
HttpRequestManager.getInstance().put(
|
HttpRequestManager.getInstance().put(
|
||||||
self._api_backup_url,
|
self._api_backup_url,
|
||||||
data = payload,
|
data = payload,
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
# Copyright (c) 2021 Ultimaker B.V.
|
# Copyright (c) 2025 UltiMaker
|
||||||
# Cura is released under the terms of the LGPLv3 or higher.
|
# Cura is released under the terms of the LGPLv3 or higher.
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import json
|
||||||
|
import os
|
||||||
import threading
|
import threading
|
||||||
from tempfile import NamedTemporaryFile
|
from tempfile import NamedTemporaryFile
|
||||||
from typing import Optional, Any, Dict
|
from typing import Optional, Any, Dict
|
||||||
@ -12,9 +13,16 @@ from PyQt6.QtNetwork import QNetworkReply, QNetworkRequest
|
|||||||
from UM.Job import Job
|
from UM.Job import Job
|
||||||
from UM.Logger import Logger
|
from UM.Logger import Logger
|
||||||
from UM.PackageManager import catalog
|
from UM.PackageManager import catalog
|
||||||
|
from UM.Resources import Resources
|
||||||
from UM.TaskManagement.HttpRequestManager import HttpRequestManager
|
from UM.TaskManagement.HttpRequestManager import HttpRequestManager
|
||||||
from cura.CuraApplication import CuraApplication
|
from UM.Version import Version
|
||||||
|
|
||||||
|
from cura.ApplicationMetadata import CuraSDKVersion
|
||||||
|
from cura.CuraApplication import CuraApplication
|
||||||
|
from cura.UltimakerCloud.UltimakerCloudScope import UltimakerCloudScope
|
||||||
|
import cura.UltimakerCloud.UltimakerCloudConstants as UltimakerCloudConstants
|
||||||
|
|
||||||
|
PACKAGES_URL_TEMPLATE = f"{UltimakerCloudConstants.CuraCloudAPIRoot}/cura-packages/v{UltimakerCloudConstants.CuraCloudAPIVersion}/cura/v{{0}}/packages/{{1}}/download"
|
||||||
|
|
||||||
class RestoreBackupJob(Job):
|
class RestoreBackupJob(Job):
|
||||||
"""Downloads a backup and overwrites local configuration with the backup.
|
"""Downloads a backup and overwrites local configuration with the backup.
|
||||||
@ -38,7 +46,6 @@ class RestoreBackupJob(Job):
|
|||||||
self.restore_backup_error_message = ""
|
self.restore_backup_error_message = ""
|
||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
|
|
||||||
url = self._backup.get("download_url")
|
url = self._backup.get("download_url")
|
||||||
assert url is not None
|
assert url is not None
|
||||||
|
|
||||||
@ -48,7 +55,11 @@ class RestoreBackupJob(Job):
|
|||||||
error_callback = self._onRestoreRequestCompleted
|
error_callback = self._onRestoreRequestCompleted
|
||||||
)
|
)
|
||||||
|
|
||||||
self._job_done.wait() # A job is considered finished when the run function completes
|
# Note: Just to be sure, use the same structure here as in CreateBackupJob.
|
||||||
|
for _ in range(5000):
|
||||||
|
CuraApplication.getInstance().processEvents()
|
||||||
|
if self._job_done.wait(0.02):
|
||||||
|
break
|
||||||
|
|
||||||
def _onRestoreRequestCompleted(self, reply: QNetworkReply, error: Optional["QNetworkReply.NetworkError"] = None) -> None:
|
def _onRestoreRequestCompleted(self, reply: QNetworkReply, error: Optional["QNetworkReply.NetworkError"] = None) -> None:
|
||||||
if not HttpRequestManager.replyIndicatesSuccess(reply, error):
|
if not HttpRequestManager.replyIndicatesSuccess(reply, error):
|
||||||
@ -60,8 +71,8 @@ class RestoreBackupJob(Job):
|
|||||||
|
|
||||||
# We store the file in a temporary path fist to ensure integrity.
|
# We store the file in a temporary path fist to ensure integrity.
|
||||||
try:
|
try:
|
||||||
temporary_backup_file = NamedTemporaryFile(delete = False)
|
self._temporary_backup_file = NamedTemporaryFile(delete_on_close = False)
|
||||||
with open(temporary_backup_file.name, "wb") as write_backup:
|
with open(self._temporary_backup_file.name, "wb") as write_backup:
|
||||||
app = CuraApplication.getInstance()
|
app = CuraApplication.getInstance()
|
||||||
bytes_read = reply.read(self.DISK_WRITE_BUFFER_SIZE)
|
bytes_read = reply.read(self.DISK_WRITE_BUFFER_SIZE)
|
||||||
while bytes_read:
|
while bytes_read:
|
||||||
@ -69,23 +80,98 @@ class RestoreBackupJob(Job):
|
|||||||
bytes_read = reply.read(self.DISK_WRITE_BUFFER_SIZE)
|
bytes_read = reply.read(self.DISK_WRITE_BUFFER_SIZE)
|
||||||
app.processEvents()
|
app.processEvents()
|
||||||
except EnvironmentError as e:
|
except EnvironmentError as e:
|
||||||
Logger.log("e", f"Unable to save backed up files due to computer limitations: {str(e)}")
|
Logger.error(f"Unable to save backed up files due to computer limitations: {str(e)}")
|
||||||
self.restore_backup_error_message = self.DEFAULT_ERROR_MESSAGE
|
self.restore_backup_error_message = self.DEFAULT_ERROR_MESSAGE
|
||||||
self._job_done.set()
|
self._job_done.set()
|
||||||
return
|
return
|
||||||
|
|
||||||
if not self._verifyMd5Hash(temporary_backup_file.name, self._backup.get("md5_hash", "")):
|
if not self._verifyMd5Hash(self._temporary_backup_file.name, self._backup.get("md5_hash", "")):
|
||||||
# Don't restore the backup if the MD5 hashes do not match.
|
# Don't restore the backup if the MD5 hashes do not match.
|
||||||
# This can happen if the download was interrupted.
|
# This can happen if the download was interrupted.
|
||||||
Logger.log("w", "Remote and local MD5 hashes do not match, not restoring backup.")
|
Logger.error("Remote and local MD5 hashes do not match, not restoring backup.")
|
||||||
self.restore_backup_error_message = self.DEFAULT_ERROR_MESSAGE
|
self.restore_backup_error_message = self.DEFAULT_ERROR_MESSAGE
|
||||||
|
self._job_done.set()
|
||||||
|
return
|
||||||
|
|
||||||
# Tell Cura to place the backup back in the user data folder.
|
# Tell Cura to place the backup back in the user data folder.
|
||||||
with open(temporary_backup_file.name, "rb") as read_backup:
|
metadata = self._backup.get("metadata", {})
|
||||||
|
with open(self._temporary_backup_file.name, "rb") as read_backup:
|
||||||
cura_api = CuraApplication.getInstance().getCuraAPI()
|
cura_api = CuraApplication.getInstance().getCuraAPI()
|
||||||
cura_api.backups.restoreBackup(read_backup.read(), self._backup.get("metadata", {}))
|
cura_api.backups.restoreBackup(read_backup.read(), metadata, auto_close=False)
|
||||||
|
|
||||||
self._job_done.set()
|
# Read packages data-file, to get the 'to_install' plugin-ids.
|
||||||
|
version_to_restore = Version(metadata.get("cura_release", "dev"))
|
||||||
|
version_str = f"{version_to_restore.getMajor()}.{version_to_restore.getMinor()}"
|
||||||
|
packages_path = os.path.abspath(os.path.join(os.path.abspath(
|
||||||
|
Resources.getConfigStoragePath()), "..", version_str, "packages.json"))
|
||||||
|
if not os.path.exists(packages_path):
|
||||||
|
Logger.error(f"Can't find path '{packages_path}' to tell what packages should be redownloaded.")
|
||||||
|
self.restore_backup_error_message = self.DEFAULT_ERROR_MESSAGE
|
||||||
|
self._job_done.set()
|
||||||
|
return
|
||||||
|
|
||||||
|
to_install = {}
|
||||||
|
try:
|
||||||
|
with open(packages_path, "r") as packages_file:
|
||||||
|
packages_json = json.load(packages_file)
|
||||||
|
if "to_install" in packages_json:
|
||||||
|
for package_data in packages_json["to_install"].values():
|
||||||
|
if "package_info" not in package_data:
|
||||||
|
continue
|
||||||
|
package_info = package_data["package_info"]
|
||||||
|
if "package_id" in package_info and "sdk_version_semver" in package_info:
|
||||||
|
to_install[package_info["package_id"]] = package_info["sdk_version_semver"]
|
||||||
|
except IOError as ex:
|
||||||
|
Logger.error(f"Couldn't open '{packages_path}' because '{str(ex)}' to get packages to re-install.")
|
||||||
|
self.restore_backup_error_message = self.DEFAULT_ERROR_MESSAGE
|
||||||
|
self._job_done.set()
|
||||||
|
return
|
||||||
|
|
||||||
|
if len(to_install) < 1:
|
||||||
|
Logger.info("No packages to reinstall, early out.")
|
||||||
|
self._job_done.set()
|
||||||
|
return
|
||||||
|
|
||||||
|
# Download all re-installable plugins packages, so they can be put back on start-up.
|
||||||
|
redownload_errors = []
|
||||||
|
def packageDownloadCallback(package_id: str, msg: "QNetworkReply", err: "QNetworkReply.NetworkError" = None) -> None:
|
||||||
|
if err is not None or HttpRequestManager.safeHttpStatus(msg) != 200:
|
||||||
|
redownload_errors.append(err)
|
||||||
|
del to_install[package_id]
|
||||||
|
|
||||||
|
try:
|
||||||
|
with NamedTemporaryFile(mode="wb", suffix=".curapackage", delete=False) as temp_file:
|
||||||
|
bytes_read = msg.read(self.DISK_WRITE_BUFFER_SIZE)
|
||||||
|
while bytes_read:
|
||||||
|
temp_file.write(bytes_read)
|
||||||
|
bytes_read = msg.read(self.DISK_WRITE_BUFFER_SIZE)
|
||||||
|
CuraApplication.getInstance().processEvents()
|
||||||
|
temp_file.close()
|
||||||
|
if not CuraApplication.getInstance().getPackageManager().installPackage(temp_file.name):
|
||||||
|
redownload_errors.append(f"Couldn't install package '{package_id}'.")
|
||||||
|
except IOError as ex:
|
||||||
|
redownload_errors.append(f"Couldn't process package '{package_id}' because '{ex}'.")
|
||||||
|
|
||||||
|
if len(to_install) < 1:
|
||||||
|
if len(redownload_errors) == 0:
|
||||||
|
Logger.info("All packages redownloaded!")
|
||||||
|
self._job_done.set()
|
||||||
|
else:
|
||||||
|
msgs = "\n - ".join(redownload_errors)
|
||||||
|
Logger.error(f"Couldn't re-install at least one package(s) because: {msgs}")
|
||||||
|
self.restore_backup_error_message = self.DEFAULT_ERROR_MESSAGE
|
||||||
|
self._job_done.set()
|
||||||
|
|
||||||
|
self._package_download_scope = UltimakerCloudScope(CuraApplication.getInstance())
|
||||||
|
for package_id, package_api_version in to_install.items():
|
||||||
|
def handlePackageId(package_id: str = package_id):
|
||||||
|
HttpRequestManager.getInstance().get(
|
||||||
|
PACKAGES_URL_TEMPLATE.format(package_api_version, package_id),
|
||||||
|
scope=self._package_download_scope,
|
||||||
|
callback=lambda msg: packageDownloadCallback(package_id, msg),
|
||||||
|
error_callback=lambda msg, err: packageDownloadCallback(package_id, msg, err)
|
||||||
|
)
|
||||||
|
handlePackageId(package_id)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _verifyMd5Hash(file_path: str, known_hash: str) -> bool:
|
def _verifyMd5Hash(file_path: str, known_hash: str) -> bool:
|
||||||
|
Loading…
x
Reference in New Issue
Block a user