diff --git a/.github/workflows/conan-package-create.yml b/.github/workflows/conan-package-create.yml
index e8329fa7b1..18e2600e1d 100644
--- a/.github/workflows/conan-package-create.yml
+++ b/.github/workflows/conan-package-create.yml
@@ -119,7 +119,7 @@ jobs:
sudo apt upgrade
sudo apt install build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config flex bison -y
- - name: Install GCC-132 on ubuntu
+ - name: Install GCC-13 on ubuntu
if: ${{ startsWith(inputs.runs_on, 'ubuntu') }}
run: |
sudo apt install g++-13 gcc-13 -y
diff --git a/.github/workflows/installers.yml b/.github/workflows/installers.yml
index ead3b7a87a..dae53b76dc 100644
--- a/.github/workflows/installers.yml
+++ b/.github/workflows/installers.yml
@@ -24,9 +24,28 @@ on:
default: false
required: true
type: boolean
+ nightly:
+ description: 'Upload to nightly release'
+ default: false
+ required: true
+ type: boolean
+ os_list:
+ description: 'List of OS(-variant)s to build for'
+ default: "windows, linux-modern, macos-x64, macos-arm64"
+ required: true
+ type: string
+ schedule:
+ # Daily at 5:20 CET
+ - cron: '20 4 * * *'
+
+env:
+ CURA_CONAN_VERSION: ${{ inputs.cura_conan_version }}
+ ENTERPRISE: ${{ inputs.enterprise }}
+ STAGING: ${{ inputs.staging }}
jobs:
windows-installer:
+ if: ${{ contains(inputs.os_list, 'windows') || github.event_name == 'schedule' }}
uses: ./.github/workflows/windows.yml
with:
cura_conan_version: ${{ inputs.cura_conan_version }}
@@ -38,6 +57,7 @@ jobs:
secrets: inherit
linux-modern-installer:
+ if: ${{ contains(inputs.os_list, 'linux-modern') || github.event_name == 'schedule' }}
uses: ./.github/workflows/linux.yml
with:
cura_conan_version: ${{ inputs.cura_conan_version }}
@@ -48,18 +68,8 @@ jobs:
operating_system: ubuntu-22.04
secrets: inherit
- linux-legacy-installer:
- uses: ./.github/workflows/linux.yml
- with:
- cura_conan_version: ${{ inputs.cura_conan_version }}
- conan_args: ${{ inputs.conan_args }}
- enterprise: ${{ inputs.enterprise }}
- staging: ${{ inputs.staging }}
- architecture: X64
- operating_system: ubuntu-20.04
- secrets: inherit
-
macos-installer:
+ if: ${{ contains(inputs.os_list, 'macos-x64') }}
uses: ./.github/workflows/macos.yml
with:
cura_conan_version: ${{ inputs.cura_conan_version }}
@@ -71,6 +81,7 @@ jobs:
secrets: inherit
macos-arm-installer:
+ if: ${{ contains(inputs.os_list, 'macos-arm64') }}
uses: ./.github/workflows/macos.yml
with:
cura_conan_version: ${{ inputs.cura_conan_version }}
@@ -79,4 +90,189 @@ jobs:
staging: ${{ inputs.staging }}
architecture: ARM64
operating_system: self-hosted
- secrets: inherit
\ No newline at end of file
+ secrets: inherit
+
+ # Run and update nightly release when the nightly input is set to true or if the schedule is triggered
+ update-nightly-release:
+ if: ${{ always() && (! cancelled()) && contains(needs.*.result, 'success') && (! contains(needs.*.result, 'failure')) && (inputs.nightly || github.event_name == 'schedule') }}
+ runs-on: ubuntu-latest
+ needs: [ windows-installer, linux-modern-installer, macos-installer, macos-arm-installer ]
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+
+ # It's not necessary to download all three, but it does make sure we have at least one if an OS is skipped.
+
+ - name: Download the run info
+ if: ${{ contains(inputs.os_list, 'macos-x64') || contains(inputs.os_list, 'macos-arm64') }}
+ uses: actions/download-artifact@v2
+ with:
+ name: macos-run-info
+
+ - name: Download the run info II
+ if: ${{ contains(inputs.os_list, 'linux-modern') || github.event_name == 'schedule' }}
+ uses: actions/download-artifact@v2
+ with:
+ name: linux-run-info
+
+ - name: Download the run info III
+ if: ${{ contains(inputs.os_list, 'windows') || github.event_name == 'schedule' }}
+ uses: actions/download-artifact@v2
+ with:
+ name: windows-run-info
+
+ - name: Set the run info as environment variables
+ run: |
+ . run_info.sh
+
+ - name: Output the name file name and extension
+ id: filename
+ shell: python
+ run: |
+ import os
+ import datetime
+ enterprise = "-Enterprise" if "${{ inputs.enterprise }}" == "true" else ""
+ os_list = [x.strip() for x in "${{ inputs.os_list }}".split(",")]
+ scheduled = "${{ github.event_name == 'schedule' }}" == "true"
+ if 'linux-modern' in os_list or scheduled:
+ linux_modern = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-linux-modern-X64"
+ if 'macos-x64' in os_list:
+ mac_x64_dmg = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-macos-X64"
+ mac_x64_pkg = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-macos-X64"
+ if 'macos-arm64' in os_list:
+ mac_arm_dmg = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-macos-ARM64"
+ mac_arm_pkg = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-macos-ARM64"
+ if 'windows' in os_list or scheduled:
+ win_msi = installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-win64-X64"
+ win_exe = installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-win64-X64"
+ nightly_name = "UltiMaker-Cura-" + os.getenv('CURA_VERSION_FULL').split("+")[0]
+ nightly_creation_time = str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
+ output_env = os.environ["GITHUB_OUTPUT"]
+ content = ""
+ if os.path.exists(output_env):
+ with open(output_env, "r") as f:
+ content = f.read()
+ with open(output_env, "w") as f:
+ f.write(content)
+ if 'linux-modern' in os_list or scheduled:
+ f.writelines(f"LINUX_MODERN={linux_modern}\n")
+ if 'macos-x64' in os_list:
+ f.writelines(f"MAC_X64_DMG={mac_x64_dmg}\n")
+ f.writelines(f"MAC_X64_PKG={mac_x64_pkg}\n")
+ if 'macos-arm64' in os_list:
+ f.writelines(f"MAC_ARM_DMG={mac_arm_dmg}\n")
+ f.writelines(f"MAC_ARM_PKG={mac_arm_pkg}\n")
+ if 'windows' in os_list or scheduled:
+ f.writelines(f"WIN_MSI={win_msi}\n")
+ f.writelines(f"WIN_EXE={win_exe}\n")
+ f.writelines(f"NIGHTLY_NAME={nightly_name}\n")
+ f.writelines(f"NIGHTLY_TIME={nightly_creation_time}\n")
+
+ - name: Download linux modern installer jobs artifacts
+ if: ${{ contains(inputs.os_list, 'linux-modern') || github.event_name == 'schedule' }}
+ uses: actions/download-artifact@v2
+ with:
+ name: ${{ steps.filename.outputs.LINUX_MODERN }}-AppImage
+ path: installers
+
+ - name: Download mac x64 dmg installer jobs artifacts
+ if: ${{ contains(inputs.os_list, 'macos-x64') }}
+ uses: actions/download-artifact@v2
+ with:
+ name: ${{ steps.filename.outputs.MAC_X64_DMG }}-dmg
+ path: installers
+
+ - name: Download mac x64 pkg installer jobs artifacts
+ if: ${{ contains(inputs.os_list, 'macos-x64') }}
+ uses: actions/download-artifact@v2
+ with:
+ name: ${{ steps.filename.outputs.MAC_X64_PKG }}-pkg
+ path: installers
+
+ - name: Download mac arm dmg installer jobs artifacts
+ if: ${{ contains(inputs.os_list, 'macos-arm64') }}
+ uses: actions/download-artifact@v2
+ with:
+ name: ${{ steps.filename.outputs.MAC_ARM_DMG }}-dmg
+ path: installers
+
+ - name: Download mac arm pkg installer jobs artifacts
+ if: ${{ contains(inputs.os_list, 'macos-arm64') }}
+ uses: actions/download-artifact@v2
+ with:
+ name: ${{ steps.filename.outputs.MAC_ARM_PKG }}-pkg
+ path: installers
+
+ - name: Download win msi installer jobs artifacts
+ if: ${{ contains(inputs.os_list, 'windows') || github.event_name == 'schedule' }}
+ uses: actions/download-artifact@v2
+ with:
+ name: ${{ steps.filename.outputs.WIN_MSI }}-msi
+ path: installers
+
+ - name: Download win exe installer jobs artifacts
+ if: ${{ contains(inputs.os_list, 'windows') || github.event_name == 'schedule' }}
+ uses: actions/download-artifact@v2
+ with:
+ name: ${{ steps.filename.outputs.WIN_EXE }}-exe
+ path: installers
+
+ - name: Rename Linux (modern) installer to nightlies
+ if: ${{ contains(inputs.os_list, 'linux-modern') || github.event_name == 'schedule' }}
+ run: |
+ mv installers/${{ steps.filename.outputs.LINUX_MODERN }}.AppImage installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-linux-modern-X64.AppImage
+
+ - name: Rename MacOS (X64) installers to nightlies
+ if: ${{ contains(inputs.os_list, 'macos-x64') }}
+ run: |
+ mv installers/${{ steps.filename.outputs.MAC_X64_DMG }}.dmg installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-X64.dmg
+ mv installers/${{ steps.filename.outputs.MAC_X64_PKG }}.pkg installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-X64.pkg
+
+ - name: Rename MacOS (ARM-64) installers to nightlies
+ if: ${{ contains(inputs.os_list, 'macos-arm64') }}
+ run: |
+ mv installers/${{ steps.filename.outputs.MAC_ARM_DMG }}.dmg installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-ARM64.dmg
+ mv installers/${{ steps.filename.outputs.MAC_ARM_PKG }}.pkg installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-ARM64.pkg
+
+ - name: Rename Windows installers to nightlies
+ if: ${{ contains(inputs.os_list, 'windows') || github.event_name == 'schedule' }}
+ run: |
+ mv installers/${{ steps.filename.outputs.WIN_MSI }}.msi installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-win64-X64.msi
+ mv installers/${{ steps.filename.outputs.WIN_EXE }}.exe installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-win64-X64.exe
+
+ - name: Update nightly release for Linux (modern)
+ if: ${{ contains(inputs.os_list, 'linux-modern') || github.event_name == 'schedule' }}
+ run: |
+ gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-linux-modern-X64.AppImage --clobber
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Update nightly release for MacOS (X64)
+ if: ${{ contains(inputs.os_list, 'macos-x64') }}
+ run: |
+ gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-X64.dmg --clobber
+ gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-X64.pkg --clobber
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Update nightly release for MacOS (ARM-64)
+ if: ${{ contains(inputs.os_list, 'macos-arm64') }}
+ run: |
+ gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-ARM64.dmg --clobber
+ gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-ARM64.pkg --clobber
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Update nightly release for Windows
+ if: ${{ contains(inputs.os_list, 'windows') || github.event_name == 'schedule' }}
+ run: |
+ gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-win64-X64.msi --clobber
+ gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-win64-X64.exe --clobber
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Update nightly release description (with date)
+ run: |
+ gh release edit nightly --title "${{ steps.filename.outputs.NIGHTLY_NAME }}" --notes "Nightly release created on: ${{ steps.filename.outputs.NIGHTLY_TIME }}"
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml
index 2e15584299..375d753006 100644
--- a/.github/workflows/linux.yml
+++ b/.github/workflows/linux.yml
@@ -119,10 +119,20 @@ jobs:
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
sudo apt update
sudo apt upgrade
- sudo apt install build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config -y
+ sudo apt install build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config binutils coreutils desktop-file-utils fakeroot fuse libgdk-pixbuf2.0-dev patchelf squashfs-tools strace util-linux zsync -y
+
+ # Get the AppImage tool
wget --no-check-certificate --quiet https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage -O $GITHUB_WORKSPACE/appimagetool
chmod +x $GITHUB_WORKSPACE/appimagetool
echo "APPIMAGETOOL_LOCATION=$GITHUB_WORKSPACE/appimagetool" >> $GITHUB_ENV
+
+ # Get the AppImage builder
+ wget --no-check-certificate --quiet -O $GITHUB_WORKSPACE/appimage-builder-x86_64.AppImage https://github.com/AppImageCrafters/appimage-builder/releases/download/v1.1.0/appimage-builder-1.1.0-x86_64.AppImage
+ chmod +x appimage-builder-x86_64.AppImage
+ echo "APPIMAGEBUILDER_LOCATION=$GITHUB_WORKSPACE/appimage-builder-x86_64.AppImage" >> $GITHUB_ENV
+
+ # Make sure these tools can be found on the path
+ echo "$GITHUB_WORKSPACE" >> $GITHUB_PATH
- name: Install GCC-13
run: |
@@ -179,10 +189,7 @@ jobs:
run: |
import os
enterprise = "-Enterprise" if "${{ inputs.enterprise }}" == "true" else ""
- if "${{ inputs.operating_system }}" == "ubuntu-22.04":
- installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-linux-modern-${{ inputs.architecture }}"
- else:
- installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-linux-${{ inputs.architecture }}"
+ installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-linux-${{ inputs.architecture }}"
output_env = os.environ["GITHUB_OUTPUT"]
content = ""
if os.path.exists(output_env):
@@ -238,7 +245,7 @@ jobs:
- name: Create the Linux AppImage (Bash)
run: |
- python ../cura_inst/packaging/AppImage/create_appimage.py ./UltiMaker-Cura $CURA_VERSION_FULL "${{ steps.filename.outputs.INSTALLER_FILENAME }}.AppImage"
+ python ../cura_inst/packaging/AppImage-builder/create_appimage.py ./UltiMaker-Cura $CURA_VERSION_FULL "${{ steps.filename.outputs.INSTALLER_FILENAME }}.AppImage"
chmod +x "${{ steps.filename.outputs.INSTALLER_FILENAME }}.AppImage"
working-directory: dist
@@ -250,6 +257,20 @@ jobs:
dist/${{ steps.filename.outputs.INSTALLER_FILENAME }}.AppImage
retention-days: 5
+ - name: Write the run info
+ shell: python
+ run: |
+ import os
+ with open("run_info.sh", "w") as f:
+ f.writelines(f'echo "CURA_VERSION_FULL={os.environ["CURA_VERSION_FULL"]}" >> $GITHUB_ENV\n')
+ - name: Upload the run info
+ uses: actions/upload-artifact@v3
+ with:
+ name: linux-run-info
+ path: |
+ run_info.sh
+ retention-days: 5
+
notify-export:
if: ${{ always() }}
needs: [ cura-installer-create ]
diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml
index af1fc3d12b..3667705952 100644
--- a/.github/workflows/macos.yml
+++ b/.github/workflows/macos.yml
@@ -264,6 +264,21 @@ jobs:
dist/${{ steps.filename.outputs.INSTALLER_FILENAME }}.pkg
retention-days: 5
+ - name: Write the run info
+ shell: python
+ run: |
+ import os
+ with open("run_info.sh", "w") as f:
+ f.writelines(f'echo "CURA_VERSION_FULL={os.environ["CURA_VERSION_FULL"]}" >> $GITHUB_ENV\n')
+
+ - name: Upload the run info
+ uses: actions/upload-artifact@v3
+ with:
+ name: macos-run-info
+ path: |
+ run_info.sh
+ retention-days: 5
+
notify-export:
if: ${{ always() }}
diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml
index df9056d454..f6de818eb4 100644
--- a/.github/workflows/windows.yml
+++ b/.github/workflows/windows.yml
@@ -256,6 +256,23 @@ jobs:
dist/${{steps.filename.outputs.INSTALLER_FILENAME }}.exe
retention-days: 5
+ # NOTE: The extension is .sh, since this isn't going to build-environment, so not on the Win build image.
+ - name: Write the run info
+ shell: python
+ run: |
+ import os
+ with open("run_info.sh", "w") as f:
+ f.writelines(f'echo "CURA_VERSION_FULL={os.environ["CURA_VERSION_FULL"]}" >> $GITHUB_ENV\n')
+
+ # NOTE: The extension is .sh, since this isn't going to build-environment, so not on the Win build image.
+ - name: Upload the run info
+ uses: actions/upload-artifact@v3
+ with:
+ name: windows-run-info
+ path: |
+ run_info.sh
+ retention-days: 5
+
notify-export:
if: ${{ always() }}
needs: [ cura-installer-create ]
diff --git a/.gitignore b/.gitignore
index 048bb915c7..8fe6978fe8 100644
--- a/.gitignore
+++ b/.gitignore
@@ -102,3 +102,5 @@ Ultimaker-Cura.spec
.run/
/printer-linter/src/printerlinter.egg-info/
/resources/qml/Dialogs/AboutDialogVersionsList.qml
+/plugins/CuraEngineGradualFlow
+/resources/bundled_packages/bundled_*.json
diff --git a/conandata.yml b/conandata.yml
index efbb7f8349..1bdd4f824a 100644
--- a/conandata.yml
+++ b/conandata.yml
@@ -19,6 +19,14 @@ pyinstaller:
package: "cura"
src: "plugins"
dst: "share/cura/plugins"
+ curaengine_gradual_flow_plugin:
+ package: "curaengine_plugin_gradual_flow"
+ src: "res/plugins/CuraEngineGradualFlow"
+ dst: "share/cura/plugins/CuraEngineGradualFlow"
+ curaengine_gradual_flow_plugin_bundled:
+ package: "curaengine_plugin_gradual_flow"
+ src: "res/bundled_packages"
+ dst: "share/cura/resources/bundled_packages"
cura_resources:
package: "cura"
src: "resources"
diff --git a/conanfile.py b/conanfile.py
index 9004658c29..aff8805b62 100644
--- a/conanfile.py
+++ b/conanfile.py
@@ -4,7 +4,7 @@ from pathlib import Path
from jinja2 import Template
from conan import ConanFile
-from conan.tools.files import copy, rmdir, save, mkdir
+from conan.tools.files import copy, rmdir, save, mkdir, rm
from conan.tools.microsoft import unix_path
from conan.tools.env import VirtualRunEnv, Environment, VirtualBuildEnv
from conan.tools.scm import Version
@@ -21,12 +21,11 @@ class CuraConan(ConanFile):
description = "3D printer / slicing GUI built on top of the Uranium framework"
topics = ("conan", "python", "pyqt6", "qt", "qml", "3d-printing", "slicer")
build_policy = "missing"
- exports = "LICENSE*", "UltiMaker-Cura.spec.jinja", "CuraVersion.py.jinja", "AboutDialogVersionsList.qml.jinja"
+ exports = "LICENSE*", "*.jinja"
settings = "os", "compiler", "build_type", "arch"
# FIXME: Remove specific branch once merged to main
- python_requires = "umbase/[>=0.1.7]@ultimaker/stable", "translationextractor/[>=2.1.1]@ultimaker/stable"
- python_requires_extend = "umbase.UMBaseConanfile"
+ python_requires = "translationextractor/[>=2.1.1]@ultimaker/stable"
options = {
"enterprise": ["True", "False", "true", "false"], # Workaround for GH Action passing boolean as lowercase string
@@ -210,8 +209,8 @@ class CuraConan(ConanFile):
src_path = os.path.join(self.source_folder, data["src"])
else:
src_path = os.path.join(self.deps_cpp_info[data["package"]].rootpath, data["src"])
- elif "root" in data: # get the paths relative from the sourcefolder
- src_path = os.path.join(self.source_folder, data["root"], data["src"])
+ elif "root" in data: # get the paths relative from the install folder
+ src_path = os.path.join(self.install_folder, data["root"], data["src"])
else:
continue
if Path(src_path).exists():
@@ -222,7 +221,9 @@ class CuraConan(ConanFile):
if "package" in binary: # get the paths from conan package
src_path = os.path.join(self.deps_cpp_info[binary["package"]].rootpath, binary["src"])
elif "root" in binary: # get the paths relative from the sourcefolder
- src_path = os.path.join(self.source_folder, binary["root"], binary["src"])
+ src_path = str(self.source_path.joinpath(binary["root"], binary["src"]))
+ if self.settings.os == "Windows":
+ src_path = src_path.replace("\\", "\\\\")
else:
continue
if not Path(src_path).exists():
@@ -294,6 +295,8 @@ class CuraConan(ConanFile):
self.options["pynest2d"].shared = True
self.options["cpython"].shared = True
self.options["boost"].header_only = True
+ if self.settings.os == "Linux":
+ self.options["curaengine_grpc_definitions"].shared = True
def validate(self):
version = self.conf_info.get("user.cura:version", default = self.version, check_type = str)
@@ -302,10 +305,13 @@ class CuraConan(ConanFile):
def requirements(self):
self.requires("boost/1.82.0")
- self.requires("pyarcus/(latest)@ultimaker/cura_10951")
+ self.requires("curaengine_grpc_definitions/latest@ultimaker/testing")
+ self.requires("zlib/1.2.13")
+ self.requires("pyarcus/(latest)@ultimaker/testing")
self.requires("curaengine/(latest)@ultimaker/cura_10475")
- self.requires("pysavitar/(latest)@ultimaker/cura_10951")
- self.requires("pynest2d/(latest)@ultimaker/cura_10951")
+ self.requires("pysavitar/(latest)@ultimaker/testing")
+ self.requires("pynest2d/(latest)@ultimaker/testing")
+ self.requires("curaengine_plugin_gradual_flow/(latest)@ultimaker/testing")
self.requires("uranium/(latest)@ultimaker/cura_10475")
self.requires("cura_binary_data/(latest)@ultimaker/testing")
self.requires("cpython/3.10.4")
@@ -348,30 +354,39 @@ class CuraConan(ConanFile):
copy(self, "CuraEngine.exe", curaengine.bindirs[0], self.source_folder, keep_path = False)
copy(self, "CuraEngine", curaengine.bindirs[0], self.source_folder, keep_path = False)
- # Copy resources of cura_binary_data
- cura_binary_data = self.dependencies["cura_binary_data"].cpp_info
- copy(self, "*", cura_binary_data.resdirs[0], str(self._share_dir.joinpath("cura")), keep_path = True)
- copy(self, "*", cura_binary_data.resdirs[1], str(self._share_dir.joinpath("uranium")), keep_path = True)
- if self.settings.os == "Windows":
- copy(self, "*", cura_binary_data.resdirs[2], str(self._share_dir.joinpath("windows")), keep_path = True)
+ # Copy the external plugins that we want to bundle with Cura
+ rmdir(self,str(self.source_path.joinpath("plugins", "CuraEngineGradualFlow")))
+ curaengine_plugin_gradual_flow = self.dependencies["curaengine_plugin_gradual_flow"].cpp_info
+ copy(self, "*.py", curaengine_plugin_gradual_flow.resdirs[0], str(self.source_path.joinpath("plugins", "CuraEngineGradualFlow")), keep_path = True)
+ ext = ".exe" if self.settings.os == "Windows" else ""
+ copy(self, f"curaengine_plugin_gradual_flow{ext}", curaengine_plugin_gradual_flow.resdirs[0], str(self.source_path.joinpath("plugins", "CuraEngineGradualFlow")), keep_path = True)
+ copy(self, "*.json", curaengine_plugin_gradual_flow.resdirs[0], str(self.source_path.joinpath("plugins", "CuraEngineGradualFlow")), keep_path = True)
+ copy(self, "bundled_*.json", curaengine_plugin_gradual_flow.resdirs[1], str(self.source_path.joinpath("resources", "bundled_packages")), keep_path = False)
- for dependency in self.dependencies.host.values():
- for bindir in dependency.cpp_info.bindirs:
- copy(self, "*.dll", bindir, str(self._site_packages), keep_path = False)
- for libdir in dependency.cpp_info.libdirs:
- copy(self, "*.pyd", libdir, str(self._site_packages), keep_path = False)
- copy(self, "*.pyi", libdir, str(self._site_packages), keep_path = False)
- copy(self, "*.dylib", libdir, str(self._base_dir.joinpath("lib")), keep_path = False)
+ # Copy resources of cura_binary_data
+ cura_binary_data = self.dependencies["cura_binary_data"].cpp_info
+ copy(self, "*", cura_binary_data.resdirs[0], str(self._share_dir.joinpath("cura")), keep_path = True)
+ copy(self, "*", cura_binary_data.resdirs[1], str(self._share_dir.joinpath("uranium")), keep_path = True)
+ if self.settings.os == "Windows":
+ copy(self, "*", cura_binary_data.resdirs[2], str(self._share_dir.joinpath("windows")), keep_path = True)
- # Copy materials (flat)
- rmdir(self, os.path.join(self.source_folder, "resources", "materials"))
- fdm_materials = self.dependencies["fdm_materials"].cpp_info
- copy(self, "*", fdm_materials.resdirs[0], self.source_folder)
+ for dependency in self.dependencies.host.values():
+ for bindir in dependency.cpp_info.bindirs:
+ copy(self, "*.dll", bindir, str(self._site_packages), keep_path = False)
+ for libdir in dependency.cpp_info.libdirs:
+ copy(self, "*.pyd", libdir, str(self._site_packages), keep_path = False)
+ copy(self, "*.pyi", libdir, str(self._site_packages), keep_path = False)
+ copy(self, "*.dylib", libdir, str(self._base_dir.joinpath("lib")), keep_path = False)
- # Copy internal resources
- if self.options.internal:
- cura_private_data = self.dependencies["cura_private_data"].cpp_info
- copy(self, "*", cura_private_data.resdirs[0], str(self._share_dir.joinpath("cura")))
+ # Copy materials (flat)
+ rmdir(self, os.path.join(self.source_folder, "resources", "materials"))
+ fdm_materials = self.dependencies["fdm_materials"].cpp_info
+ copy(self, "*", fdm_materials.resdirs[0], self.source_folder)
+
+ # Copy internal resources
+ if self.options.internal:
+ cura_private_data = self.dependencies["cura_private_data"].cpp_info
+ copy(self, "*", cura_private_data.resdirs[0], str(self._share_dir.joinpath("cura")))
if self.options.devtools:
entitlements_file = "'{}'".format(os.path.join(self.source_folder, "packaging", "MacOS", "cura.entitlements"))
@@ -402,56 +417,20 @@ class CuraConan(ConanFile):
self.run(f"{cpp_info.bindirs[0]}/msgfmt {po_file} -o {mo_file} -f", env="conanbuild", ignore_errors=True)
def deploy(self):
- # Copy CuraEngine.exe to bindirs of Virtual Python Environment
- curaengine = self.dependencies["curaengine"].cpp_info
- copy(self, "CuraEngine.exe", curaengine.bindirs[0], str(self._base_dir), keep_path = False)
- copy(self, "CuraEngine", curaengine.bindirs[0], str(self._base_dir), keep_path = False)
+ copy(self, "*", os.path.join(self.package_folder, self.cpp.package.resdirs[2]), os.path.join(self.install_folder, "packaging"), keep_path = True)
- # Copy resources of Cura (keep folder structure)
+ # Copy resources of Cura (keep folder structure) needed by pyinstaller to determine the module structure
copy(self, "*", os.path.join(self.package_folder, self.cpp_info.bindirs[0]), str(self._base_dir), keep_path = False)
copy(self, "*", os.path.join(self.package_folder, self.cpp_info.libdirs[0]), str(self._site_packages.joinpath("cura")), keep_path = True)
copy(self, "*", os.path.join(self.package_folder, self.cpp_info.resdirs[0]), str(self._share_dir.joinpath("cura", "resources")), keep_path = True)
copy(self, "*", os.path.join(self.package_folder, self.cpp_info.resdirs[1]), str(self._share_dir.joinpath("cura", "plugins")), keep_path = True)
- # Copy materials (flat)
- fdm_materials = self.dependencies["fdm_materials"].cpp_info
- copy(self, "*", fdm_materials.resdirs[0], str(self._share_dir.joinpath("cura")))
-
- # Copy internal resources
- if self.options.internal:
- cura_private_data = self.dependencies["cura_private_data"].cpp_info
- copy(self, "*", cura_private_data.resdirs[0], str(self._share_dir.joinpath("cura")))
-
# Copy resources of Uranium (keep folder structure)
uranium = self.dependencies["uranium"].cpp_info
copy(self, "*", uranium.resdirs[0], str(self._share_dir.joinpath("uranium", "resources")), keep_path = True)
copy(self, "*", uranium.resdirs[1], str(self._share_dir.joinpath("uranium", "plugins")), keep_path = True)
copy(self, "*", uranium.libdirs[0], str(self._site_packages.joinpath("UM")), keep_path = True)
- # TODO: figure out if this is still needed
- copy(self, "*", os.path.join(uranium.libdirs[0], "Qt", "qml", "UM"), str(self._site_packages.joinpath("PyQt6", "Qt6", "qml", "UM")), keep_path = True)
-
- # Copy resources of cura_binary_data
- cura_binary_data = self.dependencies["cura_binary_data"].cpp_info
- copy(self, "*", cura_binary_data.resdirs[0], str(self._share_dir.joinpath("cura")), keep_path = True)
- copy(self, "*", cura_binary_data.resdirs[1], str(self._share_dir.joinpath("uranium")), keep_path = True)
- if self.settings.os == "Windows":
- copy(self, "*", cura_binary_data.resdirs[2], str(self._share_dir.joinpath("windows")), keep_path = True)
-
- for dependency in self.dependencies.host.values():
- for bindir in dependency.cpp_info.bindirs:
- copy(self, "*.dll", bindir, str(self._site_packages), keep_path = False)
- for libdir in dependency.cpp_info.libdirs:
- copy(self, "*.pyd", libdir, str(self._site_packages), keep_path = False)
- copy(self, "*.pyi", libdir, str(self._site_packages), keep_path = False)
- copy(self, "*.dylib", libdir, str(self._base_dir.joinpath("lib")), keep_path = False)
-
- # Copy packaging scripts
- copy(self, "*", os.path.join(self.package_folder, self.cpp_info.resdirs[2]), str(self._base_dir.joinpath("packaging")), keep_path = True)
-
- # Copy requirements.txt's
- copy(self, "*.txt", os.path.join(self.package_folder, self.cpp_info.resdirs[-1]), str(self._base_dir.joinpath("pip_requirements")), keep_path = False)
-
# Generate the GitHub Action version info Environment
version = self.conf_info.get("user.cura:version", default = self.version, check_type = str)
cura_version = Version(version)
@@ -482,7 +461,6 @@ echo "CURA_APP_NAME={{ cura_app_name }}" >> ${{ env_prefix }}GITHUB_ENV
icon_path = "'{}'".format(os.path.join(self.package_folder, self.cpp_info.resdirs[2], self.conan_data["pyinstaller"]["icon"][str(self.settings.os)])).replace("\\", "\\\\"),
entitlements_file = entitlements_file if self.settings.os == "Macos" else "None")
-
def package(self):
copy(self, "cura_app.py", src = self.source_folder, dst = os.path.join(self.package_folder, self.cpp.package.bindirs[0]))
copy(self, "*", src = os.path.join(self.source_folder, "cura"), dst = os.path.join(self.package_folder, self.cpp.package.libdirs[0]))
@@ -492,6 +470,13 @@ echo "CURA_APP_NAME={{ cura_app_name }}" >> ${{ env_prefix }}GITHUB_ENV
copy(self, "requirement*.txt", src = self.source_folder, dst = os.path.join(self.package_folder, self.cpp.package.resdirs[-1]))
copy(self, "*", src = os.path.join(self.source_folder, "packaging"), dst = os.path.join(self.package_folder, self.cpp.package.resdirs[2]))
+ # Remove the CuraEngineGradualFlow plugin from the package
+ rmdir(self, os.path.join(self.package_folder, self.cpp.package.resdirs[1], "CuraEngineGradualFlow"))
+ rm(self, "bundled_*.json", os.path.join(self.package_folder, self.cpp.package.resdirs[0], "bundled_packages"), recursive = False)
+
+ # Remove the fdm_materials from the package
+ rmdir(self, os.path.join(self.package_folder, self.cpp.package.resdirs[0], "materials"))
+
def package_info(self):
self.user_info.pip_requirements = "requirements.txt"
self.user_info.pip_requirements_git = "requirements-ultimaker.txt"
diff --git a/cura/Arranging/GridArrange.py b/cura/Arranging/GridArrange.py
index 493c81b27c..4caf472b5d 100644
--- a/cura/Arranging/GridArrange.py
+++ b/cura/Arranging/GridArrange.py
@@ -1,11 +1,13 @@
import math
-from typing import List, TYPE_CHECKING, Tuple, Set
+from typing import List, TYPE_CHECKING, Tuple, Set, Union
if TYPE_CHECKING:
from UM.Scene.SceneNode import SceneNode
from cura.BuildVolume import BuildVolume
from UM.Application import Application
+from UM.Math.AxisAlignedBox import AxisAlignedBox
+from UM.Math.Polygon import Polygon
from UM.Math.Vector import Vector
from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation
from UM.Operations.GroupedOperation import GroupedOperation
@@ -34,10 +36,18 @@ class GridArrange(Arranger):
self._grid_width += self._margin_x
self._grid_height += self._margin_y
- # Round up the grid size to the nearest cm
+ # Round up the grid size to the nearest cm, this assures that new objects will
+ # be placed on integer offsets from each other
grid_precision = 10 # 1cm
- self._grid_width = math.ceil(self._grid_width / grid_precision) * grid_precision
- self._grid_height = math.ceil(self._grid_height / grid_precision) * grid_precision
+ rounded_grid_width = math.ceil(self._grid_width / grid_precision) * grid_precision
+ rounded_grid_height = math.ceil(self._grid_height / grid_precision) * grid_precision
+
+ # The space added by the "grid precision rounding up" of the grid size
+ self._grid_round_margin_x = rounded_grid_width - self._grid_width
+ self._grid_round_margin_y = rounded_grid_height - self._grid_height
+
+ self._grid_width = rounded_grid_width
+ self._grid_height = rounded_grid_height
self._offset_x = 0
self._offset_y = 0
@@ -56,10 +66,9 @@ class GridArrange(Arranger):
self._fixed_nodes_grid_ids = self._fixed_nodes_grid_ids.union(
self._intersectingGridIdxInclusive(node.getBoundingBox()))
- #grid indexes that are in disallowed area
+ # grid indexes that are in disallowed area
for polygon in self._build_volume.getDisallowedAreas():
- self._fixed_nodes_grid_ids = self._fixed_nodes_grid_ids.union(
- self._getIntersectingGridIdForPolygon(polygon))
+ self._fixed_nodes_grid_ids = self._fixed_nodes_grid_ids.union(self._intersectingGridIdxInclusive(polygon))
self._build_plate_grid_ids = self._intersectingGridIdxExclusive(self._build_volume_bounding_box)
@@ -240,51 +249,58 @@ class GridArrange(Arranger):
return TranslateOperation(node, Vector(delta_x, 0, delta_y))
- def _getGridCornerPoints(self, bounding_box: "BoundingVolume") -> Tuple[float, float, float, float]:
- coord_x1 = bounding_box.left
- coord_x2 = bounding_box.right
- coord_y1 = bounding_box.back
- coord_y2 = bounding_box.front
+ def _getGridCornerPoints(
+ self,
+ bounds: Union[AxisAlignedBox, Polygon],
+ *,
+ margin_x: float = 0.0,
+ margin_y: float = 0.0
+ ) -> Tuple[float, float, float, float]:
+ if isinstance(bounds, AxisAlignedBox):
+ coord_x1 = bounds.left - margin_x
+ coord_x2 = bounds.right + margin_x
+ coord_y1 = bounds.back - margin_y
+ coord_y2 = bounds.front + margin_y
+ elif isinstance(bounds, Polygon):
+ coord_x1 = float('inf')
+ coord_y1 = float('inf')
+ coord_x2 = float('-inf')
+ coord_y2 = float('-inf')
+ for x, y in bounds.getPoints():
+ coord_x1 = min(coord_x1, x)
+ coord_y1 = min(coord_y1, y)
+ coord_x2 = max(coord_x2, x)
+ coord_y2 = max(coord_y2, y)
+ else:
+ raise TypeError("bounds must be either an AxisAlignedBox or a Polygon")
+
+ coord_x1 -= margin_x
+ coord_x2 += margin_x
+ coord_y1 -= margin_y
+ coord_y2 += margin_y
+
grid_x1, grid_y1 = self._coordSpaceToGridSpace(coord_x1, coord_y1)
grid_x2, grid_y2 = self._coordSpaceToGridSpace(coord_x2, coord_y2)
return grid_x1, grid_y1, grid_x2, grid_y2
- def _getIntersectingGridIdForPolygon(self, polygon)-> Set[Tuple[int, int]]:
- # (x0, y0)
- # |
- # v
- # ┌─────────────┐
- # │ │
- # │ │
- # └─────────────┘ < (x1, y1)
- x0 = float('inf')
- y0 = float('inf')
- x1 = float('-inf')
- y1 = float('-inf')
- grid_idx = set()
- for [x, y] in polygon.getPoints():
- x0 = min(x0, x)
- y0 = min(y0, y)
- x1 = max(x1, x)
- y1 = max(y1, y)
- grid_x1, grid_y1 = self._coordSpaceToGridSpace(x0, y0)
- grid_x2, grid_y2 = self._coordSpaceToGridSpace(x1, y1)
-
- for grid_x in range(math.floor(grid_x1), math.ceil(grid_x2)):
- for grid_y in range(math.floor(grid_y1), math.ceil(grid_y2)):
- grid_idx.add((grid_x, grid_y))
- return grid_idx
-
- def _intersectingGridIdxInclusive(self, bounding_box: "BoundingVolume") -> Set[Tuple[int, int]]:
- grid_x1, grid_y1, grid_x2, grid_y2 = self._getGridCornerPoints(bounding_box)
+ def _intersectingGridIdxInclusive(self, bounds: Union[AxisAlignedBox, Polygon]) -> Set[Tuple[int, int]]:
+ grid_x1, grid_y1, grid_x2, grid_y2 = self._getGridCornerPoints(
+ bounds,
+ margin_x=-(self._margin_x + self._grid_round_margin_x) * 0.5,
+ margin_y=-(self._margin_y + self._grid_round_margin_y) * 0.5,
+ )
grid_idx = set()
for grid_x in range(math.floor(grid_x1), math.ceil(grid_x2)):
for grid_y in range(math.floor(grid_y1), math.ceil(grid_y2)):
grid_idx.add((grid_x, grid_y))
return grid_idx
- def _intersectingGridIdxExclusive(self, bounding_box: "BoundingVolume") -> Set[Tuple[int, int]]:
- grid_x1, grid_y1, grid_x2, grid_y2 = self._getGridCornerPoints(bounding_box)
+ def _intersectingGridIdxExclusive(self, bounds: Union[AxisAlignedBox, Polygon]) -> Set[Tuple[int, int]]:
+ grid_x1, grid_y1, grid_x2, grid_y2 = self._getGridCornerPoints(
+ bounds,
+ margin_x=(self._margin_x + self._grid_round_margin_x) * 0.5,
+ margin_y=(self._margin_y + self._grid_round_margin_y) * 0.5,
+ )
grid_idx = set()
for grid_x in range(math.ceil(grid_x1), math.floor(grid_x2)):
for grid_y in range(math.ceil(grid_y1), math.floor(grid_y2)):
diff --git a/cura/CuraApplication.py b/cura/CuraApplication.py
index cda4356e7f..e075fe92f5 100755
--- a/cura/CuraApplication.py
+++ b/cura/CuraApplication.py
@@ -50,6 +50,7 @@ from UM.Settings.Validator import Validator
from UM.View.SelectionPass import SelectionPass # For typing.
from UM.Workspace.WorkspaceReader import WorkspaceReader
from UM.i18n import i18nCatalog
+from UM.Version import Version
from cura import ApplicationMetadata
from cura.API import CuraAPI
from cura.API.Account import Account
@@ -618,6 +619,16 @@ class CuraApplication(QtApplication):
def _onEngineCreated(self):
self._qml_engine.addImageProvider("print_job_preview", PrintJobPreviewImageProvider.PrintJobPreviewImageProvider())
+ version = Version(self.getVersion())
+ if hasattr(sys, "frozen") and version.hasPostFix() and "beta" not in version.getPostfixType():
+ self._qml_engine.rootObjects()[0].setTitle(f"{ApplicationMetadata.CuraAppDisplayName} {ApplicationMetadata.CuraVersion}")
+ message = Message(
+ self._i18n_catalog.i18nc("@info:warning",
+ f"This version is not intended for production use. If you encounter any issues, please report them on our GitHub page, mentioning the full version {self.getVersion()}"),
+ lifetime = 0,
+ title = self._i18n_catalog.i18nc("@info:title", "Nightly build"),
+ message_type = Message.MessageType.WARNING)
+ message.show()
@pyqtProperty(bool)
def needToShowUserAgreement(self) -> bool:
diff --git a/packaging/AppImage-builder/AppImageBuilder.yml.jinja b/packaging/AppImage-builder/AppImageBuilder.yml.jinja
new file mode 100644
index 0000000000..fd8b4e6485
--- /dev/null
+++ b/packaging/AppImage-builder/AppImageBuilder.yml.jinja
@@ -0,0 +1,141 @@
+version: 1
+
+AppDir:
+ path: {{ app_dir }}
+ app_info:
+ id: com.ultimaker.cura
+ name: UltiMaker Cura
+ icon: {{ icon }}
+ version: {{ version }}
+ exec: UltiMaker-Cura
+ exec_args: $@
+ apt:
+ arch:
+ - amd64
+ allow_unauthenticated: true
+ sources:
+ - sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy main restricted
+ - sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy-updates main restricted
+ - sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy universe
+ - sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy-updates universe
+ - sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy multiverse
+ - sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy-updates multiverse
+ - sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy-backports main restricted
+ universe multiverse
+ - sourceline: deb http://security.ubuntu.com/ubuntu jammy-security main restricted
+ - sourceline: deb http://security.ubuntu.com/ubuntu jammy-security universe
+ - sourceline: deb http://security.ubuntu.com/ubuntu jammy-security multiverse
+ - sourceline: deb https://releases.jfrog.io/artifactory/jfrog-debs xenial contrib
+ - sourceline: deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-14 main
+ - sourceline: deb https://ppa.launchpadcontent.net/ubuntu-toolchain-r/test/ubuntu/
+ jammy main
+ - sourceline: deb https://ppa.launchpadcontent.net/deadsnakes/ppa/ubuntu/ jammy
+ main
+ - sourceline: deb [arch=amd64] https://packages.microsoft.com/repos/ms-teams stable
+ main
+ - sourceline: deb https://ppa.launchpadcontent.net/ppa-verse/cling/ubuntu/ jammy
+ main
+ - sourceline: deb [arch=amd64] https://dl.google.com/linux/chrome/deb/ stable
+ main
+ - sourceline: deb [signed-by=/usr/share/keyrings/nodesource.gpg] https://deb.nodesource.com/node_14.x
+ jammy main
+ - sourceline: deb [arch=amd64 signed-by=/usr/share/keyrings/transip-stack.gpg]
+ https://mirror.transip.net/stack/software/deb/Ubuntu_22.04/ ./
+ - sourceline: deb http://repository.spotify.com stable non-free
+ - sourceline: deb [arch=amd64,arm64,armhf] http://packages.microsoft.com/repos/code
+ stable main
+ - sourceline: deb https://packagecloud.io/slacktechnologies/slack/debian/ jessie
+ main
+ include:
+ - libc6:amd64
+ - xdg-desktop-portal-kde:amd64
+ - libcap2:amd64
+ - libcom-err2:amd64
+ - libdbus-1-3:amd64
+ - libgpg-error0:amd64
+ - libgtk-3-common
+ - libkeyutils1:amd64
+ - libllvm13
+ - liblzma5:amd64
+ - libpcre3:amd64
+ - libqt6gui6
+ - libqt6qml6
+ - libqt6qmlworkerscript6
+ - libqt6quick6
+ - libselinux1:amd64
+ - libtinfo6:amd64
+ - qml6-module-qtqml-workerscript:amd64
+ - qml6-module-qtquick:amd64
+ - qt6-gtk-platformtheme:amd64
+ - qt6-qpa-plugins:amd64
+ # x11
+ - libx11-6
+ - libx11-xcb1
+ - libxcb1
+ - libxcb-render0
+ - libxcb-xfixes0
+ - libxcb-shape0
+ - libxcb-dri2-0
+ - libxcb-shm0
+ - libxcb-glx0
+ - libxcb-present0
+ - libxcb-dri3-0
+ # graphic libraries interface (safe graphics bundle including drivers, acceleration may not work in some systems)
+ - libglvnd0
+ - libglx0
+ - libglapi-mesa
+ - libgl1
+ - libegl1
+ - libgbm1
+ - libdrm2
+ - libglx-mesa0
+ - libgl1-amber-dri
+ - libgl1-mesa-dri
+ - mesa-utils
+ - libgl1-mesa-glx
+ - libdrm-amdgpu1
+ - libdrm-nouveau2
+ exclude:
+ - hicolor-icon-theme
+ - adwaita-icon-theme
+ - humanity-icon-theme
+ files:
+ include: []
+ exclude:
+ - usr/share/man
+ - usr/share/doc/*/README.*
+ - usr/share/doc/*/changelog.*
+ - usr/share/doc/*/NEWS.*
+ - usr/share/doc/*/TODO.*
+ runtime:
+ env:
+ APPDIR_LIBRARY_PATH: "$APPDIR/usr/lib/x86_64-linux-gnu:$APPDIR/lib/x86_64-linux-gnu:$APPDIR/usr/lib:$APPDIR/usr/lib/x86_64-linux-gnu/gdk-pixbuf-2.0/2.10.0/loaders"
+ PYTHONPATH: "$APPDIR"
+ QT_PLUGIN_PATH: "$APPDIR/qt/plugins"
+ QML2_IMPORT_PATH: "$APPDIR/qt/qml"
+ QT_QPA_PLATFORMTHEME: xdgdesktopportal
+ test:
+ fedora-30:
+ image: appimagecrafters/tests-env:fedora-30
+ command: ./AppRun
+ use_host_x: True
+ debian-stable:
+ image: appimagecrafters/tests-env:debian-stable
+ command: ./AppRun
+ use_host_x: True
+ archlinux-latest:
+ image: appimagecrafters/tests-env:archlinux-latest
+ command: ./AppRun
+ use_host_x: True
+ centos-7:
+ image: appimagecrafters/tests-env:centos-7
+ command: ./AppRun
+ use_host_x: True
+ ubuntu-xenial:
+ image: appimagecrafters/tests-env:ubuntu-xenial
+ command: ./AppRun
+ use_host_x: True
+AppImage:
+ arch: {{ arch }}
+ file_name: {{ file_name }}
+ update-information: guess
diff --git a/packaging/AppImage-builder/create_appimage.py b/packaging/AppImage-builder/create_appimage.py
new file mode 100644
index 0000000000..819ec0b766
--- /dev/null
+++ b/packaging/AppImage-builder/create_appimage.py
@@ -0,0 +1,102 @@
+# Copyright (c) 2023 UltiMaker
+# Cura is released under the terms of the LGPLv3 or higher.
+
+import argparse
+import os
+import shutil
+import subprocess
+
+from pathlib import Path
+
+from jinja2 import Template
+
+
+def prepare_workspace(dist_path, appimage_filename):
+ """
+ Prepare the workspace for building the AppImage.
+ :param dist_path: Path to the distribution of Cura created with pyinstaller.
+ :param appimage_filename: name of the AppImage file.
+ :return:
+ """
+ if not os.path.exists(dist_path):
+ raise RuntimeError(f"The dist_path {dist_path} does not exist.")
+
+ if os.path.exists(os.path.join(dist_path, appimage_filename)):
+ os.remove(os.path.join(dist_path, appimage_filename))
+
+ if not os.path.exists("AppDir"):
+ shutil.move(dist_path, "AppDir")
+ else:
+ print(f"AppDir already exists, assuming it is already prepared.")
+
+ copy_files("AppDir")
+
+
+def build_appimage(dist_path, version, appimage_filename):
+ """
+ Creates an AppImage file from the build artefacts created so far.
+ """
+ generate_appimage_builder_config(dist_path, version, appimage_filename)
+ create_appimage()
+ sign_appimage(dist_path, appimage_filename)
+
+
+def generate_appimage_builder_config(dist_path, version, appimage_filename):
+ with open(os.path.join(Path(__file__).parent, "AppImageBuilder.yml.jinja"), "r") as appimage_builder_file:
+ appimage_builder = appimage_builder_file.read()
+
+ template = Template(appimage_builder)
+ appimage_builder = template.render(app_dir = "./AppDir",
+ icon = "cura-icon.png",
+ version = version,
+ arch = "x86_64",
+ file_name = appimage_filename)
+
+ with open(os.path.join(Path(__file__).parent, "AppImageBuilder.yml"), "w") as appimage_builder_file:
+ appimage_builder_file.write(appimage_builder)
+
+
+def copy_files(dist_path):
+ """
+ Copy metadata files for the metadata of the AppImage.
+ """
+ copied_files = {
+ os.path.join("..", "icons", "cura-icon.svg"): os.path.join("usr", "share", "icons", "hicolor", "scalable", "apps", "cura-icon.svg"),
+ os.path.join("..", "icons", "cura-icon_64x64.png"): os.path.join("usr", "share", "icons", "hicolor", "64x64", "apps", "cura-icon.png"),
+ os.path.join("..", "icons", "cura-icon_128x128.png"): os.path.join("usr", "share", "icons", "hicolor", "128x128", "apps", "cura-icon.png"),
+ os.path.join("..", "icons", "cura-icon_256x256.png"): os.path.join("usr", "share", "icons", "hicolor", "256x256", "apps", "cura-icon.png"),
+ os.path.join("..", "icons", "cura-icon_256x256.png"): "cura-icon.png",
+ }
+
+ # TODO: openssl.cnf ???
+
+ packaging_dir = os.path.dirname(__file__)
+ for source, dest in copied_files.items():
+ dest_file_path = os.path.join(dist_path, dest)
+ os.makedirs(os.path.dirname(dest_file_path), exist_ok = True)
+ shutil.copyfile(os.path.join(packaging_dir, source), dest_file_path)
+
+
+def create_appimage():
+ appimagetool = os.getenv("APPIMAGEBUILDER_LOCATION", "appimage-builder-x86_64.AppImage")
+ command = [appimagetool, "--recipe", os.path.join(Path(__file__).parent, "AppImageBuilder.yml"), "--skip-test"]
+ result = subprocess.call(command)
+ if result != 0:
+ raise RuntimeError(f"The AppImageTool command returned non-zero: {result}")
+
+
+def sign_appimage(dist_path, appimage_filename):
+ command = ["gpg", "--yes", "--armor", "--detach-sig", appimage_filename]
+ result = subprocess.call(command)
+ if result != 0:
+ raise RuntimeError(f"The GPG command returned non-zero: {result}")
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(description = "Create AppImages of Cura.")
+ parser.add_argument("dist_path", type = str, help = "Path to where PyInstaller installed the distribution of Cura.")
+ parser.add_argument("version", type = str, help = "Full version number of Cura (e.g. '5.1.0-beta')")
+ parser.add_argument("filename", type = str, help = "Filename of the AppImage (e.g. 'UltiMaker-Cura-5.1.0-beta-Linux-X64.AppImage')")
+ args = parser.parse_args()
+ prepare_workspace(args.dist_path, args.filename)
+ build_appimage(args.dist_path, args.version, args.filename)
diff --git a/packaging/msi/ExcludeComponents.xslt b/packaging/msi/ExcludeComponents.xslt
index b964484922..dc782e2035 100644
--- a/packaging/msi/ExcludeComponents.xslt
+++ b/packaging/msi/ExcludeComponents.xslt
@@ -20,10 +20,15 @@
...but we can use this longer `substring` expression instead (see https://github.com/wixtoolset/issues/issues/5609 )
-->
+ />
+
@@ -32,6 +37,7 @@
-
-
+
+
+
\ No newline at end of file
diff --git a/plugins/PostProcessingPlugin/scripts/LimitXYAccelJerk.py b/plugins/PostProcessingPlugin/scripts/LimitXYAccelJerk.py
new file mode 100644
index 0000000000..43aceb7793
--- /dev/null
+++ b/plugins/PostProcessingPlugin/scripts/LimitXYAccelJerk.py
@@ -0,0 +1,326 @@
+# Limit XY Accel: Authored by: Greg Foresi (GregValiant)
+# July 2023
+# Sometimes bed-slinger printers need different Accel and Jerk values for the Y but Cura always makes them the same.
+# This script changes the Accel and/or Jerk from the beginning of the 'Start Layer' to the end of the 'End Layer'.
+# The existing M201 Max Accel will be changed to limit the Y (and/or X) accel at the printer. If you have Accel enabled in Cura and the XY Accel is set to 3000 then setting the Y limit to 1000 will result in the printer limiting the Y to 1000. This can keep tall skinny prints from breaking loose of the bed and failing. The script was not tested with Junction Deviation.
+# If enabled - the Jerk setting is changed line-by-line within the gcode as there is no "limit" on Jerk.
+# if 'Gradual ACCEL change' is enabled then the Accel is changed gradually from the Start to the End layer and that will be the final Accel setting in the file. If 'Gradual' is enabled then the Jerk settings will continue to be changed to the end of the file (rather than ending at the End layer).
+# This post is intended for printers with moving beds (bed slingers) so UltiMaker printers are excluded.
+# When setting an accel limit on multi-extruder printers ALL extruders are effected.
+# This post does not distinguish between Print Accel and Travel Accel. The limit is the limit for all regardless. Example: Skin Accel = 1000 and Outer Wall accel = 500. If the limit is set to 300 then both Skin and Outer Wall will be Accel = 300.
+
+from ..Script import Script
+from cura.CuraApplication import CuraApplication
+import re
+from UM.Message import Message
+
+class LimitXYAccelJerk(Script):
+
+ def initialize(self) -> None:
+ super().initialize()
+ # Get the Accel and Jerk and set the values in the setting boxes--
+ mycura = CuraApplication.getInstance().getGlobalContainerStack()
+ extruder = mycura.extruderList
+ accel_print = extruder[0].getProperty("acceleration_print", "value")
+ accel_travel = extruder[0].getProperty("acceleration_travel", "value")
+ jerk_print_old = extruder[0].getProperty("jerk_print", "value")
+ jerk_travel_old = extruder[0].getProperty("jerk_travel", "value")
+ self._instance.setProperty("x_accel_limit", "value", round(accel_print))
+ self._instance.setProperty("y_accel_limit", "value", round(accel_print))
+ self._instance.setProperty("x_jerk", "value", jerk_print_old)
+ self._instance.setProperty("y_jerk", "value", jerk_print_old)
+ ext_count = int(mycura.getProperty("machine_extruder_count", "value"))
+ machine_name = str(mycura.getProperty("machine_name", "value"))
+
+ # Warn the user if the printer is an Ultimaker-------------------------
+ if "Ultimaker" in machine_name:
+ Message(text = " [Limit the X-Y Accel/Jerk] DID NOT RUN because Ultimaker printers don't have sliding beds.").show()
+
+ # Warn the user if the printer is multi-extruder------------------
+ if ext_count > 1:
+ Message(text = " 'Limit the X-Y Accel/Jerk': The post processor treats all extruders the same. If you have multiple extruders they will all be subject to the same Accel and Jerk limits imposed. If you have different Travel and Print Accel they will also be subject to the same limits. If that is not acceptable then you should not use this Post Processor.").show()
+
+ def getSettingDataString(self):
+ return """{
+ "name": "Limit the X-Y Accel/Jerk (all extruders equal)",
+ "key": "LimitXYAccelJerk",
+ "metadata": {},
+ "version": 2,
+ "settings":
+ {
+ "type_of_change":
+ {
+ "label": "Immediate or Gradual change",
+ "description": "An 'Immediate' change will insert the new numbers immediately at the Start Layer. A 'Gradual' change will transition from the starting Accel to the new Accel limit across a range of layers.",
+ "type": "enum",
+ "options": {
+ "immediate_change": "Immediate",
+ "gradual_change": "Gradual"},
+ "default_value": "immediate_change"
+ },
+ "x_accel_limit":
+ {
+ "label": "X MAX Acceleration",
+ "description": "If this number is lower than the 'X Print Accel' in Cura then this will limit the Accel on the X axis. Enter the Maximum Acceleration value for the X axis. This will affect both Print and Travel Accel. If you enable an End Layer then at the end of that layer the Accel Limit will be reset (unless you choose 'Gradual' in which case the new limit goes to the top layer).",
+ "type": "int",
+ "enabled": true,
+ "minimum_value": 50,
+ "unit": "mm/sec² ",
+ "default_value": 500
+ },
+ "y_accel_limit":
+ {
+ "label": "Y MAX Acceleration",
+ "description": "If this number is lower than the Y accel in Cura then this will limit the Accel on the Y axis. Enter the Maximum Acceleration value for the Y axis. This will affect both Print and Travel Accel. If you enable an End Layer then at the end of that layer the Accel Limit will be reset (unless you choose 'Gradual' in which case the new limit goes to the top layer).",
+ "type": "int",
+ "enabled": true,
+ "minimum_value": 50,
+ "unit": "mm/sec² ",
+ "default_value": 500
+ },
+ "jerk_enable":
+ {
+ "label": "Change the Jerk",
+ "description": "Whether to change the Jerk values.",
+ "type": "bool",
+ "enabled": true,
+ "default_value": false
+ },
+ "x_jerk":
+ {
+ "label": " X jerk",
+ "description": "Enter the Jerk value for the X axis. Enter '0' to use the existing X Jerk. This setting will affect both the Print and Travel jerk.",
+ "type": "int",
+ "enabled": "jerk_enable",
+ "unit": "mm/sec ",
+ "default_value": 8
+ },
+ "y_jerk":
+ {
+ "label": " Y jerk",
+ "description": "Enter the Jerk value for the Y axis. Enter '0' to use the existing Y Jerk. This setting will affect both the Print and Travel jerk.",
+ "type": "int",
+ "enabled": "jerk_enable",
+ "unit": "mm/sec ",
+ "default_value": 8
+ },
+ "start_layer":
+ {
+ "label": "From Start of Layer:",
+ "description": "Use the Cura Preview numbers. Enter the Layer to start the changes at. The minimum is Layer 1.",
+ "type": "int",
+ "default_value": 1,
+ "minimum_value": 1,
+ "unit": "Lay# ",
+ "enabled": "type_of_change == 'immediate_change'"
+ },
+ "end_layer":
+ {
+ "label": "To End of Layer",
+ "description": "Use the Cura Preview numbers. Enter '-1' for the entire file or enter a layer number. The changes will end at your 'End Layer' and revert back to the original numbers.",
+ "type": "int",
+ "default_value": -1,
+ "minimum_value": -1,
+ "unit": "Lay# ",
+ "enabled": "type_of_change == 'immediate_change'"
+ },
+ "gradient_start_layer":
+ {
+ "label": " Gradual From Layer:",
+ "description": "Use the Cura Preview numbers. Enter the Layer to start the changes at. The minimum is Layer 1.",
+ "type": "int",
+ "default_value": 1,
+ "minimum_value": 1,
+ "unit": "Lay# ",
+ "enabled": "type_of_change == 'gradual_change'"
+ },
+ "gradient_end_layer":
+ {
+ "label": " Gradual To Layer",
+ "description": "Use the Cura Preview numbers. Enter '-1' for the top layer or enter a layer number. The last 'Gradual' change will continue to the end of the file.",
+ "type": "int",
+ "default_value": -1,
+ "minimum_value": -1,
+ "unit": "Lay# ",
+ "enabled": "type_of_change == 'gradual_change'"
+ }
+ }
+ }"""
+
+ def execute(self, data):
+ mycura = CuraApplication.getInstance().getGlobalContainerStack()
+ extruder = mycura.extruderList
+ machine_name = str(mycura.getProperty("machine_name", "value"))
+ print_sequence = str(mycura.getProperty("print_sequence", "value"))
+
+ # Exit if 'one_at_a_time' is enabled-------------------------
+ if print_sequence == "one_at_a_time":
+ Message(text = " [Limit the X-Y Accel/Jerk] DID NOT RUN. This post processor is not compatible with 'One-at-a-Time' mode.").show()
+ data[0] += "; [LimitXYAccelJerk] DID NOT RUN because Cura is set to 'One-at-a-Time' mode.\n"
+ return data
+
+ # Exit if the printer is an Ultimaker-------------------------
+ if "Ultimaker" in machine_name:
+ Message(text = " [Limit the X-Y Accel/Jerk] DID NOT RUN. This post processor is for bed slinger printers only.").show()
+ data[0] += "; [LimitXYAccelJerk] DID NOT RUN because the printer doesn't have a sliding bed.\n"
+ return data
+
+ type_of_change = str(self.getSettingValueByKey("type_of_change"))
+ accel_print_enabled = bool(extruder[0].getProperty("acceleration_enabled", "value"))
+ accel_travel_enabled = bool(extruder[0].getProperty("acceleration_travel_enabled", "value"))
+ accel_print = extruder[0].getProperty("acceleration_print", "value")
+ accel_travel = extruder[0].getProperty("acceleration_travel", "value")
+ jerk_print_enabled = str(extruder[0].getProperty("jerk_enabled", "value"))
+ jerk_travel_enabled = str(extruder[0].getProperty("jerk_travel_enabled", "value"))
+ jerk_print_old = extruder[0].getProperty("jerk_print", "value")
+ jerk_travel_old = extruder[0].getProperty("jerk_travel", "value")
+
+ if int(accel_print) >= int(accel_travel):
+ accel_old = accel_print
+ else:
+ accel_old = accel_travel
+ jerk_travel = str(extruder[0].getProperty("jerk_travel", "value"))
+ if int(jerk_print_old) >= int(jerk_travel_old):
+ jerk_old = jerk_print_old
+ else:
+ jerk_old = jerk_travel_old
+
+ #Set the new Accel values----------------------------------------------------------
+ x_accel = str(self.getSettingValueByKey("x_accel_limit"))
+ y_accel = str(self.getSettingValueByKey("y_accel_limit"))
+ x_jerk = int(self.getSettingValueByKey("x_jerk"))
+ y_jerk = int(self.getSettingValueByKey("y_jerk"))
+
+ # Put the strings together-------------------------------------------
+ m201_limit_new = "M201 X" + x_accel + " Y" + y_accel
+ m201_limit_old = "M201 X" + str(round(accel_old)) + " Y" + str(round(accel_old))
+ if x_jerk == 0:
+ m205_jerk_pattern = "Y(\d*)"
+ m205_jerk_new = "Y" + str(y_jerk)
+ if y_jerk == 0:
+ m205_jerk_pattern = "X(\d*)"
+ m205_jerk_new = "X" + str(x_jerk)
+ if x_jerk != 0 and y_jerk != 0:
+ m205_jerk_pattern = "M205 X(\d*) Y(\d*)"
+ m205_jerk_new = "M205 X" + str(x_jerk) + " Y" + str(y_jerk)
+ m205_jerk_old = "M205 X" + str(jerk_old) + " Y" + str(jerk_old)
+ type_of_change = self.getSettingValueByKey("type_of_change")
+
+ #Get the indexes of the start and end layers----------------------------------------
+ if type_of_change == 'immediate_change':
+ start_layer = int(self.getSettingValueByKey("start_layer"))-1
+ end_layer = int(self.getSettingValueByKey("end_layer"))
+ else:
+ start_layer = int(self.getSettingValueByKey("gradient_start_layer"))-1
+ end_layer = int(self.getSettingValueByKey("gradient_end_layer"))
+ start_index = 2
+ end_index = len(data)-2
+ for num in range(2,len(data)-1):
+ if ";LAYER:" + str(start_layer) + "\n" in data[num]:
+ start_index = num
+ break
+ if int(end_layer) > 0:
+ for num in range(3,len(data)-1):
+ try:
+ if ";LAYER:" + str(end_layer) + "\n" in data[num]:
+ end_index = num
+ break
+ except:
+ end_index = len(data)-2
+
+ #Add Accel limit and new Jerk at start layer-----------------------------------------------------
+ if type_of_change == "immediate_change":
+ layer = data[start_index]
+ lines = layer.split("\n")
+ for index, line in enumerate(lines):
+ if lines[index].startswith(";LAYER:"):
+ lines.insert(index+1,m201_limit_new)
+ if self.getSettingValueByKey("jerk_enable"):
+ lines.insert(index+2,m205_jerk_new)
+ data[start_index] = "\n".join(lines)
+ break
+
+ #Alter any existing jerk lines. Accel lines can be ignored-----------------------------------
+ for num in range(start_index,end_index,1):
+ layer = data[num]
+ lines = layer.split("\n")
+ for index, line in enumerate(lines):
+ if line.startswith("M205"):
+ lines[index] = re.sub(m205_jerk_pattern, m205_jerk_new, line)
+ data[num] = "\n".join(lines)
+ if end_layer != -1:
+ try:
+ layer = data[end_index-1]
+ lines = layer.split("\n")
+ lines.insert(len(lines)-2,m201_limit_old)
+ lines.insert(len(lines)-2,m205_jerk_old)
+ data[end_index-1] = "\n".join(lines)
+ except:
+ pass
+ else:
+ data[len(data)-1] = m201_limit_old + "\n" + m205_jerk_old + "\n" + data[len(data)-1]
+ return data
+
+ elif type_of_change == "gradual_change":
+ layer_spread = end_index - start_index
+ if accel_old >= int(x_accel):
+ x_accel_hyst = round((accel_old - int(x_accel)) / layer_spread)
+ else:
+ x_accel_hyst = round((int(x_accel) - accel_old) / layer_spread)
+ if accel_old >= int(y_accel):
+ y_accel_hyst = round((accel_old - int(y_accel)) / layer_spread)
+ else:
+ y_accel_hyst = round((int(y_accel) - accel_old) / layer_spread)
+
+ if accel_old >= int(x_accel):
+ x_accel_start = round(round((accel_old - x_accel_hyst)/25)*25)
+ else:
+ x_accel_start = round(round((x_accel_hyst + accel_old)/25)*25)
+ if accel_old >= int(y_accel):
+ y_accel_start = round(round((accel_old - y_accel_hyst)/25)*25)
+ else:
+ y_accel_start = round(round((y_accel_hyst + accel_old)/25)*25)
+ m201_limit_new = "M201 X" + str(x_accel_start) + " Y" + str(y_accel_start)
+ #Add Accel limit and new Jerk at start layer-------------------------------------------------------------
+ layer = data[start_index]
+ lines = layer.split("\n")
+ for index, line in enumerate(lines):
+ if lines[index].startswith(";LAYER:"):
+ lines.insert(index+1,m201_limit_new)
+ if self.getSettingValueByKey("jerk_enable"):
+ lines.insert(index+2,m205_jerk_new)
+ data[start_index] = "\n".join(lines)
+ break
+ for num in range(start_index + 1, end_index,1):
+ layer = data[num]
+ lines = layer.split("\n")
+ if accel_old >= int(x_accel):
+ x_accel_start -= x_accel_hyst
+ if x_accel_start < int(x_accel): x_accel_start = int(x_accel)
+ else:
+ x_accel_start += x_accel_hyst
+ if x_accel_start > int(x_accel): x_accel_start = int(x_accel)
+ if accel_old >= int(y_accel):
+ y_accel_start -= y_accel_hyst
+ if y_accel_start < int(y_accel): y_accel_start = int(y_accel)
+ else:
+ y_accel_start += y_accel_hyst
+ if y_accel_start > int(y_accel): y_accel_start = int(y_accel)
+ m201_limit_new = "M201 X" + str(round(round(x_accel_start/25)*25)) + " Y" + str(round(round(y_accel_start/25)*25))
+ for index, line in enumerate(lines):
+ if line.startswith(";LAYER:"):
+ lines.insert(index+1, m201_limit_new)
+ continue
+ data[num] = "\n".join(lines)
+
+ #Alter any existing jerk lines. Accel lines can be ignored---------------
+ if self.getSettingValueByKey("jerk_enable"):
+ for num in range(start_index,len(data)-1,1):
+ layer = data[num]
+ lines = layer.split("\n")
+ for index, line in enumerate(lines):
+ if line.startswith("M205"):
+ lines[index] = re.sub(m205_jerk_pattern, m205_jerk_new, line)
+ data[num] = "\n".join(lines)
+ data[len(data)-1] = m201_limit_old + "\n" + m205_jerk_old + "\n" + data[len(data)-1]
+ return data
\ No newline at end of file