Merge remote-tracking branch 'origin/main' into CURA-10004_fix_crash_small_rotations

This commit is contained in:
Erwan MATHIEU 2023-09-11 15:56:32 +02:00
commit 80a8d863d6
25 changed files with 1918 additions and 785 deletions

View File

@ -119,7 +119,7 @@ jobs:
sudo apt upgrade
sudo apt install build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config flex bison -y
- name: Install GCC-132 on ubuntu
- name: Install GCC-13 on ubuntu
if: ${{ startsWith(inputs.runs_on, 'ubuntu') }}
run: |
sudo apt install g++-13 gcc-13 -y

View File

@ -2,81 +2,233 @@ name: All installers
run-name: ${{ inputs.cura_conan_version }} by @${{ github.actor }}
on:
workflow_dispatch:
inputs:
cura_conan_version:
description: 'Cura Conan Version'
default: 'cura/latest@ultimaker/testing'
required: true
type: string
conan_args:
description: 'Conan args: eq.: --require-override'
default: ''
required: false
type: string
enterprise:
description: 'Build Cura as an Enterprise edition'
default: false
required: true
type: boolean
staging:
description: 'Use staging API'
default: false
required: true
type: boolean
workflow_dispatch:
inputs:
cura_conan_version:
description: 'Cura Conan Version'
default: 'cura/latest@ultimaker/testing'
required: true
type: string
conan_args:
description: 'Conan args: eq.: --require-override'
default: ''
required: false
type: string
enterprise:
description: 'Build Cura as an Enterprise edition'
default: false
required: true
type: boolean
staging:
description: 'Use staging API'
default: false
required: true
type: boolean
nightly:
description: 'Upload to nightly release'
default: false
required: true
type: boolean
schedule:
# Daily at 5:20 CET
- cron: '20 4 * * *'
env:
CURA_CONAN_VERSION: ${{ inputs.cura_conan_version || 'cura/latest@ultimaker/testing' }}
CONAN_ARGS: ${{ inputs.conan_args || '' }}
ENTERPRISE: ${{ inputs.enterprise || false }}
STAGING: ${{ inputs.staging || false }}
jobs:
windows-installer:
uses: ./.github/workflows/windows.yml
with:
cura_conan_version: ${{ inputs.cura_conan_version }}
conan_args: ${{ inputs.conan_args }}
enterprise: ${{ inputs.enterprise }}
staging: ${{ inputs.staging }}
architecture: X64
operating_system: windows-2022
secrets: inherit
windows-installer:
uses: ./.github/workflows/windows.yml
with:
cura_conan_version: ${{ github.event.inputs.CURA_CONAN_VERSION }}
conan_args: ${{ github.event.inputs.conan_args }}
enterprise: ${{ github.event.inputs.enterprise == 'true' }}
staging: ${{ github.event.inputs.staging == 'true' }}
architecture: X64
operating_system: windows-2022
secrets: inherit
linux-modern-installer:
uses: ./.github/workflows/linux.yml
with:
cura_conan_version: ${{ inputs.cura_conan_version }}
conan_args: ${{ inputs.conan_args }}
enterprise: ${{ inputs.enterprise }}
staging: ${{ inputs.staging }}
architecture: X64
operating_system: ubuntu-22.04
secrets: inherit
linux-installer:
uses: ./.github/workflows/linux.yml
with:
cura_conan_version: ${{ github.event.inputs.CURA_CONAN_VERSION }}
conan_args: ${{ github.event.inputs.conan_args }}
enterprise: ${{ github.event.inputs.enterprise == 'true' }}
staging: ${{ github.event.inputs.staging == 'true' }}
architecture: X64
operating_system: ubuntu-22.04
secrets: inherit
linux-legacy-installer:
uses: ./.github/workflows/linux.yml
with:
cura_conan_version: ${{ inputs.cura_conan_version }}
conan_args: ${{ inputs.conan_args }}
enterprise: ${{ inputs.enterprise }}
staging: ${{ inputs.staging }}
architecture: X64
operating_system: ubuntu-20.04
secrets: inherit
macos-installer:
uses: ./.github/workflows/macos.yml
with:
cura_conan_version: ${{ github.event.inputs.CURA_CONAN_VERSION }}
conan_args: ${{ github.event.inputs.conan_args }}
enterprise: ${{ github.event.inputs.enterprise == 'true' }}
staging: ${{ github.event.inputs.staging == 'true' }}
architecture: X64
operating_system: macos-11.0
secrets: inherit
macos-installer:
uses: ./.github/workflows/macos.yml
with:
cura_conan_version: ${{ inputs.cura_conan_version }}
conan_args: ${{ inputs.conan_args }}
enterprise: ${{ inputs.enterprise }}
staging: ${{ inputs.staging }}
architecture: X64
operating_system: macos-11.0
secrets: inherit
macos-arm-installer:
uses: ./.github/workflows/macos.yml
with:
cura_conan_version: ${{ github.event.inputs.CURA_CONAN_VERSION }}
conan_args: ${{ github.event.inputs.conan_args }}
enterprise: ${{ github.event.inputs.enterprise == 'true' }}
staging: ${{ github.event.inputs.staging == 'true' }}
architecture: ARM64
operating_system: self-hosted
secrets: inherit
macos-arm-installer:
uses: ./.github/workflows/macos.yml
# Run and update nightly release when the nightly input is set to true or if the schedule is triggered
update-nightly-release:
if: ${{ always() && (! cancelled()) && contains(needs.*.result, 'success') && (! contains(needs.*.result, 'failure')) && (inputs.nightly || github.event_name == 'schedule') }}
runs-on: ubuntu-latest
needs: [ windows-installer, linux-installer, macos-installer, macos-arm-installer ]
steps:
- name: Checkout
uses: actions/checkout@v3
# It's not necessary to download all three, but it does make sure we have at least one if an OS is skipped.
- name: Download the run info
uses: actions/download-artifact@v2
with:
cura_conan_version: ${{ inputs.cura_conan_version }}
conan_args: ${{ inputs.conan_args }}
enterprise: ${{ inputs.enterprise }}
staging: ${{ inputs.staging }}
architecture: ARM64
operating_system: self-hosted
secrets: inherit
name: linux-run-info
- name: Set the run info as environment variables
run: |
. run_info.sh
- name: Output the name file name and extension
id: filename
shell: python
run: |
import os
import datetime
enterprise = "-Enterprise" if "${{ github.event.inputs.enterprise }}" == "true" else ""
linux = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-linux-X64"
mac_x64_dmg = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-macos-X64"
mac_x64_pkg = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-macos-X64"
mac_arm_dmg = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-macos-ARM64"
mac_arm_pkg = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-macos-ARM64"
win_msi = installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-win64-X64"
win_exe = installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-win64-X64"
nightly_name = "UltiMaker-Cura-" + os.getenv('CURA_VERSION_FULL').split("+")[0]
nightly_creation_time = str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
output_env = os.environ["GITHUB_OUTPUT"]
content = ""
if os.path.exists(output_env):
with open(output_env, "r") as f:
content = f.read()
with open(output_env, "w") as f:
f.write(content)
f.writelines(f"LINUX={linux}\n")
f.writelines(f"MAC_X64_DMG={mac_x64_dmg}\n")
f.writelines(f"MAC_X64_PKG={mac_x64_pkg}\n")
f.writelines(f"MAC_ARM_DMG={mac_arm_dmg}\n")
f.writelines(f"MAC_ARM_PKG={mac_arm_pkg}\n")
f.writelines(f"WIN_MSI={win_msi}\n")
f.writelines(f"WIN_EXE={win_exe}\n")
f.writelines(f"NIGHTLY_NAME={nightly_name}\n")
f.writelines(f"NIGHTLY_TIME={nightly_creation_time}\n")
- name: Download linux installer jobs artifacts
uses: actions/download-artifact@v2
with:
name: ${{ steps.filename.outputs.LINUX }}-AppImage
path: installers
- name: Rename Linux installer to nightlies
run: |
mv installers/${{ steps.filename.outputs.LINUX }}.AppImage installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-linux-X64.AppImage
- name: Update nightly release for Linux
run: |
gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-linux-X64.AppImage --clobber
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Download win msi installer jobs artifacts
uses: actions/download-artifact@v2
with:
name: ${{ steps.filename.outputs.WIN_MSI }}-msi
path: installers
- name: Download win exe installer jobs artifacts
uses: actions/download-artifact@v2
with:
name: ${{ steps.filename.outputs.WIN_EXE }}-exe
path: installers
- name: Rename Windows installers to nightlies
run: |
mv installers/${{ steps.filename.outputs.WIN_MSI }}.msi installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-win64-X64.msi
mv installers/${{ steps.filename.outputs.WIN_EXE }}.exe installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-win64-X64.exe
- name: Update nightly release for Windows
run: |
gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-win64-X64.msi --clobber
gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-win64-X64.exe --clobber
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Download MacOS (X64) dmg installer jobs artifacts
uses: actions/download-artifact@v2
with:
name: ${{ steps.filename.outputs.MAC_X64_DMG }}-dmg
path: installers
- name: Download MacOS (X64) pkg installer jobs artifacts
uses: actions/download-artifact@v2
with:
name: ${{ steps.filename.outputs.MAC_X64_PKG }}-pkg
path: installers
- name: Rename MacOS (X64) installers to nightlies
run: |
mv installers/${{ steps.filename.outputs.MAC_X64_DMG }}.dmg installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-X64.dmg
mv installers/${{ steps.filename.outputs.MAC_X64_PKG }}.pkg installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-X64.pkg
- name: Update nightly release for MacOS (X64)
run: |
gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-X64.dmg --clobber
gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-X64.pkg --clobber
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Download MacOS (ARM-64) dmg installer jobs artifacts
uses: actions/download-artifact@v2
with:
name: ${{ steps.filename.outputs.MAC_ARM_DMG }}-dmg
path: installers
- name: Download acOS (ARM-64) pkg installer jobs artifacts
uses: actions/download-artifact@v2
with:
name: ${{ steps.filename.outputs.MAC_ARM_PKG }}-pkg
path: installers
- name: Rename MacOS (ARM-64) installers to nightlies
run: |
mv installers/${{ steps.filename.outputs.MAC_ARM_DMG }}.dmg installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-ARM64.dmg
mv installers/${{ steps.filename.outputs.MAC_ARM_PKG }}.pkg installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-ARM64.pkg
- name: Update nightly release for MacOS (ARM-64)
run: |
gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-ARM64.dmg --clobber
gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-ARM64.pkg --clobber
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Update nightly release description (with date)
if: always()
run: |
gh release edit nightly --title "${{ steps.filename.outputs.NIGHTLY_NAME }}" --notes "Nightly release created on: ${{ steps.filename.outputs.NIGHTLY_TIME }}"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -38,7 +38,7 @@ on:
type: choice
options:
- ubuntu-22.04
- ubuntu-20.04
workflow_call:
inputs:
cura_conan_version:
@ -119,10 +119,20 @@ jobs:
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
sudo apt update
sudo apt upgrade
sudo apt install build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config -y
sudo apt install build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config binutils coreutils desktop-file-utils fakeroot fuse libgdk-pixbuf2.0-dev patchelf squashfs-tools strace util-linux zsync -y
# Get the AppImage tool
wget --no-check-certificate --quiet https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage -O $GITHUB_WORKSPACE/appimagetool
chmod +x $GITHUB_WORKSPACE/appimagetool
echo "APPIMAGETOOL_LOCATION=$GITHUB_WORKSPACE/appimagetool" >> $GITHUB_ENV
# Get the AppImage builder
wget --no-check-certificate --quiet -O $GITHUB_WORKSPACE/appimage-builder-x86_64.AppImage https://github.com/AppImageCrafters/appimage-builder/releases/download/v1.1.0/appimage-builder-1.1.0-x86_64.AppImage
chmod +x appimage-builder-x86_64.AppImage
echo "APPIMAGEBUILDER_LOCATION=$GITHUB_WORKSPACE/appimage-builder-x86_64.AppImage" >> $GITHUB_ENV
# Make sure these tools can be found on the path
echo "$GITHUB_WORKSPACE" >> $GITHUB_PATH
- name: Install GCC-13
run: |
@ -179,10 +189,7 @@ jobs:
run: |
import os
enterprise = "-Enterprise" if "${{ inputs.enterprise }}" == "true" else ""
if "${{ inputs.operating_system }}" == "ubuntu-22.04":
installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-linux-modern-${{ inputs.architecture }}"
else:
installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-linux-${{ inputs.architecture }}"
installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-linux-${{ inputs.architecture }}"
output_env = os.environ["GITHUB_OUTPUT"]
content = ""
if os.path.exists(output_env):
@ -238,7 +245,7 @@ jobs:
- name: Create the Linux AppImage (Bash)
run: |
python ../cura_inst/packaging/AppImage/create_appimage.py ./UltiMaker-Cura $CURA_VERSION_FULL "${{ steps.filename.outputs.INSTALLER_FILENAME }}.AppImage"
python ../cura_inst/packaging/AppImage-builder/create_appimage.py ./UltiMaker-Cura $CURA_VERSION_FULL "${{ steps.filename.outputs.INSTALLER_FILENAME }}.AppImage"
chmod +x "${{ steps.filename.outputs.INSTALLER_FILENAME }}.AppImage"
working-directory: dist
@ -250,6 +257,20 @@ jobs:
dist/${{ steps.filename.outputs.INSTALLER_FILENAME }}.AppImage
retention-days: 5
- name: Write the run info
shell: python
run: |
import os
with open("run_info.sh", "w") as f:
f.writelines(f'echo "CURA_VERSION_FULL={os.environ["CURA_VERSION_FULL"]}" >> $GITHUB_ENV\n')
- name: Upload the run info
uses: actions/upload-artifact@v3
with:
name: linux-run-info
path: |
run_info.sh
retention-days: 5
notify-export:
if: ${{ always() }}
needs: [ cura-installer-create ]

View File

@ -2,278 +2,293 @@ name: Macos Installer
run-name: ${{ inputs.cura_conan_version }} for Macos-${{ inputs.architecture }} by @${{ github.actor }}
on:
workflow_dispatch:
inputs:
cura_conan_version:
description: 'Cura Conan Version'
default: 'cura/latest@ultimaker/testing'
required: true
type: string
conan_args:
description: 'Conan args: eq.: --require-override'
default: ''
required: false
type: string
enterprise:
description: 'Build Cura as an Enterprise edition'
default: false
required: true
type: boolean
staging:
description: 'Use staging API'
default: false
required: true
type: boolean
architecture:
description: 'Architecture'
required: true
default: 'X64'
type: choice
options:
- X64
- ARM64
operating_system:
description: 'OS'
required: true
default: 'macos-11'
type: choice
options:
- self-hosted
- macos-11
- macos-12
workflow_call:
inputs:
cura_conan_version:
description: 'Cura Conan Version'
default: 'cura/latest@ultimaker/testing'
required: true
type: string
conan_args:
description: 'Conan args: eq.: --require-override'
default: ''
required: false
type: string
enterprise:
description: 'Build Cura as an Enterprise edition'
default: false
required: true
type: boolean
staging:
description: 'Use staging API'
default: false
required: true
type: boolean
architecture:
description: 'Architecture'
required: true
default: 'X64'
type: string
operating_system:
description: 'OS'
required: true
default: 'macos-11'
type: string
workflow_dispatch:
inputs:
cura_conan_version:
description: 'Cura Conan Version'
default: 'cura/latest@ultimaker/testing'
required: true
type: string
conan_args:
description: 'Conan args: eq.: --require-override'
default: ''
required: false
type: string
enterprise:
description: 'Build Cura as an Enterprise edition'
default: false
required: true
type: boolean
staging:
description: 'Use staging API'
default: false
required: true
type: boolean
architecture:
description: 'Architecture'
required: true
default: 'X64'
type: choice
options:
- X64
- ARM64
operating_system:
description: 'OS'
required: true
default: 'macos-11'
type: choice
options:
- self-hosted
- macos-11
- macos-12
workflow_call:
inputs:
cura_conan_version:
description: 'Cura Conan Version'
default: 'cura/latest@ultimaker/testing'
required: true
type: string
conan_args:
description: 'Conan args: eq.: --require-override'
default: ''
required: false
type: string
enterprise:
description: 'Build Cura as an Enterprise edition'
default: false
required: true
type: boolean
staging:
description: 'Use staging API'
default: false
required: true
type: boolean
architecture:
description: 'Architecture'
required: true
default: 'X64'
type: string
operating_system:
description: 'OS'
required: true
default: 'macos-11'
type: string
env:
CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
CODESIGN_IDENTITY: ${{ secrets.CODESIGN_IDENTITY }}
MAC_NOTARIZE_USER: ${{ secrets.MAC_NOTARIZE_USER }}
MAC_NOTARIZE_PASS: ${{ secrets.MAC_NOTARIZE_PASS }}
MACOS_CERT_P12: ${{ secrets.MACOS_CERT_P12 }}
MACOS_CERT_INSTALLER_P12: ${{ secrets.MACOS_CERT_INSTALLER_P12 }}
MACOS_CERT_USER: ${{ secrets.MACOS_CERT_USER }}
MACOS_CERT_PASSPHRASE: ${{ secrets.MACOS_CERT_PASSPHRASE }}
CURA_CONAN_VERSION: ${{ inputs.cura_conan_version }}
ENTERPRISE: ${{ inputs.enterprise }}
STAGING: ${{ inputs.staging }}
CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
CODESIGN_IDENTITY: ${{ secrets.CODESIGN_IDENTITY }}
MAC_NOTARIZE_USER: ${{ secrets.MAC_NOTARIZE_USER }}
MAC_NOTARIZE_PASS: ${{ secrets.MAC_NOTARIZE_PASS }}
MACOS_CERT_P12: ${{ secrets.MACOS_CERT_P12 }}
MACOS_CERT_INSTALLER_P12: ${{ secrets.MACOS_CERT_INSTALLER_P12 }}
MACOS_CERT_USER: ${{ secrets.MACOS_CERT_USER }}
MACOS_CERT_PASSPHRASE: ${{ secrets.MACOS_CERT_PASSPHRASE }}
CURA_CONAN_VERSION: ${{ inputs.cura_conan_version }}
ENTERPRISE: ${{ inputs.enterprise }}
STAGING: ${{ inputs.staging }}
jobs:
cura-installer-create:
runs-on: ${{ inputs.operating_system }}
cura-installer-create:
runs-on: ${{ inputs.operating_system }}
outputs:
INSTALLER_FILENAME: ${{ steps.filename.outputs.INSTALLER_FILENAME }}
outputs:
INSTALLER_FILENAME: ${{ steps.filename.outputs.INSTALLER_FILENAME }}
steps:
- name: Checkout
uses: actions/checkout@v3
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: '3.10.x'
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements for runner
run: pip install -r .github/workflows/requirements-conan-package.txt
- name: Cache Conan local repository packages (Bash)
uses: actions/cache@v3
with:
path: |
$HOME/.conan/data
$HOME/.conan/conan_download_cache
key: conan-${{ runner.os }}-${{ runner.arch }}-installer-cache
- name: Install MacOS system requirements
run: brew install cmake autoconf automake ninja create-dmg
- name: Create the default Conan profile
run: conan profile new default --detect --force
- name: Remove Macos keychain (Bash)
run: security delete-keychain signing_temp.keychain || true
- name: Configure Macos keychain Developer Cert(Bash)
id: macos-keychain-developer-cert
uses: apple-actions/import-codesign-certs@v1
with:
keychain-password: ${{ secrets.MACOS_KEYCHAIN_PASSWORD }}
p12-file-base64: ${{ secrets.MACOS_CERT_P12 }}
p12-password: ${{ secrets.MACOS_CERT_PASSPHRASE }}
- name: Configure Macos keychain Installer Cert (Bash)
id: macos-keychain-installer-cert
uses: apple-actions/import-codesign-certs@v1
with:
keychain-password: ${{ secrets.MACOS_KEYCHAIN_PASSWORD }}
create-keychain: false # keychain is created in previous use of action.
p12-file-base64: ${{ secrets.MACOS_CERT_INSTALLER_P12 }}
p12-password: ${{ secrets.MACOS_CERT_PASSPHRASE }}
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Use Conan download cache (Bash)
run: conan config set storage.download_cache="$HOME/.conan/conan_download_cache"
- name: Create the Packages (Bash)
run: conan install $CURA_CONAN_VERSION ${{ inputs.conan_args }} --build=missing --update -if cura_inst -g VirtualPythonEnv -o cura:enterprise=$ENTERPRISE -o cura:staging=$STAGING --json "cura_inst/conan_install_info.json"
- name: Upload the Package(s)
if: always()
run: |
conan upload "*" -r cura --all -c
- name: Set Environment variables for Cura (bash)
run: |
. ./cura_inst/bin/activate_github_actions_env.sh
. ./cura_inst/bin/activate_github_actions_version_env.sh
- name: Unlock Macos keychain (Bash)
run: security unlock -p $TEMP_KEYCHAIN_PASSWORD signing_temp.keychain
env:
TEMP_KEYCHAIN_PASSWORD: ${{ steps.macos-keychain-developer-cert.outputs.keychain-password }}
# FIXME: This is a workaround to ensure that we use and pack a shared library for OpenSSL 1.1.1l. We currently compile
# OpenSSL statically for CPython, but our Python Dependenies (such as PyQt6) require a shared library.
# Because Conan won't allow for building the same library with two different options (easily) we need to install it explicitly
# and do a manual copy to the VirtualEnv, such that Pyinstaller can find it.
- name: Install OpenSSL shared
run: conan install openssl/1.1.1l@_/_ --build=missing --update -o openssl:shared=True -g deploy
- name: Copy OpenSSL shared (Bash)
run: |
cp ./openssl/lib/*.so* ./cura_inst/bin/ || true
cp ./openssl/lib/*.dylib* ./cura_inst/bin/ || true
- name: Create the Cura dist
run: pyinstaller ./cura_inst/UltiMaker-Cura.spec
- name: Output the name file name and extension
id: filename
shell: python
run: |
import os
enterprise = "-Enterprise" if "${{ inputs.enterprise }}" == "true" else ""
installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-macos-${{ inputs.architecture }}"
output_env = os.environ["GITHUB_OUTPUT"]
content = ""
if os.path.exists(output_env):
with open(output_env, "r") as f:
content = f.read()
with open(output_env, "w") as f:
f.write(content)
f.writelines(f"INSTALLER_FILENAME={installer_filename}\n")
- name: Summarize the used Conan dependencies
shell: python
run: |
import os
import json
from pathlib import Path
conan_install_info_path = Path("cura_inst/conan_install_info.json")
conan_info = {"installed": []}
if os.path.exists(conan_install_info_path):
with open(conan_install_info_path, "r") as f:
conan_info = json.load(f)
sorted_deps = sorted([dep["recipe"]["id"].replace('#', r' rev: ') for dep in conan_info["installed"]])
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
content = ""
if os.path.exists(summary_env):
with open(summary_env, "r") as f:
content = f.read()
with open(summary_env, "w") as f:
f.write(content)
f.writelines("# ${{ steps.filename.outputs.INSTALLER_FILENAME }}\n")
f.writelines("## Conan packages:\n")
for dep in sorted_deps:
f.writelines(f"`{dep}`\n")
- name: Summarize the used Python modules
shell: python
run: |
import os
import pkg_resources
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
content = ""
if os.path.exists(summary_env):
with open(summary_env, "r") as f:
content = f.read()
with open(summary_env, "w") as f:
f.write(content)
f.writelines("## Python modules:\n")
for package in pkg_resources.working_set:
f.writelines(f"`{package.key}/{package.version}`\n")
- name: Create the Macos dmg (Bash)
run: python ../cura_inst/packaging/MacOS/build_macos.py --source_path ../cura_inst --dist_path . --cura_conan_version $CURA_CONAN_VERSION --filename "${{ steps.filename.outputs.INSTALLER_FILENAME }}" --build_dmg --build_pkg --app_name "$CURA_APP_NAME"
working-directory: dist
- name: Upload the dmg
uses: actions/upload-artifact@v3
with:
name: ${{ steps.filename.outputs.INSTALLER_FILENAME }}-dmg
path: |
dist/${{ steps.filename.outputs.INSTALLER_FILENAME }}.dmg
retention-days: 5
- name: Upload the pkg
uses: actions/upload-artifact@v3
with:
name: ${{ steps.filename.outputs.INSTALLER_FILENAME }}-pkg
path: |
dist/${{ steps.filename.outputs.INSTALLER_FILENAME }}.pkg
retention-days: 5
notify-export:
if: ${{ always() }}
needs: [ cura-installer-create ]
uses: ultimaker/cura/.github/workflows/notify.yml@main
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
success: ${{ contains(join(needs.*.result, ','), 'success') }}
success_title: "Create the Cura distributions"
success_body: "Installers for ${{ inputs.cura_conan_version }}"
failure_title: "Failed to create the Cura distributions"
failure_body: "Failed to create at least 1 installer for ${{ inputs.cura_conan_version }}"
secrets: inherit
python-version: '3.10.x'
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements for runner
run: pip install -r .github/workflows/requirements-conan-package.txt
- name: Cache Conan local repository packages (Bash)
uses: actions/cache@v3
with:
path: |
$HOME/.conan/data
$HOME/.conan/conan_download_cache
key: conan-${{ runner.os }}-${{ runner.arch }}-installer-cache
- name: Install MacOS system requirements
run: brew install cmake autoconf automake ninja create-dmg
- name: Create the default Conan profile
run: conan profile new default --detect --force
- name: Remove Macos keychain (Bash)
run: security delete-keychain signing_temp.keychain || true
- name: Configure Macos keychain Developer Cert(Bash)
id: macos-keychain-developer-cert
uses: apple-actions/import-codesign-certs@v1
with:
keychain-password: ${{ secrets.MACOS_KEYCHAIN_PASSWORD }}
p12-file-base64: ${{ secrets.MACOS_CERT_P12 }}
p12-password: ${{ secrets.MACOS_CERT_PASSPHRASE }}
- name: Configure Macos keychain Installer Cert (Bash)
id: macos-keychain-installer-cert
uses: apple-actions/import-codesign-certs@v1
with:
keychain-password: ${{ secrets.MACOS_KEYCHAIN_PASSWORD }}
create-keychain: false # keychain is created in previous use of action.
p12-file-base64: ${{ secrets.MACOS_CERT_INSTALLER_P12 }}
p12-password: ${{ secrets.MACOS_CERT_PASSPHRASE }}
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Use Conan download cache (Bash)
run: conan config set storage.download_cache="$HOME/.conan/conan_download_cache"
- name: Create the Packages (Bash)
run: conan install $CURA_CONAN_VERSION ${{ inputs.conan_args }} --build=missing --update -if cura_inst -g VirtualPythonEnv -o cura:enterprise=$ENTERPRISE -o cura:staging=$STAGING --json "cura_inst/conan_install_info.json"
- name: Upload the Package(s)
if: always()
run: |
conan upload "*" -r cura --all -c
- name: Set Environment variables for Cura (bash)
run: |
. ./cura_inst/bin/activate_github_actions_env.sh
. ./cura_inst/bin/activate_github_actions_version_env.sh
- name: Unlock Macos keychain (Bash)
run: security unlock -p $TEMP_KEYCHAIN_PASSWORD signing_temp.keychain
env:
TEMP_KEYCHAIN_PASSWORD: ${{ steps.macos-keychain-developer-cert.outputs.keychain-password }}
# FIXME: This is a workaround to ensure that we use and pack a shared library for OpenSSL 1.1.1l. We currently compile
# OpenSSL statically for CPython, but our Python Dependenies (such as PyQt6) require a shared library.
# Because Conan won't allow for building the same library with two different options (easily) we need to install it explicitly
# and do a manual copy to the VirtualEnv, such that Pyinstaller can find it.
- name: Install OpenSSL shared
run: conan install openssl/1.1.1l@_/_ --build=missing --update -o openssl:shared=True -g deploy
- name: Copy OpenSSL shared (Bash)
run: |
cp ./openssl/lib/*.so* ./cura_inst/bin/ || true
cp ./openssl/lib/*.dylib* ./cura_inst/bin/ || true
- name: Create the Cura dist
run: pyinstaller ./cura_inst/UltiMaker-Cura.spec
- name: Output the name file name and extension
id: filename
shell: python
run: |
import os
enterprise = "-Enterprise" if "${{ inputs.enterprise }}" == "true" else ""
installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-macos-${{ inputs.architecture }}"
output_env = os.environ["GITHUB_OUTPUT"]
content = ""
if os.path.exists(output_env):
with open(output_env, "r") as f:
content = f.read()
with open(output_env, "w") as f:
f.write(content)
f.writelines(f"INSTALLER_FILENAME={installer_filename}\n")
- name: Summarize the used Conan dependencies
shell: python
run: |
import os
import json
from pathlib import Path
conan_install_info_path = Path("cura_inst/conan_install_info.json")
conan_info = {"installed": []}
if os.path.exists(conan_install_info_path):
with open(conan_install_info_path, "r") as f:
conan_info = json.load(f)
sorted_deps = sorted([dep["recipe"]["id"].replace('#', r' rev: ') for dep in conan_info["installed"]])
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
content = ""
if os.path.exists(summary_env):
with open(summary_env, "r") as f:
content = f.read()
with open(summary_env, "w") as f:
f.write(content)
f.writelines("# ${{ steps.filename.outputs.INSTALLER_FILENAME }}\n")
f.writelines("## Conan packages:\n")
for dep in sorted_deps:
f.writelines(f"`{dep}`\n")
- name: Summarize the used Python modules
shell: python
run: |
import os
import pkg_resources
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
content = ""
if os.path.exists(summary_env):
with open(summary_env, "r") as f:
content = f.read()
with open(summary_env, "w") as f:
f.write(content)
f.writelines("## Python modules:\n")
for package in pkg_resources.working_set:
f.writelines(f"`{package.key}/{package.version}`\n")
- name: Create the Macos dmg (Bash)
run: python ../cura_inst/packaging/MacOS/build_macos.py --source_path ../cura_inst --dist_path . --cura_conan_version $CURA_CONAN_VERSION --filename "${{ steps.filename.outputs.INSTALLER_FILENAME }}" --build_dmg --build_pkg --app_name "$CURA_APP_NAME"
working-directory: dist
- name: Upload the dmg
uses: actions/upload-artifact@v3
with:
name: ${{ steps.filename.outputs.INSTALLER_FILENAME }}-dmg
path: |
dist/${{ steps.filename.outputs.INSTALLER_FILENAME }}.dmg
retention-days: 5
- name: Upload the pkg
uses: actions/upload-artifact@v3
with:
name: ${{ steps.filename.outputs.INSTALLER_FILENAME }}-pkg
path: |
dist/${{ steps.filename.outputs.INSTALLER_FILENAME }}.pkg
retention-days: 5
- name: Write the run info
shell: python
run: |
import os
with open("run_info.sh", "w") as f:
f.writelines(f'echo "CURA_VERSION_FULL={os.environ["CURA_VERSION_FULL"]}" >> $GITHUB_ENV\n')
- name: Upload the run info
uses: actions/upload-artifact@v3
with:
name: macos-run-info
path: |
run_info.sh
retention-days: 5
notify-export:
if: ${{ always() }}
needs: [ cura-installer-create ]
uses: ultimaker/cura/.github/workflows/notify.yml@main
with:
success: ${{ contains(join(needs.*.result, ','), 'success') }}
success_title: "Create the Cura distributions"
success_body: "Installers for ${{ inputs.cura_conan_version }}"
failure_title: "Failed to create the Cura distributions"
failure_body: "Failed to create at least 1 installer for ${{ inputs.cura_conan_version }}"
secrets: inherit

View File

@ -2,269 +2,286 @@ name: Windows Installer
run-name: ${{ inputs.cura_conan_version }} for Windows-${{ inputs.architecture }} by @${{ github.actor }}
on:
workflow_dispatch:
inputs:
cura_conan_version:
description: 'Cura Conan Version'
default: 'cura/latest@ultimaker/testing'
required: true
type: string
conan_args:
description: 'Conan args: eq.: --require-override'
default: ''
required: false
type: string
enterprise:
description: 'Build Cura as an Enterprise edition'
default: false
required: true
type: boolean
staging:
description: 'Use staging API'
default: false
required: true
type: boolean
architecture:
description: 'Architecture'
required: true
default: 'X64'
type: choice
options:
- X64
operating_system:
description: 'OS'
required: true
default: 'windows-2022'
type: choice
options:
- windows-2022
workflow_call:
inputs:
cura_conan_version:
description: 'Cura Conan Version'
default: 'cura/latest@ultimaker/testing'
required: true
type: string
conan_args:
description: 'Conan args: eq.: --require-override'
default: ''
required: false
type: string
enterprise:
description: 'Build Cura as an Enterprise edition'
default: false
required: true
type: boolean
staging:
description: 'Use staging API'
default: false
required: true
type: boolean
architecture:
description: 'Architecture'
required: true
default: 'X64'
type: string
operating_system:
description: 'OS'
required: true
default: 'windows-2022'
type: string
workflow_dispatch:
inputs:
cura_conan_version:
description: 'Cura Conan Version'
default: 'cura/latest@ultimaker/testing'
required: true
type: string
conan_args:
description: 'Conan args: eq.: --require-override'
default: ''
required: false
type: string
enterprise:
description: 'Build Cura as an Enterprise edition'
default: false
required: true
type: boolean
staging:
description: 'Use staging API'
default: false
required: true
type: boolean
architecture:
description: 'Architecture'
required: true
default: 'X64'
type: choice
options:
- X64
operating_system:
description: 'OS'
required: true
default: 'windows-2022'
type: choice
options:
- windows-2022
workflow_call:
inputs:
cura_conan_version:
description: 'Cura Conan Version'
default: 'cura/latest@ultimaker/testing'
required: true
type: string
conan_args:
description: 'Conan args: eq.: --require-override'
default: ''
required: false
type: string
enterprise:
description: 'Build Cura as an Enterprise edition'
default: false
required: true
type: boolean
staging:
description: 'Use staging API'
default: false
required: true
type: boolean
architecture:
description: 'Architecture'
required: true
default: 'X64'
type: string
operating_system:
description: 'OS'
required: true
default: 'windows-2022'
type: string
env:
CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
WIN_CERT_INSTALLER_CER: ${{ secrets.WIN_CERT_INSTALLER_CER }}
WIN_CERT_INSTALLER_CER_PASS: ${{ secrets.WIN_CERT_INSTALLER_CER_PASS }}
CURA_CONAN_VERSION: ${{ inputs.cura_conan_version }}
ENTERPRISE: ${{ inputs.enterprise }}
STAGING: ${{ inputs.staging }}
CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
WIN_CERT_INSTALLER_CER: ${{ secrets.WIN_CERT_INSTALLER_CER }}
WIN_CERT_INSTALLER_CER_PASS: ${{ secrets.WIN_CERT_INSTALLER_CER_PASS }}
CURA_CONAN_VERSION: ${{ inputs.cura_conan_version }}
ENTERPRISE: ${{ inputs.enterprise }}
STAGING: ${{ inputs.staging }}
jobs:
cura-installer-create:
runs-on: ${{ inputs.operating_system }}
cura-installer-create:
runs-on: ${{ inputs.operating_system }}
outputs:
INSTALLER_FILENAME: ${{ steps.filename.outputs.INSTALLER_FILENAME }}
outputs:
INSTALLER_FILENAME: ${{ steps.filename.outputs.INSTALLER_FILENAME }}
steps:
- name: Checkout
uses: actions/checkout@v3
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: '3.10.x'
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements for runner
run: pip install -r .github/workflows/requirements-conan-package.txt
- name: Cache Conan local repository packages (Powershell)
uses: actions/cache@v3
with:
path: |
C:\Users\runneradmin\.conan\data
C:\.conan
C:\Users\runneradmin\.conan\conan_download_cache
key: conan-${{ runner.os }}-${{ runner.arch }}-installer-cache
- name: Create the default Conan profile
run: conan profile new default --detect --force
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Use Conan download cache (Powershell)
run: conan config set storage.download_cache="C:\Users\runneradmin\.conan\conan_download_cache"
- name: Create the Packages (Powershell)
run: conan install $Env:CURA_CONAN_VERSION ${{ inputs.conan_args }} --build=missing --update -if cura_inst -g VirtualPythonEnv -o cura:enterprise=$Env:ENTERPRISE -o cura:staging=$Env:STAGING --json "cura_inst/conan_install_info.json"
- name: Upload the Package(s)
if: always()
run: |
conan upload "*" -r cura --all -c
- name: Set Environment variables for Cura (Powershell)
run: |
echo "${Env:WIX}\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
.\cura_inst\Scripts\activate_github_actions_env.ps1
.\cura_inst\Scripts\activate_github_actions_version_env.ps1
- name: Install OpenSSL shared
run: conan install openssl/1.1.1l@_/_ --build=missing --update -o openssl:shared=True -g deploy
- name: Copy OpenSSL shared (Powershell)
run: |
cp openssl/bin/*.dll ./cura_inst/Scripts/
cp openssl/lib/*.lib ./cura_inst/Lib/
- name: Create the Cura dist
run: pyinstaller ./cura_inst/UltiMaker-Cura.spec
- name: Output the name file name and extension
id: filename
shell: python
run: |
import os
enterprise = "-Enterprise" if "${{ inputs.enterprise }}" == "true" else ""
installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-win64-${{ inputs.architecture }}"
output_env = os.environ["GITHUB_OUTPUT"]
content = ""
if os.path.exists(output_env):
with open(output_env, "r") as f:
content = f.read()
with open(output_env, "w") as f:
f.write(content)
f.writelines(f"INSTALLER_FILENAME={installer_filename}\n")
- name: Summarize the used Conan dependencies
shell: python
run: |
import os
import json
from pathlib import Path
conan_install_info_path = Path("cura_inst/conan_install_info.json")
conan_info = {"installed": []}
if os.path.exists(conan_install_info_path):
with open(conan_install_info_path, "r") as f:
conan_info = json.load(f)
sorted_deps = sorted([dep["recipe"]["id"].replace('#', r' rev: ') for dep in conan_info["installed"]])
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
content = ""
if os.path.exists(summary_env):
with open(summary_env, "r") as f:
content = f.read()
with open(summary_env, "w") as f:
f.write(content)
f.writelines("# ${{ steps.filename.outputs.INSTALLER_FILENAME }}\n")
f.writelines("## Conan packages:\n")
for dep in sorted_deps:
f.writelines(f"`{dep}`\n")
- name: Summarize the used Python modules
shell: python
run: |
import os
import pkg_resources
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
content = ""
if os.path.exists(summary_env):
with open(summary_env, "r") as f:
content = f.read()
with open(summary_env, "w") as f:
f.write(content)
f.writelines("## Python modules:\n")
for package in pkg_resources.working_set:
f.writelines(f"`{package.key}/{package.version}`\n")
- name: Create PFX certificate from BASE64_PFX_CONTENT secret
id: create-pfx
env:
PFX_CONTENT: ${{ secrets.WIN_CERT_INSTALLER_CER }}
run: |
$pfxPath = Join-Path -Path $env:RUNNER_TEMP -ChildPath "cert.pfx";
$encodedBytes = [System.Convert]::FromBase64String($env:PFX_CONTENT);
Set-Content $pfxPath -Value $encodedBytes -AsByteStream;
echo "PFX_PATH=$pfxPath" >> $env:GITHUB_OUTPUT;
- name: Create the Windows msi installer (Powershell)
run: |
python ..\cura_inst\packaging\msi\create_windows_msi.py ..\cura_inst .\UltiMaker-Cura "${{steps.filename.outputs.INSTALLER_FILENAME }}.msi" "$Env:CURA_APP_NAME"
working-directory: dist
- name: Sign the Windows msi installer (Powershell)
env:
PFX_PATH: ${{ steps.create-pfx.outputs.PFX_PATH }}
run: |
& "C:/Program Files (x86)/Windows Kits/10/bin/10.0.17763.0/x86/signtool.exe" sign /f $Env:PFX_PATH /p "$Env:WIN_CERT_INSTALLER_CER_PASS" /fd SHA256 /t http://timestamp.digicert.com "${{steps.filename.outputs.INSTALLER_FILENAME }}.msi"
working-directory: dist
- name: Create the Windows exe installer (Powershell)
run: |
python ..\cura_inst\packaging\NSIS\create_windows_installer.py ../cura_inst . "${{steps.filename.outputs.INSTALLER_FILENAME }}.exe"
working-directory: dist
- name: Sign the Windows exe installer (Powershell)
env:
PFX_PATH: ${{ steps.create-pfx.outputs.PFX_PATH }}
run: |
& "C:/Program Files (x86)/Windows Kits/10/bin/10.0.17763.0/x86/signtool.exe" sign /f $Env:PFX_PATH /p "$Env:WIN_CERT_INSTALLER_CER_PASS" /fd SHA256 /t http://timestamp.digicert.com "${{steps.filename.outputs.INSTALLER_FILENAME }}.exe"
working-directory: dist
- name: Upload the msi
uses: actions/upload-artifact@v3
with:
name: ${{steps.filename.outputs.INSTALLER_FILENAME }}-msi
path: |
dist/${{steps.filename.outputs.INSTALLER_FILENAME }}.msi
retention-days: 5
- name: Upload the exe
uses: actions/upload-artifact@v3
with:
name: ${{steps.filename.outputs.INSTALLER_FILENAME }}-exe
path: |
dist/${{steps.filename.outputs.INSTALLER_FILENAME }}.exe
retention-days: 5
notify-export:
if: ${{ always() }}
needs: [ cura-installer-create ]
uses: ultimaker/cura/.github/workflows/notify.yml@main
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
success: ${{ contains(join(needs.*.result, ','), 'success') }}
success_title: "Create the Cura distributions"
success_body: "Installers for ${{ inputs.cura_conan_version }}"
failure_title: "Failed to create the Cura distributions"
failure_body: "Failed to create at least 1 installer for ${{ inputs.cura_conan_version }}"
secrets: inherit
python-version: '3.10.x'
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements for runner
run: pip install -r .github/workflows/requirements-conan-package.txt
- name: Cache Conan local repository packages (Powershell)
uses: actions/cache@v3
with:
path: |
C:\Users\runneradmin\.conan\data
C:\.conan
C:\Users\runneradmin\.conan\conan_download_cache
key: conan-${{ runner.os }}-${{ runner.arch }}-installer-cache
- name: Create the default Conan profile
run: conan profile new default --detect --force
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Use Conan download cache (Powershell)
run: conan config set storage.download_cache="C:\Users\runneradmin\.conan\conan_download_cache"
- name: Create the Packages (Powershell)
run: conan install $Env:CURA_CONAN_VERSION ${{ inputs.conan_args }} --build=missing --update -if cura_inst -g VirtualPythonEnv -o cura:enterprise=$Env:ENTERPRISE -o cura:staging=$Env:STAGING --json "cura_inst/conan_install_info.json"
- name: Upload the Package(s)
if: always()
run: |
conan upload "*" -r cura --all -c
- name: Set Environment variables for Cura (Powershell)
run: |
echo "${Env:WIX}\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
.\cura_inst\Scripts\activate_github_actions_env.ps1
.\cura_inst\Scripts\activate_github_actions_version_env.ps1
- name: Install OpenSSL shared
run: conan install openssl/1.1.1l@_/_ --build=missing --update -o openssl:shared=True -g deploy
- name: Copy OpenSSL shared (Powershell)
run: |
cp openssl/bin/*.dll ./cura_inst/Scripts/
cp openssl/lib/*.lib ./cura_inst/Lib/
- name: Create the Cura dist
run: pyinstaller ./cura_inst/UltiMaker-Cura.spec
- name: Output the name file name and extension
id: filename
shell: python
run: |
import os
enterprise = "-Enterprise" if "${{ inputs.enterprise }}" == "true" else ""
installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-win64-${{ inputs.architecture }}"
output_env = os.environ["GITHUB_OUTPUT"]
content = ""
if os.path.exists(output_env):
with open(output_env, "r") as f:
content = f.read()
with open(output_env, "w") as f:
f.write(content)
f.writelines(f"INSTALLER_FILENAME={installer_filename}\n")
- name: Summarize the used Conan dependencies
shell: python
run: |
import os
import json
from pathlib import Path
conan_install_info_path = Path("cura_inst/conan_install_info.json")
conan_info = {"installed": []}
if os.path.exists(conan_install_info_path):
with open(conan_install_info_path, "r") as f:
conan_info = json.load(f)
sorted_deps = sorted([dep["recipe"]["id"].replace('#', r' rev: ') for dep in conan_info["installed"]])
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
content = ""
if os.path.exists(summary_env):
with open(summary_env, "r") as f:
content = f.read()
with open(summary_env, "w") as f:
f.write(content)
f.writelines("# ${{ steps.filename.outputs.INSTALLER_FILENAME }}\n")
f.writelines("## Conan packages:\n")
for dep in sorted_deps:
f.writelines(f"`{dep}`\n")
- name: Summarize the used Python modules
shell: python
run: |
import os
import pkg_resources
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
content = ""
if os.path.exists(summary_env):
with open(summary_env, "r") as f:
content = f.read()
with open(summary_env, "w") as f:
f.write(content)
f.writelines("## Python modules:\n")
for package in pkg_resources.working_set:
f.writelines(f"`{package.key}/{package.version}`\n")
- name: Create PFX certificate from BASE64_PFX_CONTENT secret
id: create-pfx
env:
PFX_CONTENT: ${{ secrets.WIN_CERT_INSTALLER_CER }}
run: |
$pfxPath = Join-Path -Path $env:RUNNER_TEMP -ChildPath "cert.pfx";
$encodedBytes = [System.Convert]::FromBase64String($env:PFX_CONTENT);
Set-Content $pfxPath -Value $encodedBytes -AsByteStream;
echo "PFX_PATH=$pfxPath" >> $env:GITHUB_OUTPUT;
- name: Create the Windows msi installer (Powershell)
run: |
python ..\cura_inst\packaging\msi\create_windows_msi.py ..\cura_inst .\UltiMaker-Cura "${{steps.filename.outputs.INSTALLER_FILENAME }}.msi" "$Env:CURA_APP_NAME"
working-directory: dist
- name: Sign the Windows msi installer (Powershell)
env:
PFX_PATH: ${{ steps.create-pfx.outputs.PFX_PATH }}
run: |
& "C:/Program Files (x86)/Windows Kits/10/bin/10.0.17763.0/x86/signtool.exe" sign /f $Env:PFX_PATH /p "$Env:WIN_CERT_INSTALLER_CER_PASS" /fd SHA256 /t http://timestamp.digicert.com "${{steps.filename.outputs.INSTALLER_FILENAME }}.msi"
working-directory: dist
- name: Create the Windows exe installer (Powershell)
run: |
python ..\cura_inst\packaging\NSIS\create_windows_installer.py ../cura_inst . "${{steps.filename.outputs.INSTALLER_FILENAME }}.exe"
working-directory: dist
- name: Sign the Windows exe installer (Powershell)
env:
PFX_PATH: ${{ steps.create-pfx.outputs.PFX_PATH }}
run: |
& "C:/Program Files (x86)/Windows Kits/10/bin/10.0.17763.0/x86/signtool.exe" sign /f $Env:PFX_PATH /p "$Env:WIN_CERT_INSTALLER_CER_PASS" /fd SHA256 /t http://timestamp.digicert.com "${{steps.filename.outputs.INSTALLER_FILENAME }}.exe"
working-directory: dist
- name: Upload the msi
uses: actions/upload-artifact@v3
with:
name: ${{steps.filename.outputs.INSTALLER_FILENAME }}-msi
path: |
dist/${{steps.filename.outputs.INSTALLER_FILENAME }}.msi
retention-days: 5
- name: Upload the exe
uses: actions/upload-artifact@v3
with:
name: ${{steps.filename.outputs.INSTALLER_FILENAME }}-exe
path: |
dist/${{steps.filename.outputs.INSTALLER_FILENAME }}.exe
retention-days: 5
# NOTE: The extension is .sh, since this isn't going to build-environment, so not on the Win build image.
- name: Write the run info
shell: python
run: |
import os
with open("run_info.sh", "w") as f:
f.writelines(f'echo "CURA_VERSION_FULL={os.environ["CURA_VERSION_FULL"]}" >> $GITHUB_ENV\n')
# NOTE: The extension is .sh, since this isn't going to build-environment, so not on the Win build image.
- name: Upload the run info
uses: actions/upload-artifact@v3
with:
name: windows-run-info
path: |
run_info.sh
retention-days: 5
notify-export:
if: ${{ always() }}
needs: [ cura-installer-create ]
uses: ultimaker/cura/.github/workflows/notify.yml@main
with:
success: ${{ contains(join(needs.*.result, ','), 'success') }}
success_title: "Create the Cura distributions"
success_body: "Installers for ${{ inputs.cura_conan_version }}"
failure_title: "Failed to create the Cura distributions"
failure_body: "Failed to create at least 1 installer for ${{ inputs.cura_conan_version }}"
secrets: inherit

2
.gitignore vendored
View File

@ -102,3 +102,5 @@ Ultimaker-Cura.spec
.run/
/printer-linter/src/printerlinter.egg-info/
/resources/qml/Dialogs/AboutDialogVersionsList.qml
/plugins/CuraEngineGradualFlow
/resources/bundled_packages/bundled_*.json

View File

@ -19,6 +19,14 @@ pyinstaller:
package: "cura"
src: "plugins"
dst: "share/cura/plugins"
curaengine_gradual_flow_plugin:
package: "curaengine_plugin_gradual_flow"
src: "res/plugins/CuraEngineGradualFlow"
dst: "share/cura/plugins/CuraEngineGradualFlow"
curaengine_gradual_flow_plugin_bundled:
package: "curaengine_plugin_gradual_flow"
src: "res/bundled_packages"
dst: "share/cura/resources/bundled_packages"
cura_resources:
package: "cura"
src: "resources"

View File

@ -4,7 +4,7 @@ from pathlib import Path
from jinja2 import Template
from conan import ConanFile
from conan.tools.files import copy, rmdir, save, mkdir
from conan.tools.files import copy, rmdir, save, mkdir, rm
from conan.tools.microsoft import unix_path
from conan.tools.env import VirtualRunEnv, Environment, VirtualBuildEnv
from conan.tools.scm import Version
@ -21,12 +21,11 @@ class CuraConan(ConanFile):
description = "3D printer / slicing GUI built on top of the Uranium framework"
topics = ("conan", "python", "pyqt6", "qt", "qml", "3d-printing", "slicer")
build_policy = "missing"
exports = "LICENSE*", "UltiMaker-Cura.spec.jinja", "CuraVersion.py.jinja", "AboutDialogVersionsList.qml.jinja"
exports = "LICENSE*", "*.jinja"
settings = "os", "compiler", "build_type", "arch"
# FIXME: Remove specific branch once merged to main
python_requires = "umbase/[>=0.1.7]@ultimaker/stable", "translationextractor/[>=2.1.1]@ultimaker/stable"
python_requires_extend = "umbase.UMBaseConanfile"
python_requires = "translationextractor/[>=2.1.1]@ultimaker/stable"
options = {
"enterprise": ["True", "False", "true", "false"], # Workaround for GH Action passing boolean as lowercase string
@ -210,8 +209,8 @@ class CuraConan(ConanFile):
src_path = os.path.join(self.source_folder, data["src"])
else:
src_path = os.path.join(self.deps_cpp_info[data["package"]].rootpath, data["src"])
elif "root" in data: # get the paths relative from the sourcefolder
src_path = os.path.join(self.source_folder, data["root"], data["src"])
elif "root" in data: # get the paths relative from the install folder
src_path = os.path.join(self.install_folder, data["root"], data["src"])
else:
continue
if Path(src_path).exists():
@ -222,7 +221,9 @@ class CuraConan(ConanFile):
if "package" in binary: # get the paths from conan package
src_path = os.path.join(self.deps_cpp_info[binary["package"]].rootpath, binary["src"])
elif "root" in binary: # get the paths relative from the sourcefolder
src_path = os.path.join(self.source_folder, binary["root"], binary["src"])
src_path = str(self.source_path.joinpath(binary["root"], binary["src"]))
if self.settings.os == "Windows":
src_path = src_path.replace("\\", "\\\\")
else:
continue
if not Path(src_path).exists():
@ -294,6 +295,8 @@ class CuraConan(ConanFile):
self.options["pynest2d"].shared = True
self.options["cpython"].shared = True
self.options["boost"].header_only = True
if self.settings.os == "Linux":
self.options["curaengine_grpc_definitions"].shared = True
def validate(self):
version = self.conf_info.get("user.cura:version", default = self.version, check_type = str)
@ -302,10 +305,13 @@ class CuraConan(ConanFile):
def requirements(self):
self.requires("boost/1.82.0")
self.requires("pyarcus/(latest)@ultimaker/cura_10951")
self.requires("curaengine_grpc_definitions/latest@ultimaker/testing")
self.requires("zlib/1.2.13")
self.requires("pyarcus/5.3.0")
self.requires("curaengine/(latest)@ultimaker/testing")
self.requires("pysavitar/(latest)@ultimaker/cura_10951")
self.requires("pynest2d/(latest)@ultimaker/cura_10951")
self.requires("pysavitar/5.3.0")
self.requires("pynest2d/5.3.0")
self.requires("curaengine_plugin_gradual_flow/(latest)@ultimaker/testing")
self.requires("uranium/(latest)@ultimaker/testing")
self.requires("cura_binary_data/(latest)@ultimaker/testing")
self.requires("cpython/3.10.4")
@ -348,30 +354,39 @@ class CuraConan(ConanFile):
copy(self, "CuraEngine.exe", curaengine.bindirs[0], self.source_folder, keep_path = False)
copy(self, "CuraEngine", curaengine.bindirs[0], self.source_folder, keep_path = False)
# Copy resources of cura_binary_data
cura_binary_data = self.dependencies["cura_binary_data"].cpp_info
copy(self, "*", cura_binary_data.resdirs[0], str(self._share_dir.joinpath("cura")), keep_path = True)
copy(self, "*", cura_binary_data.resdirs[1], str(self._share_dir.joinpath("uranium")), keep_path = True)
if self.settings.os == "Windows":
copy(self, "*", cura_binary_data.resdirs[2], str(self._share_dir.joinpath("windows")), keep_path = True)
# Copy the external plugins that we want to bundle with Cura
rmdir(self,str(self.source_path.joinpath("plugins", "CuraEngineGradualFlow")))
curaengine_plugin_gradual_flow = self.dependencies["curaengine_plugin_gradual_flow"].cpp_info
copy(self, "*.py", curaengine_plugin_gradual_flow.resdirs[0], str(self.source_path.joinpath("plugins", "CuraEngineGradualFlow")), keep_path = True)
ext = ".exe" if self.settings.os == "Windows" else ""
copy(self, f"curaengine_plugin_gradual_flow{ext}", curaengine_plugin_gradual_flow.resdirs[0], str(self.source_path.joinpath("plugins", "CuraEngineGradualFlow")), keep_path = True)
copy(self, "*.json", curaengine_plugin_gradual_flow.resdirs[0], str(self.source_path.joinpath("plugins", "CuraEngineGradualFlow")), keep_path = True)
copy(self, "bundled_*.json", curaengine_plugin_gradual_flow.resdirs[1], str(self.source_path.joinpath("resources", "bundled_packages")), keep_path = False)
for dependency in self.dependencies.host.values():
for bindir in dependency.cpp_info.bindirs:
copy(self, "*.dll", bindir, str(self._site_packages), keep_path = False)
for libdir in dependency.cpp_info.libdirs:
copy(self, "*.pyd", libdir, str(self._site_packages), keep_path = False)
copy(self, "*.pyi", libdir, str(self._site_packages), keep_path = False)
copy(self, "*.dylib", libdir, str(self._base_dir.joinpath("lib")), keep_path = False)
# Copy resources of cura_binary_data
cura_binary_data = self.dependencies["cura_binary_data"].cpp_info
copy(self, "*", cura_binary_data.resdirs[0], str(self._share_dir.joinpath("cura")), keep_path = True)
copy(self, "*", cura_binary_data.resdirs[1], str(self._share_dir.joinpath("uranium")), keep_path = True)
if self.settings.os == "Windows":
copy(self, "*", cura_binary_data.resdirs[2], str(self._share_dir.joinpath("windows")), keep_path = True)
# Copy materials (flat)
rmdir(self, os.path.join(self.source_folder, "resources", "materials"))
fdm_materials = self.dependencies["fdm_materials"].cpp_info
copy(self, "*", fdm_materials.resdirs[0], self.source_folder)
for dependency in self.dependencies.host.values():
for bindir in dependency.cpp_info.bindirs:
copy(self, "*.dll", bindir, str(self._site_packages), keep_path = False)
for libdir in dependency.cpp_info.libdirs:
copy(self, "*.pyd", libdir, str(self._site_packages), keep_path = False)
copy(self, "*.pyi", libdir, str(self._site_packages), keep_path = False)
copy(self, "*.dylib", libdir, str(self._base_dir.joinpath("lib")), keep_path = False)
# Copy internal resources
if self.options.internal:
cura_private_data = self.dependencies["cura_private_data"].cpp_info
copy(self, "*", cura_private_data.resdirs[0], str(self._share_dir.joinpath("cura")))
# Copy materials (flat)
rmdir(self, os.path.join(self.source_folder, "resources", "materials"))
fdm_materials = self.dependencies["fdm_materials"].cpp_info
copy(self, "*", fdm_materials.resdirs[0], self.source_folder)
# Copy internal resources
if self.options.internal:
cura_private_data = self.dependencies["cura_private_data"].cpp_info
copy(self, "*", cura_private_data.resdirs[0], str(self._share_dir.joinpath("cura")))
if self.options.devtools:
entitlements_file = "'{}'".format(os.path.join(self.source_folder, "packaging", "MacOS", "cura.entitlements"))
@ -402,56 +417,20 @@ class CuraConan(ConanFile):
self.run(f"{cpp_info.bindirs[0]}/msgfmt {po_file} -o {mo_file} -f", env="conanbuild", ignore_errors=True)
def deploy(self):
# Copy CuraEngine.exe to bindirs of Virtual Python Environment
curaengine = self.dependencies["curaengine"].cpp_info
copy(self, "CuraEngine.exe", curaengine.bindirs[0], str(self._base_dir), keep_path = False)
copy(self, "CuraEngine", curaengine.bindirs[0], str(self._base_dir), keep_path = False)
copy(self, "*", os.path.join(self.package_folder, self.cpp.package.resdirs[2]), os.path.join(self.install_folder, "packaging"), keep_path = True)
# Copy resources of Cura (keep folder structure)
# Copy resources of Cura (keep folder structure) needed by pyinstaller to determine the module structure
copy(self, "*", os.path.join(self.package_folder, self.cpp_info.bindirs[0]), str(self._base_dir), keep_path = False)
copy(self, "*", os.path.join(self.package_folder, self.cpp_info.libdirs[0]), str(self._site_packages.joinpath("cura")), keep_path = True)
copy(self, "*", os.path.join(self.package_folder, self.cpp_info.resdirs[0]), str(self._share_dir.joinpath("cura", "resources")), keep_path = True)
copy(self, "*", os.path.join(self.package_folder, self.cpp_info.resdirs[1]), str(self._share_dir.joinpath("cura", "plugins")), keep_path = True)
# Copy materials (flat)
fdm_materials = self.dependencies["fdm_materials"].cpp_info
copy(self, "*", fdm_materials.resdirs[0], str(self._share_dir.joinpath("cura")))
# Copy internal resources
if self.options.internal:
cura_private_data = self.dependencies["cura_private_data"].cpp_info
copy(self, "*", cura_private_data.resdirs[0], str(self._share_dir.joinpath("cura")))
# Copy resources of Uranium (keep folder structure)
uranium = self.dependencies["uranium"].cpp_info
copy(self, "*", uranium.resdirs[0], str(self._share_dir.joinpath("uranium", "resources")), keep_path = True)
copy(self, "*", uranium.resdirs[1], str(self._share_dir.joinpath("uranium", "plugins")), keep_path = True)
copy(self, "*", uranium.libdirs[0], str(self._site_packages.joinpath("UM")), keep_path = True)
# TODO: figure out if this is still needed
copy(self, "*", os.path.join(uranium.libdirs[0], "Qt", "qml", "UM"), str(self._site_packages.joinpath("PyQt6", "Qt6", "qml", "UM")), keep_path = True)
# Copy resources of cura_binary_data
cura_binary_data = self.dependencies["cura_binary_data"].cpp_info
copy(self, "*", cura_binary_data.resdirs[0], str(self._share_dir.joinpath("cura")), keep_path = True)
copy(self, "*", cura_binary_data.resdirs[1], str(self._share_dir.joinpath("uranium")), keep_path = True)
if self.settings.os == "Windows":
copy(self, "*", cura_binary_data.resdirs[2], str(self._share_dir.joinpath("windows")), keep_path = True)
for dependency in self.dependencies.host.values():
for bindir in dependency.cpp_info.bindirs:
copy(self, "*.dll", bindir, str(self._site_packages), keep_path = False)
for libdir in dependency.cpp_info.libdirs:
copy(self, "*.pyd", libdir, str(self._site_packages), keep_path = False)
copy(self, "*.pyi", libdir, str(self._site_packages), keep_path = False)
copy(self, "*.dylib", libdir, str(self._base_dir.joinpath("lib")), keep_path = False)
# Copy packaging scripts
copy(self, "*", os.path.join(self.package_folder, self.cpp_info.resdirs[2]), str(self._base_dir.joinpath("packaging")), keep_path = True)
# Copy requirements.txt's
copy(self, "*.txt", os.path.join(self.package_folder, self.cpp_info.resdirs[-1]), str(self._base_dir.joinpath("pip_requirements")), keep_path = False)
# Generate the GitHub Action version info Environment
version = self.conf_info.get("user.cura:version", default = self.version, check_type = str)
cura_version = Version(version)
@ -482,7 +461,6 @@ echo "CURA_APP_NAME={{ cura_app_name }}" >> ${{ env_prefix }}GITHUB_ENV
icon_path = "'{}'".format(os.path.join(self.package_folder, self.cpp_info.resdirs[2], self.conan_data["pyinstaller"]["icon"][str(self.settings.os)])).replace("\\", "\\\\"),
entitlements_file = entitlements_file if self.settings.os == "Macos" else "None")
def package(self):
copy(self, "cura_app.py", src = self.source_folder, dst = os.path.join(self.package_folder, self.cpp.package.bindirs[0]))
copy(self, "*", src = os.path.join(self.source_folder, "cura"), dst = os.path.join(self.package_folder, self.cpp.package.libdirs[0]))
@ -492,6 +470,13 @@ echo "CURA_APP_NAME={{ cura_app_name }}" >> ${{ env_prefix }}GITHUB_ENV
copy(self, "requirement*.txt", src = self.source_folder, dst = os.path.join(self.package_folder, self.cpp.package.resdirs[-1]))
copy(self, "*", src = os.path.join(self.source_folder, "packaging"), dst = os.path.join(self.package_folder, self.cpp.package.resdirs[2]))
# Remove the CuraEngineGradualFlow plugin from the package
rmdir(self, os.path.join(self.package_folder, self.cpp.package.resdirs[1], "CuraEngineGradualFlow"))
rm(self, "bundled_*.json", os.path.join(self.package_folder, self.cpp.package.resdirs[0], "bundled_packages"), recursive = False)
# Remove the fdm_materials from the package
rmdir(self, os.path.join(self.package_folder, self.cpp.package.resdirs[0], "materials"))
def package_info(self):
self.user_info.pip_requirements = "requirements.txt"
self.user_info.pip_requirements_git = "requirements-ultimaker.txt"

110
cura/BackendPlugin.py Normal file
View File

@ -0,0 +1,110 @@
# Copyright (c) 2023 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
import subprocess
from typing import Optional, List
from UM.Logger import Logger
from UM.Message import Message
from UM.Settings.AdditionalSettingDefinitionAppender import AdditionalSettingDefinitionsAppender
from UM.PluginObject import PluginObject
from UM.i18n import i18nCatalog
from UM.Platform import Platform
class BackendPlugin(AdditionalSettingDefinitionsAppender, PluginObject):
catalog = i18nCatalog("cura")
def __init__(self) -> None:
super().__init__()
self.__port: int = 0
self._plugin_address: str = "127.0.0.1"
self._plugin_command: Optional[List[str]] = None
self._process = None
self._is_running = False
self._supported_slots: List[int] = []
def getSupportedSlots(self) -> List[int]:
return self._supported_slots
def isRunning(self):
return self._is_running
def setPort(self, port: int) -> None:
self.__port = port
def getPort(self) -> int:
return self.__port
def getAddress(self) -> str:
return self._plugin_address
def _validatePluginCommand(self) -> list[str]:
"""
Validate the plugin command and add the port parameter if it is missing.
:return: A list of strings containing the validated plugin command.
"""
if not self._plugin_command or "--port" in self._plugin_command:
return self._plugin_command or []
return self._plugin_command + ["--address", self.getAddress(), "--port", str(self.__port)]
def start(self) -> bool:
"""
Starts the backend_plugin process.
:return: True if the plugin process started successfully, False otherwise.
"""
try:
# STDIN needs to be None because we provide no input, but communicate via a local socket instead.
# The NUL device sometimes doesn't exist on some computers.
Logger.info(f"Starting backend_plugin [{self._plugin_id}] with command: {self._validatePluginCommand()}")
popen_kwargs = {"stdin": None}
if Platform.isWindows():
popen_kwargs["creationflags"] = subprocess.CREATE_NO_WINDOW
self._process = subprocess.Popen(self._validatePluginCommand(), **popen_kwargs)
self._is_running = True
return True
except PermissionError:
Logger.log("e", f"Couldn't start EnginePlugin: {self._plugin_id} No permission to execute process.")
self._showMessage(self.catalog.i18nc("@info:plugin_failed",
f"Couldn't start EnginePlugin: {self._plugin_id}\nNo permission to execute process."),
message_type = Message.MessageType.ERROR)
except FileNotFoundError:
Logger.logException("e", f"Unable to find local EnginePlugin server executable for: {self._plugin_id}")
self._showMessage(self.catalog.i18nc("@info:plugin_failed",
f"Unable to find local EnginePlugin server executable for: {self._plugin_id}"),
message_type = Message.MessageType.ERROR)
except BlockingIOError:
Logger.logException("e", f"Couldn't start EnginePlugin: {self._plugin_id} Resource is temporarily unavailable")
self._showMessage(self.catalog.i18nc("@info:plugin_failed",
f"Couldn't start EnginePlugin: {self._plugin_id}\nResource is temporarily unavailable"),
message_type = Message.MessageType.ERROR)
except OSError as e:
Logger.logException("e", f"Couldn't start EnginePlugin {self._plugin_id} Operating system is blocking it (antivirus?)")
self._showMessage(self.catalog.i18nc("@info:plugin_failed",
f"Couldn't start EnginePlugin: {self._plugin_id}\nOperating system is blocking it (antivirus?)"),
message_type = Message.MessageType.ERROR)
return False
def stop(self) -> bool:
if not self._process:
self._is_running = False
return True # Nothing to stop
try:
self._process.terminate()
return_code = self._process.wait()
self._is_running = False
Logger.log("d", f"EnginePlugin: {self._plugin_id} was killed. Received return code {return_code}")
return True
except PermissionError:
Logger.log("e", f"Unable to kill running EnginePlugin: {self._plugin_id} Access is denied.")
self._showMessage(self.catalog.i18nc("@info:plugin_failed",
f"Unable to kill running EnginePlugin: {self._plugin_id}\nAccess is denied."),
message_type = Message.MessageType.ERROR)
return False
def _showMessage(self, message: str, message_type: Message.MessageType = Message.MessageType.ERROR) -> None:
Message(message, title=self.catalog.i18nc("@info:title", "EnginePlugin"), message_type = message_type).show()

View File

@ -50,6 +50,7 @@ from UM.Settings.Validator import Validator
from UM.View.SelectionPass import SelectionPass # For typing.
from UM.Workspace.WorkspaceReader import WorkspaceReader
from UM.i18n import i18nCatalog
from UM.Version import Version
from cura import ApplicationMetadata
from cura.API import CuraAPI
from cura.API.Account import Account
@ -206,6 +207,8 @@ class CuraApplication(QtApplication):
self._cura_scene_controller = None
self._machine_error_checker = None
self._backend_plugins: List[BackendPlugin] = []
self._machine_settings_manager = MachineSettingsManager(self, parent = self)
self._material_management_model = None
self._quality_management_model = None
@ -616,6 +619,16 @@ class CuraApplication(QtApplication):
def _onEngineCreated(self):
self._qml_engine.addImageProvider("print_job_preview", PrintJobPreviewImageProvider.PrintJobPreviewImageProvider())
version = Version(self.getVersion())
if hasattr(sys, "frozen") and version.hasPostFix() and "beta" not in version.getPostfixType():
self._qml_engine.rootObjects()[0].setTitle(f"{ApplicationMetadata.CuraAppDisplayName} {ApplicationMetadata.CuraVersion}")
message = Message(
self._i18n_catalog.i18nc("@info:warning",
f"This version is not intended for production use. If you encounter any issues, please report them on our GitHub page, mentioning the full version {self.getVersion()}"),
lifetime = 0,
title = self._i18n_catalog.i18nc("@info:title", "Nightly build"),
message_type = Message.MessageType.WARNING)
message.show()
@pyqtProperty(bool)
def needToShowUserAgreement(self) -> bool:
@ -799,6 +812,7 @@ class CuraApplication(QtApplication):
self._plugin_registry.addType("profile_reader", self._addProfileReader)
self._plugin_registry.addType("profile_writer", self._addProfileWriter)
self._plugin_registry.addType("backend_plugin", self._addBackendPlugin)
if Platform.isLinux():
lib_suffixes = {"", "64", "32", "x32"} # A few common ones on different distributions.
@ -1754,6 +1768,13 @@ class CuraApplication(QtApplication):
def _addProfileWriter(self, profile_writer):
pass
def _addBackendPlugin(self, backend_plugin: "BackendPlugin") -> None:
self._container_registry.addAdditionalSettingDefinitionsAppender(backend_plugin)
self._backend_plugins.append(backend_plugin)
def getBackendPlugins(self) -> List["BackendPlugin"]:
return self._backend_plugins
@pyqtSlot("QSize")
def setMinimumWindowSize(self, size):
main_window = self.getMainWindow()

View File

@ -0,0 +1,141 @@
version: 1
AppDir:
path: {{ app_dir }}
app_info:
id: com.ultimaker.cura
name: UltiMaker Cura
icon: {{ icon }}
version: {{ version }}
exec: UltiMaker-Cura
exec_args: $@
apt:
arch:
- amd64
allow_unauthenticated: true
sources:
- sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy main restricted
- sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy-updates main restricted
- sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy universe
- sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy-updates universe
- sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy multiverse
- sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy-updates multiverse
- sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy-backports main restricted
universe multiverse
- sourceline: deb http://security.ubuntu.com/ubuntu jammy-security main restricted
- sourceline: deb http://security.ubuntu.com/ubuntu jammy-security universe
- sourceline: deb http://security.ubuntu.com/ubuntu jammy-security multiverse
- sourceline: deb https://releases.jfrog.io/artifactory/jfrog-debs xenial contrib
- sourceline: deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-14 main
- sourceline: deb https://ppa.launchpadcontent.net/ubuntu-toolchain-r/test/ubuntu/
jammy main
- sourceline: deb https://ppa.launchpadcontent.net/deadsnakes/ppa/ubuntu/ jammy
main
- sourceline: deb [arch=amd64] https://packages.microsoft.com/repos/ms-teams stable
main
- sourceline: deb https://ppa.launchpadcontent.net/ppa-verse/cling/ubuntu/ jammy
main
- sourceline: deb [arch=amd64] https://dl.google.com/linux/chrome/deb/ stable
main
- sourceline: deb [signed-by=/usr/share/keyrings/nodesource.gpg] https://deb.nodesource.com/node_14.x
jammy main
- sourceline: deb [arch=amd64 signed-by=/usr/share/keyrings/transip-stack.gpg]
https://mirror.transip.net/stack/software/deb/Ubuntu_22.04/ ./
- sourceline: deb http://repository.spotify.com stable non-free
- sourceline: deb [arch=amd64,arm64,armhf] http://packages.microsoft.com/repos/code
stable main
- sourceline: deb https://packagecloud.io/slacktechnologies/slack/debian/ jessie
main
include:
- libc6:amd64
- xdg-desktop-portal-kde:amd64
- libcap2:amd64
- libcom-err2:amd64
- libdbus-1-3:amd64
- libgpg-error0:amd64
- libgtk-3-common
- libkeyutils1:amd64
- libllvm13
- liblzma5:amd64
- libpcre3:amd64
- libqt6gui6
- libqt6qml6
- libqt6qmlworkerscript6
- libqt6quick6
- libselinux1:amd64
- libtinfo6:amd64
- qml6-module-qtqml-workerscript:amd64
- qml6-module-qtquick:amd64
- qt6-gtk-platformtheme:amd64
- qt6-qpa-plugins:amd64
# x11
- libx11-6
- libx11-xcb1
- libxcb1
- libxcb-render0
- libxcb-xfixes0
- libxcb-shape0
- libxcb-dri2-0
- libxcb-shm0
- libxcb-glx0
- libxcb-present0
- libxcb-dri3-0
# graphic libraries interface (safe graphics bundle including drivers, acceleration may not work in some systems)
- libglvnd0
- libglx0
- libglapi-mesa
- libgl1
- libegl1
- libgbm1
- libdrm2
- libglx-mesa0
- libgl1-amber-dri
- libgl1-mesa-dri
- mesa-utils
- libgl1-mesa-glx
- libdrm-amdgpu1
- libdrm-nouveau2
exclude:
- hicolor-icon-theme
- adwaita-icon-theme
- humanity-icon-theme
files:
include: []
exclude:
- usr/share/man
- usr/share/doc/*/README.*
- usr/share/doc/*/changelog.*
- usr/share/doc/*/NEWS.*
- usr/share/doc/*/TODO.*
runtime:
env:
APPDIR_LIBRARY_PATH: "$APPDIR/usr/lib/x86_64-linux-gnu:$APPDIR/lib/x86_64-linux-gnu:$APPDIR/usr/lib:$APPDIR/usr/lib/x86_64-linux-gnu/gdk-pixbuf-2.0/2.10.0/loaders"
PYTHONPATH: "$APPDIR"
QT_PLUGIN_PATH: "$APPDIR/qt/plugins"
QML2_IMPORT_PATH: "$APPDIR/qt/qml"
QT_QPA_PLATFORMTHEME: xdgdesktopportal
test:
fedora-30:
image: appimagecrafters/tests-env:fedora-30
command: ./AppRun
use_host_x: True
debian-stable:
image: appimagecrafters/tests-env:debian-stable
command: ./AppRun
use_host_x: True
archlinux-latest:
image: appimagecrafters/tests-env:archlinux-latest
command: ./AppRun
use_host_x: True
centos-7:
image: appimagecrafters/tests-env:centos-7
command: ./AppRun
use_host_x: True
ubuntu-xenial:
image: appimagecrafters/tests-env:ubuntu-xenial
command: ./AppRun
use_host_x: True
AppImage:
arch: {{ arch }}
file_name: {{ file_name }}
update-information: guess

View File

@ -0,0 +1,102 @@
# Copyright (c) 2023 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher.
import argparse
import os
import shutil
import subprocess
from pathlib import Path
from jinja2 import Template
def prepare_workspace(dist_path, appimage_filename):
"""
Prepare the workspace for building the AppImage.
:param dist_path: Path to the distribution of Cura created with pyinstaller.
:param appimage_filename: name of the AppImage file.
:return:
"""
if not os.path.exists(dist_path):
raise RuntimeError(f"The dist_path {dist_path} does not exist.")
if os.path.exists(os.path.join(dist_path, appimage_filename)):
os.remove(os.path.join(dist_path, appimage_filename))
if not os.path.exists("AppDir"):
shutil.move(dist_path, "AppDir")
else:
print(f"AppDir already exists, assuming it is already prepared.")
copy_files("AppDir")
def build_appimage(dist_path, version, appimage_filename):
"""
Creates an AppImage file from the build artefacts created so far.
"""
generate_appimage_builder_config(dist_path, version, appimage_filename)
create_appimage()
sign_appimage(dist_path, appimage_filename)
def generate_appimage_builder_config(dist_path, version, appimage_filename):
with open(os.path.join(Path(__file__).parent, "AppImageBuilder.yml.jinja"), "r") as appimage_builder_file:
appimage_builder = appimage_builder_file.read()
template = Template(appimage_builder)
appimage_builder = template.render(app_dir = "./AppDir",
icon = "cura-icon.png",
version = version,
arch = "x86_64",
file_name = appimage_filename)
with open(os.path.join(Path(__file__).parent, "AppImageBuilder.yml"), "w") as appimage_builder_file:
appimage_builder_file.write(appimage_builder)
def copy_files(dist_path):
"""
Copy metadata files for the metadata of the AppImage.
"""
copied_files = {
os.path.join("..", "icons", "cura-icon.svg"): os.path.join("usr", "share", "icons", "hicolor", "scalable", "apps", "cura-icon.svg"),
os.path.join("..", "icons", "cura-icon_64x64.png"): os.path.join("usr", "share", "icons", "hicolor", "64x64", "apps", "cura-icon.png"),
os.path.join("..", "icons", "cura-icon_128x128.png"): os.path.join("usr", "share", "icons", "hicolor", "128x128", "apps", "cura-icon.png"),
os.path.join("..", "icons", "cura-icon_256x256.png"): os.path.join("usr", "share", "icons", "hicolor", "256x256", "apps", "cura-icon.png"),
os.path.join("..", "icons", "cura-icon_256x256.png"): "cura-icon.png",
}
# TODO: openssl.cnf ???
packaging_dir = os.path.dirname(__file__)
for source, dest in copied_files.items():
dest_file_path = os.path.join(dist_path, dest)
os.makedirs(os.path.dirname(dest_file_path), exist_ok = True)
shutil.copyfile(os.path.join(packaging_dir, source), dest_file_path)
def create_appimage():
appimagetool = os.getenv("APPIMAGEBUILDER_LOCATION", "appimage-builder-x86_64.AppImage")
command = [appimagetool, "--recipe", os.path.join(Path(__file__).parent, "AppImageBuilder.yml"), "--skip-test"]
result = subprocess.call(command)
if result != 0:
raise RuntimeError(f"The AppImageTool command returned non-zero: {result}")
def sign_appimage(dist_path, appimage_filename):
command = ["gpg", "--yes", "--armor", "--detach-sig", appimage_filename]
result = subprocess.call(command)
if result != 0:
raise RuntimeError(f"The GPG command returned non-zero: {result}")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description = "Create AppImages of Cura.")
parser.add_argument("dist_path", type = str, help = "Path to where PyInstaller installed the distribution of Cura.")
parser.add_argument("version", type = str, help = "Full version number of Cura (e.g. '5.1.0-beta')")
parser.add_argument("filename", type = str, help = "Filename of the AppImage (e.g. 'UltiMaker-Cura-5.1.0-beta-Linux-X64.AppImage')")
args = parser.parse_args()
prepare_workspace(args.dist_path, args.filename)
build_appimage(args.dist_path, args.version, args.filename)

View File

@ -20,10 +20,15 @@
...but we can use this longer `substring` expression instead (see https://github.com/wixtoolset/issues/issues/5609 )
-->
<xsl:key
name="ExeToRemove"
match="wix:Component[ substring( wix:File/@Source, string-length( wix:File/@Source ) - 3 ) = '.exe' ]"
name="UltiMaker_Cura_exe_ToRemove"
match="wix:Component[ substring( wix:File/@Source, string-length( wix:File/@Source ) - 17 ) = 'UltiMaker-Cura.exe' ]"
use="@Id"
/> <!-- Get the last 4 characters of a string using `substring( s, len(s) - 3 )`, it uses -3 and not -4 because XSLT uses 1-based indexes, not 0-based indexes. -->
/>
<xsl:key
name="CuraEngine_exe_ToRemove"
match="wix:Component[ substring( wix:File/@Source, string-length( wix:File/@Source ) - 17 ) = 'CuraEngine.exe' ]"
use="@Id"
/>
<!-- By default, copy all elements and nodes into the output... -->
<xsl:template match="@*|node()">
@ -32,6 +37,7 @@
</xsl:copy>
</xsl:template>
<!-- ...but if the element has the "ExeToRemove" key then don't render anything (i.e. removing it from the output) -->
<xsl:template match="*[ self::wix:Component or self::wix:ComponentRef ][ key( 'ExeToRemove', @Id ) ]"/>
<!-- ...but if the element has the "UltiMaker_Cura_exe_ToRemove" or "CuraEngine_exe_ToRemove" key then don't render anything (i.e. removing it from the output) -->
<xsl:template match="*[ self::wix:Component or self::wix:ComponentRef ][ key( 'UltiMaker_Cura_exe_ToRemove', @Id ) ]"/>
<xsl:template match="*[ self::wix:Component or self::wix:ComponentRef ][ key( 'CuraEngine_exe_ToRemove', @Id ) ]"/>
</xsl:stylesheet>

View File

@ -408,26 +408,27 @@ class WorkspaceDialog(QObject):
@pyqtSlot()
def showMissingMaterialsWarning(self) -> None:
result_message = Message(
i18n_catalog.i18nc("@info:status", "The material used in this project relies on some material definitions not available in Cura, this might produce undesirable print results. We highly recommend installing the full material package from the Marketplace."),
i18n_catalog.i18nc("@info:status",
"Some of the packages used in the project file are currently not installed in Cura, this might produce undesirable print results. We highly recommend installing the all required packages from the Marketplace."),
lifetime=0,
title=i18n_catalog.i18nc("@info:title", "Material profiles not installed"),
title=i18n_catalog.i18nc("@info:title", "Some required packages are not installed"),
message_type=Message.MessageType.WARNING
)
result_message.addAction(
"learn_more",
name=i18n_catalog.i18nc("@action:button", "Learn more"),
icon="",
description="Learn more about project materials.",
button_align=Message.ActionButtonAlignment.ALIGN_LEFT,
button_style=Message.ActionButtonStyle.LINK
"learn_more",
name=i18n_catalog.i18nc("@action:button", "Learn more"),
icon="",
description=i18n_catalog.i18nc("@label", "Learn more about project packages."),
button_align=Message.ActionButtonAlignment.ALIGN_LEFT,
button_style=Message.ActionButtonStyle.LINK
)
result_message.addAction(
"install_materials",
name=i18n_catalog.i18nc("@action:button", "Install Materials"),
icon="",
description="Install missing materials from project file.",
button_align=Message.ActionButtonAlignment.ALIGN_RIGHT,
button_style=Message.ActionButtonStyle.DEFAULT
"install_packages",
name=i18n_catalog.i18nc("@action:button", "Install Packages"),
icon="",
description=i18n_catalog.i18nc("@label", "Install missing packages from project file."),
button_align=Message.ActionButtonAlignment.ALIGN_RIGHT,
button_style=Message.ActionButtonStyle.DEFAULT
)
result_message.actionTriggered.connect(self._onMessageActionTriggered)
result_message.show()

View File

@ -364,7 +364,7 @@ UM.Dialog
UM.Label
{
id: warningText
text: catalog.i18nc("@label", "The material used in this project is currently not installed in Cura.<br/>Install the material profile and reopen the project.")
text: catalog.i18nc("@label", "This project contains materials or plugins that are currently not installed in Cura.<br/>Install the missing packages and reopen the project.")
}
}
@ -404,7 +404,7 @@ UM.Dialog
Cura.PrimaryButton
{
visible: warning
text: catalog.i18nc("@action:button", "Install missing material")
text: catalog.i18nc("@action:button", "Install missing packages")
onClicked: manager.installMissingPackages()
}
]

View File

@ -1,8 +1,9 @@
# Copyright (c) 2015-2022 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
import json
import re
from typing import Optional, cast, List, Dict
from typing import Optional, cast, List, Dict, Pattern, Set
from UM.Mesh.MeshWriter import MeshWriter
from UM.Math.Vector import Vector
@ -17,6 +18,7 @@ from UM.Settings.EmptyInstanceContainer import EmptyInstanceContainer
from cura.CuraApplication import CuraApplication
from cura.CuraPackageManager import CuraPackageManager
from cura.Settings import CuraContainerStack
from cura.Utils.Threading import call_on_qt_thread
from cura.Snapshot import Snapshot
@ -177,13 +179,15 @@ class ThreeMFWriter(MeshWriter):
archive.writestr(thumbnail_file, thumbnail_buffer.data())
# Add PNG to content types file
thumbnail_type = ET.SubElement(content_types, "Default", Extension = "png", ContentType = "image/png")
thumbnail_type = ET.SubElement(content_types, "Default", Extension="png", ContentType="image/png")
# Add thumbnail relation to _rels/.rels file
thumbnail_relation_element = ET.SubElement(relations_element, "Relationship", Target = "/" + THUMBNAIL_PATH, Id = "rel1", Type = "http://schemas.openxmlformats.org/package/2006/relationships/metadata/thumbnail")
thumbnail_relation_element = ET.SubElement(relations_element, "Relationship",
Target="/" + THUMBNAIL_PATH, Id="rel1",
Type="http://schemas.openxmlformats.org/package/2006/relationships/metadata/thumbnail")
# Write material metadata
material_metadata = self._getMaterialPackageMetadata()
self._storeMetadataJson({"packages": material_metadata}, archive, PACKAGE_METADATA_PATH)
packages_metadata = self._getMaterialPackageMetadata() + self._getPluginPackageMetadata()
self._storeMetadataJson({"packages": packages_metadata}, archive, PACKAGE_METADATA_PATH)
savitar_scene = Savitar.Scene()
@ -255,7 +259,64 @@ class ThreeMFWriter(MeshWriter):
metadata_file = zipfile.ZipInfo(path)
# We have to set the compress type of each file as well (it doesn't keep the type of the entire archive)
metadata_file.compress_type = zipfile.ZIP_DEFLATED
archive.writestr(metadata_file, json.dumps(metadata, separators=(", ", ": "), indent=4, skipkeys=True, ensure_ascii=False))
archive.writestr(metadata_file,
json.dumps(metadata, separators=(", ", ": "), indent=4, skipkeys=True, ensure_ascii=False))
@staticmethod
def _getPluginPackageMetadata() -> List[Dict[str, str]]:
"""Get metadata for all backend plugins that are used in the project.
:return: List of material metadata dictionaries.
"""
backend_plugin_enum_value_regex = re.compile(
r"PLUGIN::(?P<plugin_id>\w+)@(?P<version>\d+.\d+.\d+)::(?P<value>\w+)")
# This regex parses enum values to find if they contain custom
# backend engine values. These custom enum values are in the format
# PLUGIN::<plugin_id>@<version>::<value>
# where
# - plugin_id is the id of the plugin
# - version is in the semver format
# - value is the value of the enum
plugin_ids = set()
def addPluginIdsInStack(stack: CuraContainerStack) -> None:
for key in stack.getAllKeys():
value = str(stack.getProperty(key, "value"))
for plugin_id, _version, _value in backend_plugin_enum_value_regex.findall(value):
plugin_ids.add(plugin_id)
# Go through all stacks and find all the plugin id contained in the project
global_stack = CuraApplication.getInstance().getMachineManager().activeMachine
addPluginIdsInStack(global_stack)
for container in global_stack.getContainers():
addPluginIdsInStack(container)
for extruder_stack in global_stack.extruderList:
addPluginIdsInStack(extruder_stack)
for container in extruder_stack.getContainers():
addPluginIdsInStack(container)
metadata = {}
package_manager = cast(CuraPackageManager, CuraApplication.getInstance().getPackageManager())
for plugin_id in plugin_ids:
package_data = package_manager.getInstalledPackageInfo(plugin_id)
metadata[plugin_id] = {
"id": plugin_id,
"display_name": package_data.get("display_name") if package_data.get("display_name") else "",
"package_version": package_data.get("package_version") if package_data.get("package_version") else "",
"sdk_version_semver": package_data.get("sdk_version_semver") if package_data.get(
"sdk_version_semver") else "",
"type": "plugin",
}
# Storing in a dict and fetching values to avoid duplicates
return list(metadata.values())
@staticmethod
def _getMaterialPackageMetadata() -> List[Dict[str, str]]:
@ -280,7 +341,8 @@ class ThreeMFWriter(MeshWriter):
# Don't export bundled materials
continue
package_id = package_manager.getMaterialFilePackageId(extruder.material.getFileName(), extruder.material.getMetaDataEntry("GUID"))
package_id = package_manager.getMaterialFilePackageId(extruder.material.getFileName(),
extruder.material.getMetaDataEntry("GUID"))
package_data = package_manager.getInstalledPackageInfo(package_id)
# We failed to find the package for this material
@ -288,10 +350,14 @@ class ThreeMFWriter(MeshWriter):
Logger.info(f"Could not find package for material in extruder {extruder.id}, skipping.")
continue
material_metadata = {"id": package_id,
"display_name": package_data.get("display_name") if package_data.get("display_name") else "",
"package_version": package_data.get("package_version") if package_data.get("package_version") else "",
"sdk_version_semver": package_data.get("sdk_version_semver") if package_data.get("sdk_version_semver") else ""}
material_metadata = {
"id": package_id,
"display_name": package_data.get("display_name") if package_data.get("display_name") else "",
"package_version": package_data.get("package_version") if package_data.get("package_version") else "",
"sdk_version_semver": package_data.get("sdk_version_semver") if package_data.get(
"sdk_version_semver") else "",
"type": "material",
}
metadata[package_id] = material_metadata

View File

@ -0,0 +1,60 @@
import sys
import os.path
from typing import Dict, Optional
import pytest
from unittest.mock import patch, MagicMock, PropertyMock
from UM.PackageManager import PackageManager
from cura.CuraApplication import CuraApplication
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), ".."))
import ThreeMFWriter
PLUGIN_ID = "my_plugin"
DISPLAY_NAME = "MyPlugin"
PACKAGE_VERSION = "0.0.1"
SDK_VERSION = "8.0.0"
@pytest.fixture
def package_manager() -> MagicMock:
pm = MagicMock(spec=PackageManager)
pm.getInstalledPackageInfo.return_value = {
"display_name": DISPLAY_NAME,
"package_version": PACKAGE_VERSION,
"sdk_version_semver": SDK_VERSION
}
return pm
@pytest.fixture
def machine_manager() -> MagicMock:
mm = MagicMock(spec=PackageManager)
active_machine = MagicMock()
active_machine.getAllKeys.return_value = ["infill_pattern", "layer_height", "material_bed_temperature"]
active_machine.getProperty.return_value = f"PLUGIN::{PLUGIN_ID}@{PACKAGE_VERSION}::custom_value"
active_machine.getContainers.return_value = []
active_machine.extruderList = []
mm.activeMachine = active_machine
return mm
@pytest.fixture
def application(package_manager, machine_manager):
app = MagicMock()
app.getPackageManager.return_value = package_manager
app.getMachineManager.return_value = machine_manager
return app
def test_enumParsing(application):
with patch("cura.CuraApplication.CuraApplication.getInstance", MagicMock(return_value=application)):
packages_metadata = ThreeMFWriter.ThreeMFWriter._getPluginPackageMetadata()[0]
assert packages_metadata.get("id") == PLUGIN_ID
assert packages_metadata.get("display_name") == DISPLAY_NAME
assert packages_metadata.get("package_version") == PACKAGE_VERSION
assert packages_metadata.get("sdk_version_semver") == SDK_VERSION
assert packages_metadata.get("type") == "plugin"

View File

@ -8,12 +8,31 @@ message ObjectList
repeated Setting settings = 2; // meshgroup settings (for one-at-a-time printing)
}
enum SlotID {
SETTINGS_BROADCAST = 0;
SIMPLIFY_MODIFY = 100;
POSTPROCESS_MODIFY = 101;
INFILL_MODIFY = 102;
GCODE_PATHS_MODIFY = 103;
INFILL_GENERATE = 200;
}
message EnginePlugin
{
SlotID id = 1;
string address = 2;
uint32 port = 3;
string plugin_name = 4;
string plugin_version = 5;
}
message Slice
{
repeated ObjectList object_lists = 1; // The meshgroups to be printed one after another
SettingList global_settings = 2; // The global settings used for the whole print job
repeated Extruder extruders = 3; // The settings sent to each extruder object
repeated SettingExtruder limit_to_extruder = 4; // From which stack the setting would inherit if not defined per object
repeated EnginePlugin engine_plugins = 5;
}
message Extruder

View File

@ -46,6 +46,19 @@ catalog = i18nCatalog("cura")
class CuraEngineBackend(QObject, Backend):
backendError = Signal()
printDurationMessage = Signal()
"""Emitted when we get a message containing print duration and material amount.
This also implies the slicing has finished.
:param time: The amount of time the print will take.
:param material_amount: The amount of material the print will use.
"""
slicingStarted = Signal()
"""Emitted when the slicing process starts."""
slicingCancelled = Signal()
"""Emitted when the slicing process is aborted forcefully."""
def __init__(self) -> None:
"""Starts the back-end plug-in.
@ -70,7 +83,7 @@ class CuraEngineBackend(QObject, Backend):
os.path.join(CuraApplication.getInstallPrefix(), "bin"),
os.path.dirname(os.path.abspath(sys.executable)),
]
self._last_backend_plugin_port = self._port + 1000
for path in search_path:
engine_path = os.path.join(path, executable_name)
if os.path.isfile(engine_path):
@ -86,9 +99,9 @@ class CuraEngineBackend(QObject, Backend):
self._default_engine_location = execpath
break
application = CuraApplication.getInstance() #type: CuraApplication
self._multi_build_plate_model = None #type: Optional[MultiBuildPlateModel]
self._machine_error_checker = None #type: Optional[MachineErrorChecker]
application: CuraApplication = CuraApplication.getInstance()
self._multi_build_plate_model: Optional[MultiBuildPlateModel] = None
self._machine_error_checker: Optional[MachineErrorChecker] = None
if not self._default_engine_location:
raise EnvironmentError("Could not find CuraEngine")
@ -99,13 +112,15 @@ class CuraEngineBackend(QObject, Backend):
application.getPreferences().addPreference("backend/location", self._default_engine_location)
# Workaround to disable layer view processing if layer view is not active.
self._layer_view_active = False #type: bool
self._layer_view_active: bool = False
self._onActiveViewChanged()
self._stored_layer_data = [] # type: List[Arcus.PythonMessage]
self._stored_optimized_layer_data = {} # type: Dict[int, List[Arcus.PythonMessage]] # key is build plate number, then arrays are stored until they go to the ProcessSlicesLayersJob
self._stored_layer_data: List[Arcus.PythonMessage] = []
self._scene = application.getController().getScene() #type: Scene
# key is build plate number, then arrays are stored until they go to the ProcessSlicesLayersJob
self._stored_optimized_layer_data: Dict[int, List[Arcus.PythonMessage]] = {}
self._scene: Scene = application.getController().getScene()
self._scene.sceneChanged.connect(self._onSceneChanged)
# Triggers for auto-slicing. Auto-slicing is triggered as follows:
@ -116,7 +131,7 @@ class CuraEngineBackend(QObject, Backend):
# If there is an error check, stop the auto-slicing timer, and only wait for the error check to be finished
# to start the auto-slicing timer again.
#
self._global_container_stack = None #type: Optional[ContainerStack]
self._global_container_stack: Optional[ContainerStack] = None
# Listeners for receiving messages from the back-end.
self._message_handlers["cura.proto.Layer"] = self._onLayerMessage
@ -128,31 +143,34 @@ class CuraEngineBackend(QObject, Backend):
self._message_handlers["cura.proto.PrintTimeMaterialEstimates"] = self._onPrintTimeMaterialEstimates
self._message_handlers["cura.proto.SlicingFinished"] = self._onSlicingFinishedMessage
self._start_slice_job = None #type: Optional[StartSliceJob]
self._start_slice_job_build_plate = None #type: Optional[int]
self._slicing = False #type: bool # Are we currently slicing?
self._restart = False #type: bool # Back-end is currently restarting?
self._tool_active = False #type: bool # If a tool is active, some tasks do not have to do anything
self._always_restart = True #type: bool # Always restart the engine when starting a new slice. Don't keep the process running. TODO: Fix engine statelessness.
self._process_layers_job = None #type: Optional[ProcessSlicedLayersJob] # The currently active job to process layers, or None if it is not processing layers.
self._build_plates_to_be_sliced = [] #type: List[int] # what needs slicing?
self._engine_is_fresh = True #type: bool # Is the newly started engine used before or not?
self._start_slice_job: Optional[StartSliceJob] = None
self._start_slice_job_build_plate: Optional[int] = None
self._slicing: bool = False # Are we currently slicing?
self._restart: bool = False # Back-end is currently restarting?
self._tool_active: bool = False # If a tool is active, some tasks do not have to do anything
self._always_restart: bool = True # Always restart the engine when starting a new slice. Don't keep the process running. TODO: Fix engine statelessness.
self._process_layers_job: Optional[ProcessSlicedLayersJob] = None # The currently active job to process layers, or None if it is not processing layers.
self._build_plates_to_be_sliced: List[int] = [] # what needs slicing?
self._engine_is_fresh: bool = True # Is the newly started engine used before or not?
self._backend_log_max_lines = 20000 #type: int # Maximum number of lines to buffer
self._error_message = None #type: Optional[Message] # Pop-up message that shows errors.
self._last_num_objects = defaultdict(int) #type: Dict[int, int] # Count number of objects to see if there is something changed
self._postponed_scene_change_sources = [] #type: List[SceneNode] # scene change is postponed (by a tool)
self._backend_log_max_lines: int = 20000 # Maximum number of lines to buffer
self._error_message: Optional[Message] = None # Pop-up message that shows errors.
self._time_start_process = None #type: Optional[float]
self._is_disabled = False #type: bool
# Count number of objects to see if there is something changed
self._last_num_objects: Dict[int, int] = defaultdict(int)
self._postponed_scene_change_sources: List[SceneNode] = [] # scene change is postponed (by a tool)
self._time_start_process: Optional[float] = None
self._is_disabled: bool = False
application.getPreferences().addPreference("general/auto_slice", False)
self._use_timer = False #type: bool
# When you update a setting and other settings get changed through inheritance, many propertyChanged signals are fired.
# This timer will group them up, and only slice for the last setting changed signal.
self._use_timer: bool = False
# When you update a setting and other settings get changed through inheritance, many propertyChanged
# signals are fired. This timer will group them up, and only slice for the last setting changed signal.
# TODO: Properly group propertyChanged signals by whether they are triggered by the same user interaction.
self._change_timer = QTimer() #type: QTimer
self._change_timer: QTimer = QTimer()
self._change_timer.setSingleShot(True)
self._change_timer.setInterval(500)
self.determineAutoSlicing()
@ -172,10 +190,34 @@ class CuraEngineBackend(QObject, Backend):
self._slicing_error_message.actionTriggered.connect(self._reportBackendError)
self._resetLastSliceTimeStats()
self._snapshot = None #type: Optional[QImage]
self._snapshot: Optional[QImage] = None
application.initializationFinished.connect(self.initialize)
def startPlugins(self) -> None:
"""
Ensure that all backend plugins are started
It assigns unique ports to each plugin to avoid conflicts.
:return:
"""
self.stopPlugins()
backend_plugins = CuraApplication.getInstance().getBackendPlugins()
for backend_plugin in backend_plugins:
# Set the port to prevent plugins from using the same one.
if backend_plugin.getPort() < 1:
backend_plugin.setPort(self._last_backend_plugin_port)
self._last_backend_plugin_port += 1
backend_plugin.start()
def stopPlugins(self) -> None:
"""
Ensure that all backend plugins will be terminated.
"""
backend_plugins = CuraApplication.getInstance().getBackendPlugins()
for backend_plugin in backend_plugins:
if backend_plugin.isRunning():
backend_plugin.stop()
def _resetLastSliceTimeStats(self) -> None:
self._time_start_process = None
self._time_send_message = None
@ -202,7 +244,8 @@ class CuraEngineBackend(QObject, Backend):
application.getMachineManager().globalContainerChanged.connect(self._onGlobalStackChanged)
self._onGlobalStackChanged()
# extruder enable / disable. Actually wanted to use machine manager here, but the initialization order causes it to crash
# Extruder enable / disable. Actually wanted to use machine manager here,
# but the initialization order causes it to crash
ExtruderManager.getInstance().extrudersChanged.connect(self._extruderChanged)
self.backendQuit.connect(self._onBackendQuit)
@ -239,26 +282,14 @@ class CuraEngineBackend(QObject, Backend):
command += ["connect", "127.0.0.1:{0}".format(self._port), ""]
parser = argparse.ArgumentParser(prog = "cura", add_help = False)
parser.add_argument("--debug", action = "store_true", default = False, help = "Turn on the debug mode by setting this option.")
parser.add_argument("--debug", action = "store_true", default = False,
help = "Turn on the debug mode by setting this option.")
known_args = vars(parser.parse_known_args()[0])
if known_args["debug"]:
command.append("-vvv")
return command
printDurationMessage = Signal()
"""Emitted when we get a message containing print duration and material amount.
This also implies the slicing has finished.
:param time: The amount of time the print will take.
:param material_amount: The amount of material the print will use.
"""
slicingStarted = Signal()
"""Emitted when the slicing process starts."""
slicingCancelled = Signal()
"""Emitted when the slicing process is aborted forcefully."""
@pyqtSlot()
def stopSlicing(self) -> None:
self.setState(BackendState.NotStarted)
@ -266,7 +297,8 @@ class CuraEngineBackend(QObject, Backend):
self._terminate()
self._createSocket()
if self._process_layers_job is not None: # We were processing layers. Stop that, the layers are going to change soon.
if self._process_layers_job is not None:
# We were processing layers. Stop that, the layers are going to change soon.
Logger.log("i", "Aborting process layers job...")
self._process_layers_job.abort()
self._process_layers_job = None
@ -281,7 +313,7 @@ class CuraEngineBackend(QObject, Backend):
self.markSliceAll()
self.slice()
@call_on_qt_thread # must be called from the main thread because of OpenGL
@call_on_qt_thread # Must be called from the main thread because of OpenGL
def _createSnapshot(self) -> None:
self._snapshot = None
if not CuraApplication.getInstance().isVisible:
@ -290,7 +322,7 @@ class CuraEngineBackend(QObject, Backend):
Logger.log("i", "Creating thumbnail image (just before slice)...")
try:
self._snapshot = Snapshot.snapshot(width = 300, height = 300)
except:
except Exception:
Logger.logException("w", "Failed to create snapshot image")
self._snapshot = None # Failing to create thumbnail should not fail creation of UFP
@ -302,6 +334,8 @@ class CuraEngineBackend(QObject, Backend):
self._createSnapshot()
self.startPlugins()
Logger.log("i", "Starting to slice...")
self._time_start_process = time()
if not self._build_plates_to_be_sliced:
@ -315,7 +349,8 @@ class CuraEngineBackend(QObject, Backend):
return
if not hasattr(self._scene, "gcode_dict"):
self._scene.gcode_dict = {} #type: ignore #Because we are creating the missing attribute here.
self._scene.gcode_dict = {} # type: ignore
# We need to ignore type because we are creating the missing attribute here.
# see if we really have to slice
application = CuraApplication.getInstance()
@ -326,9 +361,9 @@ class CuraEngineBackend(QObject, Backend):
self._stored_layer_data = []
if build_plate_to_be_sliced not in num_objects or num_objects[build_plate_to_be_sliced] == 0:
self._scene.gcode_dict[build_plate_to_be_sliced] = [] #type: ignore #Because we created this attribute above.
self._scene.gcode_dict[build_plate_to_be_sliced] = [] # type: ignore
# We need to ignore the type because we created this attribute above.
Logger.log("d", "Build plate %s has no objects to be sliced, skipping", build_plate_to_be_sliced)
if self._build_plates_to_be_sliced:
self.slice()
@ -337,7 +372,7 @@ class CuraEngineBackend(QObject, Backend):
if application.getPrintInformation() and build_plate_to_be_sliced == active_build_plate:
application.getPrintInformation().setToZeroPrintInformation(build_plate_to_be_sliced)
if self._process is None: # type: ignore
if self._process is None: # type: ignore
self._createSocket()
self.stopSlicing()
self._engine_is_fresh = False # Yes we're going to use the engine
@ -345,7 +380,7 @@ class CuraEngineBackend(QObject, Backend):
self.processingProgress.emit(0.0)
self.backendStateChange.emit(BackendState.NotStarted)
self._scene.gcode_dict[build_plate_to_be_sliced] = [] #type: ignore #[] indexed by build plate number
self._scene.gcode_dict[build_plate_to_be_sliced] = [] # type: ignore #[] indexed by build plate number
self._slicing = True
self.slicingStarted.emit()
@ -370,6 +405,8 @@ class CuraEngineBackend(QObject, Backend):
if self._start_slice_job is not None:
self._start_slice_job.cancel()
self.stopPlugins()
self.slicingCancelled.emit()
self.processingProgress.emit(0)
Logger.log("d", "Attempting to kill the engine process")
@ -377,14 +414,15 @@ class CuraEngineBackend(QObject, Backend):
if CuraApplication.getInstance().getUseExternalBackend():
return
if self._process is not None: # type: ignore
if self._process is not None: # type: ignore
Logger.log("d", "Killing engine process")
try:
self._process.terminate() # type: ignore
Logger.log("d", "Engine process is killed. Received return code %s", self._process.wait()) # type: ignore
self._process = None # type: ignore
self._process.terminate() # type: ignore
Logger.log("d", "Engine process is killed. Received return code %s", self._process.wait()) # type: ignore
self._process = None # type: ignore
except Exception as e: # terminating a process that is already terminating causes an exception, silently ignore this.
except Exception as e:
# Terminating a process that is already terminating causes an exception, silently ignore this.
Logger.log("d", "Exception occurred while trying to kill the engine %s", str(e))
def _onStartSliceCompleted(self, job: StartSliceJob) -> None:
@ -429,14 +467,14 @@ class CuraEngineBackend(QObject, Backend):
Logger.log("w", "Global container stack not assigned to CuraEngineBackend!")
return
extruders = ExtruderManager.getInstance().getActiveExtruderStacks()
error_keys = [] #type: List[str]
error_keys: List[str] = []
for extruder in extruders:
error_keys.extend(extruder.getErrorKeys())
if not extruders:
error_keys = self._global_container_stack.getErrorKeys()
error_labels = set()
for key in error_keys:
for stack in [self._global_container_stack] + extruders: #Search all container stacks for the definition of this setting. Some are only in an extruder stack.
for stack in [self._global_container_stack] + extruders: #Search all container stacks for the definition of this setting. Some are only in an extruder stack.
definitions = cast(DefinitionContainerInterface, stack.getBottom()).findDefinitions(key = key)
if definitions:
break #Found it! No need to continue search.
@ -524,7 +562,7 @@ class CuraEngineBackend(QObject, Backend):
# Preparation completed, send it to the backend.
self._socket.sendMessage(job.getSliceMessage())
# Notify the user that it's now up to the backend to do it's job
# Notify the user that it's now up to the backend to do its job
self.setState(BackendState.Processing)
# Handle time reporting.
@ -551,7 +589,8 @@ class CuraEngineBackend(QObject, Backend):
self._is_disabled = True
gcode_list = node.callDecoration("getGCodeList")
if gcode_list is not None:
self._scene.gcode_dict[node.callDecoration("getBuildPlateNumber")] = gcode_list #type: ignore #Because we generate this attribute dynamically.
self._scene.gcode_dict[node.callDecoration("getBuildPlateNumber")] = gcode_list # type: ignore
# We need to ignore type because we generate this attribute dynamically.
if self._use_timer == enable_timer:
return self._use_timer
@ -566,7 +605,7 @@ class CuraEngineBackend(QObject, Backend):
def _numObjectsPerBuildPlate(self) -> Dict[int, int]:
"""Return a dict with number of objects per build plate"""
num_objects = defaultdict(int) #type: Dict[int, int]
num_objects: Dict[int, int] = defaultdict(int)
for node in DepthFirstIterator(self._scene.getRoot()):
# Only count sliceable objects
if node.callDecoration("isSliceable"):
@ -646,11 +685,13 @@ class CuraEngineBackend(QObject, Backend):
self._terminate()
self._createSocket()
if error.getErrorCode() not in [Arcus.ErrorCode.BindFailedError, Arcus.ErrorCode.ConnectionResetError, Arcus.ErrorCode.Debug]:
if error.getErrorCode() not in [Arcus.ErrorCode.BindFailedError,
Arcus.ErrorCode.ConnectionResetError,
Arcus.ErrorCode.Debug]:
Logger.log("w", "A socket error caused the connection to be reset")
# _terminate()' function sets the job status to 'cancel', after reconnecting to another Port the job status
# needs to be updated. Otherwise backendState is "Unable To Slice"
# needs to be updated. Otherwise, backendState is "Unable To Slice"
if error.getErrorCode() == Arcus.ErrorCode.BindFailedError and self._start_slice_job is not None:
self._start_slice_job.setIsCancelled(False)
@ -672,7 +713,7 @@ class CuraEngineBackend(QObject, Backend):
for node in DepthFirstIterator(self._scene.getRoot()):
if node.callDecoration("getLayerData"):
if not build_plate_numbers or node.callDecoration("getBuildPlateNumber") in build_plate_numbers:
# We can assume that all nodes have a parent as we're looping through the scene (and filter out root)
# We can assume that all nodes have a parent as we're looping through the scene and filter out root
cast(SceneNode, node.getParent()).removeChild(node)
def markSliceAll(self) -> None:
@ -701,7 +742,7 @@ class CuraEngineBackend(QObject, Backend):
:param instance: The setting instance that has changed.
:param property: The property of the setting instance that has changed.
"""
if property == "value": # Only reslice if the value has changed.
if property == "value": # Only re-slice if the value has changed.
self.needsSlicing()
self._onChanged()
@ -765,13 +806,17 @@ class CuraEngineBackend(QObject, Backend):
:param message: The protobuf message signalling that slicing is finished.
"""
self.stopPlugins()
self.setState(BackendState.Done)
self.processingProgress.emit(1.0)
self._time_end_slice = time()
try:
gcode_list = self._scene.gcode_dict[self._start_slice_job_build_plate] #type: ignore #Because we generate this attribute dynamically.
except KeyError: # Can occur if the g-code has been cleared while a slice message is still arriving from the other end.
gcode_list = self._scene.gcode_dict[self._start_slice_job_build_plate] #type: ignore
# We need to ignore the type because it was generated dynamically.
except KeyError:
# Can occur if the g-code has been cleared while a slice message is still arriving from the other end.
gcode_list = []
application = CuraApplication.getInstance()
for index, line in enumerate(gcode_list):
@ -816,7 +861,8 @@ class CuraEngineBackend(QObject, Backend):
try:
self._scene.gcode_dict[self._start_slice_job_build_plate].append(message.data.decode("utf-8", "replace")) #type: ignore #Because we generate this attribute dynamically.
except KeyError: # Can occur if the g-code has been cleared while a slice message is still arriving from the other end.
except KeyError:
# Can occur if the g-code has been cleared while a slice message is still arriving from the other end.
pass # Throw the message away.
def _onGCodePrefixMessage(self, message: Arcus.PythonMessage) -> None:
@ -828,7 +874,8 @@ class CuraEngineBackend(QObject, Backend):
try:
self._scene.gcode_dict[self._start_slice_job_build_plate].insert(0, message.data.decode("utf-8", "replace")) #type: ignore #Because we generate this attribute dynamically.
except KeyError: # Can occur if the g-code has been cleared while a slice message is still arriving from the other end.
except KeyError:
# Can occur if the g-code has been cleared while a slice message is still arriving from the other end.
pass # Throw the message away.
def _onSliceUUIDMessage(self, message: Arcus.PythonMessage) -> None:
@ -955,7 +1002,8 @@ class CuraEngineBackend(QObject, Backend):
view = CuraApplication.getInstance().getController().getActiveView()
if view:
active_build_plate = CuraApplication.getInstance().getMultiBuildPlateModel().activeBuildPlate
if view.getPluginId() == "SimulationView": # If switching to layer view, we should process the layers if that hasn't been done yet.
if view.getPluginId() == "SimulationView":
# If switching to layer view, we should process the layers if that hasn't been done yet.
self._layer_view_active = True
# There is data and we're not slicing at the moment
# if we are slicing, there is no need to re-calculate the data as it will be invalid in a moment.
@ -974,7 +1022,6 @@ class CuraEngineBackend(QObject, Backend):
We should reset our state and start listening for new connections.
"""
if not self._restart:
if self._process: # type: ignore
return_code = self._process.wait()
@ -985,6 +1032,7 @@ class CuraEngineBackend(QObject, Backend):
self.stopSlicing()
else:
Logger.log("d", "Backend finished slicing. Resetting process and socket.")
self.stopPlugins()
self._process = None # type: ignore
def _reportBackendError(self, _message_id: str, _action_id: str) -> None:
@ -1007,7 +1055,8 @@ class CuraEngineBackend(QObject, Backend):
self._global_container_stack = CuraApplication.getInstance().getMachineManager().activeMachine
if self._global_container_stack:
self._global_container_stack.propertyChanged.connect(self._onSettingChanged) # Note: Only starts slicing when the value changed.
# Note: Only starts slicing when the value changed.
self._global_container_stack.propertyChanged.connect(self._onSettingChanged)
self._global_container_stack.containersChanged.connect(self._onChanged)
for extruder in self._global_container_stack.extruderList:

View File

@ -1,5 +1,6 @@
# Copyright (c) 2021-2022 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
import os
import numpy
from string import Formatter
@ -301,6 +302,21 @@ class StartSliceJob(Job):
for extruder_stack in global_stack.extruderList:
self._buildExtruderMessage(extruder_stack)
for plugin in CuraApplication.getInstance().getBackendPlugins():
for slot in plugin.getSupportedSlots():
# Right now we just send the message for every slot that we support. A single plugin can support
# multiple slots
# In the future the frontend will need to decide what slots that a plugin actually supports should
# also be used. For instance, if you have two plugins and each of them support a_generate and b_generate
# only one of each can actually be used (eg; plugin 1 does both, plugin 1 does a_generate and 2 does
# b_generate, etc).
plugin_message = self._slice_message.addRepeatedMessage("engine_plugins")
plugin_message.id = slot
plugin_message.address = plugin.getAddress()
plugin_message.port = plugin.getPort()
plugin_message.plugin_name = plugin.getPluginId()
plugin_message.plugin_version = plugin.getVersion()
for group in filtered_object_groups:
group_message = self._slice_message.addRepeatedMessage("object_lists")
parent = group[0].getParent()

View File

@ -20,7 +20,6 @@ class MissingPackageList(RemotePackageList):
def __init__(self, packages_metadata: List[Dict[str, str]], parent: Optional["QObject"] = None) -> None:
super().__init__(parent)
self._packages_metadata: List[Dict[str, str]] = packages_metadata
self._package_type_filter = "material"
self._search_type = "package_ids"
self._requested_search_string = ",".join(map(lambda package: package["id"], packages_metadata))
@ -38,7 +37,14 @@ class MissingPackageList(RemotePackageList):
for package_metadata in self._packages_metadata:
if package_metadata["id"] not in returned_packages_ids:
package = PackageModel.fromIncompletePackageInformation(package_metadata["display_name"], package_metadata["package_version"], self._package_type_filter)
package_type = package_metadata["type"] if "type" in package_metadata else "material"
# When this feature was originally introduced only missing materials were detected. With the inclusion
# of backend plugins this system was extended to also detect missing plugins. With that change the type
# of the package was added to the metadata. Project files before this change do not have this type. So
# if the type is not present we assume it is a material.
package = PackageModel.fromIncompletePackageInformation(package_metadata["display_name"],
package_metadata["package_version"],
package_type)
self.appendItem({"package": package})
self.itemsChanged.emit()

View File

@ -87,12 +87,22 @@ class PackageModel(QObject):
self._is_missing_package_information = False
@classmethod
def fromIncompletePackageInformation(cls, display_name: str, package_version: str, package_type: str) -> "PackageModel":
def fromIncompletePackageInformation(cls, display_name: str, package_version: str,
package_type: str) -> "PackageModel":
description = ""
match package_type:
case "material":
description = catalog.i18nc("@label:label Ultimaker Marketplace is a brand name, don't translate",
"The material package associated with the Cura project could not be found on the Ultimaker Marketplace. Use the partial material profile definition stored in the Cura project file at your own risk.")
case "plugin":
description = catalog.i18nc("@label:label Ultimaker Marketplace is a brand name, don't translate",
"The plugin associated with the Cura project could not be found on the Ultimaker Marketplace. As the plugin may be required to slice the project it might not be possible to correctly slice the file.")
package_data = {
"display_name": display_name,
"package_version": package_version,
"package_type": package_type,
"description": catalog.i18nc("@label:label Ultimaker Marketplace is a brand name, don't translate", "The material package associated with the Cura project could not be found on the Ultimaker Marketplace. Use the partial material profile definition stored in the Cura project file at your own risk.")
"description": description,
}
package_model = cls(package_data)
package_model.setIsMissingPackageInformation(True)

View File

@ -12,7 +12,7 @@ import Cura 1.6 as Cura
Marketplace
{
modality: Qt.ApplicationModal
title: catalog.i18nc("@title", "Install missing Materials")
title: catalog.i18nc("@title", "Install missing packages")
pageContentsSource: "MissingPackages.qml"
showSearchHeader: false
showOnboadBanner: false

View File

@ -5,7 +5,7 @@ import UM 1.4 as UM
Packages
{
pageTitle: catalog.i18nc("@header", "Install Materials")
pageTitle: catalog.i18nc("@header", "Install Packages")
bannerVisible: false
showUpdateButton: false

View File

@ -0,0 +1,326 @@
# Limit XY Accel: Authored by: Greg Foresi (GregValiant)
# July 2023
# Sometimes bed-slinger printers need different Accel and Jerk values for the Y but Cura always makes them the same.
# This script changes the Accel and/or Jerk from the beginning of the 'Start Layer' to the end of the 'End Layer'.
# The existing M201 Max Accel will be changed to limit the Y (and/or X) accel at the printer. If you have Accel enabled in Cura and the XY Accel is set to 3000 then setting the Y limit to 1000 will result in the printer limiting the Y to 1000. This can keep tall skinny prints from breaking loose of the bed and failing. The script was not tested with Junction Deviation.
# If enabled - the Jerk setting is changed line-by-line within the gcode as there is no "limit" on Jerk.
# if 'Gradual ACCEL change' is enabled then the Accel is changed gradually from the Start to the End layer and that will be the final Accel setting in the file. If 'Gradual' is enabled then the Jerk settings will continue to be changed to the end of the file (rather than ending at the End layer).
# This post is intended for printers with moving beds (bed slingers) so UltiMaker printers are excluded.
# When setting an accel limit on multi-extruder printers ALL extruders are effected.
# This post does not distinguish between Print Accel and Travel Accel. The limit is the limit for all regardless. Example: Skin Accel = 1000 and Outer Wall accel = 500. If the limit is set to 300 then both Skin and Outer Wall will be Accel = 300.
from ..Script import Script
from cura.CuraApplication import CuraApplication
import re
from UM.Message import Message
class LimitXYAccelJerk(Script):
def initialize(self) -> None:
super().initialize()
# Get the Accel and Jerk and set the values in the setting boxes--
mycura = CuraApplication.getInstance().getGlobalContainerStack()
extruder = mycura.extruderList
accel_print = extruder[0].getProperty("acceleration_print", "value")
accel_travel = extruder[0].getProperty("acceleration_travel", "value")
jerk_print_old = extruder[0].getProperty("jerk_print", "value")
jerk_travel_old = extruder[0].getProperty("jerk_travel", "value")
self._instance.setProperty("x_accel_limit", "value", round(accel_print))
self._instance.setProperty("y_accel_limit", "value", round(accel_print))
self._instance.setProperty("x_jerk", "value", jerk_print_old)
self._instance.setProperty("y_jerk", "value", jerk_print_old)
ext_count = int(mycura.getProperty("machine_extruder_count", "value"))
machine_name = str(mycura.getProperty("machine_name", "value"))
# Warn the user if the printer is an Ultimaker-------------------------
if "Ultimaker" in machine_name:
Message(text = "<NOTICE> [Limit the X-Y Accel/Jerk] DID NOT RUN because Ultimaker printers don't have sliding beds.").show()
# Warn the user if the printer is multi-extruder------------------
if ext_count > 1:
Message(text = "<NOTICE> 'Limit the X-Y Accel/Jerk': The post processor treats all extruders the same. If you have multiple extruders they will all be subject to the same Accel and Jerk limits imposed. If you have different Travel and Print Accel they will also be subject to the same limits. If that is not acceptable then you should not use this Post Processor.").show()
def getSettingDataString(self):
return """{
"name": "Limit the X-Y Accel/Jerk (all extruders equal)",
"key": "LimitXYAccelJerk",
"metadata": {},
"version": 2,
"settings":
{
"type_of_change":
{
"label": "Immediate or Gradual change",
"description": "An 'Immediate' change will insert the new numbers immediately at the Start Layer. A 'Gradual' change will transition from the starting Accel to the new Accel limit across a range of layers.",
"type": "enum",
"options": {
"immediate_change": "Immediate",
"gradual_change": "Gradual"},
"default_value": "immediate_change"
},
"x_accel_limit":
{
"label": "X MAX Acceleration",
"description": "If this number is lower than the 'X Print Accel' in Cura then this will limit the Accel on the X axis. Enter the Maximum Acceleration value for the X axis. This will affect both Print and Travel Accel. If you enable an End Layer then at the end of that layer the Accel Limit will be reset (unless you choose 'Gradual' in which case the new limit goes to the top layer).",
"type": "int",
"enabled": true,
"minimum_value": 50,
"unit": "mm/sec² ",
"default_value": 500
},
"y_accel_limit":
{
"label": "Y MAX Acceleration",
"description": "If this number is lower than the Y accel in Cura then this will limit the Accel on the Y axis. Enter the Maximum Acceleration value for the Y axis. This will affect both Print and Travel Accel. If you enable an End Layer then at the end of that layer the Accel Limit will be reset (unless you choose 'Gradual' in which case the new limit goes to the top layer).",
"type": "int",
"enabled": true,
"minimum_value": 50,
"unit": "mm/sec² ",
"default_value": 500
},
"jerk_enable":
{
"label": "Change the Jerk",
"description": "Whether to change the Jerk values.",
"type": "bool",
"enabled": true,
"default_value": false
},
"x_jerk":
{
"label": " X jerk",
"description": "Enter the Jerk value for the X axis. Enter '0' to use the existing X Jerk. This setting will affect both the Print and Travel jerk.",
"type": "int",
"enabled": "jerk_enable",
"unit": "mm/sec ",
"default_value": 8
},
"y_jerk":
{
"label": " Y jerk",
"description": "Enter the Jerk value for the Y axis. Enter '0' to use the existing Y Jerk. This setting will affect both the Print and Travel jerk.",
"type": "int",
"enabled": "jerk_enable",
"unit": "mm/sec ",
"default_value": 8
},
"start_layer":
{
"label": "From Start of Layer:",
"description": "Use the Cura Preview numbers. Enter the Layer to start the changes at. The minimum is Layer 1.",
"type": "int",
"default_value": 1,
"minimum_value": 1,
"unit": "Lay# ",
"enabled": "type_of_change == 'immediate_change'"
},
"end_layer":
{
"label": "To End of Layer",
"description": "Use the Cura Preview numbers. Enter '-1' for the entire file or enter a layer number. The changes will end at your 'End Layer' and revert back to the original numbers.",
"type": "int",
"default_value": -1,
"minimum_value": -1,
"unit": "Lay# ",
"enabled": "type_of_change == 'immediate_change'"
},
"gradient_start_layer":
{
"label": " Gradual From Layer:",
"description": "Use the Cura Preview numbers. Enter the Layer to start the changes at. The minimum is Layer 1.",
"type": "int",
"default_value": 1,
"minimum_value": 1,
"unit": "Lay# ",
"enabled": "type_of_change == 'gradual_change'"
},
"gradient_end_layer":
{
"label": " Gradual To Layer",
"description": "Use the Cura Preview numbers. Enter '-1' for the top layer or enter a layer number. The last 'Gradual' change will continue to the end of the file.",
"type": "int",
"default_value": -1,
"minimum_value": -1,
"unit": "Lay# ",
"enabled": "type_of_change == 'gradual_change'"
}
}
}"""
def execute(self, data):
mycura = CuraApplication.getInstance().getGlobalContainerStack()
extruder = mycura.extruderList
machine_name = str(mycura.getProperty("machine_name", "value"))
print_sequence = str(mycura.getProperty("print_sequence", "value"))
# Exit if 'one_at_a_time' is enabled-------------------------
if print_sequence == "one_at_a_time":
Message(text = "<NOTICE> [Limit the X-Y Accel/Jerk] DID NOT RUN. This post processor is not compatible with 'One-at-a-Time' mode.").show()
data[0] += "; [LimitXYAccelJerk] DID NOT RUN because Cura is set to 'One-at-a-Time' mode.\n"
return data
# Exit if the printer is an Ultimaker-------------------------
if "Ultimaker" in machine_name:
Message(text = "<NOTICE> [Limit the X-Y Accel/Jerk] DID NOT RUN. This post processor is for bed slinger printers only.").show()
data[0] += "; [LimitXYAccelJerk] DID NOT RUN because the printer doesn't have a sliding bed.\n"
return data
type_of_change = str(self.getSettingValueByKey("type_of_change"))
accel_print_enabled = bool(extruder[0].getProperty("acceleration_enabled", "value"))
accel_travel_enabled = bool(extruder[0].getProperty("acceleration_travel_enabled", "value"))
accel_print = extruder[0].getProperty("acceleration_print", "value")
accel_travel = extruder[0].getProperty("acceleration_travel", "value")
jerk_print_enabled = str(extruder[0].getProperty("jerk_enabled", "value"))
jerk_travel_enabled = str(extruder[0].getProperty("jerk_travel_enabled", "value"))
jerk_print_old = extruder[0].getProperty("jerk_print", "value")
jerk_travel_old = extruder[0].getProperty("jerk_travel", "value")
if int(accel_print) >= int(accel_travel):
accel_old = accel_print
else:
accel_old = accel_travel
jerk_travel = str(extruder[0].getProperty("jerk_travel", "value"))
if int(jerk_print_old) >= int(jerk_travel_old):
jerk_old = jerk_print_old
else:
jerk_old = jerk_travel_old
#Set the new Accel values----------------------------------------------------------
x_accel = str(self.getSettingValueByKey("x_accel_limit"))
y_accel = str(self.getSettingValueByKey("y_accel_limit"))
x_jerk = int(self.getSettingValueByKey("x_jerk"))
y_jerk = int(self.getSettingValueByKey("y_jerk"))
# Put the strings together-------------------------------------------
m201_limit_new = "M201 X" + x_accel + " Y" + y_accel
m201_limit_old = "M201 X" + str(round(accel_old)) + " Y" + str(round(accel_old))
if x_jerk == 0:
m205_jerk_pattern = "Y(\d*)"
m205_jerk_new = "Y" + str(y_jerk)
if y_jerk == 0:
m205_jerk_pattern = "X(\d*)"
m205_jerk_new = "X" + str(x_jerk)
if x_jerk != 0 and y_jerk != 0:
m205_jerk_pattern = "M205 X(\d*) Y(\d*)"
m205_jerk_new = "M205 X" + str(x_jerk) + " Y" + str(y_jerk)
m205_jerk_old = "M205 X" + str(jerk_old) + " Y" + str(jerk_old)
type_of_change = self.getSettingValueByKey("type_of_change")
#Get the indexes of the start and end layers----------------------------------------
if type_of_change == 'immediate_change':
start_layer = int(self.getSettingValueByKey("start_layer"))-1
end_layer = int(self.getSettingValueByKey("end_layer"))
else:
start_layer = int(self.getSettingValueByKey("gradient_start_layer"))-1
end_layer = int(self.getSettingValueByKey("gradient_end_layer"))
start_index = 2
end_index = len(data)-2
for num in range(2,len(data)-1):
if ";LAYER:" + str(start_layer) + "\n" in data[num]:
start_index = num
break
if int(end_layer) > 0:
for num in range(3,len(data)-1):
try:
if ";LAYER:" + str(end_layer) + "\n" in data[num]:
end_index = num
break
except:
end_index = len(data)-2
#Add Accel limit and new Jerk at start layer-----------------------------------------------------
if type_of_change == "immediate_change":
layer = data[start_index]
lines = layer.split("\n")
for index, line in enumerate(lines):
if lines[index].startswith(";LAYER:"):
lines.insert(index+1,m201_limit_new)
if self.getSettingValueByKey("jerk_enable"):
lines.insert(index+2,m205_jerk_new)
data[start_index] = "\n".join(lines)
break
#Alter any existing jerk lines. Accel lines can be ignored-----------------------------------
for num in range(start_index,end_index,1):
layer = data[num]
lines = layer.split("\n")
for index, line in enumerate(lines):
if line.startswith("M205"):
lines[index] = re.sub(m205_jerk_pattern, m205_jerk_new, line)
data[num] = "\n".join(lines)
if end_layer != -1:
try:
layer = data[end_index-1]
lines = layer.split("\n")
lines.insert(len(lines)-2,m201_limit_old)
lines.insert(len(lines)-2,m205_jerk_old)
data[end_index-1] = "\n".join(lines)
except:
pass
else:
data[len(data)-1] = m201_limit_old + "\n" + m205_jerk_old + "\n" + data[len(data)-1]
return data
elif type_of_change == "gradual_change":
layer_spread = end_index - start_index
if accel_old >= int(x_accel):
x_accel_hyst = round((accel_old - int(x_accel)) / layer_spread)
else:
x_accel_hyst = round((int(x_accel) - accel_old) / layer_spread)
if accel_old >= int(y_accel):
y_accel_hyst = round((accel_old - int(y_accel)) / layer_spread)
else:
y_accel_hyst = round((int(y_accel) - accel_old) / layer_spread)
if accel_old >= int(x_accel):
x_accel_start = round(round((accel_old - x_accel_hyst)/25)*25)
else:
x_accel_start = round(round((x_accel_hyst + accel_old)/25)*25)
if accel_old >= int(y_accel):
y_accel_start = round(round((accel_old - y_accel_hyst)/25)*25)
else:
y_accel_start = round(round((y_accel_hyst + accel_old)/25)*25)
m201_limit_new = "M201 X" + str(x_accel_start) + " Y" + str(y_accel_start)
#Add Accel limit and new Jerk at start layer-------------------------------------------------------------
layer = data[start_index]
lines = layer.split("\n")
for index, line in enumerate(lines):
if lines[index].startswith(";LAYER:"):
lines.insert(index+1,m201_limit_new)
if self.getSettingValueByKey("jerk_enable"):
lines.insert(index+2,m205_jerk_new)
data[start_index] = "\n".join(lines)
break
for num in range(start_index + 1, end_index,1):
layer = data[num]
lines = layer.split("\n")
if accel_old >= int(x_accel):
x_accel_start -= x_accel_hyst
if x_accel_start < int(x_accel): x_accel_start = int(x_accel)
else:
x_accel_start += x_accel_hyst
if x_accel_start > int(x_accel): x_accel_start = int(x_accel)
if accel_old >= int(y_accel):
y_accel_start -= y_accel_hyst
if y_accel_start < int(y_accel): y_accel_start = int(y_accel)
else:
y_accel_start += y_accel_hyst
if y_accel_start > int(y_accel): y_accel_start = int(y_accel)
m201_limit_new = "M201 X" + str(round(round(x_accel_start/25)*25)) + " Y" + str(round(round(y_accel_start/25)*25))
for index, line in enumerate(lines):
if line.startswith(";LAYER:"):
lines.insert(index+1, m201_limit_new)
continue
data[num] = "\n".join(lines)
#Alter any existing jerk lines. Accel lines can be ignored---------------
if self.getSettingValueByKey("jerk_enable"):
for num in range(start_index,len(data)-1,1):
layer = data[num]
lines = layer.split("\n")
for index, line in enumerate(lines):
if line.startswith("M205"):
lines[index] = re.sub(m205_jerk_pattern, m205_jerk_new, line)
data[num] = "\n".join(lines)
data[len(data)-1] = m201_limit_old + "\n" + m205_jerk_old + "\n" + data[len(data)-1]
return data