Merge branch 'main' into dependabot/pip/twisted-23.8.0

This commit is contained in:
Erwan MATHIEU 2024-06-11 10:36:26 +02:00 committed by GitHub
commit e9feee2747
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7788 changed files with 85827 additions and 29186 deletions

View File

@ -4,7 +4,13 @@ labels: ["Type: Bug", "Status: Triage", "Slicing Error :collision:"]
body:
- type: markdown
attributes:
value: |
value: |
### ✨Try our improved Cura 5.7✨
Before filling out the report below, we want you to try the latest Cura 5.7 Beta.
This version of Cura has become significantly more reliable and has an updated slicing engine that will automatically send a report to the Cura Team for analysis.
#### [You can find the downloads here](https://github.com/Ultimaker/Cura/releases/tag/5.7.0-beta.1) ####
If you still encounter a crash you are still welcome to report the issue so we can use your model as a test case, you can find instructions on how to do that below.
### Project File
**⚠️ Before you continue, we need your project file to troubleshoot a slicing crash.**
It contains the printer and settings we need for troubleshooting.
@ -23,14 +29,14 @@ body:
- type: input
attributes:
label: Cura Version
placeholder: 5.3.1
placeholder: 5.6.0
validations:
required: true
- type: markdown
attributes:
value: |
We work hard on improving our slicing crashes. Our most recent release is 5.3.1.
If you are not on the latest version of Cura, [you can download it here](https://github.com/Ultimaker/Cura/releases/tag/5.3.1)
We work hard on improving our slicing crashes. Our most recent release is 5.6.0.
If you are not on the latest version of Cura, [you can download it here](https://github.com/Ultimaker/Cura/releases/latest)
- type: input
attributes:
label: Operating System
@ -68,4 +74,3 @@ body:
description: You can add the zip file and additional information that is relevant to the issue in the comments below.
validations:
required: true

View File

@ -1,153 +0,0 @@
name: Create and Upload Conan package
on:
workflow_call:
inputs:
project_name:
required: true
type: string
recipe_id_full:
required: true
type: string
build_id:
required: true
type: number
build_info:
required: false
default: true
type: boolean
recipe_id_latest:
required: false
type: string
runs_on:
required: true
type: string
python_version:
required: true
type: string
conan_config_branch:
required: false
type: string
conan_logging_level:
required: false
type: string
conan_clean_local_cache:
required: false
type: boolean
default: false
conan_upload_community:
required: false
default: true
type: boolean
env:
CONAN_LOGIN_USERNAME: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD: ${{ secrets.CONAN_PASS }}
CONAN_LOG_RUN_TO_OUTPUT: 1
CONAN_LOGGING_LEVEL: ${{ inputs.conan_logging_level }}
CONAN_NON_INTERACTIVE: 1
jobs:
conan-package-create:
runs-on: ${{ inputs.runs_on }}
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: ${{ inputs.python_version }}
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements for runner
run: pip install -r https://raw.githubusercontent.com/Ultimaker/Cura/main/.github/workflows/requirements-conan-package.txt
# Note the runner requirements are always installed from the main branch in the Ultimaker/Cura repo
- name: Use Conan download cache (Bash)
if: ${{ runner.os != 'Windows' }}
run: conan config set storage.download_cache="$HOME/.conan/conan_download_cache"
- name: Use Conan download cache (Powershell)
if: ${{ runner.os == 'Windows' }}
run: conan config set storage.download_cache="C:\Users\runneradmin\.conan\conan_download_cache"
- name: Cache Conan local repository packages (Bash)
uses: actions/cache@v3
if: ${{ runner.os != 'Windows' }}
with:
path: |
$HOME/.conan/data
$HOME/.conan/conan_download_cache
key: conan-${{ inputs.runs_on }}-${{ runner.arch }}-create-cache
- name: Cache Conan local repository packages (Powershell)
uses: actions/cache@v3
if: ${{ runner.os == 'Windows' }}
with:
path: |
C:\Users\runneradmin\.conan\data
C:\.conan
C:\Users\runneradmin\.conan\conan_download_cache
key: conan-${{ inputs.runs_on }}-${{ runner.arch }}-create-cache
- name: Install MacOS system requirements
if: ${{ runner.os == 'Macos' }}
run: brew install autoconf automake ninja
# NOTE: Due to what are probably github issues, we have to remove the cache and reconfigure before the rest.
# This is maybe because grub caches the disk it uses last time, which is recreated each time.
- name: Install Linux system requirements
if: ${{ runner.os == 'Linux' }}
run: |
sudo rm /var/cache/debconf/config.dat
sudo dpkg --configure -a
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
sudo apt update
sudo apt upgrade
sudo apt install build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config flex bison -y
- name: Install GCC-13 on ubuntu
if: ${{ startsWith(inputs.runs_on, 'ubuntu') }}
run: |
sudo apt install g++-13 gcc-13 -y
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13
sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-13 13
- name: Create the default Conan profile
run: conan profile new default --detect
- name: Get Conan configuration from branch
if: ${{ inputs.conan_config_branch != '' }}
run: conan config install https://github.com/Ultimaker/conan-config.git -a "-b ${{ inputs.conan_config_branch }}"
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Add Cura private Artifactory remote
run: conan remote add cura-private https://ultimaker.jfrog.io/artifactory/api/conan/cura-private True
- name: Create the Packages
run: conan install ${{ inputs.recipe_id_full }} --build=missing --update -c tools.build:skip_test=True
- name: Upload the Package(s)
if: ${{ always() && inputs.conan_upload_community }}
run: conan upload ${{ inputs.recipe_id_full }} -r cura --all -c
- name: Upload the Package(s) to the private Artifactory
if: ${{ always() && ! inputs.conan_upload_community }}
run: conan upload ${{ inputs.recipe_id_full }} -r cura-private --all -c

View File

@ -0,0 +1,40 @@
name: conan-package-resources
on:
push:
paths:
- '.github/workflows/conan-package-resources.yml'
- 'resources/definitions/**'
- 'resources/extruders/**'
- 'resources/images/**'
- 'resources/intent/**'
- 'resources/meshes/**'
- 'resources/quality/**'
- 'resources/variants/**'
- 'resources/conanfile.py'
branches:
- 'main'
- 'CURA-*'
- 'PP-*'
- 'NP-*'
- '[0-9].[0-9]*'
- '[0-9].[0-9][0-9]*'
env:
CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
jobs:
conan-recipe-version:
uses: ultimaker/cura-workflows/.github/workflows/conan-recipe-version.yml@main
with:
project_name: cura_resources
conan-package-export:
needs: [ conan-recipe-version ]
uses: ultimaker/cura-workflows/.github/workflows/conan-recipe-export.yml@main
with:
recipe_id_full: ${{ needs.conan-recipe-version.outputs.recipe_id_full }}
recipe_id_latest: ${{ needs.conan-recipe-version.outputs.recipe_id_latest }}
conan_recipe_root: "./resources/"
secrets: inherit

View File

@ -1,145 +1,59 @@
---
name: conan-package
# Exports the recipe, sources and binaries for Mac, Windows and Linux and upload these to the server such that these can
# be used downstream.
#
# It should run on pushes against main or CURA-* branches, but it will only create the binaries for main and release branches
on:
workflow_dispatch:
inputs:
create_binaries_windows:
required: true
default: false
description: 'create binaries Windows'
create_binaries_linux:
required: true
default: false
description: 'create binaries Linux'
create_binaries_macos:
required: true
default: false
description: 'create binaries Macos'
push:
paths:
- 'plugins/**'
- 'resources/**'
- 'cura/**'
- 'resources/bundled_packages/**'
- 'resources/i18n/**'
- 'resources/qml/**'
- 'resources/setting_visibility/**'
- 'resources/shaders/**'
- 'resources/texts/**'
- 'resources/themes/**'
- 'resources/public_key.pem'
- 'resources/README_resources.txt'
- 'icons/**'
- 'tests/**'
- 'packaging/**'
- '.github/workflows/conan-*.yml'
- '.github/workflows/conan-package.yml'
- '.github/workflows/notify.yml'
- '.github/workflows/requirements-conan-package.txt'
- '.github/workflows/requirements-runner.txt'
- 'requirements*.txt'
- 'conanfile.py'
- 'conandata.yml'
- 'GitVersion.yml'
- '*.jinja'
branches:
- main
- 'main'
- 'CURA-*'
- '[1-9].[0-9]'
- '[1-9].[0-9][0-9]'
tags:
- '[1-9].[0-9].[0-9]*'
- '[1-9].[0-9].[0-9]'
- '[1-9].[0-9][0-9].[0-9]*'
- 'PP-*'
- 'NP-*'
- '[0-9].[0-9]*'
- '[0-9].[0-9][0-9]*'
env:
CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
CONAN_LOGIN_USERNAME_CURA_CE: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA_CE: ${{ secrets.CONAN_PASS }}
CONAN_LOG_RUN_TO_OUTPUT: 1
CONAN_LOGGING_LEVEL: ${{ inputs.conan_logging_level }}
CONAN_NON_INTERACTIVE: 1
permissions: { }
jobs:
conan-recipe-version:
permissions:
contents: read
uses: ultimaker/cura/.github/workflows/conan-recipe-version.yml@main
uses: ultimaker/cura-workflows/.github/workflows/conan-recipe-version.yml@main
with:
project_name: cura
conan-package-create-linux:
conan-package-export:
needs: [ conan-recipe-version ]
runs-on: 'ubuntu-latest'
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Cache Conan data
id: cache-conan
uses: actions/cache@v3
with:
path: ~/.conan
key: ${{ runner.os }}-conan
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: '3.11.x'
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements for runner
run: pip install -r .github/workflows/requirements-conan-package.txt
# NOTE: Due to what are probably github issues, we have to remove the cache and reconfigure before the rest.
# This is maybe because grub caches the disk it uses last time, which is recreated each time.
- name: Install Linux system requirements
if: ${{ runner.os == 'Linux' }}
run: |
sudo rm /var/cache/debconf/config.dat
sudo dpkg --configure -a
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
sudo apt update
sudo apt upgrade
sudo apt install efibootmgr build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config flex bison g++-12 gcc-12 -y
- name: Install GCC-13
run: |
sudo apt install g++-13 gcc-13 -y
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13
sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-13 13
- name: Create the default Conan profile
run: conan profile new default --detect --force
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Create the Packages
run: conan create . ${{ needs.conan-recipe-version.outputs.recipe_id_full }} --build=missing --update -o ${{ needs.conan-recipe-version.outputs.project_name }}:devtools=True -c tools.build:skip_test=True
- name: Create the latest alias
if: always()
run: conan alias ${{ needs.conan-recipe-version.outputs.recipe_id_latest }} ${{ needs.conan-recipe-version.outputs.recipe_id_full }}
- name: Upload the Package(s)
if: always()
run: |
conan upload ${{ needs.conan-recipe-version.outputs.recipe_id_full }} -r cura --all -c
conan upload ${{ needs.conan-recipe-version.outputs.recipe_id_latest }} -r cura -c
notify-create:
if: ${{ always() && (github.event_name == 'push' && (github.ref_name == 'main' || github.ref_name == 'master' || needs.conan-recipe-version.outputs.is_release_branch == 'true')) }}
needs: [ conan-recipe-version, conan-package-create-linux ]
uses: ultimaker/cura/.github/workflows/notify.yml@main
uses: ultimaker/cura-workflows/.github/workflows/conan-recipe-export.yml@main
with:
success: ${{ contains(join(needs.*.result, ','), 'success') }}
success_title: "New binaries created in ${{ github.repository }}"
success_body: "Created binaries for ${{ needs.conan-recipe-version.outputs.recipe_id_full }}"
failure_title: "Failed to create binaries in ${{ github.repository }}"
failure_body: "Failed to created binaries for ${{ needs.conan-recipe-version.outputs.recipe_id_full }}"
recipe_id_full: ${{ needs.conan-recipe-version.outputs.recipe_id_full }}
recipe_id_latest: ${{ needs.conan-recipe-version.outputs.recipe_id_latest }}
secrets: inherit
conan-package-create:
needs: [ conan-recipe-version, conan-package-export ]
uses: ultimaker/cura-workflows/.github/workflows/conan-package-create-linux.yml@main
with:
recipe_id_full: ${{ needs.conan-recipe-version.outputs.recipe_id_full }}
conan_extra_args: "-o cura:enable_i18n=True"
secrets: inherit

View File

@ -1,107 +0,0 @@
name: Export Conan Recipe to server
on:
workflow_call:
inputs:
recipe_id_full:
required: true
type: string
recipe_id_latest:
required: false
type: string
runs_on:
required: true
type: string
python_version:
required: true
type: string
conan_config_branch:
required: false
type: string
conan_logging_level:
required: false
type: string
conan_export_binaries:
required: false
type: boolean
conan_upload_community:
required: false
default: true
type: boolean
env:
CONAN_LOGIN_USERNAME: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD: ${{ secrets.CONAN_PASS }}
CONAN_LOG_RUN_TO_OUTPUT: 1
CONAN_LOGGING_LEVEL: ${{ inputs.conan_logging_level }}
CONAN_NON_INTERACTIVE: 1
jobs:
package-export:
runs-on: ${{ inputs.runs_on }}
steps:
- name: Checkout project
uses: actions/checkout@v3
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: ${{ inputs.python_version }}
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements and Create default Conan profile
run: |
pip install -r https://raw.githubusercontent.com/Ultimaker/Cura/main/.github/workflows/requirements-conan-package.txt
conan profile new default --detect
# Note the runner requirements are always installed from the main branch in the Ultimaker/Cura repo
- name: Cache Conan local repository packages
uses: actions/cache@v3
with:
path: $HOME/.conan/data
key: ${{ runner.os }}-conan-export-cache
- name: Get Conan configuration from branch
if: ${{ inputs.conan_config_branch != '' }}
run: conan config install https://github.com/Ultimaker/conan-config.git -a "-b ${{ inputs.conan_config_branch }}"
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Add Cura private Artifactory remote
run: conan remote add cura-private https://ultimaker.jfrog.io/artifactory/api/conan/cura-private True
- name: Export the Package (binaries)
if: ${{ inputs.conan_export_binaries }}
run: conan create . ${{ inputs.recipe_id_full }} --build=missing --update -c tools.build:skip_test=True
- name: Export the Package
if: ${{ !inputs.conan_export_binaries }}
run: conan export . ${{ inputs.recipe_id_full }}
- name: Create the latest alias
if: always()
run: conan alias ${{ inputs.recipe_id_latest }} ${{ inputs.recipe_id_full }}
- name: Upload the Package(s)
if: ${{ always() && inputs.conan_upload_community }}
run: |
conan upload ${{ inputs.recipe_id_full }} -r cura --all -c
conan upload ${{ inputs.recipe_id_latest }} -r cura -c
- name: Upload the Package(s) to the private Artifactory
if: ${{ always() && ! inputs.conan_upload_community }}
run: |
conan upload ${{ inputs.recipe_id_full }} -r cura-private --all -c
conan upload ${{ inputs.recipe_id_latest }} -r cura-private -c

View File

@ -1,217 +0,0 @@
name: Get Conan Recipe Version
on:
workflow_call:
inputs:
project_name:
required: true
type: string
user:
required: false
default: ultimaker
type: string
additional_buildmetadata:
required: false
default: ""
type: string
outputs:
recipe_id_full:
description: "The full Conan recipe id: <name>/<version>@<user>/<channel>"
value: ${{ jobs.get-semver.outputs.recipe_id_full }}
recipe_id_latest:
description: "The full Conan recipe aliased (latest) id: <name>/(latest)@<user>/<channel>"
value: ${{ jobs.get-semver.outputs.recipe_id_latest }}
recipe_semver_full:
description: "The full semver <Major>.<Minor>.<Patch>-<PreReleaseTag>+<BuildMetaData>"
value: ${{ jobs.get-semver.outputs.semver_full }}
is_release_branch:
description: "is current branch a release branch?"
value: ${{ jobs.get-semver.outputs.release_branch }}
user:
description: "The conan user"
value: ${{ jobs.get-semver.outputs.user }}
channel:
description: "The conan channel"
value: ${{ jobs.get-semver.outputs.channel }}
project_name:
description: "The conan projectname"
value: ${{ inputs.project_name }}
jobs:
get-semver:
runs-on: ubuntu-latest
outputs:
recipe_id_full: ${{ steps.get-conan-broadcast-data.outputs.recipe_id_full }}
recipe_id_latest: ${{ steps.get-conan-broadcast-data.outputs.recipe_id_latest }}
semver_full: ${{ steps.get-conan-broadcast-data.outputs.semver_full }}
is_release_branch: ${{ steps.get-conan-broadcast-data.outputs.is_release_branch }}
user: ${{ steps.get-conan-broadcast-data.outputs.user }}
channel: ${{ steps.get-conan-broadcast-data.outputs.channel }}
steps:
- name: Checkout repo
uses: actions/checkout@v3
if: ${{ github.event.pull_request.head.repo.full_name == github.repository }}
with:
fetch-depth: 0
ref: ${{ github.head_ref }}
- name: Checkout repo PR
uses: actions/checkout@v3
if: ${{ github.event.pull_request.head.repo.full_name != github.repository }}
with:
fetch-depth: 0
ref: ${{ github.base_ref }}
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: "3.11.x"
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements and Create default Conan profile
run: |
pip install -r .github/workflows/requirements-conan-package.txt
pip install gitpython
- id: get-conan-broadcast-data
name: Get Conan broadcast data
run: |
import subprocess
import os
from conan.tools.scm import Version
from conan.errors import ConanException
from git import Repo
repo = Repo('.')
user = "${{ inputs.user }}".lower()
project_name = "${{ inputs.project_name }}"
event_name = "${{ github.event_name }}"
issue_number = "${{ github.ref }}".split('/')[2]
is_tag = "${{ github.ref_type }}" == "tag"
is_release_branch = False
ref_name = "${{ github.base_ref }}" if event_name == "pull_request" else "${{ github.ref_name }}"
buildmetadata = "" if "${{ inputs.additional_buildmetadata }}" == "" else "${{ inputs.additional_buildmetadata }}_"
# FIXME: for when we push a tag (such as an release)
channel = "testing"
if is_tag:
branch_version = Version(ref_name)
is_release_branch = True
channel = "_"
user = "_"
actual_version = f"{branch_version}"
else:
try:
branch_version = Version(repo.active_branch.name)
except ConanException:
branch_version = Version('0.0.0')
if ref_name == f"{branch_version.major}.{branch_version.minor}":
channel = 'stable'
is_release_branch = True
elif ref_name in ("main", "master"):
channel = 'testing'
else:
channel = "_".join(repo.active_branch.name.replace("-", "_").split("_")[:2]).lower()
if "pull_request" in event_name:
channel = f"pr_{issue_number}"
# %% Get the actual version
latest_branch_version = Version("0.0.0")
latest_branch_tag = None
for tag in repo.active_branch.repo.tags:
if str(tag).startswith("firmware") or str(tag).startswith("master"):
continue # Quick-fix for the versioning scheme name of the embedded team in fdm_materials(_private) repo
try:
version = Version(tag)
except ConanException:
continue
if version > latest_branch_version and version < Version("6.0.0"):
# FIXME: stupid old Cura tags 13.04 etc. keep popping up, als the fdm_material tag for firmware are messing with this
latest_branch_version = version
latest_branch_tag = repo.tag(tag)
if latest_branch_tag:
# %% Get the actual version
sha_commit = repo.commit().hexsha[:6]
latest_branch_version_prerelease = latest_branch_version.pre
if latest_branch_version.pre and not "." in str(latest_branch_version.pre):
# The prerealese did not contain a version number, default it to 1
latest_branch_version_prerelease = f"{latest_branch_version.pre}.1"
if event_name == "pull_request":
actual_version = f"{latest_branch_version.major}.{latest_branch_version.minor}.{latest_branch_version.patch}-{str(latest_branch_version_prerelease).lower()}+{buildmetadata}pr_{issue_number}_{sha_commit}"
channel_metadata = f"{channel}_{sha_commit}"
else:
if channel in ("stable", "_", ""):
channel_metadata = f"{sha_commit}"
else:
channel_metadata = f"{channel}_{sha_commit}"
if is_release_branch:
if (latest_branch_version.pre == "" or latest_branch_version.pre is None) and branch_version > latest_branch_version:
actual_version = f"{branch_version.major}.{branch_version.minor}.0-beta.1+{buildmetadata}{channel_metadata}"
elif latest_branch_version.pre == "":
# An actual full release has been created, we are working on patch
bump_up_patch = int(str(latest_branch_version.patch)) + 1
actual_version = f"{latest_branch_version.major}.{latest_branch_version.minor}.{bump_up_patch}-beta.1+{buildmetadata}{channel_metadata}"
elif latest_branch_version.pre is None:
actual_version = f"{latest_branch_version.major}.{latest_branch_version.minor}.{int(latest_branch_version.patch.value) + 1}-beta.1+{buildmetadata}{channel_metadata}"
else:
# An beta release has been created we are working toward a next beta or full release
bump_up_release_tag = int(str(latest_branch_version.pre).split('.')[1]) + 1
actual_version = f"{latest_branch_version.major}.{latest_branch_version.minor}.{latest_branch_version.patch}-{str(latest_branch_version.pre).split('.')[0]}.{bump_up_release_tag}+{buildmetadata}{channel_metadata}"
else:
max_branches_version = Version("0.0.0")
for branch in repo.references:
try:
if "remotes/origin" in branch.abspath:
b_version = Version(branch.name.split("/")[-1])
if b_version < Version("6.0.0") and b_version > max_branches_version:
max_branches_version = b_version
except:
pass
if max_branches_version > latest_branch_version:
actual_version = f"{max_branches_version.major}.{int(str(max_branches_version.minor)) + 1}.0-alpha+{buildmetadata}{channel}_{sha_commit}"
else:
actual_version = f"{latest_branch_version.major}.{int(str(latest_branch_version.minor)) + 1}.0-alpha+{buildmetadata}{channel_metadata}"
# %% Set the environment output
output_env = os.environ["GITHUB_OUTPUT"]
content = ""
if os.path.exists(output_env):
with open(output_env, "r") as f:
content = f.read()
with open(output_env, "w") as f:
f.write(content)
f.writelines(f"name={project_name}\n")
f.writelines(f"version={actual_version}\n")
f.writelines(f"channel={channel}\n")
f.writelines(f"recipe_id_full={project_name}/{actual_version}@{user}/{channel}\n")
f.writelines(f"recipe_id_latest={project_name}/latest@{user}/{channel}\n")
f.writelines(f"semver_full={actual_version}\n")
f.writelines(f"is_release_branch={str(is_release_branch).lower()}\n")
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
with open(summary_env, "w") as f:
f.writelines(f"# {project_name}\n")
f.writelines(f"name={project_name}\n")
f.writelines(f"version={actual_version}\n")
f.writelines(f"channel={channel}\n")
f.writelines(f"recipe_id_full={project_name}/{actual_version}@{user}/{channel}\n")
f.writelines(f"recipe_id_latest={project_name}/latest@{user}/{channel}\n")
f.writelines(f"semver_full={actual_version}\n")
f.writelines(f"is_release_branch={str(is_release_branch).lower()}\n")
shell: python

View File

@ -31,54 +31,41 @@ on:
type: boolean
schedule:
# Daily at 5:15 CET
# Daily at 4:15 CET (main-branch) and 5:15 CET (release-branch)
- cron: '15 3 * * *'
- cron: '15 4 * * *'
env:
CURA_CONAN_VERSION: ${{ inputs.cura_conan_version || 'cura/latest@ultimaker/testing' }}
CONAN_ARGS: ${{ inputs.conan_args || '' }}
ENTERPRISE: ${{ inputs.enterprise || false }}
STAGING: ${{ inputs.staging || false }}
jobs:
default-values:
runs-on: ubuntu-latest
outputs:
cura_conan_version: ${{ steps.default.outputs.cura_conan_version }}
steps:
- name: Output default values
id: default
shell: python
run: |
import os
cura_conan_version = "cura/latest@ultimaker/testing" if "${{ github.event.inputs.cura_conan_version }}" == "" else "${{ github.event.inputs.cura_conan_version }}"
output_env = os.environ["GITHUB_OUTPUT"]
content = ""
if os.path.exists(output_env):
with open(output_env, "r") as f:
content = f.read()
with open(output_env, "w") as f:
f.write(content)
f.writelines(f"cura_conan_version={cura_conan_version}\n")
default_values:
uses: ultimaker/cura-workflows/.github/workflows/cura-installer-default-value.yml@main
with:
cura_conan_version: ${{ inputs.cura_conan_version }}
latest_release: '5.6'
latest_release_schedule_hour: 4
latest_release_tag: 'nightly'
windows-installer:
uses: ./.github/workflows/windows.yml
needs: [ default-values ]
uses: ultimaker/cura-workflows/.github/workflows/cura-installer-windows.yml@main
needs: [ default_values ]
with:
cura_conan_version: ${{ needs.default-values.outputs.cura_conan_version }}
cura_conan_version: ${{ needs.default_values.outputs.cura_conan_version }}
conan_args: ${{ github.event.inputs.conan_args }}
enterprise: ${{ github.event.inputs.enterprise == 'true' }}
staging: ${{ github.event.inputs.staging == 'true' }}
architecture: X64
operating_system: windows-2022
operating_system: self-hosted-Windows-X64
secrets: inherit
linux-installer:
uses: ./.github/workflows/linux.yml
needs: [ default-values ]
uses: ultimaker/cura-workflows/.github/workflows/cura-installer-linux.yml@main
needs: [ default_values ]
with:
cura_conan_version: ${{ needs.default-values.outputs.cura_conan_version }}
cura_conan_version: ${{ needs.default_values.outputs.cura_conan_version }}
conan_args: ${{ github.event.inputs.conan_args }}
enterprise: ${{ github.event.inputs.enterprise == 'true' }}
staging: ${{ github.event.inputs.staging == 'true' }}
@ -87,10 +74,10 @@ jobs:
secrets: inherit
macos-installer:
uses: ./.github/workflows/macos.yml
needs: [ default-values ]
uses: ultimaker/cura-workflows/.github/workflows/cura-installer-macos.yml@main
needs: [ default_values ]
with:
cura_conan_version: ${{ needs.default-values.outputs.cura_conan_version }}
cura_conan_version: ${{ needs.default_values.outputs.cura_conan_version }}
conan_args: ${{ github.event.inputs.conan_args }}
enterprise: ${{ github.event.inputs.enterprise == 'true' }}
staging: ${{ github.event.inputs.staging == 'true' }}
@ -99,10 +86,10 @@ jobs:
secrets: inherit
macos-arm-installer:
uses: ./.github/workflows/macos.yml
needs: [ default-values ]
uses: ultimaker/cura-workflows/.github/workflows/cura-installer-macos.yml@main
needs: [ default_values ]
with:
cura_conan_version: ${{ needs.default-values.outputs.cura_conan_version }}
cura_conan_version: ${{ needs.default_values.outputs.cura_conan_version }}
conan_args: ${{ github.event.inputs.conan_args }}
enterprise: ${{ github.event.inputs.enterprise == 'true' }}
staging: ${{ github.event.inputs.staging == 'true' }}
@ -114,12 +101,12 @@ jobs:
update-nightly-release:
if: ${{ inputs.nightly || github.event_name == 'schedule' }}
runs-on: ubuntu-latest
needs: [ windows-installer, linux-installer, macos-installer, macos-arm-installer ]
needs: [ default_values, windows-installer, linux-installer, macos-installer, macos-arm-installer ]
steps:
- name: Checkout
uses: actions/checkout@v3
# It's not necessary to download all three, but it does make sure we have at least one if an OS is skipped.
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Download the run info
uses: actions/download-artifact@v2
@ -168,7 +155,7 @@ jobs:
with:
name: ${{ steps.filename.outputs.LINUX }}-AppImage
path: installers
- name: Download linux installer jobs asc artifacts
uses: actions/download-artifact@v2
with:
@ -182,8 +169,8 @@ jobs:
- name: Update nightly release for Linux
run: |
gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-linux-X64.AppImage --clobber
gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-linux-X64.AppImage.asc --clobber
gh release upload ${{ needs.default_values.outputs.release_tag }} installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-linux-X64.AppImage --clobber
gh release upload ${{ needs.default_values.outputs.release_tag }} installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-linux-X64.AppImage.asc --clobber
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@ -206,8 +193,8 @@ jobs:
- name: Update nightly release for Windows
run: |
gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-win64-X64.msi --clobber
gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-win64-X64.exe --clobber
gh release upload ${{ needs.default_values.outputs.release_tag }} installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-win64-X64.msi --clobber
gh release upload ${{ needs.default_values.outputs.release_tag }} installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-win64-X64.exe --clobber
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@ -230,8 +217,8 @@ jobs:
- name: Update nightly release for MacOS (X64)
run: |
gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-X64.dmg --clobber
gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-X64.pkg --clobber
gh release upload ${{ needs.default_values.outputs.release_tag }} installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-X64.dmg --clobber
gh release upload ${{ needs.default_values.outputs.release_tag }} installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-X64.pkg --clobber
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@ -254,14 +241,32 @@ jobs:
- name: Update nightly release for MacOS (ARM-64)
run: |
gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-ARM64.dmg --clobber
gh release upload nightly installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-ARM64.pkg --clobber
gh release upload ${{ needs.default_values.outputs.release_tag }} installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-ARM64.dmg --clobber
gh release upload ${{ needs.default_values.outputs.release_tag }} installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-ARM64.pkg --clobber
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: create the release notes
shell: python
run: |
import os
import datetime
from jinja2 import Template
with open(".github/workflows/release_notes.md.jinja", "r") as f:
release_notes = Template(f.read())
current_nightly_beta = "${{ needs.default_values.outputs.release_tag }}".split("nightly-")[-1]
with open("release-notes.md", "w") as f:
f.write(release_notes.render(
timestamp="${{ steps.filename.outputs.NIGHTLY_TIME }}",
branch="" if "${{ needs.default-values.outputs.release_tag == 'nightly' }}" == 'true' else current_nightly_beta,
branch_specific="" if os.getenv("GITHUB_REF") == "refs/heads/main" else f"?branch={current_nightly_beta}",
))
- name: Update nightly release description (with date)
if: always()
run: |
gh release edit nightly --title "${{ steps.filename.outputs.NIGHTLY_NAME }}" --notes "Nightly release created on: ${{ steps.filename.outputs.NIGHTLY_TIME }}"
gh release edit ${{ needs.default_values.outputs.release_tag }} --title "${{ steps.filename.outputs.NIGHTLY_NAME }}" --notes-file release-notes.md
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -1,5 +1,5 @@
name: Linux Installer
run-name: ${{ inputs.cura_conan_version }} for Linux-${{ inputs.architecture }} by @${{ github.actor }}
run-name: ${{ inputs.cura_conan_version }} by @${{ github.actor }}
on:
workflow_dispatch:
@ -39,255 +39,14 @@ on:
options:
- ubuntu-22.04
workflow_call:
inputs:
cura_conan_version:
description: 'Cura Conan Version'
default: 'cura/latest@ultimaker/testing'
required: true
type: string
conan_args:
description: 'Conan args: eq.: --require-override'
default: ''
required: false
type: string
enterprise:
description: 'Build Cura as an Enterprise edition'
default: false
required: true
type: boolean
staging:
description: 'Use staging API'
default: false
required: true
type: boolean
architecture:
description: 'Architecture'
required: true
default: 'X64'
type: string
operating_system:
description: 'OS'
required: true
default: 'ubuntu-22.04'
type: string
env:
CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
GPG_PRIVATE_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
CURA_CONAN_VERSION: ${{ inputs.cura_conan_version }}
ENTERPRISE: ${{ inputs.enterprise }}
STAGING: ${{ inputs.staging }}
jobs:
cura-installer-create:
runs-on: ${{ inputs.operating_system }}
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: '3.10.x'
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements for runner
run: pip install -r .github/workflows/requirements-conan-package.txt
- name: Cache Conan local repository packages (Bash)
uses: actions/cache@v3
with:
path: |
$HOME/.conan/data
$HOME/.conan/conan_download_cache
key: conan-${{ runner.os }}-${{ runner.arch }}-installer-cache
- name: Hack needed specifically for ubuntu-22.04 from mid-Feb 2023 onwards
if: ${{ startsWith(inputs.operating_system, 'ubuntu-22.04') }}
run: sudo apt remove libodbc2 libodbcinst2 unixodbc-common -y
# NOTE: Due to what are probably github issues, we have to remove the cache and reconfigure before the rest.
# This is maybe because grub caches the disk it uses last time, which is recreated each time.
- name: Install Linux system requirements
run: |
sudo rm /var/cache/debconf/config.dat
sudo dpkg --configure -a
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
sudo apt update
sudo apt upgrade
sudo apt install build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config binutils coreutils desktop-file-utils fakeroot fuse libgdk-pixbuf2.0-dev patchelf squashfs-tools strace util-linux zsync -y
# Get the AppImage tool
wget --no-check-certificate --quiet https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage -O $GITHUB_WORKSPACE/appimagetool
chmod +x $GITHUB_WORKSPACE/appimagetool
echo "APPIMAGETOOL_LOCATION=$GITHUB_WORKSPACE/appimagetool" >> $GITHUB_ENV
# Get the AppImage builder
wget --no-check-certificate --quiet -O $GITHUB_WORKSPACE/appimage-builder-x86_64.AppImage https://github.com/AppImageCrafters/appimage-builder/releases/download/v1.1.0/appimage-builder-1.1.0-x86_64.AppImage
chmod +x appimage-builder-x86_64.AppImage
echo "APPIMAGEBUILDER_LOCATION=$GITHUB_WORKSPACE/appimage-builder-x86_64.AppImage" >> $GITHUB_ENV
# Make sure these tools can be found on the path
echo "$GITHUB_WORKSPACE" >> $GITHUB_PATH
- name: Install GCC-13
run: |
sudo apt install g++-13 gcc-13 -y
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13
sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-13 13
- name: Create the default Conan profile
run: conan profile new default --detect --force
- name: Configure GPG Key Linux (Bash)
run: echo -n "$GPG_PRIVATE_KEY" | base64 --decode | gpg --import
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Use Conan download cache (Bash)
run: conan config set storage.download_cache="$HOME/.conan/conan_download_cache"
- name: Create the Packages (Bash)
run: conan install $CURA_CONAN_VERSION ${{ inputs.conan_args }} --build=missing --update -if cura_inst -g VirtualPythonEnv -o cura:enterprise=$ENTERPRISE -o cura:staging=$STAGING --json "cura_inst/conan_install_info.json"
- name: Upload the Package(s)
if: always()
run: |
conan upload "*" -r cura --all -c
- name: Set Environment variables for Cura (bash)
run: |
. ./cura_inst/bin/activate_github_actions_env.sh
. ./cura_inst/bin/activate_github_actions_version_env.sh
# FIXME: This is a workaround to ensure that we use and pack a shared library for OpenSSL 1.1.1l. We currently compile
# OpenSSL statically for CPython, but our Python Dependenies (such as PyQt6) require a shared library.
# Because Conan won't allow for building the same library with two different options (easily) we need to install it explicitly
# and do a manual copy to the VirtualEnv, such that Pyinstaller can find it.
- name: Install OpenSSL shared
run: conan install openssl/1.1.1l@_/_ --build=missing --update -o openssl:shared=True -g deploy
- name: Copy OpenSSL shared (Bash)
run: |
cp ./openssl/lib/*.so* ./cura_inst/bin/ || true
cp ./openssl/lib/*.dylib* ./cura_inst/bin/ || true
- name: Create the Cura dist
run: pyinstaller ./cura_inst/UltiMaker-Cura.spec
- name: Output the name file name and extension
id: filename
shell: python
run: |
import os
enterprise = "-Enterprise" if "${{ inputs.enterprise }}" == "true" else ""
installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-linux-${{ inputs.architecture }}"
output_env = os.environ["GITHUB_OUTPUT"]
content = ""
if os.path.exists(output_env):
with open(output_env, "r") as f:
content = f.read()
with open(output_env, "w") as f:
f.write(content)
f.writelines(f"INSTALLER_FILENAME={installer_filename}\n")
- name: Summarize the used Conan dependencies
shell: python
run: |
import os
import json
from pathlib import Path
conan_install_info_path = Path("cura_inst/conan_install_info.json")
conan_info = {"installed": []}
if os.path.exists(conan_install_info_path):
with open(conan_install_info_path, "r") as f:
conan_info = json.load(f)
sorted_deps = sorted([dep["recipe"]["id"].replace('#', r' rev: ') for dep in conan_info["installed"]])
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
content = ""
if os.path.exists(summary_env):
with open(summary_env, "r") as f:
content = f.read()
with open(summary_env, "w") as f:
f.write(content)
f.writelines("# ${{ steps.filename.outputs.INSTALLER_FILENAME }}\n")
f.writelines("## Conan packages:\n")
for dep in sorted_deps:
f.writelines(f"`{dep}`\n")
- name: Summarize the used Python modules
shell: python
run: |
import os
import pkg_resources
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
content = ""
if os.path.exists(summary_env):
with open(summary_env, "r") as f:
content = f.read()
with open(summary_env, "w") as f:
f.write(content)
f.writelines("## Python modules:\n")
for package in pkg_resources.working_set:
f.writelines(f"`{package.key}/{package.version}`\n")
- name: Create the Linux AppImage (Bash)
run: |
python ../cura_inst/packaging/AppImage-builder/create_appimage.py ./UltiMaker-Cura $CURA_VERSION_FULL "${{ steps.filename.outputs.INSTALLER_FILENAME }}.AppImage"
chmod +x "${{ steps.filename.outputs.INSTALLER_FILENAME }}.AppImage"
working-directory: dist
- name: Upload the AppImage
uses: actions/upload-artifact@v3
with:
name: ${{ steps.filename.outputs.INSTALLER_FILENAME }}-AppImage
path: |
dist/${{ steps.filename.outputs.INSTALLER_FILENAME }}.AppImage
retention-days: 5
- name: Upload the asc
uses: actions/upload-artifact@v3
with:
name: ${{ steps.filename.outputs.INSTALLER_FILENAME }}-asc
path: |
dist/${{ steps.filename.outputs.INSTALLER_FILENAME }}.AppImage.asc
retention-days: 5
- name: Write the run info
shell: python
run: |
import os
with open("run_info.sh", "w") as f:
f.writelines(f'echo "CURA_VERSION_FULL={os.environ["CURA_VERSION_FULL"]}" >> $GITHUB_ENV\n')
- name: Upload the run info
uses: actions/upload-artifact@v3
with:
name: linux-run-info
path: |
run_info.sh
retention-days: 5
notify-export:
if: ${{ always() }}
needs: [ cura-installer-create ]
uses: ultimaker/cura/.github/workflows/notify.yml@main
linux-installer:
uses: ultimaker/cura-workflows/.github/workflows/cura-installer-linux.yml@main
with:
success: ${{ contains(join(needs.*.result, ','), 'success') }}
success_title: "Create the Cura distributions"
success_body: "Installers for ${{ inputs.cura_conan_version }}"
failure_title: "Failed to create the Cura distributions"
failure_body: "Failed to create at least 1 installer for ${{ inputs.cura_conan_version }}"
secrets: inherit
cura_conan_version: ${{ inputs.cura_conan_version }}
conan_args: ${{ inputs.conan_args }}
enterprise: ${{ inputs.enterprise }}
staging: ${{ inputs.staging }}
architecture: ${{ inputs.architecture }}
operating_system: ${{ inputs.operating_system }}
secrets: inherit

View File

@ -1,5 +1,5 @@
name: Macos Installer
run-name: ${{ inputs.cura_conan_version }} for Macos-${{ inputs.architecture }} by @${{ github.actor }}
name: MacOS Installer
run-name: ${{ inputs.cura_conan_version }} by @${{ github.actor }}
on:
workflow_dispatch:
@ -42,257 +42,15 @@ on:
- self-hosted-ARM64
- macos-11
- macos-12
workflow_call:
inputs:
cura_conan_version:
description: 'Cura Conan Version'
default: 'cura/latest@ultimaker/testing'
required: true
type: string
conan_args:
description: 'Conan args: eq.: --require-override'
default: ''
required: false
type: string
enterprise:
description: 'Build Cura as an Enterprise edition'
default: false
required: true
type: boolean
staging:
description: 'Use staging API'
default: false
required: true
type: boolean
architecture:
description: 'Architecture'
required: true
default: 'ARM64'
type: string
operating_system:
description: 'OS'
required: true
default: 'self-hosted-ARM64'
type: string
env:
CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
CODESIGN_IDENTITY: ${{ secrets.CODESIGN_IDENTITY }}
MAC_NOTARIZE_USER: ${{ secrets.MAC_NOTARIZE_USER }}
MAC_NOTARIZE_PASS: ${{ secrets.MAC_NOTARIZE_PASS }}
MACOS_CERT_P12: ${{ secrets.MACOS_CERT_P12 }}
MACOS_CERT_INSTALLER_P12: ${{ secrets.MACOS_CERT_INSTALLER_P12 }}
MACOS_CERT_USER: ${{ secrets.MACOS_CERT_USER }}
MACOS_CERT_PASSPHRASE: ${{ secrets.MACOS_CERT_PASSPHRASE }}
CURA_CONAN_VERSION: ${{ inputs.cura_conan_version }}
ENTERPRISE: ${{ inputs.enterprise }}
STAGING: ${{ inputs.staging }}
jobs:
cura-installer-create:
runs-on: ${{ inputs.operating_system }}
outputs:
INSTALLER_FILENAME: ${{ steps.filename.outputs.INSTALLER_FILENAME }}
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: '3.11.x'
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements for runner
run: pip install -r .github/workflows/requirements-conan-package.txt
- name: Cache Conan local repository packages (Bash)
uses: actions/cache@v3
with:
path: |
$HOME/.conan/data
$HOME/.conan/conan_download_cache
key: conan-${{ runner.os }}-${{ runner.arch }}-installer-cache
- name: Install MacOS system requirements
run: brew install cmake autoconf automake ninja create-dmg
- name: Create the default Conan profile
run: conan profile new default --detect --force
- name: Remove Macos keychain (Bash)
run: security delete-keychain signing_temp.keychain || true
- name: Configure Macos keychain Developer Cert(Bash)
id: macos-keychain-developer-cert
uses: apple-actions/import-codesign-certs@v1
with:
keychain-password: ${{ secrets.MACOS_KEYCHAIN_PASSWORD }}
p12-file-base64: ${{ secrets.MACOS_CERT_P12 }}
p12-password: ${{ secrets.MACOS_CERT_PASSPHRASE }}
- name: Configure Macos keychain Installer Cert (Bash)
id: macos-keychain-installer-cert
uses: apple-actions/import-codesign-certs@v1
with:
keychain-password: ${{ secrets.MACOS_KEYCHAIN_PASSWORD }}
create-keychain: false # keychain is created in previous use of action.
p12-file-base64: ${{ secrets.MACOS_CERT_INSTALLER_P12 }}
p12-password: ${{ secrets.MACOS_CERT_PASSPHRASE }}
- name: Remove private Artifactory
run: conan remote remove cura-conan-private || true
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Use Conan download cache (Bash)
run: conan config set storage.download_cache="$HOME/.conan/conan_download_cache"
- name: Create the Packages (Bash)
run: conan install $CURA_CONAN_VERSION ${{ inputs.conan_args }} --build=missing --update -if cura_inst -g VirtualPythonEnv -o cura:enterprise=$ENTERPRISE -o cura:staging=$STAGING --json "cura_inst/conan_install_info.json"
- name: Upload the Package(s)
if: ${{ inputs.operating_system != 'self-hosted' }}
run: |
conan upload "*" -r cura --all -c
- name: Set Environment variables for Cura (bash)
run: |
. ./cura_inst/bin/activate_github_actions_env.sh
. ./cura_inst/bin/activate_github_actions_version_env.sh
- name: Unlock Macos keychain (Bash)
run: security unlock -p $TEMP_KEYCHAIN_PASSWORD signing_temp.keychain
env:
TEMP_KEYCHAIN_PASSWORD: ${{ steps.macos-keychain-developer-cert.outputs.keychain-password }}
# FIXME: This is a workaround to ensure that we use and pack a shared library for OpenSSL 1.1.1l. We currently compile
# OpenSSL statically for CPython, but our Python Dependenies (such as PyQt6) require a shared library.
# Because Conan won't allow for building the same library with two different options (easily) we need to install it explicitly
# and do a manual copy to the VirtualEnv, such that Pyinstaller can find it.
- name: Install OpenSSL shared
run: conan install openssl/1.1.1l@_/_ --build=missing --update -o openssl:shared=True -g deploy
- name: Copy OpenSSL shared (Bash)
run: |
cp ./openssl/lib/*.so* ./cura_inst/bin/ || true
cp ./openssl/lib/*.dylib* ./cura_inst/bin/ || true
- name: Create the Cura dist
run: pyinstaller ./cura_inst/UltiMaker-Cura.spec
- name: Output the name file name and extension
id: filename
shell: python
run: |
import os
enterprise = "-Enterprise" if "${{ inputs.enterprise }}" == "true" else ""
installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-macos-${{ inputs.architecture }}"
output_env = os.environ["GITHUB_OUTPUT"]
content = ""
if os.path.exists(output_env):
with open(output_env, "r") as f:
content = f.read()
with open(output_env, "w") as f:
f.write(content)
f.writelines(f"INSTALLER_FILENAME={installer_filename}\n")
- name: Summarize the used Conan dependencies
shell: python
run: |
import os
import json
from pathlib import Path
conan_install_info_path = Path("cura_inst/conan_install_info.json")
conan_info = {"installed": []}
if os.path.exists(conan_install_info_path):
with open(conan_install_info_path, "r") as f:
conan_info = json.load(f)
sorted_deps = sorted([dep["recipe"]["id"].replace('#', r' rev: ') for dep in conan_info["installed"]])
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
content = ""
if os.path.exists(summary_env):
with open(summary_env, "r") as f:
content = f.read()
with open(summary_env, "w") as f:
f.write(content)
f.writelines("# ${{ steps.filename.outputs.INSTALLER_FILENAME }}\n")
f.writelines("## Conan packages:\n")
for dep in sorted_deps:
f.writelines(f"`{dep}`\n")
- name: Summarize the used Python modules
shell: python
run: |
import os
import pkg_resources
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
content = ""
if os.path.exists(summary_env):
with open(summary_env, "r") as f:
content = f.read()
with open(summary_env, "w") as f:
f.write(content)
f.writelines("## Python modules:\n")
for package in pkg_resources.working_set:
f.writelines(f"`{package.key}/{package.version}`\n")
- name: Create the Macos dmg (Bash)
run: python ../cura_inst/packaging/MacOS/build_macos.py --source_path ../cura_inst --dist_path . --cura_conan_version $CURA_CONAN_VERSION --filename "${{ steps.filename.outputs.INSTALLER_FILENAME }}" --build_dmg --build_pkg --app_name "$CURA_APP_NAME"
working-directory: dist
- name: Upload the dmg
uses: actions/upload-artifact@v3
with:
name: ${{ steps.filename.outputs.INSTALLER_FILENAME }}-dmg
path: |
dist/${{ steps.filename.outputs.INSTALLER_FILENAME }}.dmg
retention-days: 5
- name: Upload the pkg
uses: actions/upload-artifact@v3
with:
name: ${{ steps.filename.outputs.INSTALLER_FILENAME }}-pkg
path: |
dist/${{ steps.filename.outputs.INSTALLER_FILENAME }}.pkg
retention-days: 5
- name: Write the run info
shell: python
run: |
import os
with open("run_info.sh", "w") as f:
f.writelines(f'echo "CURA_VERSION_FULL={os.environ["CURA_VERSION_FULL"]}" >> $GITHUB_ENV\n')
- name: Upload the run info
uses: actions/upload-artifact@v3
with:
name: macos-run-info
path: |
run_info.sh
retention-days: 5
notify-export:
if: ${{ always() }}
needs: [ cura-installer-create ]
uses: ultimaker/cura/.github/workflows/notify.yml@main
macos-installer:
uses: ultimaker/cura-workflows/.github/workflows/cura-installer-macos.yml@main
with:
success: ${{ contains(join(needs.*.result, ','), 'success') }}
success_title: "Create the Cura distributions"
success_body: "Installers for ${{ inputs.cura_conan_version }}"
failure_title: "Failed to create the Cura distributions"
failure_body: "Failed to create at least 1 installer for ${{ inputs.cura_conan_version }}"
secrets: inherit
cura_conan_version: ${{ inputs.cura_conan_version }}
conan_args: ${{ inputs.conan_args }}
enterprise: ${{ inputs.enterprise }}
staging: ${{ inputs.staging }}
architecture: ${{ inputs.architecture }}
operating_system: ${{ inputs.operating_system }}
secrets: inherit

View File

@ -1,54 +0,0 @@
name: Get Conan Recipe Version
on:
workflow_call:
inputs:
success:
required: true
type: boolean
success_title:
required: true
type: string
success_body:
required: true
type: string
failure_title:
required: true
type: string
failure_body:
required: true
type: string
jobs:
slackNotification:
name: Slack Notification
runs-on: ubuntu-latest
steps:
- name: Slack notify on-success
if: ${{ inputs.success }}
uses: rtCamp/action-slack-notify@v2
env:
SLACK_USERNAME: ${{ github.repository }}
SLACK_COLOR: green
SLACK_ICON: https://github.com/Ultimaker/Cura/blob/main/icons/cura-128.png?raw=true
SLACK_TITLE: ${{ inputs.success_title }}
SLACK_MESSAGE: ${{ inputs.success_body }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
- name: Slack notify on-failure
if: ${{ !inputs.success }}
uses: rtCamp/action-slack-notify@v2
env:
SLACK_USERNAME: ${{ github.repository }}
SLACK_COLOR: red
SLACK_ICON: https://github.com/Ultimaker/Cura/blob/main/icons/cura-128.png?raw=true
SLACK_TITLE: ${{ inputs.failure_title }}
SLACK_MESSAGE: ${{ inputs.failure_body }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}

View File

@ -5,6 +5,9 @@ on:
path:
- "resources/**"
permissions:
contents: read
jobs:
printer-linter-diagnose:
name: Printer linter PR diagnose
@ -18,6 +21,7 @@ jobs:
- uses: technote-space/get-diff-action@v6
with:
DIFF_FILTER: AMRCD
PATTERNS: |
resources/+(extruders|definitions)/*.def.json
resources/+(intent|quality|variants)/**/*.inst.cfg
@ -41,11 +45,15 @@ jobs:
if: env.GIT_DIFF && !env.MATCHED_FILES
run: python printer-linter/src/terminal.py --diagnose --report printer-linter-result/fixes.yml ${{ env.GIT_DIFF_FILTERED }}
- name: Check Deleted Files(s)
if: env.GIT_DIFF
run: python printer-linter/src/terminal.py --deleted --report printer-linter-result/comment.md ${{ env.GIT_DIFF_FILTERED }}
- name: Save PR metadata
run: |
echo ${{ github.event.number }} > printer-linter-result/pr-id.txt
echo ${{ github.event.pull_request.head.repo.full_name }} > printer-linter-result/pr-head-repo.txt
echo ${{ github.event.pull_request.head.ref }} > printer-linter-result/pr-head-ref.txt
echo ${{ github.event.pull_request.head.sha }} > printer-linter-result/pr-head-sha.txt
- uses: actions/upload-artifact@v2
with:

View File

@ -6,76 +6,106 @@ on:
types: [completed]
jobs:
clang-tidy-results:
printer-linter-result:
# Trigger the job only if the previous (insecure) workflow completed successfully
if: ${{ github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success' }}
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- name: Download analysis results
uses: actions/github-script@v3.1.0
uses: actions/github-script@v7
with:
script: |
let artifacts = await github.actions.listWorkflowRunArtifacts({
const artifacts = await github.rest.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: ${{github.event.workflow_run.id }},
});
let matchArtifact = artifacts.data.artifacts.filter((artifact) => {
const matchArtifact = artifacts.data.artifacts.filter((artifact) => {
return artifact.name == "printer-linter-result"
})[0];
let download = await github.actions.downloadArtifact({
const download = await github.rest.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: "zip",
});
let fs = require("fs");
fs.writeFileSync("${{github.workspace}}/printer-linter-result.zip", Buffer.from(download.data));
- name: Set environment variables
run: |
mkdir printer-linter-result
unzip printer-linter-result.zip -d printer-linter-result
echo "pr_id=$(cat printer-linter-result/pr-id.txt)" >> $GITHUB_ENV
echo "pr_head_repo=$(cat printer-linter-result/pr-head-repo.txt)" >> $GITHUB_ENV
echo "pr_head_ref=$(cat printer-linter-result/pr-head-ref.txt)" >> $GITHUB_ENV
- uses: actions/checkout@v3
with:
repository: ${{ env.pr_head_repo }}
ref: ${{ env.pr_head_ref }}
persist-credentials: false
- name: Redownload analysis results
uses: actions/github-script@v3.1.0
with:
script: |
let artifacts = await github.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: ${{github.event.workflow_run.id }},
});
let matchArtifact = artifacts.data.artifacts.filter((artifact) => {
return artifact.name == "printer-linter-result"
})[0];
let download = await github.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: "zip",
});
let fs = require("fs");
fs.writeFileSync("${{github.workspace}}/printer-linter-result.zip", Buffer.from(download.data));
const fs = require("fs");
fs.writeFileSync("${{ github.workspace }}/printer-linter-result.zip", Buffer.from(download.data));
- name: Extract analysis results
run: |
mkdir printer-linter-result
unzip printer-linter-result.zip -d printer-linter-result
unzip -j printer-linter-result.zip -d printer-linter-result
- name: Set PR details environment variables
uses: actions/github-script@v7
with:
script: |
const assert = require("node:assert").strict;
const fs = require("fs");
function exportVar(varName, fileName, regEx) {
const val = fs.readFileSync("${{ github.workspace }}/printer-linter-result/" + fileName, {
encoding: "ascii"
}).trimEnd();
assert.ok(regEx.test(val), "Invalid value format for " + varName);
core.exportVariable(varName, val);
}
exportVar("PR_ID", "pr-id.txt", /^[0-9]+$/);
exportVar("PR_HEAD_REPO", "pr-head-repo.txt", /^[-./0-9A-Z_a-z]+$/);
exportVar("PR_HEAD_SHA", "pr-head-sha.txt", /^[0-9A-Fa-f]+$/);
fs.access("${{ github.workspace }}/printer-linter-result/comment.md", fs.constants.F_OK, (err) => {
if (err) {
core.exportVariable("commentFileExists", "false");
} else {
core.exportVariable("commentFileExists", "true");
}
});
- uses: actions/checkout@v4
with:
repository: ${{ env.PR_HEAD_REPO }}
ref: ${{ env.PR_HEAD_SHA }}
persist-credentials: false
- name: Redownload analysis results
uses: actions/github-script@v7
with:
script: |
const artifacts = await github.rest.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: ${{github.event.workflow_run.id }},
});
const matchArtifact = artifacts.data.artifacts.filter((artifact) => {
return artifact.name == "printer-linter-result"
})[0];
const download = await github.rest.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: "zip",
});
const fs = require("fs");
fs.writeFileSync("${{ github.workspace }}/printer-linter-result.zip", Buffer.from(download.data));
- name: Extract analysis results
run: |
mkdir printer-linter-result
unzip -j printer-linter-result.zip -d printer-linter-result
- name: Run PR Comments
if: env.commentFileExists == 'true'
uses: peter-evans/create-or-update-comment@v4
with:
issue-number: ${{ env.PR_ID }}
body-path: 'printer-linter-result/comment.md'
- name: Run clang-tidy-pr-comments action
uses: platisd/clang-tidy-pr-comments@bc0bb7da034a8317d54e7fe1e819159002f4cc40
uses: platisd/clang-tidy-pr-comments@v1
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
clang_tidy_fixes: printer-linter-result/fixes.yml
pull_request_id: ${{ env.pr_id }}
pull_request_id: ${{ env.PR_ID }}
request_changes: true

View File

@ -1,15 +1,10 @@
name: process-pull-request
on:
pull_request_target:
types: [opened, reopened, edited, synchronize, review_requested, ready_for_review, assigned]
pull_request_target:
types: [ opened, reopened, edited, review_requested, ready_for_review, assigned ]
jobs:
add_label:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions-ecosystem/action-add-labels@v1
if: ${{ github.event.pull_request.head.repo.full_name != github.repository }}
with:
labels: 'PR: Community Contribution :crown:'
add_label:
uses: ultimaker/cura-workflows/.github/workflows/process-pull-request.yml@main
secrets: inherit

View File

@ -0,0 +1,39 @@
# Nightlies
> :clock12: Created at: {{ timestamp }}
| | |
|--------------:|--------------------------------------------------------------------------------------------|
| **Nightlies** | [![nightly {{ branch }}](https://github.com/Ultimaker/Cura/actions/workflows/installers.yml/badge.svg{{ branch_specific }}
?event=schedule)](https://github.com/Ultimaker/Cura/actions/workflows/installers.yml) |
# Unit Test results
| | |
|-------------------------------:|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| **Cura {{ branch }}** | [![unit-test](https://github.com/Ultimaker/Cura/actions/workflows/unit-test.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/Cura/actions/workflows/unit-test.yml) |
| **CuraEngine {{ branch }}** | [![unit-test](https://github.com/Ultimaker/CuraEngine/actions/workflows/unit-test.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/CuraEngine/actions/workflows/unit-test.yml) |
| **Uranium {{ branch }}** | [![unit-test](https://github.com/Ultimaker/Uranium/actions/workflows/unit-test.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/Uranium/actions/workflows/unit-test.yml) |
| **CuraEngine GradualFlow 0.1** | [![unit-test](https://github.com/Ultimaker/CuraEngine_plugin_gradual_flow/actions/workflows/unit-test.yml/badge.svg?branch=0.1)](https://github.com/Ultimaker/CuraEngine_plugin_gradual_flow/actions/workflows/unit-test.yml) |
| **synsepalum-dulcificum 0.1** | [![unit-test](https://github.com/Ultimaker/synsepalum-dulcificum/actions/workflows/unit-test.yml/badge.svg?branch=0.1)](https://github.com/Ultimaker/synsepalum-dulcificum/actions/workflows/unit-test.yml) |
| **libSavitar** | [![unit-test](https://github.com/Ultimaker/libSavitar/actions/workflows/unit-test.yml/badge.svg)](https://github.com/Ultimaker/libSavitar/actions/workflows/unit-test.yml) |
| **libnest2d** | [![unit-test](https://github.com/Ultimaker/libnest2d/actions/workflows/unit-test.yml/badge.svg)](https://github.com/Ultimaker/libnest2d/actions/workflows/unit-test.yml) |
# Conan packages
| | |
|------------------------------------:|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| **Cura {{ branch }}** | [![conan-package](https://github.com/Ultimaker/Cura/actions/workflows/conan-package.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/Cura/actions/workflows/conan-package.yml) |
| **CuraEngine {{ branch }}** | [![conan-package](https://github.com/Ultimaker/CuraEngine/actions/workflows/conan-package.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/CuraEngine/actions/workflows/conan-package.yml) |
| **Uranium {{ branch }}** | [![conan-package](https://github.com/Ultimaker/Uranium/actions/workflows/conan-package.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/Uranium/actions/workflows/conan-package.yml) |
| **fdm_materials {{ branch }}** | [![conan-package](https://github.com/Ultimaker/fdm_materials/actions/workflows/conan-package.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/fdm_materials/actions/workflows/conan-package.yml) |
| **cura-binary-data {{ branch }}** | [![conan-package](https://github.com/Ultimaker/cura-binary-data/actions/workflows/conan-package.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/cura-binary-data/actions/workflows/conan-package.yml) |
| **CuraEngine GradualFlow 0.1** | [![conan-package](https://github.com/Ultimaker/CuraEngine_plugin_gradual_flow/actions/workflows/conan-package.yml/badge.svg?branch=0.1)](https://github.com/Ultimaker/CuraEngine_plugin_gradual_flow/actions/workflows/conan-package.yml) |
| **synsepalum-dulcificum 0.1** | [![conan-package](https://github.com/Ultimaker/synsepalum-dulcificum/actions/workflows/conan-package.yml/badge.svg?branch=0.1)](https://github.com/Ultimaker/synsepalum-dulcificum/actions/workflows/conan-package.yml) |
| **CuraEngine gRPC definitions 0.1** | [![conan-package](https://github.com/Ultimaker/CuraEngine_grpc_definitions/actions/workflows/conan-package.yml/badge.svg?branch=0.1)](https://github.com/Ultimaker/CuraEngine_grpc_definitions/actions/workflows/conan-package.yml) |
| **libArcus** | [![conan-package](https://github.com/Ultimaker/libArcus/actions/workflows/conan-package.yml/badge.svg)](https://github.com/Ultimaker/libArcus/actions/workflows/conan-package.yml) |
| **pyArcus** | [![conan-package](https://github.com/Ultimaker/pyArcus/actions/workflows/conan-package.yml/badge.svg)](https://github.com/Ultimaker/pyArcus/actions/workflows/conan-package.yml) |
| **libSavitar** | [![conan-package](https://github.com/Ultimaker/libSavitar/actions/workflows/conan-package.yml/badge.svg)](https://github.com/Ultimaker/libSavitar/actions/workflows/conan-package.yml) |
| **pySavitar** | [![conan-package](https://github.com/Ultimaker/pySavitar/actions/workflows/conan-package.yml/badge.svg)](https://github.com/Ultimaker/pySavitar/actions/workflows/conan-package.yml) |
| **libnest2d** | [![conan-package](https://github.com/Ultimaker/libnest2d/actions/workflows/conan-package.yml/badge.svg)](https://github.com/Ultimaker/libnest2d/actions/workflows/conan-package.yml) |
| **pynest2d** | [![conan-package](https://github.com/Ultimaker/pynest2d/actions/workflows/conan-package.yml/badge.svg)](https://github.com/Ultimaker/pynest2d/actions/workflows/conan-package.yml) |

View File

@ -1,2 +1,2 @@
conan>=1.60.2,<2.0.0
sip
sip<=6.7.12

View File

View File

@ -1,5 +1,5 @@
# NOTE: Best to keep all of these remarks in, they might prove useful in the future.
# This is basically just the standard one that is sugested on 'new workflow'.
# This is basically just the standard one that is suggested on 'new workflow'.
name: Scorecard supply-chain security
on:
@ -21,51 +21,42 @@ jobs:
name: Scorecard analysis
runs-on: ubuntu-latest
permissions:
# Needed to upload the results to code-scanning dashboard.
# Needed for Code scanning upload
security-events: write
# Needed to publish results and get a badge (see publish_results below).
# Needed for GitHub OIDC token if publish_results is true
id-token: write
# Uncomment the permissions below if installing in a private repository.
# contents: read
# actions: read
steps:
- name: "Checkout code"
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # v3.1.0
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
persist-credentials: false
- name: "Run analysis"
uses: ossf/scorecard-action@e38b1902ae4f44df626f11ba0734b14fb91f8f86 # v2.1.2
uses: ossf/scorecard-action@0864cf19026789058feabb7e87baa5f140aac736 # v2.3.1
with:
results_file: results.sarif
results_format: sarif
# (Optional) "write" PAT token. Uncomment the `repo_token` line below if:
# - you want to enable the Branch-Protection check on a *public* repository, or
# - you are installing Scorecard on a *private* repository
# To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat.
# repo_token: ${{ secrets.SCORECARD_TOKEN }}
# Public repositories:
# - Publish results to OpenSSF REST API for easy access by consumers
# - Allows the repository to include the Scorecard badge.
# - See https://github.com/ossf/scorecard-action#publishing-results.
# For private repositories:
# - `publish_results` will always be set to `false`, regardless
# of the value entered here.
# Scorecard team runs a weekly scan of public GitHub repos,
# see https://github.com/ossf/scorecard#public-data.
# Setting `publish_results: true` helps us scale by leveraging your workflow to
# extract the results instead of relying on our own infrastructure to run scans.
# And it's free for you!
publish_results: true
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
# format to the repository Actions tab.
# Upload the results as artifacts (optional). Commenting out will disable
# uploads of run results in SARIF format to the repository Actions tab.
# https://docs.github.com/en/actions/advanced-guides/storing-workflow-data-as-artifacts
- name: "Upload artifact"
uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # v3.1.0
uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with:
name: SARIF file
path: results.sarif
retention-days: 5
# Upload the results to GitHub's code scanning dashboard.
# Upload the results to GitHub's code scanning dashboard (optional).
# Commenting out will disable upload of results to your repo's Code Scanning dashboard
- name: "Upload to code-scanning"
uses: github/codeql-action/upload-sarif@17573ee1cc1b9d061760f3a006fc4aac4f944fd5 # v2.2.4
uses: github/codeql-action/upload-sarif@83a02f7883b12e0e4e1a146174f5e2292a01e601 # v2.16.4
with:
sarif_file: results.sarif

View File

@ -1,82 +1,14 @@
name: unit-test-post
on:
workflow_run:
workflows: [ "unit-test" ]
types: [ completed ]
workflow_run:
workflows: [ "unit-test" ]
types: [ completed ]
jobs:
publish-test-results:
if: ${{ github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success' }}
runs-on: ubuntu-latest
steps:
- name: Download analysis results
uses: actions/github-script@v3.1.0
with:
script: |
let artifacts = await github.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: ${{github.event.workflow_run.id }},
});
let matchArtifact = artifacts.data.artifacts.filter((artifact) => {
return artifact.name == "test-result"
})[0];
let download = await github.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: "zip",
});
let fs = require("fs");
fs.writeFileSync("${{github.workspace}}/test-result.zip", Buffer.from(download.data));
- name: Set environment variables
run: |
mkdir pr_env
unzip test-result.zip -d pr_env
echo "pr_id=$(cat pr_env/pr-id.txt)" >> $GITHUB_ENV
echo "pr_head_repo=$(cat pr_env/pr-head-repo.txt)" >> $GITHUB_ENV
echo "pr_head_ref=$(cat pr_env/pr-head-ref.txt)" >> $GITHUB_ENV
- uses: actions/checkout@v3
with:
repository: ${{ env.pr_head_repo }}
ref: ${{ env.pr_head_ref }}
persist-credentials: false
- name: Redownload analysis results
uses: actions/github-script@v3.1.0
with:
script: |
let artifacts = await github.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: ${{github.event.workflow_run.id }},
});
let matchArtifact = artifacts.data.artifacts.filter((artifact) => {
return artifact.name == "test-result"
})[0];
let download = await github.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: "zip",
});
let fs = require("fs");
fs.writeFileSync("${{github.workspace}}/test-result.zip", Buffer.from(download.data));
- name: Extract analysis results
run: |
mkdir -p tests
unzip test-result.zip -d tests
- name: Publish Unit Test Results
id: test-results
uses: EnricoMi/publish-unit-test-result-action@v1
with:
files: "tests/**/*.xml"
- name: Conclusion
run: echo "Conclusion is ${{ steps.test-results.outputs.json && fromJSON( steps.test-results.outputs.json ).conclusion }}"
publish-test-results:
uses: ultimaker/cura-workflows/.github/workflows/unit-test-post.yml@main
with:
event: ${{ github.event.workflow_run.event }}
conclusion: ${{ github.event.workflow_run.conclusion }}
secrets: inherit

View File

@ -1,4 +1,3 @@
---
name: unit-test
on:
@ -9,23 +8,18 @@ on:
- 'cura/**'
- 'icons/**'
- 'tests/**'
- 'packaging/**'
- '.github/workflows/conan-*.yml'
- '.github/workflows/unit-test.yml'
- '.github/workflows/notify.yml'
- '.github/workflows/requirements-conan-package.txt'
- '.github/workflows/requirements-runner.txt'
- 'requirements*.txt'
- 'conanfile.py'
- 'conandata.yml'
- 'GitVersion.yml'
- '*.jinja'
branches:
- main
- 'CURA-*'
- '[1-9]+.[0-9]+'
tags:
- '[0-9]+.[0-9]+.[0-9]+'
- '[0-9]+.[0-9]+-beta'
- 'PP-*'
- '[0-9]+.[0-9]+'
pull_request:
paths:
- 'plugins/**'
@ -33,134 +27,36 @@ on:
- 'cura/**'
- 'icons/**'
- 'tests/**'
- 'packaging/**'
- '.github/workflows/conan-*.yml'
- '.github/workflows/unit-test.yml'
- '.github/workflows/notify.yml'
- '.github/workflows/requirements-conan-package.txt'
- '.github/workflows/requirements-runner.txt'
- 'requirements*.txt'
- 'conanfile.py'
- 'conandata.yml'
- 'GitVersion.yml'
- '*.jinja'
branches:
- main
- '[1-9]+.[0-9]+'
tags:
- '[0-9]+.[0-9]+.[0-9]+'
- '[0-9]+.[0-9]+-beta'
env:
CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
CONAN_LOGIN_USERNAME_CURA_CE: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA_CE: ${{ secrets.CONAN_PASS }}
CONAN_LOG_RUN_TO_OUTPUT: 1
CONAN_LOGGING_LEVEL: info
CONAN_NON_INTERACTIVE: 1
- '[0-9]+.[0-9]+'
permissions:
contents: read
env:
CONAN_LOGIN_USERNAME: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD: ${{ secrets.CONAN_PASS }}
jobs:
conan-recipe-version:
uses: ultimaker/cura/.github/workflows/conan-recipe-version.yml@main
uses: ultimaker/cura-workflows/.github/workflows/conan-recipe-version.yml@main
with:
project_name: cura
testing:
runs-on: ubuntu-22.04
uses: ultimaker/cura-workflows/.github/workflows/unit-test.yml@main
needs: [ conan-recipe-version ]
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 2
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: '3.11.x'
architecture: 'x64'
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements and Create default Conan profile
run: pip install -r requirements-conan-package.txt
working-directory: .github/workflows/
- name: Use Conan download cache (Bash)
if: ${{ runner.os != 'Windows' }}
run: conan config set storage.download_cache="$HOME/.conan/conan_download_cache"
- name: Cache Conan local repository packages (Bash)
uses: actions/cache@v3
if: ${{ runner.os != 'Windows' }}
with:
path: |
$HOME/.conan/data
$HOME/.conan/conan_download_cache
key: conan-${{ runner.os }}-${{ runner.arch }}-unit-cache
# NOTE: Due to what are probably github issues, we have to remove the cache and reconfigure before the rest.
# This is maybe because grub caches the disk it uses last time, which is recreated each time.
- name: Install Linux system requirements
if: ${{ runner.os == 'Linux' }}
run: |
sudo rm /var/cache/debconf/config.dat
sudo dpkg --configure -a
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
sudo apt update
sudo apt upgrade
sudo apt install build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config -y
- name: Install GCC-13
run: |
sudo apt install g++-13 gcc-13 -y
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13
sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-13 13
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Get Conan profile
run: conan profile new default --detect --force
- name: Install dependencies
run: conan install . ${{ needs.conan-recipe-version.outputs.recipe_id_full }} --build=missing --update -o cura:devtools=True -g VirtualPythonEnv -if venv
- name: Upload the Dependency package(s)
run: conan upload "*" -r cura --all -c
- name: Set Environment variables for Cura (bash)
if: ${{ runner.os != 'Windows' }}
run: |
. ./venv/bin/activate_github_actions_env.sh
- name: Run Unit Test
id: run-test
run: |
pytest --junitxml=junit_cura.xml
working-directory: tests
- name: Save PR metadata
if: always()
run: |
echo ${{ github.event.number }} > pr-id.txt
echo ${{ github.event.pull_request.head.repo.full_name }} > pr-head-repo.txt
echo ${{ github.event.pull_request.head.ref }} > pr-head-ref.txt
working-directory: tests
- name: Upload Test Results
if: always()
uses: actions/upload-artifact@v3
with:
name: test-result
path: |
tests/**/*.xml
tests/pr-id.txt
tests/pr-head-repo.txt
tests/pr-head-ref.txt
with:
recipe_id_full: ${{ needs.conan-recipe-version.outputs.recipe_id_full }}
conan_extra_args: '-g VirtualPythonEnv -o cura:devtools=True -c tools.build:skip_test=False --options "*:enable_sentry=False"'
unit_test_cmd: 'pytest --junitxml=junit_cura.xml'
unit_test_dir: 'tests'
conan_generator_dir: './venv/bin'
secrets: inherit

View File

@ -1,5 +1,5 @@
name: Windows Installer
run-name: ${{ inputs.cura_conan_version }} for Windows-${{ inputs.architecture }} by @${{ github.actor }}
run-name: ${{ inputs.cura_conan_version }} by @${{ github.actor }}
on:
workflow_dispatch:
@ -34,254 +34,20 @@ on:
operating_system:
description: 'OS'
required: true
default: 'windows-2022'
default: 'self-hosted-Windows-X64'
type: choice
options:
- self-hosted-Windows-X64
- windows-2022
workflow_call:
inputs:
cura_conan_version:
description: 'Cura Conan Version'
default: 'cura/latest@ultimaker/testing'
required: true
type: string
conan_args:
description: 'Conan args: eq.: --require-override'
default: ''
required: false
type: string
enterprise:
description: 'Build Cura as an Enterprise edition'
default: false
required: true
type: boolean
staging:
description: 'Use staging API'
default: false
required: true
type: boolean
architecture:
description: 'Architecture'
required: true
default: 'X64'
type: string
operating_system:
description: 'OS'
required: true
default: 'windows-2022'
type: string
env:
CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
WIN_CERT_INSTALLER_CER: ${{ secrets.WIN_CERT_INSTALLER_CER }}
WIN_CERT_INSTALLER_CER_PASS: ${{ secrets.WIN_CERT_INSTALLER_CER_PASS }}
CURA_CONAN_VERSION: ${{ inputs.cura_conan_version }}
ENTERPRISE: ${{ inputs.enterprise }}
STAGING: ${{ inputs.staging }}
jobs:
cura-installer-create:
runs-on: ${{ inputs.operating_system }}
outputs:
INSTALLER_FILENAME: ${{ steps.filename.outputs.INSTALLER_FILENAME }}
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: '3.10.x'
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements for runner
run: pip install -r .github/workflows/requirements-conan-package.txt
- name: Cache Conan local repository packages (Powershell)
uses: actions/cache@v3
with:
path: |
C:\Users\runneradmin\.conan\data
C:\.conan
C:\Users\runneradmin\.conan\conan_download_cache
key: conan-${{ runner.os }}-${{ runner.arch }}-installer-cache
- name: Create the default Conan profile
run: conan profile new default --detect --force
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Use Conan download cache (Powershell)
run: conan config set storage.download_cache="C:\Users\runneradmin\.conan\conan_download_cache"
- name: Create the Packages (Powershell)
run: conan install $Env:CURA_CONAN_VERSION ${{ inputs.conan_args }} --build=missing --update -if cura_inst -g VirtualPythonEnv -o cura:enterprise=$Env:ENTERPRISE -o cura:staging=$Env:STAGING --json "cura_inst/conan_install_info.json"
- name: Upload the Package(s)
if: always()
run: |
conan upload "*" -r cura --all -c
- name: Set Environment variables for Cura (Powershell)
run: |
echo "${Env:WIX}\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
.\cura_inst\Scripts\activate_github_actions_env.ps1
.\cura_inst\Scripts\activate_github_actions_version_env.ps1
- name: Install OpenSSL shared
run: conan install openssl/1.1.1l@_/_ --build=missing --update -o openssl:shared=True -g deploy
- name: Copy OpenSSL shared (Powershell)
run: |
cp openssl/bin/*.dll ./cura_inst/Scripts/
cp openssl/lib/*.lib ./cura_inst/Lib/
- name: Create the Cura dist
run: pyinstaller ./cura_inst/UltiMaker-Cura.spec
- name: Output the name file name and extension
id: filename
shell: python
run: |
import os
enterprise = "-Enterprise" if "${{ inputs.enterprise }}" == "true" else ""
installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-win64-${{ inputs.architecture }}"
output_env = os.environ["GITHUB_OUTPUT"]
content = ""
if os.path.exists(output_env):
with open(output_env, "r") as f:
content = f.read()
with open(output_env, "w") as f:
f.write(content)
f.writelines(f"INSTALLER_FILENAME={installer_filename}\n")
- name: Summarize the used Conan dependencies
shell: python
run: |
import os
import json
from pathlib import Path
conan_install_info_path = Path("cura_inst/conan_install_info.json")
conan_info = {"installed": []}
if os.path.exists(conan_install_info_path):
with open(conan_install_info_path, "r") as f:
conan_info = json.load(f)
sorted_deps = sorted([dep["recipe"]["id"].replace('#', r' rev: ') for dep in conan_info["installed"]])
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
content = ""
if os.path.exists(summary_env):
with open(summary_env, "r") as f:
content = f.read()
with open(summary_env, "w") as f:
f.write(content)
f.writelines("# ${{ steps.filename.outputs.INSTALLER_FILENAME }}\n")
f.writelines("## Conan packages:\n")
for dep in sorted_deps:
f.writelines(f"`{dep}`\n")
- name: Summarize the used Python modules
shell: python
run: |
import os
import pkg_resources
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
content = ""
if os.path.exists(summary_env):
with open(summary_env, "r") as f:
content = f.read()
with open(summary_env, "w") as f:
f.write(content)
f.writelines("## Python modules:\n")
for package in pkg_resources.working_set:
f.writelines(f"`{package.key}/{package.version}`\n")
- name: Create PFX certificate from BASE64_PFX_CONTENT secret
id: create-pfx
env:
PFX_CONTENT: ${{ secrets.WIN_CERT_INSTALLER_CER }}
run: |
$pfxPath = Join-Path -Path $env:RUNNER_TEMP -ChildPath "cert.pfx";
$encodedBytes = [System.Convert]::FromBase64String($env:PFX_CONTENT);
Set-Content $pfxPath -Value $encodedBytes -AsByteStream;
echo "PFX_PATH=$pfxPath" >> $env:GITHUB_OUTPUT;
- name: Create the Windows msi installer (Powershell)
run: |
python ..\cura_inst\packaging\msi\create_windows_msi.py ..\cura_inst .\UltiMaker-Cura "${{steps.filename.outputs.INSTALLER_FILENAME }}.msi" "$Env:CURA_APP_NAME"
working-directory: dist
- name: Sign the Windows msi installer (Powershell)
env:
PFX_PATH: ${{ steps.create-pfx.outputs.PFX_PATH }}
run: |
& "C:/Program Files (x86)/Windows Kits/10/bin/10.0.17763.0/x86/signtool.exe" sign /f $Env:PFX_PATH /p "$Env:WIN_CERT_INSTALLER_CER_PASS" /fd SHA256 /t http://timestamp.digicert.com "${{steps.filename.outputs.INSTALLER_FILENAME }}.msi"
working-directory: dist
- name: Create the Windows exe installer (Powershell)
run: |
python ..\cura_inst\packaging\NSIS\create_windows_installer.py ../cura_inst . "${{steps.filename.outputs.INSTALLER_FILENAME }}.exe"
working-directory: dist
- name: Sign the Windows exe installer (Powershell)
env:
PFX_PATH: ${{ steps.create-pfx.outputs.PFX_PATH }}
run: |
& "C:/Program Files (x86)/Windows Kits/10/bin/10.0.17763.0/x86/signtool.exe" sign /f $Env:PFX_PATH /p "$Env:WIN_CERT_INSTALLER_CER_PASS" /fd SHA256 /t http://timestamp.digicert.com "${{steps.filename.outputs.INSTALLER_FILENAME }}.exe"
working-directory: dist
- name: Upload the msi
uses: actions/upload-artifact@v3
with:
name: ${{steps.filename.outputs.INSTALLER_FILENAME }}-msi
path: |
dist/${{steps.filename.outputs.INSTALLER_FILENAME }}.msi
retention-days: 5
- name: Upload the exe
uses: actions/upload-artifact@v3
with:
name: ${{steps.filename.outputs.INSTALLER_FILENAME }}-exe
path: |
dist/${{steps.filename.outputs.INSTALLER_FILENAME }}.exe
retention-days: 5
# NOTE: The extension is .sh, since this isn't going to build-environment, so not on the Win build image.
- name: Write the run info
shell: python
run: |
import os
with open("run_info.sh", "w") as f:
f.writelines(f'echo "CURA_VERSION_FULL={os.environ["CURA_VERSION_FULL"]}" >> $GITHUB_ENV\n')
# NOTE: The extension is .sh, since this isn't going to build-environment, so not on the Win build image.
- name: Upload the run info
uses: actions/upload-artifact@v3
with:
name: windows-run-info
path: |
run_info.sh
retention-days: 5
notify-export:
if: ${{ always() }}
needs: [ cura-installer-create ]
uses: ultimaker/cura/.github/workflows/notify.yml@main
windows-installer:
uses: ultimaker/cura-workflows/.github/workflows/cura-installer-windows.yml@main
with:
success: ${{ contains(join(needs.*.result, ','), 'success') }}
success_title: "Create the Cura distributions"
success_body: "Installers for ${{ inputs.cura_conan_version }}"
failure_title: "Failed to create the Cura distributions"
failure_body: "Failed to create at least 1 installer for ${{ inputs.cura_conan_version }}"
secrets: inherit
cura_conan_version: ${{ inputs.cura_conan_version }}
conan_args: ${{ inputs.conan_args }}
enterprise: ${{ inputs.enterprise }}
staging: ${{ inputs.staging }}
architecture: ${{ inputs.architecture }}
operating_system: ${{ inputs.operating_system }}
secrets: inherit

1
.gitignore vendored
View File

@ -101,7 +101,6 @@ graph_info.json
Ultimaker-Cura.spec
.run/
/printer-linter/src/printerlinter.egg-info/
/resources/qml/Dialogs/AboutDialogVersionsList.qml
/plugins/CuraEngineGradualFlow
/resources/bundled_packages/bundled_*.json
curaengine_plugin_gradual_flow

View File

@ -3,6 +3,10 @@ checks:
diagnostic-mesh-file-size: true
diagnostic-definition-redundant-override: true
diagnostic-resources-macos-app-directory-name: true
diagnostic-incorrect-formula: true
diagnostic-resource-file-deleted: true
diagnostic-material-temperature-defined: true
diagnostic-long-profile-names: true
fixes:
diagnostic-definition-redundant-override: true
format:

View File

@ -1,61 +0,0 @@
import QtQuick 2.2
import QtQuick.Controls 2.9
import UM 1.6 as UM
import Cura 1.5 as Cura
ListView
{
id: projectBuildInfoList
visible: false
anchors.top: creditsNotes.bottom
anchors.topMargin: UM.Theme.getSize("default_margin").height
width: parent.width
height: base.height - y - (2 * UM.Theme.getSize("default_margin").height + closeButton.height)
ScrollBar.vertical: UM.ScrollBar
{
id: projectBuildInfoListScrollBar
}
delegate: Row
{
spacing: UM.Theme.getSize("narrow_margin").width
UM.Label
{
text: (model.name)
width: (projectBuildInfoList.width* 0.4) | 0
elide: Text.ElideRight
}
UM.Label
{
text: (model.version)
width: (projectBuildInfoList.width *0.6) | 0
elide: Text.ElideRight
}
}
model: ListModel
{
id: developerInfo
}
Component.onCompleted:
{
var conan_installs = {{ conan_installs }};
var python_installs = {{ python_installs }};
developerInfo.append({ name : "<H1>Conan Installs</H1>", version : '' });
for (var n in conan_installs)
{
developerInfo.append({ name : conan_installs[n][0], version : conan_installs[n][1] });
}
developerInfo.append({ name : '', version : '' });
developerInfo.append({ name : "<H1>Python Installs</H1>", version : '' });
for (var n in python_installs)
{
developerInfo.append({ name : python_installs[n][0], version : python_installs[n][1] });
}
}
}

View File

@ -1,4 +1,4 @@
# Copyright (c) 2022 UltiMaker
# Copyright (c) 2023 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher.
CuraAppName = "{{ cura_app_name }}"
@ -12,3 +12,6 @@ CuraCloudAccountAPIRoot = "{{ cura_cloud_account_api_root }}"
CuraMarketplaceRoot = "{{ cura_marketplace_root }}"
CuraDigitalFactoryURL = "{{ cura_digital_factory_url }}"
CuraLatestURL = "{{ cura_latest_url }}"
ConanInstalls = {{ conan_installs }}
PythonInstalls = {{ python_installs }}

View File

@ -12,7 +12,7 @@
[![Badge Test]][Test]
[![Badge Conan]][Conan]
![Badge Downloads]
[![Badge Downloads]][Downloads]
<br>
<br>
@ -26,7 +26,9 @@
*With hundreds of settings & community-managed print profiles,* <br>
*Ultimaker Cura is sure to lead your next project to a success.*
<br>
**Contribute Printer Profiles?** -- Please [look here](https://github.com/Ultimaker/Cura/wiki/Adding-new-machine-profiles-to-Cura) first. <br>
**Contribute Translations?** -- Please [look here](https://github.com/Ultimaker/Cura/wiki/Translating-Cura) first.
<br>
[![Button Building]][Building]
@ -67,6 +69,7 @@
[Issues]: https://github.com/Ultimaker/Cura/issues
[Conan]: https://github.com/Ultimaker/Cura/actions/workflows/conan-package.yml
[Test]: https://github.com/Ultimaker/Cura/actions/workflows/unit-test.yml
[Downloads]: https://github.com/Ultimaker/Cura/releases/latest
[License]: LICENSE
[Report]: docs/Report.md
@ -81,8 +84,8 @@
[Badge License]: https://img.shields.io/badge/License-LGPL3-336887.svg?style=for-the-badge&labelColor=458cb5&logoColor=white&logo=GNU
[Badge Closed]: https://img.shields.io/github/issues-closed/ultimaker/cura?style=for-the-badge&logoColor=white&labelColor=629944&color=446a30&logo=AddThis
[Badge Issues]: https://img.shields.io/github/issues/ultimaker/cura?style=for-the-badge&logoColor=white&labelColor=c34360&color=933349&logo=AdBlock
[Badge Conan]: https://img.shields.io/github/workflow/status/Ultimaker/Cura/conan-package?style=for-the-badge&logoColor=white&labelColor=6185aa&color=4c6987&logo=Conan&label=Conan%20Package
[Badge Test]: https://img.shields.io/github/workflow/status/Ultimaker/Cura/unit-test?style=for-the-badge&logoColor=white&labelColor=4a999d&color=346c6e&logo=Codacy&label=Unit%20Test
[Badge Conan]: https://img.shields.io/github/actions/workflow/status/Ultimaker/Cura/conan-package.yml?branch=main&style=for-the-badge&logoColor=white&labelColor=6185aa&color=4c6987&logo=Conan&label=Conan%20Package
[Badge Test]: https://img.shields.io/github/actions/workflow/status/Ultimaker/Cura/unit-test.yml?branch=main&style=for-the-badge&logoColor=white&labelColor=4a999d&color=346c6e&logo=Codacy&label=Unit%20Test
[Badge Size]: https://img.shields.io/github/repo-size/ultimaker/cura?style=for-the-badge&logoColor=white&labelColor=715a97&color=584674&logo=GoogleAnalytics
[Badge Downloads]: https://img.shields.io/github/downloads-pre/Ultimaker/Cura/latest/total?style=for-the-badge

View File

@ -55,7 +55,8 @@ exe = EXE(
target_arch={{ target_arch }},
codesign_identity=os.getenv('CODESIGN_IDENTITY', None),
entitlements_file={{ entitlements_file }},
icon={{ icon }}
icon={{ icon }},
contents_directory='.'
)
coll = COLLECT(
@ -70,188 +71,7 @@ coll = COLLECT(
)
{% if macos == true %}
# PyInstaller seems to copy everything in the resource folder for the MacOS, this causes issues with codesigning and notarizing
# The folder structure should adhere to the one specified in Table 2-5
# https://developer.apple.com/library/archive/documentation/CoreFoundation/Conceptual/CFBundles/BundleTypes/BundleTypes.html#//apple_ref/doc/uid/10000123i-CH101-SW1
# The class below is basically ducktyping the BUNDLE class of PyInstaller and using our own `assemble` method for more fine-grain and specific
# control. Some code of the method below is copied from:
# https://github.com/pyinstaller/pyinstaller/blob/22d1d2a5378228744cc95f14904dae1664df32c4/PyInstaller/building/osx.py#L115
#-----------------------------------------------------------------------------
# Copyright (c) 2005-2022, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License (version 2
# or later) with exception for distributing the bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
#-----------------------------------------------------------------------------
import plistlib
import shutil
import PyInstaller.utils.osx as osxutils
from pathlib import Path
from PyInstaller.building.osx import BUNDLE
from PyInstaller.building.utils import (_check_path_overlap, _rmtree, add_suffix_to_extension, checkCache)
from PyInstaller.building.datastruct import logger
from PyInstaller.building.icon import normalize_icon_type
class UMBUNDLE(BUNDLE):
def assemble(self):
from PyInstaller.config import CONF
if _check_path_overlap(self.name) and os.path.isdir(self.name):
_rmtree(self.name)
logger.info("Building BUNDLE %s", self.tocbasename)
# Create a minimal Mac bundle structure.
macos_path = Path(self.name, "Contents", "MacOS")
resources_path = Path(self.name, "Contents", "Resources")
frameworks_path = Path(self.name, "Contents", "Frameworks")
os.makedirs(macos_path)
os.makedirs(resources_path)
os.makedirs(frameworks_path)
# Makes sure the icon exists and attempts to convert to the proper format if applicable
self.icon = normalize_icon_type(self.icon, ("icns",), "icns", CONF["workpath"])
# Ensure icon path is absolute
self.icon = os.path.abspath(self.icon)
# Copy icns icon to Resources directory.
shutil.copy(self.icon, os.path.join(self.name, 'Contents', 'Resources'))
# Key/values for a minimal Info.plist file
info_plist_dict = {
"CFBundleDisplayName": self.appname,
"CFBundleName": self.appname,
# Required by 'codesign' utility.
# The value for CFBundleIdentifier is used as the default unique name of your program for Code Signing
# purposes. It even identifies the APP for access to restricted OS X areas like Keychain.
#
# The identifier used for signing must be globally unique. The usual form for this identifier is a
# hierarchical name in reverse DNS notation, starting with the toplevel domain, followed by the company
# name, followed by the department within the company, and ending with the product name. Usually in the
# form: com.mycompany.department.appname
# CLI option --osx-bundle-identifier sets this value.
"CFBundleIdentifier": self.bundle_identifier,
"CFBundleExecutable": os.path.basename(self.exename),
"CFBundleIconFile": os.path.basename(self.icon),
"CFBundleInfoDictionaryVersion": "6.0",
"CFBundlePackageType": "APPL",
"CFBundleVersionString": self.version,
"CFBundleShortVersionString": self.version,
}
# Set some default values. But they still can be overwritten by the user.
if self.console:
# Setting EXE console=True implies LSBackgroundOnly=True.
info_plist_dict['LSBackgroundOnly'] = True
else:
# Let's use high resolution by default.
info_plist_dict['NSHighResolutionCapable'] = True
# Merge info_plist settings from spec file
if isinstance(self.info_plist, dict) and self.info_plist:
info_plist_dict.update(self.info_plist)
plist_filename = os.path.join(self.name, "Contents", "Info.plist")
with open(plist_filename, "wb") as plist_fh:
plistlib.dump(info_plist_dict, plist_fh)
links = []
_QT_BASE_PATH = {'PySide2', 'PySide6', 'PyQt5', 'PyQt6', 'PySide6'}
for inm, fnm, typ in self.toc:
# Adjust name for extensions, if applicable
inm, fnm, typ = add_suffix_to_extension(inm, fnm, typ)
inm = Path(inm)
fnm = Path(fnm)
# Copy files from cache. This ensures that are used files with relative paths to dynamic library
# dependencies (@executable_path)
if typ in ('EXTENSION', 'BINARY') or (typ == 'DATA' and inm.suffix == '.so'):
if any(['.' in p for p in inm.parent.parts]):
inm = Path(inm.name)
fnm = Path(checkCache(
str(fnm),
strip = self.strip,
upx = self.upx,
upx_exclude = self.upx_exclude,
dist_nm = str(inm),
target_arch = self.target_arch,
codesign_identity = self.codesign_identity,
entitlements_file = self.entitlements_file,
strict_arch_validation = (typ == 'EXTENSION'),
))
frame_dst = frameworks_path.joinpath(inm)
if not frame_dst.exists():
if frame_dst.is_dir():
os.makedirs(frame_dst, exist_ok = True)
else:
os.makedirs(frame_dst.parent, exist_ok = True)
shutil.copy(fnm, frame_dst, follow_symlinks = True)
macos_dst = macos_path.joinpath(inm)
if not macos_dst.exists():
if macos_dst.is_dir():
os.makedirs(macos_dst, exist_ok = True)
else:
os.makedirs(macos_dst.parent, exist_ok = True)
# Create relative symlink to the framework
symlink_to = Path(*[".." for p in macos_dst.relative_to(macos_path).parts], "Frameworks").joinpath(
frame_dst.relative_to(frameworks_path))
try:
macos_dst.symlink_to(symlink_to)
except FileExistsError:
pass
else:
if typ == 'DATA':
if any(['.' in p for p in inm.parent.parts]) or inm.suffix == '.so':
# Skip info dist egg and some not needed folders in tcl and tk, since they all contain dots in their files
logger.warning(f"Skipping DATA file {inm}")
continue
res_dst = resources_path.joinpath(inm)
if not res_dst.exists():
if res_dst.is_dir():
os.makedirs(res_dst, exist_ok = True)
else:
os.makedirs(res_dst.parent, exist_ok = True)
shutil.copy(fnm, res_dst, follow_symlinks = True)
macos_dst = macos_path.joinpath(inm)
if not macos_dst.exists():
if macos_dst.is_dir():
os.makedirs(macos_dst, exist_ok = True)
else:
os.makedirs(macos_dst.parent, exist_ok = True)
# Create relative symlink to the resource
symlink_to = Path(*[".." for p in macos_dst.relative_to(macos_path).parts], "Resources").joinpath(
res_dst.relative_to(resources_path))
try:
macos_dst.symlink_to(symlink_to)
except FileExistsError:
pass
else:
macos_dst = macos_path.joinpath(inm)
if not macos_dst.exists():
if macos_dst.is_dir():
os.makedirs(macos_dst, exist_ok = True)
else:
os.makedirs(macos_dst.parent, exist_ok = True)
shutil.copy(fnm, macos_dst, follow_symlinks = True)
# Sign the bundle
logger.info('Signing the BUNDLE...')
try:
osxutils.sign_binary(self.name, self.codesign_identity, self.entitlements_file, deep = True)
except Exception as e:
logger.warning(f"Error while signing the bundle: {e}")
logger.warning("You will need to sign the bundle manually!")
logger.info(f"Building BUNDLE {self.tocbasename} completed successfully.")
app = UMBUNDLE(
app = BUNDLE(
coll,
name='{{ display_name }}.app',
icon={{ icon }},
@ -266,10 +86,15 @@ app = UMBUNDLE(
'CFBundlePackageType': 'APPL',
'CFBundleVersionString': {{ version }},
'CFBundleShortVersionString': {{ short_version }},
'CFBundleURLTypes': [{
'CFBundleURLName': '{{ display_name }}',
'CFBundleURLSchemes': ['cura', 'slicer'],
}],
'CFBundleDocumentTypes': [{
'CFBundleTypeRole': 'Viewer',
'CFBundleTypeExtensions': ['*'],
'CFBundleTypeName': 'Model Files',
}]
},
){% endif %}
'CFBundleTypeRole': 'Viewer',
'CFBundleTypeExtensions': ['stl', 'obj', '3mf', 'gcode', 'ufp'],
'CFBundleTypeName': 'Model Files',
}]
},
)
{% endif %}

View File

@ -1,3 +1,19 @@
version: "5.8.0-alpha.0"
requirements:
- "cura_resources/(latest)@ultimaker/testing"
- "uranium/(latest)@ultimaker/testing"
- "curaengine/(latest)@ultimaker/testing"
- "cura_binary_data/(latest)@ultimaker/testing"
- "fdm_materials/(latest)@ultimaker/testing"
- "curaengine_plugin_gradual_flow/0.1.0-beta.3"
- "dulcificum/latest@ultimaker/testing"
- "pysavitar/5.3.0"
- "pynest2d/5.3.0"
- "curaengine_grpc_definitions/0.2.0"
- "native_cad_plugin/2.0.0"
requirements_internal:
- "fdm_materials/(latest)@internal/testing"
- "cura_private_data/(latest)@internal/testing"
urls:
default:
cloud_api_root: "https://api.ultimaker.com"
@ -27,10 +43,22 @@ pyinstaller:
package: "curaengine_plugin_gradual_flow"
src: "res/bundled_packages"
dst: "share/cura/resources/bundled_packages"
native_cad_plugin:
package: "native_cad_plugin"
src: "res/plugins/NativeCADplugin"
dst: "share/cura/plugins/NativeCADplugin"
native_cad_plugin_bundled:
package: "native_cad_plugin"
src: "res/bundled_packages"
dst: "share/cura/resources/bundled_packages"
cura_resources:
package: "cura"
src: "resources"
dst: "share/cura/resources"
cura_shared_resources:
package: "cura_resources"
src: "res"
dst: "share/cura/resources"
cura_private_data:
package: "cura_private_data"
src: "res"
@ -86,6 +114,7 @@ pyinstaller:
hiddenimports:
- "pySavitar"
- "pyArcus"
- "pyDulcificum"
- "pynest2d"
- "PyQt6"
- "PyQt6.QtNetwork"
@ -103,7 +132,6 @@ pyinstaller:
- "sqlite3"
- "trimesh"
- "win32ctypes"
- "PyQt6"
- "PyQt6.QtNetwork"
- "PyQt6.sip"
- "stl"
@ -145,6 +173,10 @@ pycharm_targets:
module_name: Cura
name: pytest in TestGCodeListDecorator.py
script_name: tests/TestGCodeListDecorator.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestHitChecker.py
script_name: tests/TestHitChecker.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestIntentManager.py
@ -173,6 +205,10 @@ pycharm_targets:
module_name: Cura
name: pytest in TestPrintInformation.py
script_name: tests/TestPrintInformation.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestPrintOrderManager.py
script_name: tests/TestPrintOrderManager.py
- jinja_path: .run_templates/pycharm_cura_test.run.xml.jinja
module_name: Cura
name: pytest in TestProfileRequirements.py

View File

@ -1,10 +1,11 @@
import os
from io import StringIO
from pathlib import Path
from jinja2 import Template
from conan import ConanFile
from conan.tools.files import copy, rmdir, save, mkdir, rm
from conan.tools.files import copy, rmdir, save, mkdir, rm, update_conandata
from conan.tools.microsoft import unix_path
from conan.tools.env import VirtualRunEnv, Environment, VirtualBuildEnv
from conan.tools.scm import Version
@ -25,7 +26,7 @@ class CuraConan(ConanFile):
settings = "os", "compiler", "build_type", "arch"
# FIXME: Remove specific branch once merged to main
python_requires = "translationextractor/[>=2.1.1]@ultimaker/stable"
python_requires = "translationextractor/[>=2.2.0]@ultimaker/stable"
options = {
"enterprise": ["True", "False", "true", "false"], # Workaround for GH Action passing boolean as lowercase string
@ -34,7 +35,8 @@ class CuraConan(ConanFile):
"cloud_api_version": "ANY",
"display_name": "ANY", # TODO: should this be an option??
"cura_debug_mode": [True, False], # FIXME: Use profiles
"internal": [True, False]
"internal": ["True", "False", "true", "false"], # Workaround for GH Action passing boolean as lowercase string
"enable_i18n": [True, False],
}
default_options = {
"enterprise": "False",
@ -43,12 +45,17 @@ class CuraConan(ConanFile):
"cloud_api_version": "1",
"display_name": "UltiMaker Cura",
"cura_debug_mode": False, # Not yet implemented
"internal": False,
"internal": "False",
"enable_i18n": False,
}
def set_version(self):
if not self.version:
self.version = "5.6.0-alpha"
self.version = self.conan_data["version"]
@property
def _i18n_options(self):
return self.conf.get("user.i18n:options", default = {"extract": True, "build": True}, check_type = dict)
@property
def _pycharm_targets(self):
@ -65,6 +72,8 @@ class CuraConan(ConanFile):
self._cura_env = Environment()
self._cura_env.define("QML2_IMPORT_PATH", str(self._site_packages.joinpath("PyQt6", "Qt6", "qml")))
self._cura_env.define("QT_PLUGIN_PATH", str(self._site_packages.joinpath("PyQt6", "Qt6", "plugins")))
if not self.in_local_cache:
self._cura_env.define("CURA_DATA_ROOT", str(self._share_dir.joinpath("cura")))
if self.settings.os == "Linux":
self._cura_env.define("QT_QPA_FONTDIR", "/usr/share/fonts")
@ -76,6 +85,10 @@ class CuraConan(ConanFile):
def _enterprise(self):
return self.options.enterprise in ["True", 'true']
@property
def _internal(self):
return self.options.internal in ["True", 'true']
@property
def _app_name(self):
if self._enterprise:
@ -137,36 +150,43 @@ class CuraConan(ConanFile):
return "'x86_64'"
return "None"
def _generate_about_versions(self, location):
with open(os.path.join(self.recipe_folder, "AboutDialogVersionsList.qml.jinja"), "r") as f:
cura_version_py = Template(f.read())
def _conan_installs(self):
self.output.info("Collecting conan installs")
conan_installs = {}
conan_installs = []
python_installs = []
# list of conan installs
for dependency in self.dependencies.host.values():
conan_installs[dependency.ref.name] = {
"version": dependency.ref.version,
"revision": dependency.ref.revision
}
return conan_installs
# list of conan installs
for _, dependency in self.dependencies.host.items():
conan_installs.append([dependency.ref.name,dependency.ref.version])
def _python_installs(self):
self.output.info("Collecting python installs")
python_installs = {}
# list of python installs
run_env = VirtualRunEnv(self)
env = run_env.environment()
env.prepend_path("PYTHONPATH", str(self._site_packages.as_posix()))
venv_vars = env.vars(self, scope = "run")
#list of python installs
outer = '"' if self.settings.os == "Windows" else "'"
inner = "'" if self.settings.os == "Windows" else '"'
python_ins_cmd = f"python -c {outer}import pkg_resources; print({inner};{inner}.join([(s.key+{inner},{inner}+ s.version) for s in pkg_resources.working_set])){outer}"
from six import StringIO
buffer = StringIO()
self.run(python_ins_cmd, run_environment= True, env = "conanrun", output=buffer)
with venv_vars.apply():
self.run(f"""python -c {outer}import pkg_resources; print({inner};{inner}.join([(s.key+{inner},{inner}+ s.version) for s in pkg_resources.working_set])){outer}""",
env = "conanrun",
output = buffer)
packages = str(buffer.getvalue()).split("-----------------\n")
package = packages[1].strip('\r\n').split(";")
for pack in package:
python_installs.append(pack.split(","))
with open(os.path.join(location, "AboutDialogVersionsList.qml"), "w") as f:
f.write(cura_version_py.render(
conan_installs = conan_installs,
python_installs = python_installs
))
packages = packages[1].strip('\r\n').split(";")
for package in packages:
name, version = package.split(",")
python_installs[name] = {"version": version}
return python_installs
def _generate_cura_version(self, location):
with open(os.path.join(self.recipe_folder, "CuraVersion.py.jinja"), "r") as f:
@ -177,7 +197,7 @@ class CuraConan(ConanFile):
cura_version = Version(self.conf.get("user.cura:version", default = self.version, check_type = str))
pre_tag = f"-{cura_version.pre}" if cura_version.pre else ""
build_tag = f"+{cura_version.build}" if cura_version.build else ""
internal_tag = f"+internal" if self.options.internal else ""
internal_tag = f"+internal" if self._internal else ""
cura_version = f"{cura_version.major}.{cura_version.minor}.{cura_version.patch}{pre_tag}{build_tag}{internal_tag}"
with open(os.path.join(location, "CuraVersion.py"), "w") as f:
@ -192,13 +212,16 @@ class CuraConan(ConanFile):
cura_cloud_account_api_root = self.conan_data["urls"][self._urls]["cloud_account_api_root"],
cura_marketplace_root = self.conan_data["urls"][self._urls]["marketplace_root"],
cura_digital_factory_url = self.conan_data["urls"][self._urls]["digital_factory_url"],
cura_latest_url = self.conan_data["urls"][self._urls]["cura_latest_url"]))
cura_latest_url=self.conan_data["urls"][self._urls]["cura_latest_url"],
conan_installs=self._conan_installs(),
python_installs=self._python_installs(),
))
def _generate_pyinstaller_spec(self, location, entrypoint_location, icon_path, entitlements_file):
pyinstaller_metadata = self.conan_data["pyinstaller"]
datas = [(str(self._base_dir.joinpath("conan_install_info.json")), ".")]
datas = []
for data in pyinstaller_metadata["datas"].values():
if not self.options.internal and data.get("internal", False):
if not self._internal and data.get("internal", False):
continue
if "package" in data: # get the paths from conan package
@ -208,6 +231,8 @@ class CuraConan(ConanFile):
else:
src_path = os.path.join(self.source_folder, data["src"])
else:
if data["package"] not in self.deps_cpp_info.deps:
continue
src_path = os.path.join(self.deps_cpp_info[data["package"]].rootpath, data["src"])
elif "root" in data: # get the paths relative from the install folder
src_path = os.path.join(self.install_folder, data["root"], data["src"])
@ -253,7 +278,7 @@ class CuraConan(ConanFile):
with open(os.path.join(self.recipe_folder, "UltiMaker-Cura.spec.jinja"), "r") as f:
pyinstaller = Template(f.read())
version = self.conf_info.get("user.cura:version", default = self.version, check_type = str)
version = self.conf.get("user.cura:version", default = self.version, check_type = str)
cura_version = Version(version)
with open(os.path.join(location, "UltiMaker-Cura.spec"), "w") as f:
@ -277,6 +302,9 @@ class CuraConan(ConanFile):
short_version = f"'{cura_version.major}.{cura_version.minor}.{cura_version.patch}'",
))
def export(self):
update_conandata(self, {"version": self.version})
def export_sources(self):
copy(self, "*", os.path.join(self.recipe_folder, "plugins"), os.path.join(self.export_sources_folder, "plugins"))
copy(self, "*", os.path.join(self.recipe_folder, "resources"), os.path.join(self.export_sources_folder, "resources"), excludes = "*.mo")
@ -289,43 +317,52 @@ class CuraConan(ConanFile):
copy(self, "requirements-ultimaker.txt", self.recipe_folder, self.export_sources_folder)
copy(self, "cura_app.py", self.recipe_folder, self.export_sources_folder)
def config_options(self):
if self.settings.os == "Windows" and not self.conf.get("tools.microsoft.bash:path", check_type=str):
del self.options.enable_i18n
def configure(self):
self.options["pyarcus"].shared = True
self.options["pysavitar"].shared = True
self.options["pynest2d"].shared = True
self.options["dulcificum"].shared = self.settings.os != "Windows"
self.options["cpython"].shared = True
self.options["boost"].header_only = True
if self.settings.os == "Linux":
self.options["curaengine_grpc_definitions"].shared = True
self.options["openssl"].shared = True
if self.conf.get("user.curaengine:sentry_url", "", check_type=str) != "":
self.options["curaengine"].enable_sentry = True
self.options["arcus"].enable_sentry = True
self.options["clipper"].enable_sentry = True
def validate(self):
version = self.conf_info.get("user.cura:version", default = self.version, check_type = str)
version = self.conf.get("user.cura:version", default = self.version, check_type = str)
if version and Version(version) <= Version("4"):
raise ConanInvalidConfiguration("Only versions 5+ are support")
def requirements(self):
for req in self.conan_data["requirements"]:
if self._internal and "fdm_materials" in req:
continue
if not self._enterprise and "native_cad_plugin" in req:
continue
self.requires(req)
if self._internal:
for req in self.conan_data["requirements_internal"]:
self.requires(req)
self.requires("cpython/3.10.4@ultimaker/stable")
self.requires("clipper/6.4.2@ultimaker/stable")
self.requires("openssl/3.2.0")
self.requires("protobuf/3.21.12")
self.requires("boost/1.82.0")
self.requires("curaengine_grpc_definitions/(latest)@ultimaker/testing")
self.requires("spdlog/1.12.0")
self.requires("fmt/10.1.1")
self.requires("zlib/1.2.13")
self.requires("pyarcus/5.3.0")
self.requires("curaengine/(latest)@ultimaker/testing")
self.requires("pysavitar/5.3.0")
self.requires("pynest2d/5.3.0")
self.requires("curaengine_plugin_gradual_flow/(latest)@ultimaker/stable")
self.requires("uranium/(latest)@ultimaker/testing")
self.requires("cura_binary_data/(latest)@ultimaker/testing")
self.requires("cpython/3.10.4")
if self.options.internal:
self.requires("cura_private_data/(latest)@ultimaker/testing")
self.requires("fdm_materials/(latest)@internal/testing")
else:
self.requires("fdm_materials/(latest)@ultimaker/testing")
def build_requirements(self):
if self.options.devtools:
if self.settings.os != "Windows" or self.conf.get("tools.microsoft.bash:path", check_type = str):
# FIXME: once m4, autoconf, automake are Conan V2 ready use self.win_bash and add gettext as base tool_requirement
self.tool_requires("gettext/0.21@ultimaker/testing", force_host_context = True)
if self.options.get_safe("enable_i18n", False):
self.tool_requires("gettext/0.21", force_host_context = True)
def layout(self):
self.folders.source = "."
@ -346,7 +383,6 @@ class CuraConan(ConanFile):
vr.generate()
self._generate_cura_version(os.path.join(self.source_folder, "cura"))
self._generate_about_versions(os.path.join(self.source_folder, "resources","qml", "Dialogs"))
if not self.in_local_cache:
# Copy CuraEngine.exe to bindirs of Virtual Python Environment
@ -361,6 +397,12 @@ class CuraConan(ConanFile):
copy(self, "*", curaengine_plugin_gradual_flow.bindirs[0], self.source_folder, keep_path = False)
copy(self, "bundled_*.json", curaengine_plugin_gradual_flow.resdirs[1], str(self.source_path.joinpath("resources", "bundled_packages")), keep_path = False)
if self._enterprise:
rmdir(self, str(self.source_path.joinpath("plugins", "NativeCADplugin")))
curaengine_plugin_gradual_flow = self.dependencies["native_cad_plugin"].cpp_info
copy(self, "*", curaengine_plugin_gradual_flow.resdirs[0], str(self.source_path.joinpath("plugins", "NativeCADplugin")), keep_path = True)
copy(self, "bundled_*.json", curaengine_plugin_gradual_flow.resdirs[1], str(self.source_path.joinpath("resources", "bundled_packages")), keep_path = False)
# Copy resources of cura_binary_data
cura_binary_data = self.dependencies["cura_binary_data"].cpp_info
copy(self, "*", cura_binary_data.resdirs[0], str(self._share_dir.joinpath("cura")), keep_path = True)
@ -382,37 +424,40 @@ class CuraConan(ConanFile):
copy(self, "*", fdm_materials.resdirs[0], self.source_folder)
# Copy internal resources
if self.options.internal:
if self._internal:
cura_private_data = self.dependencies["cura_private_data"].cpp_info
copy(self, "*", cura_private_data.resdirs[0], str(self._share_dir.joinpath("cura")))
if self.options.devtools:
entitlements_file = "'{}'".format(os.path.join(self.source_folder, "packaging", "MacOS", "cura.entitlements"))
self._generate_pyinstaller_spec(location = self.generators_folder,
entrypoint_location = "'{}'".format(os.path.join(self.source_folder, self.conan_data["pyinstaller"]["runinfo"]["entrypoint"])).replace("\\", "\\\\"),
icon_path = "'{}'".format(os.path.join(self.source_folder, "packaging", self.conan_data["pyinstaller"]["icon"][str(self.settings.os)])).replace("\\", "\\\\"),
entitlements_file = entitlements_file if self.settings.os == "Macos" else "None")
self._generate_pyinstaller_spec(
location=self.generators_folder,
entrypoint_location="'{}'".format(
os.path.join(self.source_folder, self.conan_data["pyinstaller"]["runinfo"]["entrypoint"])).replace(
"\\", "\\\\"),
icon_path="'{}'".format(os.path.join(self.source_folder, "packaging",
self.conan_data["pyinstaller"]["icon"][
str(self.settings.os)])).replace("\\", "\\\\"),
entitlements_file=entitlements_file if self.settings.os == "Macos" else "None"
)
# Update the po and pot files
if self.settings.os != "Windows" or self.conf.get("tools.microsoft.bash:path", check_type=str):
vb = VirtualBuildEnv(self)
vb.generate()
if self.options.get_safe("enable_i18n", False) and self._i18n_options["extract"]:
vb = VirtualBuildEnv(self)
vb.generate()
# # FIXME: once m4, autoconf, automake are Conan V2 ready use self.win_bash and add gettext as base tool_requirement
# cpp_info = self.dependencies["gettext"].cpp_info
# pot = self.python_requires["translationextractor"].module.ExtractTranslations(self, cpp_info.bindirs[0])
# pot.generate()
# # FIXME: once m4, autoconf, automake are Conan V2 ready use self.win_bash and add gettext as base tool_requirement
cpp_info = self.dependencies["gettext"].cpp_info
pot = self.python_requires["translationextractor"].module.ExtractTranslations(self, cpp_info.bindirs[0])
pot.generate()
def build(self):
if self.options.devtools:
if self.settings.os != "Windows" or self.conf.get("tools.microsoft.bash:path", check_type = str):
# FIXME: once m4, autoconf, automake are Conan V2 ready use self.win_bash and add gettext as base tool_requirement
for po_file in self.source_path.joinpath("resources", "i18n").glob("**/*.po"):
mo_file = Path(self.build_folder, po_file.with_suffix('.mo').relative_to(self.source_path))
mo_file = mo_file.parent.joinpath("LC_MESSAGES", mo_file.name)
mkdir(self, str(unix_path(self, Path(mo_file).parent)))
cpp_info = self.dependencies["gettext"].cpp_info
self.run(f"{cpp_info.bindirs[0]}/msgfmt {po_file} -o {mo_file} -f", env="conanbuild", ignore_errors=True)
if self.options.get_safe("enable_i18n", False) and self._i18n_options["build"]:
for po_file in self.source_path.joinpath("resources", "i18n").glob("**/*.po"):
mo_file = Path(self.build_folder, po_file.with_suffix('.mo').relative_to(self.source_path))
mo_file = mo_file.parent.joinpath("LC_MESSAGES", mo_file.name)
mkdir(self, str(unix_path(self, Path(mo_file).parent)))
cpp_info = self.dependencies["gettext"].cpp_info
self.run(f"{cpp_info.bindirs[0]}/msgfmt {po_file} -o {mo_file} -f", env="conanbuild", ignore_errors=True)
def deploy(self):
copy(self, "*", os.path.join(self.package_folder, self.cpp.package.resdirs[2]), os.path.join(self.install_folder, "packaging"), keep_path = True)
@ -423,6 +468,12 @@ class CuraConan(ConanFile):
copy(self, "*", os.path.join(self.package_folder, self.cpp_info.resdirs[0]), str(self._share_dir.joinpath("cura", "resources")), keep_path = True)
copy(self, "*", os.path.join(self.package_folder, self.cpp_info.resdirs[1]), str(self._share_dir.joinpath("cura", "plugins")), keep_path = True)
# Copy the cura_resources resources from the package
rm(self, "conanfile.py", os.path.join(self.package_folder, self.cpp.package.resdirs[0]))
cura_resources = self.dependencies["cura_resources"].cpp_info
for res_dir in cura_resources.resdirs:
copy(self, "*", res_dir, str(self._share_dir.joinpath("cura", "resources", Path(res_dir).name)), keep_path = True)
# Copy resources of Uranium (keep folder structure)
uranium = self.dependencies["uranium"].cpp_info
copy(self, "*", uranium.resdirs[0], str(self._share_dir.joinpath("uranium", "resources")), keep_path = True)
@ -430,7 +481,7 @@ class CuraConan(ConanFile):
copy(self, "*", uranium.libdirs[0], str(self._site_packages.joinpath("UM")), keep_path = True)
# Generate the GitHub Action version info Environment
version = self.conf_info.get("user.cura:version", default = self.version, check_type = str)
version = self.conf.get("user.cura:version", default = self.version, check_type = str)
cura_version = Version(version)
env_prefix = "Env:" if self.settings.os == "Windows" else ""
activate_github_actions_version_env = Template(r"""echo "CURA_VERSION_MAJOR={{ cura_version_major }}" >> ${{ env_prefix }}GITHUB_ENV
@ -451,7 +502,6 @@ echo "CURA_APP_NAME={{ cura_app_name }}" >> ${{ env_prefix }}GITHUB_ENV
save(self, os.path.join(self._script_dir, f"activate_github_actions_version_env{ext}"), activate_github_actions_version_env)
self._generate_cura_version(os.path.join(self._site_packages, "cura"))
self._generate_about_versions(str(self._share_dir.joinpath("cura", "resources", "qml", "Dialogs")))
entitlements_file = "'{}'".format(Path(self.cpp_info.res_paths[2], "MacOS", "cura.entitlements"))
self._generate_pyinstaller_spec(location = self._base_dir,
@ -475,6 +525,12 @@ echo "CURA_APP_NAME={{ cura_app_name }}" >> ${{ env_prefix }}GITHUB_ENV
# Remove the fdm_materials from the package
rmdir(self, os.path.join(self.package_folder, self.cpp.package.resdirs[0], "materials"))
# Remove the cura_resources resources from the package
rm(self, "conanfile.py", os.path.join(self.package_folder, self.cpp.package.resdirs[0]))
cura_resources = self.dependencies["cura_resources"].cpp_info
for res_dir in cura_resources.resdirs:
rmdir(self, os.path.join(self.package_folder, self.cpp.package.resdirs[0], Path(res_dir).name))
def package_info(self):
self.user_info.pip_requirements = "requirements.txt"
self.user_info.pip_requirements_git = "requirements-ultimaker.txt"
@ -482,10 +538,14 @@ echo "CURA_APP_NAME={{ cura_app_name }}" >> ${{ env_prefix }}GITHUB_ENV
if self.in_local_cache:
self.runenv_info.append_path("PYTHONPATH", os.path.join(self.package_folder, "site-packages"))
self.env_info.PYTHONPATH.append(os.path.join(self.package_folder, "site-packages"))
self.runenv_info.append_path("PYTHONPATH", os.path.join(self.package_folder, "plugins"))
self.env_info.PYTHONPATH.append(os.path.join(self.package_folder, "plugins"))
else:
self.runenv_info.append_path("PYTHONPATH", self.source_folder)
self.env_info.PYTHONPATH.append(self.source_folder)
self.runenv_info.append_path("PYTHONPATH", os.path.join(self.source_folder, "plugins"))
self.env_info.PYTHONPATH.append(os.path.join(self.source_folder, "plugins"))
def package_id(self):
self.info.clear()
@ -498,6 +558,8 @@ echo "CURA_APP_NAME={{ cura_app_name }}" >> ${{ env_prefix }}GITHUB_ENV
del self.info.options.cloud_api_version
del self.info.options.display_name
del self.info.options.cura_debug_mode
if self.options.get_safe("enable_i18n", False):
del self.info.options.enable_i18n
# TODO: Use the hash of requirements.txt and requirements-ultimaker.txt, Because changing these will actually result in a different
# Cura. This is needed because the requirements.txt aren't managed by Conan and therefor not resolved in the package_id. This isn't

View File

@ -190,6 +190,20 @@ class Account(QObject):
def isLoggedIn(self) -> bool:
return self._logged_in
@pyqtSlot()
def stopSyncing(self) -> None:
Logger.debug(f"Stopping sync of cloud printers")
self._setManualSyncEnabled(True)
if self._update_timer.isActive():
self._update_timer.stop()
@pyqtSlot()
def startSyncing(self) -> None:
Logger.debug(f"Starting sync of cloud printers")
self._setManualSyncEnabled(False)
if not self._update_timer.isActive():
self._update_timer.start()
def _onLoginStateChanged(self, logged_in: bool = False, error_message: Optional[str] = None) -> None:
if error_message:
if self._error_message:

View File

@ -1,4 +1,4 @@
# Copyright (c) 2022 UltiMaker
# Copyright (c) 2023 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher.
# ---------
@ -14,7 +14,7 @@ DEFAULT_CURA_LATEST_URL = "https://software.ultimaker.com/latest.json"
# Each release has a fixed SDK version coupled with it. It doesn't make sense to make it configurable because, for
# example Cura 3.2 with SDK version 6.1 will not work. So the SDK version is hard-coded here and left out of the
# CuraVersion.py.in template.
CuraSDKVersion = "8.5.0"
CuraSDKVersion = "8.7.0"
try:
from cura.CuraVersion import CuraLatestURL
@ -69,13 +69,25 @@ try:
except ImportError:
CuraAppDisplayName = DEFAULT_CURA_DISPLAY_NAME
DEPENDENCY_INFO = {}
try:
from pathlib import Path
conan_install_info = Path(__file__).parent.parent.joinpath("conan_install_info.json")
if conan_install_info.exists():
import json
with open(conan_install_info, "r") as f:
DEPENDENCY_INFO = json.loads(f.read())
except:
pass
from cura.CuraVersion import ConanInstalls
if type(ConanInstalls) == dict:
CONAN_INSTALLS = ConanInstalls
else:
CONAN_INSTALLS = {}
except ImportError:
CONAN_INSTALLS = {}
try:
from cura.CuraVersion import PythonInstalls
if type(PythonInstalls) == dict:
PYTHON_INSTALLS = PythonInstalls
else:
PYTHON_INSTALLS = {}
except ImportError:
PYTHON_INSTALLS = {}

View File

@ -241,14 +241,8 @@ class GridArrange(Arranger):
center_grid_x = coord_grid_x + (0.5 * self._grid_width)
center_grid_y = coord_grid_y + (0.5 * self._grid_height)
bounding_box = node.getBoundingBox()
center_node_x = (bounding_box.left + bounding_box.right) * 0.5
center_node_y = (bounding_box.back + bounding_box.front) * 0.5
delta_x = center_grid_x - center_node_x
delta_y = center_grid_y - center_node_y
return TranslateOperation(node, Vector(delta_x, 0, delta_y))
return TranslateOperation(node, Vector(center_grid_x, node.getWorldPosition().y, center_grid_y),
set_position=True)
def _getGridCornerPoints(
self,

View File

@ -18,8 +18,8 @@ class BackendPlugin(AdditionalSettingDefinitionsAppender, PluginObject):
catalog = i18nCatalog("cura")
settings_catalog = i18nCatalog("fdmprinter.def.json")
def __init__(self) -> None:
super().__init__(self.settings_catalog)
def __init__(self, catalog_i18n = settings_catalog) -> None:
super().__init__(catalog_i18n)
self.__port: int = 0
self._plugin_address: str = "127.0.0.1"
self._plugin_command: Optional[List[str]] = None

View File

@ -120,6 +120,8 @@ class BuildVolume(SceneNode):
# Objects loaded at the moment. We are connected to the property changed events of these objects.
self._scene_objects = set() # type: Set[SceneNode]
# Number of toplevel printable meshes. If there is more than one, the build volume needs to take account of the gantry height in One at a Time printing.
self._root_printable_object_count = 0
self._scene_change_timer = QTimer()
self._scene_change_timer.setInterval(200)
@ -151,6 +153,7 @@ class BuildVolume(SceneNode):
def _onSceneChangeTimerFinished(self):
root = self._application.getController().getScene().getRoot()
new_scene_objects = set(node for node in BreadthFirstIterator(root) if node.callDecoration("isSliceable"))
if new_scene_objects != self._scene_objects:
for node in new_scene_objects - self._scene_objects: #Nodes that were added to the scene.
self._updateNodeListeners(node)
@ -166,6 +169,26 @@ class BuildVolume(SceneNode):
self.rebuild()
self._scene_objects = new_scene_objects
# This also needs to be called when objects are grouped/ungrouped,
# which is not reflected in a change in self._scene_objects
self._updateRootPrintableObjectCount()
def _updateRootPrintableObjectCount(self):
# Get the number of models in the scene root, excluding modifier meshes and counting grouped models as 1
root = self._application.getController().getScene().getRoot()
scene_objects = set(node for node in BreadthFirstIterator(root) if node.callDecoration("isSliceable") or node.callDecoration("isGroup"))
new_root_printable_object_count = len(list(node for node in scene_objects if node.getParent() == root and not (
node_stack := node.callDecoration("getStack") and (
node.callDecoration("getStack").getProperty("anti_overhang_mesh", "value") or
node.callDecoration("getStack").getProperty("support_mesh", "value") or
node.callDecoration("getStack").getProperty("cutting_mesh", "value") or
node.callDecoration("getStack").getProperty("infill_mesh", "value")
))
))
if new_root_printable_object_count != self._root_printable_object_count:
self._root_printable_object_count = new_root_printable_object_count
self._onSettingPropertyChanged("print_sequence", "value") # Create fake event, so right settings are triggered.
def _updateNodeListeners(self, node: SceneNode):
@ -489,20 +512,20 @@ class BuildVolume(SceneNode):
if not self._disallowed_areas:
return None
bounding_box = Polygon(numpy.array([[min_w, min_d], [min_w, max_d], [max_w, max_d], [max_w, min_d]], numpy.float32))
mb = MeshBuilder()
color = self._disallowed_area_color
for polygon in self._disallowed_areas:
points = polygon.getPoints()
if len(points) == 0:
intersection = polygon.intersectionConvexHulls(bounding_box)
points = numpy.flipud(intersection.getPoints())
if len(points) < 3:
continue
first = Vector(self._clamp(points[0][0], min_w, max_w), disallowed_area_height,
self._clamp(points[0][1], min_d, max_d))
previous_point = Vector(self._clamp(points[0][0], min_w, max_w), disallowed_area_height,
self._clamp(points[0][1], min_d, max_d))
for point in points:
new_point = Vector(self._clamp(point[0], min_w, max_w), disallowed_area_height,
self._clamp(point[1], min_d, max_d))
first = Vector(points[0][0], disallowed_area_height, points[0][1])
previous_point = Vector(points[1][0], disallowed_area_height, points[1][1])
for point in points[2:]:
new_point = Vector(point[0], disallowed_area_height, point[1])
mb.addFace(first, previous_point, new_point, color=color)
previous_point = new_point
@ -650,7 +673,7 @@ class BuildVolume(SceneNode):
self._width = self._global_container_stack.getProperty("machine_width", "value")
machine_height = self._global_container_stack.getProperty("machine_height", "value")
if self._global_container_stack.getProperty("print_sequence", "value") == "one_at_a_time" and len(self._scene_objects) > 1:
if self._global_container_stack.getProperty("print_sequence", "value") == "one_at_a_time" and self._root_printable_object_count > 1:
new_height = min(self._global_container_stack.getProperty("gantry_height", "value") * self._scale_vector.z, machine_height)
if self._height > new_height:
@ -692,9 +715,12 @@ class BuildVolume(SceneNode):
update_extra_z_clearance = True
for setting_key in self._changed_settings_since_last_rebuild:
if setting_key in ["print_sequence", "support_mesh", "infill_mesh", "cutting_mesh", "anti_overhang_mesh"]:
self._updateRootPrintableObjectCount()
if setting_key == "print_sequence":
machine_height = self._global_container_stack.getProperty("machine_height", "value")
if self._application.getGlobalContainerStack().getProperty("print_sequence", "value") == "one_at_a_time" and len(self._scene_objects) > 1:
if self._application.getGlobalContainerStack().getProperty("print_sequence", "value") == "one_at_a_time" and self._root_printable_object_count > 1:
new_height = min(
self._global_container_stack.getProperty("gantry_height", "value") * self._scale_vector.z,
machine_height)

View File

@ -1,12 +1,12 @@
# Copyright (c) 2023 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher.
from typing import List, cast
from PyQt6.QtCore import QObject, QUrl, QMimeData
from PyQt6.QtCore import QObject, QUrl, pyqtSignal, pyqtProperty
from PyQt6.QtGui import QDesktopServices
from PyQt6.QtWidgets import QApplication
from UM.Application import Application
from UM.Event import CallFunctionEvent
from UM.FlameProfiler import pyqtSlot
from UM.Math.Vector import Vector
@ -32,11 +32,14 @@ from cura.Operations.SetBuildPlateNumberOperation import SetBuildPlateNumberOper
from UM.Logger import Logger
from UM.Scene.SceneNode import SceneNode
class CuraActions(QObject):
def __init__(self, parent: QObject = None) -> None:
super().__init__(parent)
self._operation_stack = Application.getInstance().getOperationStack()
self._operation_stack.changed.connect(self._onUndoStackChanged)
undoStackChanged = pyqtSignal()
@pyqtSlot()
def openDocumentation(self) -> None:
# Starting a web browser from a signal handler connected to a menu will crash on windows.
@ -45,6 +48,25 @@ class CuraActions(QObject):
event = CallFunctionEvent(self._openUrl, [QUrl("https://ultimaker.com/en/resources/manuals/software?utm_source=cura&utm_medium=software&utm_campaign=dropdown-documentation")], {})
cura.CuraApplication.CuraApplication.getInstance().functionEvent(event)
@pyqtProperty(bool, notify=undoStackChanged)
def canUndo(self):
return self._operation_stack.canUndo()
@pyqtProperty(bool, notify=undoStackChanged)
def canRedo(self):
return self._operation_stack.canRedo()
@pyqtSlot()
def undo(self):
self._operation_stack.undo()
@pyqtSlot()
def redo(self):
self._operation_stack.redo()
def _onUndoStackChanged(self):
self.undoStackChanged.emit()
@pyqtSlot()
def openBugReportPage(self) -> None:
event = CallFunctionEvent(self._openUrl, [QUrl("https://github.com/Ultimaker/Cura/issues/new/choose")], {})
@ -249,7 +271,11 @@ class CuraActions(QObject):
# deselect currently selected nodes, and select the new nodes
for node in Selection.getAllSelectedObjects():
Selection.remove(node)
numberOfFixedNodes = len(fixed_nodes)
for node in nodes:
numberOfFixedNodes += 1
node.printOrder = numberOfFixedNodes
Selection.add(node)
def _openUrl(self, url: QUrl) -> None:

View File

@ -2,22 +2,26 @@
# Cura is released under the terms of the LGPLv3 or higher.
import enum
import os
import re
import sys
import tempfile
import time
import platform
from pathlib import Path
from typing import cast, TYPE_CHECKING, Optional, Callable, List, Any, Dict
import requests
import numpy
from PyQt6.QtCore import QObject, QTimer, QUrl, pyqtSignal, pyqtProperty, QEvent, pyqtEnum, QCoreApplication
from PyQt6.QtCore import QObject, QTimer, QUrl, QUrlQuery, pyqtSignal, pyqtProperty, QEvent, pyqtEnum, QCoreApplication, \
QByteArray
from PyQt6.QtGui import QColor, QIcon
from PyQt6.QtQml import qmlRegisterUncreatableType, qmlRegisterUncreatableMetaObject, qmlRegisterSingletonType, qmlRegisterType
from PyQt6.QtQml import qmlRegisterUncreatableMetaObject, qmlRegisterSingletonType, qmlRegisterType
from PyQt6.QtWidgets import QMessageBox
import UM.Util
import cura.Settings.cura_empty_instance_containers
from UM.Application import Application
from UM.Decorators import override
from UM.Decorators import override, deprecated
from UM.FlameProfiler import pyqtSlot
from UM.Logger import Logger
from UM.Math.AxisAlignedBox import AxisAlignedBox
@ -29,6 +33,7 @@ from UM.Message import Message
from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation
from UM.Operations.GroupedOperation import GroupedOperation
from UM.Operations.SetTransformOperation import SetTransformOperation
from UM.OutputDevice.ProjectOutputDevice import ProjectOutputDevice
from UM.Platform import Platform
from UM.PluginError import PluginNotFoundError
from UM.Preferences import Preferences
@ -100,7 +105,8 @@ from cura.Settings.SettingInheritanceManager import SettingInheritanceManager
from cura.Settings.SidebarCustomMenuItemsModel import SidebarCustomMenuItemsModel
from cura.Settings.SimpleModeSettingsManager import SimpleModeSettingsManager
from cura.TaskManagement.OnExitCallbackManager import OnExitCallbackManager
from cura.UI import CuraSplashScreen, MachineActionManager, PrintInformation
from cura.UI import CuraSplashScreen, PrintInformation
from cura.UI.MachineActionManager import MachineActionManager
from cura.UI.AddPrinterPagesModel import AddPrinterPagesModel
from cura.UI.MachineSettingsManager import MachineSettingsManager
from cura.UI.ObjectsModel import ObjectsModel
@ -121,6 +127,7 @@ from .Machines.Models.CompatibleMachineModel import CompatibleMachineModel
from .Machines.Models.MachineListModel import MachineListModel
from .Machines.Models.ActiveIntentQualitiesModel import ActiveIntentQualitiesModel
from .Machines.Models.IntentSelectionModel import IntentSelectionModel
from .PrintOrderManager import PrintOrderManager
from .SingleInstance import SingleInstance
if TYPE_CHECKING:
@ -132,7 +139,7 @@ class CuraApplication(QtApplication):
# SettingVersion represents the set of settings available in the machine/extruder definitions.
# You need to make sure that this version number needs to be increased if there is any non-backwards-compatible
# changes of the settings.
SettingVersion = 22
SettingVersion = 23
Created = False
@ -175,18 +182,20 @@ class CuraApplication(QtApplication):
# Variables set from CLI
self._files_to_open = []
self._urls_to_open = []
self._use_single_instance = False
self._single_instance = None
self._open_project_mode: Optional[str] = None
self._cura_formula_functions = None # type: Optional[CuraFormulaFunctions]
self._machine_action_manager = None # type: Optional[MachineActionManager.MachineActionManager]
self._machine_action_manager: Optional[MachineActionManager] = None
self.empty_container = None # type: EmptyInstanceContainer
self.empty_definition_changes_container = None # type: EmptyInstanceContainer
self.empty_variant_container = None # type: EmptyInstanceContainer
self.empty_intent_container = None # type: EmptyInstanceContainer
self.empty_intent_container = None # type: EmptyInstanceContainer
self.empty_material_container = None # type: EmptyInstanceContainer
self.empty_quality_container = None # type: EmptyInstanceContainer
self.empty_quality_changes_container = None # type: EmptyInstanceContainer
@ -197,6 +206,7 @@ class CuraApplication(QtApplication):
self._container_manager = None
self._object_manager = None
self._print_order_manager = None
self._extruders_model = None
self._extruders_model_with_optional = None
self._build_plate_model = None
@ -248,7 +258,7 @@ class CuraApplication(QtApplication):
self._additional_components = {} # Components to add to certain areas in the interface
self._open_file_queue = [] # A list of files to open (after the application has started)
self._open_url_queue = [] # A list of urls to open (after the application has started)
self._update_platform_activity_timer = None
self._sidebar_custom_menu_items = [] # type: list # Keeps list of custom menu items for the side bar
@ -269,6 +279,11 @@ class CuraApplication(QtApplication):
CentralFileStorage.setIsEnterprise(ApplicationMetadata.IsEnterpriseVersion)
Resources.setIsEnterprise(ApplicationMetadata.IsEnterpriseVersion)
self._conan_installs = ApplicationMetadata.CONAN_INSTALLS
self._python_installs = ApplicationMetadata.PYTHON_INSTALLS
self._supported_url_schemes: List[str] = ["cura", "slicer"]
@pyqtProperty(str, constant=True)
def ultimakerCloudApiRootUrl(self) -> str:
return UltimakerCloudConstants.CuraCloudAPIRoot
@ -321,7 +336,11 @@ class CuraApplication(QtApplication):
assert not "This crash is triggered by the trigger_early_crash command line argument."
for filename in self._cli_args.file:
self._files_to_open.append(os.path.abspath(filename))
url = QUrl(filename)
if url.scheme() in self._supported_url_schemes:
self._urls_to_open.append(url)
else:
self._files_to_open.append(os.path.abspath(filename))
def initialize(self) -> None:
self.__addExpectedResourceDirsAndSearchPaths() # Must be added before init of super
@ -338,11 +357,11 @@ class CuraApplication(QtApplication):
self.__addAllEmptyContainers()
self.__setLatestResouceVersionsForVersionUpgrade()
self._machine_action_manager = MachineActionManager.MachineActionManager(self)
self._machine_action_manager = MachineActionManager(self)
self._machine_action_manager.initialize()
def __sendCommandToSingleInstance(self):
self._single_instance = SingleInstance(self, self._files_to_open)
self._single_instance = SingleInstance(self, self._files_to_open, self._urls_to_open)
# If we use single instance, try to connect to the single instance server, send commands, and then exit.
# If we cannot find an existing single instance server, this is the only instance, so just keep going.
@ -359,10 +378,20 @@ class CuraApplication(QtApplication):
Resources.addExpectedDirNameInData(dir_name)
app_root = os.path.abspath(os.path.join(os.path.dirname(sys.executable)))
Resources.addSecureSearchPath(os.path.join(app_root, "share", "cura", "resources"))
Resources.addSecureSearchPath(os.path.join(self._app_install_dir, "share", "cura", "resources"))
if platform.system() == "Darwin":
Resources.addSecureSearchPath(os.path.join(app_root, "Resources", "share", "cura", "resources"))
Resources.addSecureSearchPath(
os.path.join(self._app_install_dir, "Resources", "share", "cura", "resources"))
else:
Resources.addSecureSearchPath(os.path.join(app_root, "share", "cura", "resources"))
Resources.addSecureSearchPath(os.path.join(self._app_install_dir, "share", "cura", "resources"))
if not hasattr(sys, "frozen"):
cura_data_root = os.environ.get('CURA_DATA_ROOT', None)
if cura_data_root:
Resources.addSearchPath(str(Path(cura_data_root).joinpath("resources")))
Resources.addSearchPath(os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", "resources"))
# local Conan cache
@ -589,6 +618,7 @@ class CuraApplication(QtApplication):
preferences.addPreference("view/invert_zoom", False)
preferences.addPreference("view/filter_current_build_plate", False)
preferences.addPreference("view/navigation_style", "cura")
preferences.addPreference("cura/sidebar_collapsed", False)
preferences.addPreference("cura/favorite_materials", "")
@ -851,11 +881,8 @@ class CuraApplication(QtApplication):
self._log_hardware_info()
if len(ApplicationMetadata.DEPENDENCY_INFO) > 0:
Logger.debug("Using Conan managed dependencies: " + ", ".join(
[dep["recipe"]["id"] for dep in ApplicationMetadata.DEPENDENCY_INFO["installed"] if dep["recipe"]["version"] != "latest"]))
else:
Logger.warning("Could not find conan_install_info.json")
Logger.debug("Using conan dependencies: {}", str(self.conanInstalls))
Logger.debug("Using python dependencies: {}", str(self.pythonInstalls))
Logger.log("i", "Initializing machine error checker")
self._machine_error_checker = MachineErrorChecker(self)
@ -884,6 +911,7 @@ class CuraApplication(QtApplication):
# initialize info objects
self._print_information = PrintInformation.PrintInformation(self)
self._cura_actions = CuraActions.CuraActions(self)
self._print_order_manager = PrintOrderManager(self.getObjectsModel().getNodes)
self.processEvents()
# Initialize setting visibility presets model.
self._setting_visibility_presets_model = SettingVisibilityPresetsModel(self.getPreferences(), parent = self)
@ -941,6 +969,10 @@ class CuraApplication(QtApplication):
self.callLater(self._openFile, file_name)
for file_name in self._open_file_queue: # Open all the files that were queued up while plug-ins were loading.
self.callLater(self._openFile, file_name)
for url in self._urls_to_open:
self.callLater(self._openUrl, url)
for url in self._open_url_queue:
self.callLater(self._openUrl, url)
initializationFinished = pyqtSignal()
showAddPrintersUncancellableDialog = pyqtSignal() # Used to show the add printers dialog with a greyed background
@ -962,6 +994,7 @@ class CuraApplication(QtApplication):
t.setEnabledAxis([ToolHandle.XAxis, ToolHandle.YAxis, ToolHandle.ZAxis])
Selection.selectionChanged.connect(self.onSelectionChanged)
self._print_order_manager.printOrderChanged.connect(self._onPrintOrderChanged)
# Set default background color for scene
self.getRenderer().setBackgroundColor(QColor(245, 245, 245))
@ -1051,6 +1084,10 @@ class CuraApplication(QtApplication):
def getTextManager(self, *args) -> "TextManager":
return self._text_manager
@pyqtSlot()
def setWorkplaceDropToBuildplate(self):
return self._physics.setAppAllModelDropDown()
def getCuraFormulaFunctions(self, *args) -> "CuraFormulaFunctions":
if self._cura_formula_functions is None:
self._cura_formula_functions = CuraFormulaFunctions(self)
@ -1077,6 +1114,10 @@ class CuraApplication(QtApplication):
self._object_manager = ObjectsModel(self)
return self._object_manager
@pyqtSlot(str, result = "QVariantList")
def getSupportedActionMachineList(self, definition_id: str) -> List["MachineAction"]:
return self._machine_action_manager.getSupportedActions(self._machine_manager.getDefinitionByMachineId(definition_id))
@pyqtSlot(result = QObject)
def getExtrudersModel(self, *args) -> "ExtrudersModel":
if self._extruders_model is None:
@ -1102,6 +1143,16 @@ class CuraApplication(QtApplication):
self._build_plate_model = BuildPlateModel(self)
return self._build_plate_model
@pyqtSlot()
def exportUcp(self):
writer = self.getMeshFileHandler().getWriter("3MFWriter")
if writer is None:
Logger.warning("3mf writer is not enabled")
return
writer.exportUcp()
def getCuraSceneController(self, *args) -> CuraSceneController:
if self._cura_scene_controller is None:
self._cura_scene_controller = CuraSceneController.createCuraSceneController()
@ -1112,14 +1163,16 @@ class CuraApplication(QtApplication):
self._setting_inheritance_manager = SettingInheritanceManager.createSettingInheritanceManager()
return self._setting_inheritance_manager
def getMachineActionManager(self, *args: Any) -> MachineActionManager.MachineActionManager:
@pyqtSlot(result = QObject)
def getMachineActionManager(self, *args: Any) -> MachineActionManager:
"""Get the machine action manager
We ignore any *args given to this, as we also register the machine manager as qml singleton.
It wants to give this function an engine and script engine, but we don't care about that.
"""
return cast(MachineActionManager.MachineActionManager, self._machine_action_manager)
return self._machine_action_manager
@pyqtSlot(result = QObject)
def getMaterialManagementModel(self) -> MaterialManagementModel:
@ -1133,7 +1186,8 @@ class CuraApplication(QtApplication):
self._quality_management_model = QualityManagementModel(parent = self)
return self._quality_management_model
def getSimpleModeSettingsManager(self, *args):
@pyqtSlot(result=QObject)
def getSimpleModeSettingsManager(self)-> SimpleModeSettingsManager:
if self._simple_mode_settings_manager is None:
self._simple_mode_settings_manager = SimpleModeSettingsManager()
return self._simple_mode_settings_manager
@ -1150,9 +1204,15 @@ class CuraApplication(QtApplication):
if event.type() == QEvent.Type.FileOpen:
if self._plugins_loaded:
self._openFile(event.file())
if event.file():
self._openFile(event.file())
if event.url():
self._openUrl(event.url())
else:
self._open_file_queue.append(event.file())
if event.file():
self._open_file_queue.append(event.file())
if event.url():
self._open_url_queue.append(event.url())
if int(event.type()) == 20: # 'QEvent.Type.Quit' enum isn't there, even though it should be according to docs.
# Once we're at this point, everything should have been flushed already (past OnExitCallbackManager).
@ -1170,16 +1230,43 @@ class CuraApplication(QtApplication):
return self._print_information
def getQualityProfilesDropDownMenuModel(self, *args, **kwargs):
@pyqtSlot(result=QObject)
def getQualityProfilesDropDownMenuModel(self, *args, **kwargs)-> QualityProfilesDropDownMenuModel:
if self._quality_profile_drop_down_menu_model is None:
self._quality_profile_drop_down_menu_model = QualityProfilesDropDownMenuModel(self)
return self._quality_profile_drop_down_menu_model
def getCustomQualityProfilesDropDownMenuModel(self, *args, **kwargs):
@pyqtSlot(result=QObject)
def getCustomQualityProfilesDropDownMenuModel(self, *args, **kwargs)->CustomQualityProfilesDropDownMenuModel:
if self._custom_quality_profile_drop_down_menu_model is None:
self._custom_quality_profile_drop_down_menu_model = CustomQualityProfilesDropDownMenuModel(self)
return self._custom_quality_profile_drop_down_menu_model
@deprecated("SimpleModeSettingsManager is deprecated and will be removed in major SDK release, Use getSimpleModeSettingsManager() instead", since = "5.7.0")
def getSimpleModeSettingsManagerWrapper(self, *args, **kwargs):
return self.getSimpleModeSettingsManager()
@deprecated("MachineActionManager is deprecated and will be removed in major SDK release, Use getMachineActionManager() instead", since="5.7.0")
def getMachineActionManagerWrapper(self, *args, **kwargs):
return self.getMachineActionManager()
@deprecated("QualityManagementModel is deprecated and will be removed in major SDK release, Use getQualityManagementModel() instead", since="5.7.0")
def getQualityManagementModelWrapper(self, *args, **kwargs):
return self.getQualityManagementModel()
@deprecated("MaterialManagementModel is deprecated and will be removed in major SDK release, Use getMaterialManagementModel() instead", since = "5.7.0")
def getMaterialManagementModelWrapper(self, *args, **kwargs):
return self.getMaterialManagementModel()
@deprecated("QualityProfilesDropDownMenuModel is deprecated and will be removed in major SDK release, Use getQualityProfilesDropDownMenuModel() instead", since = "5.7.0")
def getQualityProfilesDropDownMenuModelWrapper(self, *args, **kwargs):
return self.getQualityProfilesDropDownMenuModel()
@deprecated("CustomQualityProfilesDropDownMenuModel is deprecated and will be removed in major SDK release, Use getCustomQualityProfilesDropDownMenuModel() instead", since = "5.7.0")
def getCustomQualityProfilesDropDownMenuModelWrapper(self, *args, **kwargs):
return self.getCustomQualityProfilesDropDownMenuModel()
def getCuraAPI(self, *args, **kwargs) -> "CuraAPI":
return self._cura_API
@ -1195,6 +1282,7 @@ class CuraApplication(QtApplication):
self.processEvents()
engine.rootContext().setContextProperty("Printer", self)
engine.rootContext().setContextProperty("CuraApplication", self)
engine.rootContext().setContextProperty("PrintOrderManager", self._print_order_manager)
engine.rootContext().setContextProperty("PrintInformation", self._print_information)
engine.rootContext().setContextProperty("CuraActions", self._cura_actions)
engine.rootContext().setContextProperty("CuraSDKVersion", ApplicationMetadata.CuraSDKVersion)
@ -1208,8 +1296,8 @@ class CuraApplication(QtApplication):
qmlRegisterSingletonType(MachineManager, "Cura", 1, 0, self.getMachineManager, "MachineManager")
qmlRegisterSingletonType(IntentManager, "Cura", 1, 6, self.getIntentManager, "IntentManager")
qmlRegisterSingletonType(SettingInheritanceManager, "Cura", 1, 0, self.getSettingInheritanceManager, "SettingInheritanceManager")
qmlRegisterSingletonType(SimpleModeSettingsManager, "Cura", 1, 0, self.getSimpleModeSettingsManager, "SimpleModeSettingsManager")
qmlRegisterSingletonType(MachineActionManager.MachineActionManager, "Cura", 1, 0, self.getMachineActionManager, "MachineActionManager")
qmlRegisterSingletonType(SimpleModeSettingsManager, "Cura", 1, 0, self.getSimpleModeSettingsManagerWrapper, "SimpleModeSettingsManager")
qmlRegisterSingletonType(MachineActionManager, "Cura", 1, 0, self.getMachineActionManagerWrapper, "MachineActionManager")
self.processEvents()
qmlRegisterType(NetworkingUtil, "Cura", 1, 5, "NetworkingUtil")
@ -1234,16 +1322,14 @@ class CuraApplication(QtApplication):
qmlRegisterType(FavoriteMaterialsModel, "Cura", 1, 0, "FavoriteMaterialsModel")
qmlRegisterType(GenericMaterialsModel, "Cura", 1, 0, "GenericMaterialsModel")
qmlRegisterType(MaterialBrandsModel, "Cura", 1, 0, "MaterialBrandsModel")
qmlRegisterSingletonType(QualityManagementModel, "Cura", 1, 0, self.getQualityManagementModel, "QualityManagementModel")
qmlRegisterSingletonType(MaterialManagementModel, "Cura", 1, 5, self.getMaterialManagementModel, "MaterialManagementModel")
qmlRegisterSingletonType(QualityManagementModel, "Cura", 1, 0, self.getQualityManagementModelWrapper,"QualityManagementModel")
qmlRegisterSingletonType(MaterialManagementModel, "Cura", 1, 5, self.getMaterialManagementModelWrapper,"MaterialManagementModel")
self.processEvents()
qmlRegisterType(DiscoveredPrintersModel, "Cura", 1, 0, "DiscoveredPrintersModel")
qmlRegisterType(DiscoveredCloudPrintersModel, "Cura", 1, 7, "DiscoveredCloudPrintersModel")
qmlRegisterSingletonType(QualityProfilesDropDownMenuModel, "Cura", 1, 0,
self.getQualityProfilesDropDownMenuModel, "QualityProfilesDropDownMenuModel")
qmlRegisterSingletonType(CustomQualityProfilesDropDownMenuModel, "Cura", 1, 0,
self.getCustomQualityProfilesDropDownMenuModel, "CustomQualityProfilesDropDownMenuModel")
qmlRegisterSingletonType(QualityProfilesDropDownMenuModel, "Cura", 1, 0, self.getQualityProfilesDropDownMenuModelWrapper, "QualityProfilesDropDownMenuModel")
qmlRegisterSingletonType(CustomQualityProfilesDropDownMenuModel, "Cura", 1, 0, self.getCustomQualityProfilesDropDownMenuModelWrapper, "CustomQualityProfilesDropDownMenuModel")
qmlRegisterType(NozzleModel, "Cura", 1, 0, "NozzleModel")
qmlRegisterType(IntentModel, "Cura", 1, 6, "IntentModel")
qmlRegisterType(IntentCategoryModel, "Cura", 1, 6, "IntentCategoryModel")
@ -1372,7 +1458,11 @@ class CuraApplication(QtApplication):
self._scene_bounding_box = scene_bounding_box
self.sceneBoundingBoxChanged.emit()
self._platform_activity = True if count > 0 else False
if count > 0:
self._platform_activity = True
else:
ProjectOutputDevice.setLastOutputName(None)
self._platform_activity = False
self.activityChanged.emit()
@pyqtSlot()
@ -1536,7 +1626,7 @@ class CuraApplication(QtApplication):
if not nodes:
return
objects_in_filename = {} # type: Dict[str, List[CuraSceneNode]]
objects_in_filename: Dict[str, List[CuraSceneNode]] = {}
for node in nodes:
mesh_data = node.getMeshData()
if mesh_data:
@ -1550,15 +1640,14 @@ class CuraApplication(QtApplication):
Logger.log("w", "Unable to reload data because we don't have a filename.")
for file_name, nodes in objects_in_filename.items():
for node in nodes:
file_path = os.path.normpath(os.path.dirname(file_name))
job = ReadMeshJob(file_name, add_to_recent_files = file_path != tempfile.gettempdir()) # Don't add temp files to the recent files list
job._node = node # type: ignore
job.finished.connect(self._reloadMeshFinished)
if has_merged_nodes:
job.finished.connect(self.updateOriginOfMergedMeshes)
job.start()
file_path = os.path.normpath(os.path.dirname(file_name))
job = ReadMeshJob(file_name,
add_to_recent_files=file_path != tempfile.gettempdir()) # Don't add temp files to the recent files list
job._nodes = nodes # type: ignore
job.finished.connect(self._reloadMeshFinished)
if has_merged_nodes:
job.finished.connect(self.updateOriginOfMergedMeshes)
job.start()
@pyqtSlot("QStringList")
def setExpandedCategories(self, categories: List[str]) -> None:
@ -1693,8 +1782,12 @@ class CuraApplication(QtApplication):
Selection.remove(node)
Selection.add(group_node)
all_nodes = self.getObjectsModel().getNodes()
PrintOrderManager.updatePrintOrdersAfterGroupOperation(all_nodes, group_node, selected_nodes)
@pyqtSlot()
def ungroupSelected(self) -> None:
all_nodes = self.getObjectsModel().getNodes()
selected_objects = Selection.getAllSelectedObjects().copy()
for node in selected_objects:
if node.callDecoration("isGroup"):
@ -1702,21 +1795,30 @@ class CuraApplication(QtApplication):
group_parent = node.getParent()
children = node.getChildren().copy()
for child in children:
# Ungroup only 1 level deep
if child.getParent() != node:
continue
# Ungroup only 1 level deep
children_to_ungroup = list(filter(lambda child: child.getParent() == node, children))
for child in children_to_ungroup:
# Set the parent of the children to the parent of the group-node
op.addOperation(SetParentOperation(child, group_parent))
# Add all individual nodes to the selection
Selection.add(child)
PrintOrderManager.updatePrintOrdersAfterUngroupOperation(all_nodes, node, children_to_ungroup)
op.push()
# Note: The group removes itself from the scene once all its children have left it,
# see GroupDecorator._onChildrenChanged
def _onPrintOrderChanged(self) -> None:
# update object list
scene = self.getController().getScene()
scene.sceneChanged.emit(scene.getRoot())
# reset if already was sliced
Application.getInstance().getBackend().needsSlicing()
Application.getInstance().getBackend().tickle()
def _createSplashScreen(self) -> Optional[CuraSplashScreen.CuraSplashScreen]:
if self._is_headless:
return None
@ -1730,9 +1832,10 @@ class CuraApplication(QtApplication):
def _reloadMeshFinished(self, job) -> None:
"""
Function called whenever a ReadMeshJob finishes in the background. It reloads a specific node object in the
Function called when ReadMeshJob finishes reloading a file in the background, then update node objects in the
scene from its source file. The function gets all the nodes that exist in the file through the job result, and
then finds the scene node that it wants to refresh by its object id. Each job refreshes only one node.
then finds the scene nodes that need to be refreshed by their name. Each job refreshes all nodes of a file.
Nodes that are not present in the updated file are kept in the scene.
:param job: The :py:class:`Uranium.UM.ReadMeshJob.ReadMeshJob` running in the background that reads all the
meshes in a file
@ -1742,25 +1845,93 @@ class CuraApplication(QtApplication):
if len(job_result) == 0:
Logger.log("e", "Reloading the mesh failed.")
return
object_found = False
mesh_data = None
renamed_nodes = {} # type: Dict[str, int]
# Find the node to be refreshed based on its id
for job_result_node in job_result:
if job_result_node.getId() == job._node.getId():
mesh_data = job_result_node.getMeshData()
object_found = True
break
if not object_found:
Logger.warning("The object with id {} no longer exists! Keeping the old version in the scene.".format(job_result_node.getId()))
return
if not mesh_data:
Logger.log("w", "Could not find a mesh in reloaded node.")
return
job._node.setMeshData(mesh_data)
mesh_data = job_result_node.getMeshData()
if not mesh_data:
Logger.log("w", "Could not find a mesh in reloaded node.")
continue
# Solves issues with object naming
result_node_name = job_result_node.getName()
if not result_node_name:
result_node_name = os.path.basename(mesh_data.getFileName())
if result_node_name in renamed_nodes: # objects may get renamed by ObjectsModel._renameNodes() when loaded
renamed_nodes[result_node_name] += 1
result_node_name = "{0}({1})".format(result_node_name, renamed_nodes[result_node_name])
else:
renamed_nodes[job_result_node.getName()] = 0
# Find the matching scene node to replace
scene_node = None
for replaced_node in job._nodes:
if replaced_node.getName() == result_node_name:
scene_node = replaced_node
break
if scene_node:
scene_node.setMeshData(mesh_data)
else:
# Current node is a new one in the file, or it's name has changed
# TODO: Load this mesh into the scene. Also alter the "_reloadJobFinished" action in UM.Scene
Logger.log("w", "Could not find matching node for object '{0}' in the scene.".format(result_node_name))
def _openFile(self, filename):
self.readLocalFile(QUrl.fromLocalFile(filename))
def _openUrl(self, url: QUrl) -> None:
if url.scheme() not in self._supported_url_schemes:
# only handle cura:// and slicer:// urls schemes
return
match url.host() + url.path():
case "open" | "open/":
query = QUrlQuery(url.query())
model_url = QUrl(query.queryItemValue("file", options=QUrl.ComponentFormattingOption.FullyDecoded))
def on_finish(response):
content_disposition_header_key = QByteArray("content-disposition".encode())
if not response.hasRawHeader(content_disposition_header_key):
Logger.log("w", "Could not find Content-Disposition header in response from {0}".format(
model_url.url()))
# Use the last part of the url as the filename, and assume it is an STL file
filename = model_url.path().split("/")[-1] + ".stl"
else:
# content_disposition is in the format
# ```
# content_disposition attachment; "filename=[FILENAME]"
# ```
# Use a regex to extract the filename
content_disposition = str(response.rawHeader(content_disposition_header_key).data(),
encoding='utf-8')
content_disposition_match = re.match(r'attachment; filename="(?P<filename>.*)"',
content_disposition)
assert content_disposition_match is not None
filename = content_disposition_match.group("filename")
tmp = tempfile.NamedTemporaryFile(suffix=filename, delete=False)
with open(tmp.name, "wb") as f:
f.write(response.readAll())
self.readLocalFile(QUrl.fromLocalFile(tmp.name), add_to_recent_files=False)
def on_error(*args, **kwargs):
Logger.log("w", "Could not download file from {0}".format(model_url.url()))
Message("Could not download file: " + str(model_url.url()),
title= "Loading Model failed",
message_type=Message.MessageType.ERROR).show()
return
self.getHttpRequestManager().get(
model_url.url(),
callback=on_finish,
error_callback=on_error,
)
case path:
Logger.log("w", "Unsupported url scheme path: {0}".format(path))
def _addProfileReader(self, profile_reader):
# TODO: Add the profile reader to the list of plug-ins that can be used when importing profiles.
pass
@ -1811,6 +1982,17 @@ class CuraApplication(QtApplication):
openProjectFile = pyqtSignal(QUrl, bool, arguments = ["project_file", "add_to_recent_files"]) # Emitted when a project file is about to open.
@pyqtSlot(QUrl, bool)
def readLocalUcpFile(self, file: QUrl, add_to_recent_files: bool = True):
file_name = QUrl(file).toLocalFile()
workspace_reader = self.getWorkspaceFileHandler()
if workspace_reader is None:
Logger.warning(f"Workspace reader not found, cannot read file {file_name}.")
return
workspace_reader.readLocalFile(file, add_to_recent_files)
@pyqtSlot(QUrl, str, bool)
@pyqtSlot(QUrl, str)
@pyqtSlot(QUrl)
@ -1824,7 +2006,7 @@ class CuraApplication(QtApplication):
Logger.log("i", "Attempting to read file %s", file.toString())
if not file.isValid():
return
self._open_project_mode = project_mode
scene = self.getController().getScene()
for node in DepthFirstIterator(scene.getRoot()):
@ -1834,16 +2016,16 @@ class CuraApplication(QtApplication):
is_project_file = self.checkIsValidProjectFile(file)
if project_mode is None:
project_mode = self.getPreferences().getValue("cura/choice_on_open_project")
if self._open_project_mode is None:
self._open_project_mode = self.getPreferences().getValue("cura/choice_on_open_project")
if is_project_file and project_mode == "open_as_project":
if is_project_file and self._open_project_mode == "open_as_project":
# open as project immediately without presenting a dialog
workspace_handler = self.getWorkspaceFileHandler()
workspace_handler.readLocalFile(file, add_to_recent_files_hint = add_to_recent_files)
return
if is_project_file and project_mode == "always_ask":
if is_project_file and self._open_project_mode == "always_ask":
# present a dialog asking to open as project or import models
self.callLater(self.openProjectFile.emit, file, add_to_recent_files)
return
@ -1943,7 +2125,8 @@ class CuraApplication(QtApplication):
node.scale(original_node.getScale())
node.setSelectable(True)
node.setName(os.path.basename(file_name))
if not node.getName():
node.setName(os.path.basename(file_name))
self.getBuildVolume().checkBoundsAndUpdate(node)
is_non_sliceable = "." + file_extension in self._non_sliceable_extensions
@ -1977,8 +2160,11 @@ class CuraApplication(QtApplication):
center_y = 0
node.translate(Vector(0, center_y, 0))
nodes_to_arrange.append(node)
# If the file is a project,and models are to be loaded from a that project,
# models inside file should be arranged in buildplate.
elif self._open_project_mode == "open_as_model":
nodes_to_arrange.append(node)
# This node is deep copied from some other node which already has a BuildPlateDecorator, but the deepcopy
# of BuildPlateDecorator produces one that's associated with build plate -1. So, here we need to check if
@ -2012,6 +2198,12 @@ class CuraApplication(QtApplication):
def addNonSliceableExtension(self, extension):
self._non_sliceable_extensions.append(extension)
@pyqtSlot(str, result = bool)
def isProjectUcp(self, file_url) -> bool:
file_path = QUrl(file_url).toLocalFile()
workspace_reader = self.getWorkspaceFileHandler().getReaderForFile(file_path)
return workspace_reader.getIsProjectUcp()
@pyqtSlot(str, result=bool)
def checkIsValidProjectFile(self, file_url):
"""Checks if the given file URL is a valid project file. """
@ -2021,6 +2213,8 @@ class CuraApplication(QtApplication):
if workspace_reader is None:
return False # non-project files won't get a reader
try:
if workspace_reader.getPluginId() == "3MFReader":
workspace_reader.clearOpenAsUcp()
result = workspace_reader.preRead(file_path, show_dialog=False)
return result == WorkspaceReader.PreReadResult.accepted
except:
@ -2130,3 +2324,11 @@ class CuraApplication(QtApplication):
@pyqtProperty(bool, constant=True)
def isEnterprise(self) -> bool:
return ApplicationMetadata.IsEnterpriseVersion
@pyqtProperty("QVariant", constant=True)
def conanInstalls(self) -> Dict[str, Dict[str, str]]:
return self._conan_installs
@pyqtProperty("QVariant", constant=True)
def pythonInstalls(self) -> Dict[str, Dict[str, str]]:
return self._python_installs

88
cura/HitChecker.py Normal file
View File

@ -0,0 +1,88 @@
from typing import List, Dict
from cura.Scene.CuraSceneNode import CuraSceneNode
class HitChecker:
"""Checks if nodes can be printed without causing any collisions and interference"""
def __init__(self, nodes: List[CuraSceneNode]) -> None:
self._hit_map = self._buildHitMap(nodes)
def anyTwoNodesBlockEachOther(self, nodes: List[CuraSceneNode]) -> bool:
"""Returns True if any 2 nodes block each other"""
for a in nodes:
for b in nodes:
if self._hit_map[a][b] and self._hit_map[b][a]:
return True
return False
def canPrintBefore(self, node: CuraSceneNode, other_nodes: List[CuraSceneNode]) -> bool:
"""Returns True if node doesn't block other_nodes and can be printed before them"""
no_hits = all(not self._hit_map[node][other_node] for other_node in other_nodes)
return no_hits
def canPrintAfter(self, node: CuraSceneNode, other_nodes: List[CuraSceneNode]) -> bool:
"""Returns True if node doesn't hit other nodes and can be printed after them"""
no_hits = all(not self._hit_map[other_node][node] for other_node in other_nodes)
return no_hits
def calculateScore(self, a: CuraSceneNode, b: CuraSceneNode) -> int:
"""Calculate score simply sums the number of other objects it 'blocks'
:param a: node
:param b: node
:return: sum of the number of other objects
"""
score_a = sum(self._hit_map[a].values())
score_b = sum(self._hit_map[b].values())
return score_a - score_b
def canPrintNodesInProvidedOrder(self, ordered_nodes: List[CuraSceneNode]) -> bool:
"""Returns True If nodes don't have any hits in provided order"""
for node_index, node in enumerate(ordered_nodes):
nodes_before = ordered_nodes[:node_index - 1] if node_index - 1 >= 0 else []
nodes_after = ordered_nodes[node_index + 1:] if node_index + 1 < len(ordered_nodes) else []
if not self.canPrintBefore(node, nodes_after) or not self.canPrintAfter(node, nodes_before):
return False
return True
@staticmethod
def _buildHitMap(nodes: List[CuraSceneNode]) -> Dict[CuraSceneNode, CuraSceneNode]:
"""Pre-computes all hits between all objects
:nodes: nodes that need to be checked for collisions
:return: dictionary where hit_map[node1][node2] is False if there node1 can be printed before node2
"""
hit_map = {j: {i: HitChecker._checkHit(j, i) for i in nodes} for j in nodes}
return hit_map
@staticmethod
def _checkHit(a: CuraSceneNode, b: CuraSceneNode) -> bool:
"""Checks if a can be printed before b
:param a: node
:param b: node
:return: False if a can be printed before b
"""
if a == b:
return False
a_hit_hull = a.callDecoration("getConvexHullBoundary")
b_hit_hull = b.callDecoration("getConvexHullHeadFull")
overlap = a_hit_hull.intersectsPolygon(b_hit_hull)
if overlap:
return True
# Adhesion areas must never overlap, regardless of printing order
# This would cause over-extrusion
a_hit_hull = a.callDecoration("getAdhesionArea")
b_hit_hull = b.callDecoration("getAdhesionArea")
overlap = a_hit_hull.intersectsPolygon(b_hit_hull)
if overlap:
return True
else:
return False

View File

@ -1,5 +1,6 @@
# Copyright (c) 2019 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
import math
import numpy
from typing import Optional, cast
@ -66,7 +67,7 @@ class LayerPolygon:
# Buffering the colors shouldn't be necessary as it is not
# re-used and can save a lot of memory usage.
self._color_map = LayerPolygon.getColorMap()
self._colors = self._color_map[self._types] # type: numpy.ndarray
self._colors: numpy.ndarray = self._color_map[self._types]
# When type is used as index returns true if type == LayerPolygon.InfillType
# or type == LayerPolygon.SkinType
@ -74,8 +75,8 @@ class LayerPolygon:
# Should be generated in better way, not hardcoded.
self._is_infill_or_skin_type_map = numpy.array([0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0], dtype=bool)
self._build_cache_line_mesh_mask = None # type: Optional[numpy.ndarray]
self._build_cache_needed_points = None # type: Optional[numpy.ndarray]
self._build_cache_line_mesh_mask: Optional[numpy.ndarray] = None
self._build_cache_needed_points: Optional[numpy.ndarray] = None
def buildCache(self) -> None:
# For the line mesh we do not draw Infill or Jumps. Therefore those lines are filtered out.
@ -186,6 +187,11 @@ class LayerPolygon:
def types(self):
return self._types
@property
def lineLengths(self):
data_array = numpy.array(self._data)
return numpy.linalg.norm(data_array[1:] - data_array[:-1], axis=1)
@property
def data(self):
return self._data

View File

@ -49,7 +49,7 @@ class MachineErrorChecker(QObject):
self._keys_to_check = set() # type: Set[str]
self._num_keys_to_check_per_update = 10
self._num_keys_to_check_per_update = 1
def initialize(self) -> None:
self._error_check_timer.timeout.connect(self._rescheduleCheck)

View File

@ -21,18 +21,25 @@ class MaterialNode(ContainerNode):
Its subcontainers are quality profiles.
"""
def __init__(self, container_id: str, variant: "VariantNode") -> None:
def __init__(self, container_id: str, variant: "VariantNode", *, container: ContainerInterface = None) -> None:
super().__init__(container_id)
self.variant = variant
self.qualities = {} # type: Dict[str, QualityNode] # Mapping container IDs to quality profiles.
self.materialChanged = Signal() # Triggered when the material is removed or its metadata is updated.
container_registry = ContainerRegistry.getInstance()
my_metadata = container_registry.findContainersMetadata(id = container_id)[0]
self.base_file = my_metadata["base_file"]
self.material_type = my_metadata["material"]
self.brand = my_metadata["brand"]
self.guid = my_metadata["GUID"]
if container is not None:
self.base_file = container.getMetaDataEntry("base_file")
self.material_type = container.getMetaDataEntry("material")
self.brand = container.getMetaDataEntry("brand")
self.guid = container.getMetaDataEntry("GUID")
else:
my_metadata = container_registry.findContainersMetadata(id = container_id)[0]
self.base_file = my_metadata["base_file"]
self.material_type = my_metadata["material"]
self.brand = my_metadata["brand"]
self.guid = my_metadata["GUID"]
self._loadAll()
container_registry.containerRemoved.connect(self._onRemoved)
container_registry.containerMetaDataChanged.connect(self._onMetadataChanged)

View File

@ -54,10 +54,7 @@ class ActiveIntentQualitiesModel(ListModel):
self._updateDelayed()
def _update(self):
active_extruder_stack = cura.CuraApplication.CuraApplication.getInstance().getMachineManager().activeStack
if active_extruder_stack:
self._intent_category = active_extruder_stack.intent.getMetaDataEntry("intent_category", "")
self._intent_category = IntentManager.getInstance().currentIntentCategory
new_items: List[Dict[str, Any]] = []
global_stack = cura.CuraApplication.CuraApplication.getInstance().getGlobalContainerStack()
if not global_stack:

View File

@ -51,6 +51,9 @@ class CompatibleMachineModel(ListModel):
for output_device in machine_manager.printerOutputDevices:
for printer in output_device.printers:
extruder_configs = dict()
# If the printer name already exist in the queue skip it
if printer.name in [item["name"] for item in self.items]:
continue
# initialize & add current active material:
for extruder in printer.extruders:

View File

@ -227,7 +227,7 @@ class ExtrudersModel(ListModel):
"material_brand": "",
"color_name": "",
"material_type": "",
"material_label": ""
"material_name": ""
}
items.append(item)
if self._items != items:

View File

@ -61,6 +61,11 @@ class IntentCategoryModel(ListModel):
"The annealing profile requires post-processing in an oven after the print is finished. This profile retains the dimensional accuracy of the printed part after annealing and improves strength, stiffness, and thermal resistance.")
}
cls._translations["solid"] = {
"name": catalog.i18nc("@label", "Solid"),
"description": catalog.i18nc("@text",
"A highly dense and strong part but at a slower print time. Great for functional parts.")
}
return cls._translations
def __init__(self, intent_category: str) -> None:

View File

@ -57,8 +57,9 @@ class IntentSelectionModel(ListModel):
self._onChange()
_default_intent_categories = ["default", "visual", "engineering", "quick", "annealing"]
_icons = {"default": "GearCheck", "visual": "Visual", "engineering": "Nut", "quick": "SpeedOMeter", "annealing": "Anneal"}
_default_intent_categories = ["default", "visual", "engineering", "quick", "annealing", "solid"]
_icons = {"default": "GearCheck", "visual": "Visual", "engineering": "Nut", "quick": "SpeedOMeter",
"annealing": "Anneal", "solid": "Hammer"}
def _onContainerChange(self, container: ContainerInterface) -> None:
"""Updates the list of intents if an intent profile was added or removed."""

View File

@ -24,3 +24,8 @@ intent_translations["quick"] = {
"name": catalog.i18nc("@label", "Draft"),
"description": catalog.i18nc("@text", "The draft profile is designed to print initial prototypes and concept validation with the intent of significant print time reduction.")
}
intent_translations["solid"] = {
"name": catalog.i18nc("@label", "Solid"),
"description": catalog.i18nc("@text",
"A highly dense and strong part but at a slower print time. Great for functional parts.")
}

View File

@ -5,7 +5,7 @@
# online cloud connected printers are represented within this ListModel. Additional information such as the number of
# connected printers for each printer type is gathered.
from typing import Optional, List, cast
from typing import Optional, List, cast, Dict, Any
from PyQt6.QtCore import Qt, QTimer, QObject, pyqtSlot, pyqtProperty, pyqtSignal
@ -30,10 +30,10 @@ class MachineListModel(ListModel):
ComponentTypeRole = Qt.ItemDataRole.UserRole + 8
IsNetworkedMachineRole = Qt.ItemDataRole.UserRole + 9
def __init__(self, parent: Optional[QObject] = None, machines_filter: List[GlobalStack] = None, listenToChanges: bool = True) -> None:
def __init__(self, parent: Optional[QObject] = None, machines_filter: List[GlobalStack] = None, listenToChanges: bool = True, showCloudPrinters: bool = False) -> None:
super().__init__(parent)
self._show_cloud_printers = False
self._show_cloud_printers = showCloudPrinters
self._machines_filter = machines_filter
self._catalog = i18nCatalog("cura")
@ -159,3 +159,8 @@ class MachineListModel(ListModel):
"machineCount": machine_count,
"catergory": "connected" if is_online else "other",
})
def getItems(self) -> Dict[str, Any]:
if self.count > 0:
return self.items
return {}

View File

@ -148,7 +148,7 @@ class VariantNode(ContainerNode):
if "empty_material" in self.materials:
del self.materials["empty_material"]
self.materials[base_file] = MaterialNode(container.getId(), variant = self)
self.materials[base_file] = MaterialNode(container.getId(), variant = self, container = container)
self.materials[base_file].materialChanged.connect(self.materialsChanged)
self.materialsChanged.emit(self.materials[base_file])

View File

@ -16,6 +16,7 @@ from UM.TaskManagement.HttpRequestManager import HttpRequestManager # To downlo
catalog = i18nCatalog("cura")
TOKEN_TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S"
REQUEST_TIMEOUT = 5 # Seconds
class AuthorizationHelpers:
@ -40,6 +41,7 @@ class AuthorizationHelpers:
"""
data = {
"client_id": self._settings.CLIENT_ID if self._settings.CLIENT_ID is not None else "",
"client_secret": self._settings.CLIENT_SECRET if self._settings.CLIENT_SECRET is not None else "",
"redirect_uri": self._settings.CALLBACK_URL if self._settings.CALLBACK_URL is not None else "",
"grant_type": "authorization_code",
"code": authorization_code,
@ -52,7 +54,8 @@ class AuthorizationHelpers:
data = urllib.parse.urlencode(data).encode("UTF-8"),
headers_dict = headers,
callback = lambda response: self.parseTokenResponse(response, callback),
error_callback = lambda response, _: self.parseTokenResponse(response, callback)
error_callback = lambda response, _: self.parseTokenResponse(response, callback),
timeout = REQUEST_TIMEOUT
)
def getAccessTokenUsingRefreshToken(self, refresh_token: str, callback: Callable[[AuthenticationResponse], None]) -> None:
@ -64,6 +67,7 @@ class AuthorizationHelpers:
Logger.log("d", "Refreshing the access token for [%s]", self._settings.OAUTH_SERVER_URL)
data = {
"client_id": self._settings.CLIENT_ID if self._settings.CLIENT_ID is not None else "",
"client_secret": self._settings.CLIENT_SECRET if self._settings.CLIENT_SECRET is not None else "",
"redirect_uri": self._settings.CALLBACK_URL if self._settings.CALLBACK_URL is not None else "",
"grant_type": "refresh_token",
"refresh_token": refresh_token,
@ -75,7 +79,9 @@ class AuthorizationHelpers:
data = urllib.parse.urlencode(data).encode("UTF-8"),
headers_dict = headers,
callback = lambda response: self.parseTokenResponse(response, callback),
error_callback = lambda response, _: self.parseTokenResponse(response, callback)
error_callback = lambda response, _: self.parseTokenResponse(response, callback),
urgent = True,
timeout = REQUEST_TIMEOUT
)
def parseTokenResponse(self, token_response: QNetworkReply, callback: Callable[[AuthenticationResponse], None]) -> None:
@ -120,7 +126,8 @@ class AuthorizationHelpers:
check_token_url,
headers_dict = headers,
callback = lambda reply: self._parseUserProfile(reply, success_callback, failed_callback),
error_callback = lambda _, _2: failed_callback() if failed_callback is not None else None
error_callback = lambda _, _2: failed_callback() if failed_callback is not None else None,
timeout = REQUEST_TIMEOUT
)
def _parseUserProfile(self, reply: QNetworkReply, success_callback: Optional[Callable[[UserProfile], None]], failed_callback: Optional[Callable[[], None]] = None) -> None:

View File

@ -1,4 +1,4 @@
# Copyright (c) 2021 Ultimaker B.V.
# Copyright (c) 2024 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher.
import json
@ -6,13 +6,14 @@ from datetime import datetime, timedelta
from typing import Callable, Dict, Optional, TYPE_CHECKING, Union
from urllib.parse import urlencode, quote_plus
from PyQt6.QtCore import QUrl
from PyQt6.QtCore import QUrl, QTimer
from PyQt6.QtGui import QDesktopServices
from UM.Logger import Logger
from UM.Message import Message
from UM.Signal import Signal
from UM.i18n import i18nCatalog
from UM.TaskManagement.HttpRequestManager import HttpRequestManager # To download log-in tokens.
from cura.OAuth2.AuthorizationHelpers import AuthorizationHelpers, TOKEN_TIMESTAMP_FORMAT
from cura.OAuth2.LocalAuthorizationServer import LocalAuthorizationServer
from cura.OAuth2.Models import AuthenticationResponse, BaseModel
@ -25,26 +26,32 @@ if TYPE_CHECKING:
MYCLOUD_LOGOFF_URL = "https://account.ultimaker.com/logoff?utm_source=cura&utm_medium=software&utm_campaign=change-account-before-adding-printers"
REFRESH_TOKEN_MAX_RETRIES = 15
REFRESH_TOKEN_RETRY_INTERVAL = 1000
class AuthorizationService:
"""The authorization service is responsible for handling the login flow, storing user credentials and providing
account information.
"""
# Emit signal when authentication is completed.
onAuthStateChanged = Signal()
def __init__(self,
settings: "OAuth2Settings",
preferences: Optional["Preferences"] = None,
get_user_profile: bool = True) -> None:
# Emit signal when authentication is completed.
self.onAuthStateChanged = Signal()
# Emit signal when authentication failed.
onAuthenticationError = Signal()
# Emit signal when authentication failed.
self.onAuthenticationError = Signal()
accessTokenChanged = Signal()
self.accessTokenChanged = Signal()
def __init__(self, settings: "OAuth2Settings", preferences: Optional["Preferences"] = None) -> None:
self._settings = settings
self._auth_helpers = AuthorizationHelpers(settings)
self._auth_url = "{}/authorize".format(self._settings.OAUTH_SERVER_URL)
self._auth_data: Optional[AuthenticationResponse] = None
self._user_profile: Optional["UserProfile"] = None
self._get_user_profile: bool = get_user_profile
self._preferences = preferences
self._server = LocalAuthorizationServer(self._auth_helpers, self._onAuthStateChanged, daemon=True)
self._currently_refreshing_token = False # Whether we are currently in the process of refreshing auth. Don't make new requests while busy.
@ -53,6 +60,12 @@ class AuthorizationService:
self.onAuthStateChanged.connect(self._authChanged)
self._refresh_token_retries = 0
self._refresh_token_retry_timer = QTimer()
self._refresh_token_retry_timer.setInterval(REFRESH_TOKEN_RETRY_INTERVAL)
self._refresh_token_retry_timer.setSingleShot(True)
self._refresh_token_retry_timer.timeout.connect(self.refreshAccessToken)
def _authChanged(self, logged_in):
if logged_in and self._unable_to_get_data_message is not None:
self._unable_to_get_data_message.hide()
@ -163,16 +176,29 @@ class AuthorizationService:
return
def process_auth_data(response: AuthenticationResponse) -> None:
self._currently_refreshing_token = False
if response.success:
self._refresh_token_retries = 0
self._storeAuthData(response)
HttpRequestManager.getInstance().setDelayRequests(False)
self.onAuthStateChanged.emit(logged_in = True)
else:
Logger.warning("Failed to get a new access token from the server.")
self.onAuthStateChanged.emit(logged_in = False)
if self._refresh_token_retries >= REFRESH_TOKEN_MAX_RETRIES:
self._refresh_token_retries = 0
Logger.warning("Failed to get a new access token from the server, giving up.")
HttpRequestManager.getInstance().setDelayRequests(False)
self.onAuthStateChanged.emit(logged_in = False)
else:
# Retry a bit later, network may be offline right now and will hopefully be back soon
Logger.warning("Failed to get a new access token from the server, retrying later.")
self._refresh_token_retries += 1
self._refresh_token_retry_timer.start()
if self._currently_refreshing_token:
Logger.debug("Was already busy refreshing token. Do not start a new request.")
return
HttpRequestManager.getInstance().setDelayRequests(True)
self._currently_refreshing_token = True
self._auth_helpers.getAccessTokenUsingRefreshToken(self._auth_data.refresh_token, process_auth_data)
@ -279,7 +305,8 @@ class AuthorizationService:
message_type = Message.MessageType.ERROR)
Logger.warning("Unable to get user profile using auth data from preferences.")
self._unable_to_get_data_message.show()
self.getUserProfile(callback)
if self._get_user_profile:
self.getUserProfile(callback)
except (ValueError, TypeError):
Logger.logException("w", "Could not load auth data from preferences")
@ -294,7 +321,8 @@ class AuthorizationService:
self._auth_data = auth_data
self._currently_refreshing_token = False
if auth_data:
self.getUserProfile()
if self._get_user_profile:
self.getUserProfile()
self._preferences.setValue(self._settings.AUTH_DATA_PREFERENCE_KEY, json.dumps(auth_data.dump()))
else:
Logger.log("d", "Clearing the user profile")

View File

@ -16,6 +16,7 @@ class OAuth2Settings(BaseModel):
CALLBACK_PORT = None # type: Optional[int]
OAUTH_SERVER_URL = None # type: Optional[str]
CLIENT_ID = None # type: Optional[str]
CLIENT_SECRET = None # type: Optional[str]
CLIENT_SCOPES = None # type: Optional[str]
CALLBACK_URL = None # type: Optional[str]
AUTH_DATA_PREFERENCE_KEY = "" # type: str

View File

@ -7,6 +7,11 @@ from UM.Scene.Iterator import Iterator
from UM.Scene.SceneNode import SceneNode
from functools import cmp_to_key
from cura.HitChecker import HitChecker
from cura.PrintOrderManager import PrintOrderManager
from cura.Scene.CuraSceneNode import CuraSceneNode
class OneAtATimeIterator(Iterator.Iterator):
"""Iterator that returns a list of nodes in the order that they need to be printed
@ -16,8 +21,6 @@ class OneAtATimeIterator(Iterator.Iterator):
def __init__(self, scene_node) -> None:
super().__init__(scene_node) # Call super to make multiple inheritance work.
self._hit_map = [[]] # type: List[List[bool]] # For each node, which other nodes this hits. A grid of booleans on which nodes hit which.
self._original_node_list = [] # type: List[SceneNode] # The nodes that need to be checked for collisions.
def _fillStack(self) -> None:
"""Fills the ``_node_stack`` with a list of scene nodes that need to be printed in order. """
@ -38,104 +41,50 @@ class OneAtATimeIterator(Iterator.Iterator):
self._node_stack = node_list[:]
return
# Copy the list
self._original_node_list = node_list[:]
hit_checker = HitChecker(node_list)
# Initialise the hit map (pre-compute all hits between all objects)
self._hit_map = [[self._checkHit(i, j) for i in node_list] for j in node_list]
if PrintOrderManager.isUserDefinedPrintOrderEnabled():
self._node_stack = self._getNodesOrderedByUser(hit_checker, node_list)
else:
self._node_stack = self._getNodesOrderedAutomatically(hit_checker, node_list)
# Check if we have to files that block each other. If this is the case, there is no solution!
for a in range(0, len(node_list)):
for b in range(0, len(node_list)):
if a != b and self._hit_map[a][b] and self._hit_map[b][a]:
return
# update print orders so that user can try to arrange the nodes automatically first
# and if result is not satisfactory he/she can switch to manual mode and change it
for index, node in enumerate(self._node_stack):
node.printOrder = index + 1
@staticmethod
def _getNodesOrderedByUser(hit_checker: HitChecker, node_list: List[CuraSceneNode]) -> List[CuraSceneNode]:
nodes_ordered_by_user = sorted(node_list, key=lambda n: n.printOrder)
if hit_checker.canPrintNodesInProvidedOrder(nodes_ordered_by_user):
return nodes_ordered_by_user
return [] # No solution
@staticmethod
def _getNodesOrderedAutomatically(hit_checker: HitChecker, node_list: List[CuraSceneNode]) -> List[CuraSceneNode]:
# Check if we have two files that block each other. If this is the case, there is no solution!
if hit_checker.anyTwoNodesBlockEachOther(node_list):
return [] # No solution
# Sort the original list so that items that block the most other objects are at the beginning.
# This does not decrease the worst case running time, but should improve it in most cases.
sorted(node_list, key = cmp_to_key(self._calculateScore))
node_list = sorted(node_list, key = cmp_to_key(hit_checker.calculateScore))
todo_node_list = [_ObjectOrder([], node_list)]
while len(todo_node_list) > 0:
current = todo_node_list.pop()
for node in current.todo:
# Check if the object can be placed with what we have and still allows for a solution in the future
if not self._checkHitMultiple(node, current.order) and not self._checkBlockMultiple(node, current.todo):
if hit_checker.canPrintAfter(node, current.order) and hit_checker.canPrintBefore(node, current.todo):
# We found a possible result. Create new todo & order list.
new_todo_list = current.todo[:]
new_todo_list.remove(node)
new_order = current.order[:] + [node]
if len(new_todo_list) == 0:
# We have no more nodes to check, so quit looking.
self._node_stack = new_order
return
return new_order # Solution found!
todo_node_list.append(_ObjectOrder(new_order, new_todo_list))
self._node_stack = [] #No result found!
# Check if first object can be printed before the provided list (using the hit map)
def _checkHitMultiple(self, node: SceneNode, other_nodes: List[SceneNode]) -> bool:
node_index = self._original_node_list.index(node)
for other_node in other_nodes:
other_node_index = self._original_node_list.index(other_node)
if self._hit_map[node_index][other_node_index]:
return True
return False
def _checkBlockMultiple(self, node: SceneNode, other_nodes: List[SceneNode]) -> bool:
"""Check for a node whether it hits any of the other nodes.
:param node: The node to check whether it collides with the other nodes.
:param other_nodes: The nodes to check for collisions.
:return: returns collision between nodes
"""
node_index = self._original_node_list.index(node)
for other_node in other_nodes:
other_node_index = self._original_node_list.index(other_node)
if self._hit_map[other_node_index][node_index] and node_index != other_node_index:
return True
return False
def _calculateScore(self, a: SceneNode, b: SceneNode) -> int:
"""Calculate score simply sums the number of other objects it 'blocks'
:param a: node
:param b: node
:return: sum of the number of other objects
"""
score_a = sum(self._hit_map[self._original_node_list.index(a)])
score_b = sum(self._hit_map[self._original_node_list.index(b)])
return score_a - score_b
def _checkHit(self, a: SceneNode, b: SceneNode) -> bool:
"""Checks if a can be printed before b
:param a: node
:param b: node
:return: true if a can be printed before b
"""
if a == b:
return False
a_hit_hull = a.callDecoration("getConvexHullBoundary")
b_hit_hull = b.callDecoration("getConvexHullHeadFull")
overlap = a_hit_hull.intersectsPolygon(b_hit_hull)
if overlap:
return True
# Adhesion areas must never overlap, regardless of printing order
# This would cause over-extrusion
a_hit_hull = a.callDecoration("getAdhesionArea")
b_hit_hull = b.callDecoration("getAdhesionArea")
overlap = a_hit_hull.intersectsPolygon(b_hit_hull)
if overlap:
return True
else:
return False
return [] # No result found!
class _ObjectOrder:

View File

@ -39,6 +39,11 @@ class PlatformPhysics:
Application.getInstance().getPreferences().addPreference("physics/automatic_push_free", False)
Application.getInstance().getPreferences().addPreference("physics/automatic_drop_down", True)
self._app_all_model_drop = False
def setAppAllModelDropDown(self):
self._app_all_model_drop = True
self._onChangeTimerFinished()
def _onSceneChanged(self, source):
if not source.callDecoration("isSliceable"):
@ -80,9 +85,9 @@ class PlatformPhysics:
# Move it downwards if bottom is above platform
move_vector = Vector()
if node.getSetting(SceneNodeSettings.AutoDropDown, app_automatic_drop_down) and not (node.getParent() and node.getParent().callDecoration("isGroup") or node.getParent() != root) and node.isEnabled(): #If an object is grouped, don't move it down
if (node.getSetting(SceneNodeSettings.AutoDropDown, app_automatic_drop_down) or self._app_all_model_drop) and not (node.getParent() and node.getParent().callDecoration("isGroup") or node.getParent() != root) and node.isEnabled():
z_offset = node.callDecoration("getZOffset") if node.getDecorator(ZOffsetDecorator.ZOffsetDecorator) else 0
move_vector = move_vector.set(y = -bbox.bottom + z_offset)
move_vector = move_vector.set(y=-bbox.bottom + z_offset)
# If there is no convex hull for the node, start calculating it and continue.
if not node.getDecorator(ConvexHullDecorator) and not node.callDecoration("isNonPrintingMesh") and node.callDecoration("getLayerData") is None:
@ -168,6 +173,8 @@ class PlatformPhysics:
op = PlatformPhysicsOperation.PlatformPhysicsOperation(node, move_vector)
op.push()
# setting this drop to model same as app_automatic_drop_down
self._app_all_model_drop = False
# After moving, we have to evaluate the boundary checks for nodes
build_volume.updateNodeBoundaryCheck()

View File

@ -45,17 +45,17 @@ class PreviewPass(RenderPass):
This is useful to get a preview image of a scene taken from a different location as the active camera.
"""
def __init__(self, width: int, height: int) -> None:
def __init__(self, width: int, height: int, *, root: CuraSceneNode = None) -> None:
super().__init__("preview", width, height, 0)
self._camera = None # type: Optional[Camera]
self._camera: Optional[Camera] = None
self._renderer = Application.getInstance().getRenderer()
self._shader = None # type: Optional[ShaderProgram]
self._non_printing_shader = None # type: Optional[ShaderProgram]
self._support_mesh_shader = None # type: Optional[ShaderProgram]
self._scene = Application.getInstance().getController().getScene()
self._shader: Optional[ShaderProgram] = None
self._non_printing_shader: Optional[ShaderProgram] = None
self._support_mesh_shader: Optional[ShaderProgram] = None
self._root = Application.getInstance().getController().getScene().getRoot() if root is None else root
# Set the camera to be used by this render pass
# if it's None, the active camera is used
@ -96,7 +96,7 @@ class PreviewPass(RenderPass):
batch_support_mesh = RenderBatch(self._support_mesh_shader)
# Fill up the batch with objects that can be sliced.
for node in DepthFirstIterator(self._scene.getRoot()):
for node in DepthFirstIterator(self._root):
if hasattr(node, "_outside_buildarea") and not getattr(node, "_outside_buildarea"):
if node.callDecoration("isSliceable") and node.getMeshData() and node.isVisible():
per_mesh_stack = node.callDecoration("getStack")

174
cura/PrintOrderManager.py Normal file
View File

@ -0,0 +1,174 @@
from typing import List, Callable, Optional, Any
from PyQt6.QtCore import pyqtProperty, pyqtSignal, QObject, pyqtSlot
from UM.Application import Application
from UM.Scene.Selection import Selection
from cura.Scene.CuraSceneNode import CuraSceneNode
class PrintOrderManager(QObject):
"""Allows to order the object list to set the print sequence manually"""
def __init__(self, get_nodes: Callable[[], List[CuraSceneNode]]) -> None:
super().__init__()
self._get_nodes = get_nodes
self._configureEvents()
_settingsChanged = pyqtSignal()
_uiActionsOutdated = pyqtSignal()
printOrderChanged = pyqtSignal()
@pyqtSlot()
def swapSelectedAndPreviousNodes(self) -> None:
selected_node, previous_node, next_node = self._getSelectedAndNeighborNodes()
self._swapPrintOrders(selected_node, previous_node)
@pyqtSlot()
def swapSelectedAndNextNodes(self) -> None:
selected_node, previous_node, next_node = self._getSelectedAndNeighborNodes()
self._swapPrintOrders(selected_node, next_node)
@pyqtProperty(str, notify=_uiActionsOutdated)
def previousNodeName(self) -> str:
selected_node, previous_node, next_node = self._getSelectedAndNeighborNodes()
return self._getNodeName(previous_node)
@pyqtProperty(str, notify=_uiActionsOutdated)
def nextNodeName(self) -> str:
selected_node, previous_node, next_node = self._getSelectedAndNeighborNodes()
return self._getNodeName(next_node)
@pyqtProperty(bool, notify=_uiActionsOutdated)
def shouldEnablePrintBeforeAction(self) -> bool:
selected_node, previous_node, next_node = self._getSelectedAndNeighborNodes()
can_swap_with_previous_node = selected_node is not None and previous_node is not None
return can_swap_with_previous_node
@pyqtProperty(bool, notify=_uiActionsOutdated)
def shouldEnablePrintAfterAction(self) -> bool:
selected_node, previous_node, next_node = self._getSelectedAndNeighborNodes()
can_swap_with_next_node = selected_node is not None and next_node is not None
return can_swap_with_next_node
@pyqtProperty(bool, notify=_settingsChanged)
def shouldShowEditPrintOrderActions(self) -> bool:
return PrintOrderManager.isUserDefinedPrintOrderEnabled()
@staticmethod
def isUserDefinedPrintOrderEnabled() -> bool:
stack = Application.getInstance().getGlobalContainerStack()
is_enabled = stack and \
stack.getProperty("print_sequence", "value") == "one_at_a_time" and \
stack.getProperty("user_defined_print_order_enabled", "value")
return bool(is_enabled)
@staticmethod
def initializePrintOrders(nodes: List[CuraSceneNode]) -> None:
"""Just created (loaded from file) nodes have print order 0.
This method initializes print orders with max value to put nodes at the end of object list"""
max_print_order = max(map(lambda n: n.printOrder, nodes), default=0)
for node in nodes:
if node.printOrder == 0:
max_print_order += 1
node.printOrder = max_print_order
@staticmethod
def updatePrintOrdersAfterGroupOperation(
all_nodes: List[CuraSceneNode],
group_node: CuraSceneNode,
grouped_nodes: List[CuraSceneNode]
) -> None:
group_node.printOrder = min(map(lambda n: n.printOrder, grouped_nodes))
all_nodes.append(group_node)
for node in grouped_nodes:
all_nodes.remove(node)
# reassign print orders so there won't be gaps like 1 2 5 6 7
sorted_nodes = sorted(all_nodes, key=lambda n: n.printOrder)
for i, node in enumerate(sorted_nodes):
node.printOrder = i + 1
@staticmethod
def updatePrintOrdersAfterUngroupOperation(
all_nodes: List[CuraSceneNode],
group_node: CuraSceneNode,
ungrouped_nodes: List[CuraSceneNode]
) -> None:
all_nodes.remove(group_node)
nodes_to_update_print_order = filter(lambda n: n.printOrder > group_node.printOrder, all_nodes)
for node in nodes_to_update_print_order:
node.printOrder += len(ungrouped_nodes) - 1
for i, child in enumerate(ungrouped_nodes):
child.printOrder = group_node.printOrder + i
all_nodes.append(child)
def _swapPrintOrders(self, node1: CuraSceneNode, node2: CuraSceneNode) -> None:
if node1 and node2:
node1.printOrder, node2.printOrder = node2.printOrder, node1.printOrder # swap print orders
self.printOrderChanged.emit() # update object list first
self._uiActionsOutdated.emit() # then update UI actions
def _getSelectedAndNeighborNodes(self
) -> (Optional[CuraSceneNode], Optional[CuraSceneNode], Optional[CuraSceneNode]):
nodes = self._get_nodes()
ordered_nodes = sorted(nodes, key=lambda n: n.printOrder)
for i, node in enumerate(ordered_nodes, 1):
node.printOrder = i
selected_node = PrintOrderManager._getSingleSelectedNode()
if selected_node and selected_node in ordered_nodes:
selected_node_index = ordered_nodes.index(selected_node)
else:
selected_node_index = None
if selected_node_index is not None and selected_node_index - 1 >= 0:
previous_node = ordered_nodes[selected_node_index - 1]
else:
previous_node = None
if selected_node_index is not None and selected_node_index + 1 < len(ordered_nodes):
next_node = ordered_nodes[selected_node_index + 1]
else:
next_node = None
return selected_node, previous_node, next_node
@staticmethod
def _getNodeName(node: CuraSceneNode, max_length: int = 30) -> str:
node_name = node.getName() if node else ""
truncated_node_name = node_name[:max_length]
return truncated_node_name
@staticmethod
def _getSingleSelectedNode() -> Optional[CuraSceneNode]:
if len(Selection.getAllSelectedObjects()) == 1:
selected_node = Selection.getSelectedObject(0)
return selected_node
return None
def _configureEvents(self) -> None:
Selection.selectionChanged.connect(self._onSelectionChanged)
self._global_stack = None
Application.getInstance().globalContainerStackChanged.connect(self._onGlobalStackChanged)
self._onGlobalStackChanged()
def _onGlobalStackChanged(self) -> None:
if self._global_stack:
self._global_stack.propertyChanged.disconnect(self._onSettingsChanged)
self._global_stack.containersChanged.disconnect(self._onSettingsChanged)
self._global_stack = Application.getInstance().getGlobalContainerStack()
if self._global_stack:
self._global_stack.propertyChanged.connect(self._onSettingsChanged)
self._global_stack.containersChanged.connect(self._onSettingsChanged)
def _onSettingsChanged(self, *args: Any) -> None:
self._settingsChanged.emit()
def _onSelectionChanged(self) -> None:
self._uiActionsOutdated.emit()

View File

@ -40,9 +40,22 @@ class ExtruderConfigurationModel(QObject):
def setHotendID(self, hotend_id: Optional[str]) -> None:
if self._hotend_id != hotend_id:
self._hotend_id = hotend_id
self._hotend_id = ExtruderConfigurationModel.applyNameMappingHotend(hotend_id)
self.extruderConfigurationChanged.emit()
@staticmethod
def applyNameMappingHotend(hotendId) -> str:
_EXTRUDER_NAME_MAP = {
"mk14_hot":"1XA",
"mk14_hot_s":"2XA",
"mk14_c":"1C",
"mk14":"1A",
"mk14_s":"2A"
}
if hotendId in _EXTRUDER_NAME_MAP:
return _EXTRUDER_NAME_MAP[hotendId]
return hotendId
@pyqtProperty(str, fset = setHotendID, notify = extruderConfigurationChanged)
def hotendID(self) -> Optional[str]:
return self._hotend_id

View File

@ -9,6 +9,8 @@ from PyQt6.QtCore import pyqtProperty, QObject
class MaterialOutputModel(QObject):
def __init__(self, guid: Optional[str], type: str, color: str, brand: str, name: str, parent = None) -> None:
super().__init__(parent)
name, guid = MaterialOutputModel.getMaterialFromDefinition(guid, type, brand, name)
self._guid = guid
self._type = type
self._color = color
@ -19,6 +21,34 @@ class MaterialOutputModel(QObject):
def guid(self) -> str:
return self._guid if self._guid else ""
@staticmethod
def getMaterialFromDefinition(guid, type, brand, name):
_MATERIAL_MAP = { "abs" :{"name" :"ABS" ,"guid": "2780b345-577b-4a24-a2c5-12e6aad3e690"},
"abs-cf10" :{"name": "ABS-CF" ,"guid": "495a0ce5-9daf-4a16-b7b2-06856d82394d"},
"abs-wss1" :{"name" :"ABS-R" ,"guid": "88c8919c-6a09-471a-b7b6-e801263d862d"},
"asa" :{"name" :"ASA" ,"guid": "f79bc612-21eb-482e-ad6c-87d75bdde066"},
"nylon12-cf":{"name": "Nylon 12 CF" ,"guid": "3c6f2877-71cc-4760-84e6-4b89ab243e3b"},
"nylon" :{"name" :"Nylon" ,"guid": "283d439a-3490-4481-920c-c51d8cdecf9c"},
"pc" :{"name" :"PC" ,"guid": "62414577-94d1-490d-b1e4-7ef3ec40db02"},
"petg" :{"name" :"PETG" ,"guid": "69386c85-5b6c-421a-bec5-aeb1fb33f060"},
"pla" :{"name" :"PLA" ,"guid": "0ff92885-617b-4144-a03c-9989872454bc"},
"pva" :{"name" :"PVA" ,"guid": "a4255da2-cb2a-4042-be49-4a83957a2f9a"},
"wss1" :{"name" :"RapidRinse" ,"guid": "a140ef8f-4f26-4e73-abe0-cfc29d6d1024"},
"sr30" :{"name" :"SR-30" ,"guid": "77873465-83a9-4283-bc44-4e542b8eb3eb"},
"bvoh" :{"name" :"BVOH" ,"guid": "923e604c-8432-4b09-96aa-9bbbd42207f4"},
"cpe" :{"name" :"CPE" ,"guid": "da1872c1-b991-4795-80ad-bdac0f131726"},
"hips" :{"name" :"HIPS" ,"guid": "a468d86a-220c-47eb-99a5-bbb47e514eb0"},
"tpu" :{"name" :"TPU 95A" ,"guid": "19baa6a9-94ff-478b-b4a1-8157b74358d2"}
}
if guid is None and brand != "empty" and type in _MATERIAL_MAP:
name = _MATERIAL_MAP[type]["name"]
guid = _MATERIAL_MAP[type]["guid"]
return name, guid
@pyqtProperty(str, constant = True)
def type(self) -> str:
return self._type

View File

@ -415,7 +415,19 @@ class NetworkedPrinterOutputDevice(PrinterOutputDevice):
@pyqtProperty(str, constant = True)
def printerType(self) -> str:
return self._properties.get(b"printer_type", b"Unknown").decode("utf-8")
return NetworkedPrinterOutputDevice.applyPrinterTypeMapping(self._properties.get(b"printer_type", b"Unknown").decode("utf-8"))
@staticmethod
def applyPrinterTypeMapping(printer_type):
_PRINTER_TYPE_NAME = {
"fire_e": "ultimaker_method",
"lava_f": "ultimaker_methodx",
"magma_10": "ultimaker_methodxl",
"sketch": "ultimaker_sketch"
}
if printer_type in _PRINTER_TYPE_NAME:
return _PRINTER_TYPE_NAME[printer_type]
return printer_type
@pyqtProperty(str, constant = True)
def ipAddress(self) -> str:

View File

@ -11,6 +11,7 @@ from UM.Scene.SceneNode import SceneNode
from UM.Scene.SceneNodeDecorator import SceneNodeDecorator # To cast the deepcopy of every decorator back to SceneNodeDecorator.
import cura.CuraApplication # To get the build plate.
from UM.Scene.SceneNodeSettings import SceneNodeSettings
from cura.Settings.ExtruderStack import ExtruderStack # For typing.
from cura.Settings.SettingOverrideDecorator import SettingOverrideDecorator # For per-object settings.
@ -25,13 +26,26 @@ class CuraSceneNode(SceneNode):
if not no_setting_override:
self.addDecorator(SettingOverrideDecorator()) # Now we always have a getActiveExtruderPosition, unless explicitly disabled
self._outside_buildarea = False
self._print_order = 0
def setOutsideBuildArea(self, new_value: bool) -> None:
self._outside_buildarea = new_value
@property
def printOrder(self):
return self._print_order
@printOrder.setter
def printOrder(self, new_value):
self._print_order = new_value
def isOutsideBuildArea(self) -> bool:
return self._outside_buildarea or self.callDecoration("getBuildPlateNumber") < 0
@property
def isDropDownEnabled(self) ->bool:
return self.getSetting(SceneNodeSettings.AutoDropDown, Application.getInstance().getPreferences().getValue("physics/automatic_drop_down"))
def isVisible(self) -> bool:
return super().isVisible() and self.callDecoration("getBuildPlateNumber") == cura.CuraApplication.CuraApplication.getInstance().getMultiBuildPlateModel().activeBuildPlate
@ -157,3 +171,6 @@ class CuraSceneNode(SceneNode):
def transformChanged(self) -> None:
self._transformChanged()
def __repr__(self) -> str:
return "{print_order}. {name}".format(print_order = self._print_order, name = self.getName())

View File

@ -284,16 +284,20 @@ class CuraStackBuilder:
abstract_machines = registry.findContainerStacks(id = abstract_machine_id)
if abstract_machines:
return cast(GlobalStack, abstract_machines[0])
definitions = registry.findDefinitionContainers(id=definition_id)
name = ""
if definitions:
name = definitions[0].getName()
stack = cls.createMachine(abstract_machine_id, definition_id, show_warning_message=False)
if not stack:
return None
if not stack.getMetaDataEntry("visible", True):
return None
stack.setName(name)
stack.setMetaDataEntry("is_abstract_machine", True)

View File

@ -10,13 +10,16 @@ class VariantDatabaseHandler(DatabaseMetadataContainerController):
"""The Database handler for Variant containers"""
def __init__(self):
super().__init__(SQLQueryFactory(table = "variant",
fields = {
"id": "text",
"name": "text",
"hardware_type": "text",
"definition": "text",
"version": "text",
"setting_version": "text"
}))
super().__init__(SQLQueryFactory(
table="variant",
fields={
"id": "text",
"name": "text",
"hardware_type": "text",
"definition": "text",
"version": "text",
"setting_version": "text",
"reference_extruder_id": "text",
},
))
self._container_type = InstanceContainer

View File

@ -316,7 +316,13 @@ class ExtruderManager(QObject):
# Starts with the adhesion extruder.
adhesion_type = global_stack.getProperty("adhesion_type", "value")
if adhesion_type in {"skirt", "brim"}:
return max(0, int(global_stack.getProperty("skirt_brim_extruder_nr", "value"))) # optional skirt/brim extruder defaults to zero
skirt_brim_extruder_nr = global_stack.getProperty("skirt_brim_extruder_nr", "value")
# if the skirt_brim_extruder_nr is -1, then we use the first used extruder
if skirt_brim_extruder_nr == -1:
used_extruders = self.getUsedExtruderStacks()
return used_extruders[0].position
else:
return skirt_brim_extruder_nr
if adhesion_type == "raft":
return global_stack.getProperty("raft_base_extruder_nr", "value")

View File

@ -145,10 +145,24 @@ class IntentManager(QObject):
@pyqtProperty(str, notify = intentCategoryChanged)
def currentIntentCategory(self) -> str:
application = cura.CuraApplication.CuraApplication.getInstance()
active_extruder_stack = application.getMachineManager().activeStack
if active_extruder_stack is None:
return ""
return active_extruder_stack.intent.getMetaDataEntry("intent_category", "")
global_stack = application.getGlobalContainerStack()
active_intent = "default"
if global_stack is None:
return active_intent
# Loop over all active extruders and check if they have an intent that isn't default.
# The logic behind this is that support materials (for instance, PVA) don't have intents, but they should be
# combinable with all other intents. So if one extruder has "engineering" as an intent and the other has
# "default" the 'dominant' intent is "engineering"
for extruder_stack in global_stack.extruderList:
if not extruder_stack.isEnabled: # Ignore disabled stacks
continue
extruder_intent = extruder_stack.intent.getMetaDataEntry("intent_category", "")
if extruder_intent != "default":
active_intent = extruder_intent
return active_intent
@pyqtSlot(str, str)
def selectIntent(self, intent_category: str, quality_type: str) -> None:

View File

@ -48,6 +48,8 @@ from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
from cura.Settings.GlobalStack import GlobalStack
if TYPE_CHECKING:
from PyQt6.QtCore import QVariantList
from cura.CuraApplication import CuraApplication
from cura.Machines.MaterialNode import MaterialNode
from cura.Machines.QualityChangesGroup import QualityChangesGroup
@ -581,6 +583,10 @@ class MachineManager(QObject):
def activeMachine(self) -> Optional["GlobalStack"]:
return self._global_container_stack
@pyqtProperty("QVariantList", notify=activeVariantChanged)
def activeMachineExtruders(self) -> Optional["QVariantList"]:
return self._global_container_stack.extruderList if self._global_container_stack else None
@pyqtProperty(str, notify = activeStackChanged)
def activeStackId(self) -> str:
if self._active_container_stack:
@ -841,6 +847,24 @@ class MachineManager(QObject):
return result
@pyqtProperty(bool, notify = currentConfigurationChanged)
def variantCoreUsableForFactor4(self) -> bool:
"""The selected core is usable if it is in second extruder of Factor4
"""
result = True
if not self._global_container_stack:
return result
if self.activeMachine.definition.id != "ultimaker_factor4":
return result
for extruder_container in self._global_container_stack.extruderList:
if extruder_container.definition.id.startswith("ultimaker_factor4_extruder_right"):
if extruder_container.material == empty_material_container:
return True
if extruder_container.variant.id.startswith("ultimaker_factor4_bb"):
return False
return True
@pyqtSlot(str, result = str)
def getDefinitionByMachineId(self, machine_id: str) -> Optional[str]:
"""Get the Definition ID of a machine (specified by ID)

View File

@ -1,6 +1,6 @@
# Copyright (c) 2017 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
from typing import List, Optional, TYPE_CHECKING
from typing import List, Optional, Set, TYPE_CHECKING
from PyQt6.QtCore import QObject, QTimer, pyqtProperty, pyqtSignal
from UM.FlameProfiler import pyqtSlot
@ -168,37 +168,26 @@ class SettingInheritanceManager(QObject):
def settingsWithInheritanceWarning(self) -> List[str]:
return self._settings_with_inheritance_warning
def _settingIsOverwritingInheritance(self, key: str, stack: ContainerStack = None) -> bool:
"""Check if a setting has an inheritance function that is overwritten"""
def _userSettingIsOverwritingInheritance(self, key: str, stack: ContainerStack, all_keys: Set[str] = set()) -> bool:
"""Check if a setting known as having a User state has an inheritance function that is overwritten"""
has_setting_function = False
if not stack:
stack = self._active_container_stack
if not stack: # No active container stack yet!
return False
if self._active_container_stack is None:
return False
all_keys = self._active_container_stack.getAllKeys()
containers = [] # type: List[ContainerInterface]
has_user_state = stack.getProperty(key, "state") == InstanceState.User
"""Check if the setting has a user state. If not, it is never overwritten."""
if not has_user_state:
return False
# If a setting is not enabled, don't label it as overwritten (It's never visible anyway).
if not stack.getProperty(key, "enabled"):
return False
user_container = stack.getTop()
"""Also check if the top container is not a setting function (this happens if the inheritance is restored)."""
# Also check if the top container is not a setting function (this happens if the inheritance is restored).
if user_container and isinstance(user_container.getProperty(key, "value"), SettingFunction):
return False
if not all_keys:
all_keys = self._active_container_stack.getAllKeys()
## Mash all containers for all the stacks together.
while stack:
containers.extend(stack.getContainers())
@ -229,17 +218,35 @@ class SettingInheritanceManager(QObject):
break # There is a setting function somewhere, stop looking deeper.
return has_setting_function and has_non_function_value
def _settingIsOverwritingInheritance(self, key: str, stack: ContainerStack = None) -> bool:
"""Check if a setting has an inheritance function that is overwritten"""
if not stack:
stack = self._active_container_stack
if not stack: # No active container stack yet!
return False
if self._active_container_stack is None:
return False
has_user_state = stack.getProperty(key, "state") == InstanceState.User
if not has_user_state:
return False
return self._userSettingIsOverwritingInheritance(key, stack)
def _update(self) -> None:
self._settings_with_inheritance_warning = [] # Reset previous data.
# Make sure that the GlobalStack is not None. sometimes the globalContainerChanged signal gets here late.
if self._global_container_stack is None:
if self._global_container_stack is None or self._active_container_stack is None:
return
# Check all setting keys that we know of and see if they are overridden.
for setting_key in self._global_container_stack.getAllKeys():
override = self._settingIsOverwritingInheritance(setting_key)
if override:
# Check all user setting keys that we know of and see if they are overridden.
all_keys = self._active_container_stack.getAllKeys()
for setting_key in self._active_container_stack.getAllKeysWithUserState():
if self._userSettingIsOverwritingInheritance(setting_key, self._active_container_stack, all_keys):
self._settings_with_inheritance_warning.append(setting_key)
# Check all the categories if any of their children have their inheritance overwritten.

View File

@ -5,16 +5,18 @@ import json
import os
from typing import List, Optional
from PyQt6.QtCore import QUrl
from PyQt6.QtNetwork import QLocalServer, QLocalSocket
from UM.Qt.QtApplication import QtApplication #For typing.
from UM.Qt.QtApplication import QtApplication # For typing.
from UM.Logger import Logger
class SingleInstance:
def __init__(self, application: QtApplication, files_to_open: Optional[List[str]]) -> None:
def __init__(self, application: QtApplication, files_to_open: Optional[List[str]], url_to_open: Optional[List[str]]) -> None:
self._application = application
self._files_to_open = files_to_open
self._url_to_open = url_to_open
self._single_instance_server = None
@ -33,7 +35,7 @@ class SingleInstance:
return False
# We only send the files that need to be opened.
if not self._files_to_open:
if not self._files_to_open and not self._url_to_open:
Logger.log("i", "No file need to be opened, do nothing.")
return True
@ -55,8 +57,12 @@ class SingleInstance:
payload = {"command": "open", "filePath": os.path.abspath(filename)}
single_instance_socket.write(bytes(json.dumps(payload) + "\n", encoding = "ascii"))
for url in self._url_to_open:
payload = {"command": "open-url", "urlPath": url.toString()}
single_instance_socket.write(bytes(json.dumps(payload) + "\n", encoding="ascii"))
payload = {"command": "close-connection"}
single_instance_socket.write(bytes(json.dumps(payload) + "\n", encoding = "ascii"))
single_instance_socket.write(bytes(json.dumps(payload) + "\n", encoding="ascii"))
single_instance_socket.flush()
single_instance_socket.waitForDisconnected()
@ -72,7 +78,7 @@ class SingleInstance:
def _onClientConnected(self) -> None:
Logger.log("i", "New connection received on our single-instance server")
connection = None #type: Optional[QLocalSocket]
connection = None # type: Optional[QLocalSocket]
if self._single_instance_server:
connection = self._single_instance_server.nextPendingConnection()
@ -81,7 +87,7 @@ class SingleInstance:
def __readCommands(self, connection: QLocalSocket) -> None:
line = connection.readLine()
while len(line) != 0: # There is also a .canReadLine()
while len(line) != 0: # There is also a .canReadLine()
try:
payload = json.loads(str(line, encoding = "ascii").strip())
command = payload["command"]
@ -94,13 +100,19 @@ class SingleInstance:
elif command == "open":
self._application.callLater(lambda f = payload["filePath"]: self._application._openFile(f))
#command: Load a url link in Cura
elif command == "open-url":
url = QUrl(payload["urlPath"])
self._application.callLater(lambda: self._application._openUrl(url))
# Command: Activate the window and bring it to the top.
elif command == "focus":
# Operating systems these days prevent windows from moving around by themselves.
# 'alert' or flashing the icon in the taskbar is the best thing we do now.
main_window = self._application.getMainWindow()
if main_window is not None:
self._application.callLater(lambda: main_window.alert(0)) # type: ignore # I don't know why MyPy complains here
self._application.callLater(lambda: main_window.alert(0)) # type: ignore # I don't know why MyPy complains here
# Command: Close the socket connection. We're done.
elif command == "close-connection":

View File

@ -1,7 +1,9 @@
# Copyright (c) 2021 Ultimaker B.V.
# Copyright (c) 2023 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher.
import numpy
from typing import Optional
from PyQt6 import QtCore
from PyQt6.QtCore import QCoreApplication
from PyQt6.QtGui import QImage
@ -10,30 +12,133 @@ from UM.Logger import Logger
from cura.PreviewPass import PreviewPass
from UM.Application import Application
from UM.Math.AxisAlignedBox import AxisAlignedBox
from UM.Math.Matrix import Matrix
from UM.Math.Vector import Vector
from UM.Scene.Camera import Camera
from UM.Scene.Iterator.DepthFirstIterator import DepthFirstIterator
from UM.Scene.SceneNode import SceneNode
from UM.Qt.QtRenderer import QtRenderer
class Snapshot:
DEFAULT_WIDTH_HEIGHT = 300
MAX_RENDER_DISTANCE = 10000
BOUND_BOX_FACTOR = 1.75
CAMERA_FOVY = 30
ATTEMPTS_FOR_SNAPSHOT = 10
@staticmethod
def getImageBoundaries(image: QImage):
# Look at the resulting image to get a good crop.
# Get the pixels as byte array
def getNonZeroPixels(image: QImage):
pixel_array = image.bits().asarray(image.sizeInBytes())
width, height = image.width(), image.height()
# Convert to numpy array, assume it's 32 bit (it should always be)
pixels = numpy.frombuffer(pixel_array, dtype=numpy.uint8).reshape([height, width, 4])
# Find indices of non zero pixels
nonzero_pixels = numpy.nonzero(pixels)
return numpy.nonzero(pixels)
@staticmethod
def getImageBoundaries(image: QImage):
nonzero_pixels = Snapshot.getNonZeroPixels(image)
min_y, min_x, min_a_ = numpy.amin(nonzero_pixels, axis=1) # type: ignore
max_y, max_x, max_a_ = numpy.amax(nonzero_pixels, axis=1) # type: ignore
return min_x, max_x, min_y, max_y
@staticmethod
def snapshot(width = 300, height = 300):
def isometricSnapshot(width: int = DEFAULT_WIDTH_HEIGHT, height: int = DEFAULT_WIDTH_HEIGHT, *, node: Optional[SceneNode] = None) -> Optional[QImage]:
"""
Create an isometric snapshot of the scene.
:param width: width of the aspect ratio default 300
:param height: height of the aspect ratio default 300
:param node: node of the scene default is the root of the scene
:return: None when there is no model on the build plate otherwise it will return an image
"""
if node is None:
node = Application.getInstance().getController().getScene().getRoot()
# the direction the camera is looking at to create the isometric view
iso_view_dir = Vector(-1, -1, -1).normalized()
bounds = Snapshot.nodeBounds(node)
if bounds is None:
Logger.log("w", "There appears to be nothing to render")
return None
camera = Camera("snapshot")
# find local x and y directional vectors of the camera
tangent_space_x_direction = iso_view_dir.cross(Vector.Unit_Y).normalized()
tangent_space_y_direction = tangent_space_x_direction.cross(iso_view_dir).normalized()
# find extreme screen space coords of the scene
x_points = [p.dot(tangent_space_x_direction) for p in bounds.points]
y_points = [p.dot(tangent_space_y_direction) for p in bounds.points]
min_x = min(x_points)
max_x = max(x_points)
min_y = min(y_points)
max_y = max(y_points)
camera_width = max_x - min_x
camera_height = max_y - min_y
if camera_width == 0 or camera_height == 0:
Logger.log("w", "There appears to be nothing to render")
return None
# increase either width or height to match the aspect ratio of the image
if camera_width / camera_height > width / height:
camera_height = camera_width * height / width
else:
camera_width = camera_height * width / height
# Configure camera for isometric view
ortho_matrix = Matrix()
ortho_matrix.setOrtho(
-camera_width / 2,
camera_width / 2,
-camera_height / 2,
camera_height / 2,
-Snapshot.MAX_RENDER_DISTANCE,
Snapshot.MAX_RENDER_DISTANCE
)
camera.setPerspective(False)
camera.setProjectionMatrix(ortho_matrix)
camera.setPosition(bounds.center)
camera.lookAt(bounds.center + iso_view_dir)
# Render the scene
renderer = QtRenderer()
render_pass = PreviewPass(width, height, root=node)
renderer.setViewportSize(width, height)
renderer.setWindowSize(width, height)
render_pass.setCamera(camera)
renderer.addRenderPass(render_pass)
renderer.beginRendering()
renderer.render()
return render_pass.getOutput()
@staticmethod
def isNodeRenderable(node):
return not getattr(node, "_outside_buildarea", False) and node.callDecoration(
"isSliceable") and node.getMeshData() and node.isVisible() and not node.callDecoration(
"isNonThumbnailVisibleMesh")
@staticmethod
def nodeBounds(root_node: SceneNode) -> Optional[AxisAlignedBox]:
axis_aligned_box = None
for node in DepthFirstIterator(root_node):
if Snapshot.isNodeRenderable(node):
if axis_aligned_box is None:
axis_aligned_box = node.getBoundingBox()
else:
axis_aligned_box = axis_aligned_box + node.getBoundingBox()
return axis_aligned_box
@staticmethod
def snapshot(width = DEFAULT_WIDTH_HEIGHT, height = DEFAULT_WIDTH_HEIGHT, number_of_attempts = ATTEMPTS_FOR_SNAPSHOT):
"""Return a QImage of the scene
Uses PreviewPass that leaves out some elements Aspect ratio assumes a square
@ -55,14 +160,7 @@ class Snapshot:
camera = Camera("snapshot", root)
# determine zoom and look at
bbox = None
for node in DepthFirstIterator(root):
if not getattr(node, "_outside_buildarea", False):
if node.callDecoration("isSliceable") and node.getMeshData() and node.isVisible() and not node.callDecoration("isNonThumbnailVisibleMesh"):
if bbox is None:
bbox = node.getBoundingBox()
else:
bbox = bbox + node.getBoundingBox()
bbox = Snapshot.nodeBounds(root)
# If there is no bounding box, it means that there is no model in the buildplate
if bbox is None:
Logger.log("w", "Unable to create snapshot as we seem to have an empty buildplate")
@ -76,13 +174,13 @@ class Snapshot:
looking_from_offset = Vector(-1, 1, 2)
if size > 0:
# determine the watch distance depending on the size
looking_from_offset = looking_from_offset * size * 1.75
looking_from_offset = looking_from_offset * size * Snapshot.BOUND_BOX_FACTOR
camera.setPosition(look_at + looking_from_offset)
camera.lookAt(look_at)
satisfied = False
size = None
fovy = 30
fovy = Snapshot.CAMERA_FOVY
while not satisfied:
if size is not None:
@ -97,9 +195,14 @@ class Snapshot:
pixel_output = preview_pass.getOutput()
try:
min_x, max_x, min_y, max_y = Snapshot.getImageBoundaries(pixel_output)
except (ValueError, AttributeError):
Logger.logException("w", "Failed to crop the snapshot!")
return None
except (ValueError, AttributeError) as e:
if number_of_attempts == 0:
Logger.warning( f"Failed to crop the snapshot even after {Snapshot.ATTEMPTS_FOR_SNAPSHOT} attempts!")
return None
else:
number_of_attempts = number_of_attempts - 1
Logger.info("Trying to get the snapshot again.")
return Snapshot.snapshot(width, height, number_of_attempts)
size = max((max_x - min_x) / render_width, (max_y - min_y) / render_height)
if size > 0.5 or satisfied:

View File

@ -14,6 +14,9 @@ from UM.Scene.SceneNode import SceneNode
from UM.Scene.Selection import Selection
from UM.i18n import i18nCatalog
from cura.PrintOrderManager import PrintOrderManager
from cura.Scene.CuraSceneNode import CuraSceneNode
catalog = i18nCatalog("cura")
@ -76,6 +79,9 @@ class ObjectsModel(ListModel):
self._build_plate_number = nr
self._update()
def getNodes(self) -> List[CuraSceneNode]:
return list(map(lambda n: n["node"], self.items))
def _updateSceneDelayed(self, source) -> None:
if not isinstance(source, Camera):
self._update_timer.start()
@ -175,6 +181,10 @@ class ObjectsModel(ListModel):
all_nodes = self._renameNodes(name_to_node_info_dict)
user_defined_print_order_enabled = PrintOrderManager.isUserDefinedPrintOrderEnabled()
if user_defined_print_order_enabled:
PrintOrderManager.initializePrintOrders(all_nodes)
for node in all_nodes:
if hasattr(node, "isOutsideBuildArea"):
is_outside_build_area = node.isOutsideBuildArea() # type: ignore
@ -223,8 +233,13 @@ class ObjectsModel(ListModel):
# for anti overhang meshes and groups the extruder nr is irrelevant
extruder_number = -1
if not user_defined_print_order_enabled:
name = node.getName()
else:
name = "{print_order}. {name}".format(print_order = node.printOrder, name = node.getName())
nodes.append({
"name": node.getName(),
"name": name,
"selected": Selection.isSelected(node),
"outside_build_area": is_outside_build_area,
"buildplate_number": node_build_plate_number,
@ -234,5 +249,5 @@ class ObjectsModel(ListModel):
"node": node
})
nodes = sorted(nodes, key=lambda n: n["name"])
nodes = sorted(nodes, key=lambda n: n["name"] if not user_defined_print_order_enabled else n["node"].printOrder)
self.setItems(nodes)

View File

@ -15,6 +15,10 @@ if "" in sys.path:
import argparse
import faulthandler
import os
# set the environment variable QT_QUICK_FLICKABLE_WHEEL_DECELERATION to 5000 as mentioned in qt6.6 update log to overcome scroll related issues
os.environ["QT_QUICK_FLICKABLE_WHEEL_DECELERATION"] = str(int(os.environ.get("QT_QUICK_FLICKABLE_WHEEL_DECELERATION", "5000")))
if sys.platform != "linux": # Turns out the Linux build _does_ use this, but we're not making an Enterprise release for that system anyway.
os.environ["QT_PLUGIN_PATH"] = "" # Security workaround: Don't need it, and introduces an attack vector, so set to nul.
os.environ["QML2_IMPORT_PATH"] = "" # Security workaround: Don't need it, and introduces an attack vector, so set to nul.

View File

@ -14,38 +14,9 @@ AppDir:
- amd64
allow_unauthenticated: true
sources:
- sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy main restricted
- sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy-updates main restricted
- sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy universe
- sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy-updates universe
- sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy multiverse
- sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy-updates multiverse
- sourceline: deb http://nl.archive.ubuntu.com/ubuntu/ jammy-backports main restricted
universe multiverse
- sourceline: deb http://security.ubuntu.com/ubuntu jammy-security main restricted
- sourceline: deb http://security.ubuntu.com/ubuntu jammy-security universe
- sourceline: deb http://security.ubuntu.com/ubuntu jammy-security multiverse
- sourceline: deb https://releases.jfrog.io/artifactory/jfrog-debs xenial contrib
- sourceline: deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-14 main
- sourceline: deb https://ppa.launchpadcontent.net/ubuntu-toolchain-r/test/ubuntu/
jammy main
- sourceline: deb https://ppa.launchpadcontent.net/deadsnakes/ppa/ubuntu/ jammy
main
- sourceline: deb [arch=amd64] https://packages.microsoft.com/repos/ms-teams stable
main
- sourceline: deb https://ppa.launchpadcontent.net/ppa-verse/cling/ubuntu/ jammy
main
- sourceline: deb [arch=amd64] https://dl.google.com/linux/chrome/deb/ stable
main
- sourceline: deb [signed-by=/usr/share/keyrings/nodesource.gpg] https://deb.nodesource.com/node_14.x
jammy main
- sourceline: deb [arch=amd64 signed-by=/usr/share/keyrings/transip-stack.gpg]
https://mirror.transip.net/stack/software/deb/Ubuntu_22.04/ ./
- sourceline: deb http://repository.spotify.com stable non-free
- sourceline: deb [arch=amd64,arm64,armhf] http://packages.microsoft.com/repos/code
stable main
- sourceline: deb https://packagecloud.io/slacktechnologies/slack/debian/ jessie
main
- sourceline: deb http://archive.ubuntu.com/ubuntu/ jammy main restricted universe multiverse
- sourceline: deb http://archive.ubuntu.com/ubuntu/ jammy-updates main restricted universe multiverse
- sourceline: deb http://security.ubuntu.com/ubuntu jammy-security main restricted universe multiverse
include:
- xdg-desktop-portal-kde
- libgtk-3-0
@ -67,6 +38,7 @@ AppDir:
- usr/share/doc/*/changelog.*
- usr/share/doc/*/NEWS.*
- usr/share/doc/*/TODO.*
- usr/lib/x86_64-linux-gnu/libssl.so*
runtime:
env:
APPDIR_LIBRARY_PATH: "$APPDIR:$APPDIR/runtime/compat/:$APPDIR/usr/lib/x86_64-linux-gnu:$APPDIR/lib/x86_64-linux-gnu:$APPDIR/usr/lib:$APPDIR/usr/lib/x86_64-linux-gnu/gdk-pixbuf-2.0/2.10.0/loaders"

View File

@ -87,15 +87,16 @@ def notarize_file(dist_path: str, filename: str) -> None:
""" Notarize a file. This takes 5+ minutes, there is indication that this step is successful."""
notarize_user = os.environ.get("MAC_NOTARIZE_USER")
notarize_password = os.environ.get("MAC_NOTARIZE_PASS")
altool_executable = os.environ.get("ALTOOL_EXECUTABLE", "altool")
notarize_team = os.environ.get("MACOS_CERT_USER")
notary_executable = os.environ.get("NOTARY_TOOL_EXECUTABLE", "notarytool")
notarize_arguments = [
"xcrun", altool_executable,
"--notarize-app",
"--primary-bundle-id", ULTIMAKER_CURA_DOMAIN,
"--username", notarize_user,
"xcrun", notary_executable,
"submit",
"--apple-id", notarize_user,
"--password", notarize_password,
"--file", Path(dist_path, filename)
"--team-id", notarize_team,
Path(dist_path, filename)
]
subprocess.run(notarize_arguments)

View File

@ -144,6 +144,23 @@ SectionEnd
######################################################################
Section UrlProtocol
WriteRegStr HKCR "cura" "" "URL:cura"
WriteRegStr HKCR "cura" "URL Protocol" ""
WriteRegStr HKCR "cura\DefaultIcon" "" "$INSTDIR\${MAIN_APP_EXE},1"
WriteRegStr HKCR "cura\shell" "" "open"
WriteRegStr HKCR "cura\shell\open\command" "" '"$INSTDIR\${MAIN_APP_EXE}" "%1"'
WriteRegStr HKCR "slicer" "" "URL:slicer"
WriteRegStr HKCR "slicer" "URL Protocol" ""
WriteRegStr HKCR "slicer\DefaultIcon" "" "$INSTDIR\${MAIN_APP_EXE},1"
WriteRegStr HKCR "slicer\shell" "" "open"
WriteRegStr HKCR "slicer\shell\open\command" "" '"$INSTDIR\${MAIN_APP_EXE}" "%1"'
SectionEnd
######################################################################
Section Uninstall
${INSTALL_TYPE}{% for files in mapped_out_paths.values() %}{% for file in files %}
Delete "{{ file[1] }}"{% endfor %}{% endfor %}{% for rem_dir in rmdir_paths %}
@ -187,8 +204,13 @@ RmDir "$SMPROGRAMS\{{ app_name }}"
!insertmacro APP_UNASSOCIATE "stl" "Cura.model"
!insertmacro APP_UNASSOCIATE "3mf" "Cura.project"
; Unassociate file associations for 'cura' protocol
DeleteRegKey HKCR "cura"
; Unassociate file associations for 'slicer' protocol
DeleteRegKey HKCR "slicer"
DeleteRegKey ${REG_ROOT} "${REG_APP_PATH}"
DeleteRegKey ${REG_ROOT} "${UNINSTALL_PATH}"
SectionEnd
######################################################################

View File

@ -33,6 +33,21 @@
/>
</Upgrade>
<Property Id="ASSOCIATE_URL_PROTOCOLS">
<RegistrySearch Id="CheckCuraProtocolHandler"
Type="raw"
Root="HKCR"
Key="cura"
Name="URL Protocol"
/>
<RegistrySearch Id="CheckSlicerProtocolHandler"
Type="raw"
Root="HKCR"
Key="slicer"
Name="URL Protocol"
/>
</Property>
{% if "Enterprise" in app_name %}
<Property Id="PREVIOUS_413_INSTALLED" Secure="yes" />
<Upgrade Id="53C603BB-2B17-4206-A609-29C2E0D0B0AE">
@ -144,11 +159,32 @@
</Component>
</DirectoryRef>
<!--Url Scheme-->
<Component Id="CuraRegistration" Guid="*" Directory="APPLICATIONFOLDER">
<RegistryKey Root="HKCR" Key="cura">
<RegistryValue Type="string" Value="URL:Cura Protocol"/>
<RegistryValue Type="string" Name="URL Protocol" Value=""/>
<RegistryValue Type="string" Key="DefaultIcon" Value="[APPLICATIONFOLDER]\{{ main_app }},1"/>
<RegistryValue Type="string" Key="shell\open\command" Value="&quot;[APPLICATIONFOLDER]\{{ main_app }}&quot; &quot;%1&quot;"/>
</RegistryKey>
</Component>
<Component Id="SlicerRegistration" Guid="*" Directory="APPLICATIONFOLDER">
<RegistryKey Root="HKCR" Key="slicer">
<RegistryValue Type="string" Value="URL:Slicer Protocol"/>
<RegistryValue Type="string" Name="URL Protocol" Value=""/>
<RegistryValue Type="string" Key="DefaultIcon" Value="[APPLICATIONFOLDER]\{{ main_app }},1"/>
<RegistryValue Type="string" Key="shell\open\command" Value="&quot;[APPLICATIONFOLDER]\{{ main_app }}&quot; &quot;%1&quot;"/>
</RegistryKey>
</Component>
<Feature Id="ProductFeature" Title="{{ app_name }}" Level="1" ConfigurableDirectory="APPLICATIONFOLDER">
<ComponentRef Id="CMP_UltiMaker_Cura_exe" />
<ComponentRef Id="CMP_CuraEngine_exe" />
<ComponentGroupRef Id="NewFilesGroup" />
<ComponentRef Id="CMP_Shortcuts" />
<ComponentRef Id="CuraRegistration"/>
<ComponentRef Id="SlicerRegistration"/>
</Feature>
<Feature Id="UninstallOlderVersionFeature" Title="Uninstall previous versions" Level="{{ 1 if "Enterprise" in app_name else 0 }}" Description="..."/>
</Product>

View File

@ -0,0 +1,62 @@
# Copyright (c) 2024 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
from PyQt6.QtCore import Qt, pyqtSignal
from UM import i18nCatalog
from UM.Logger import Logger
from UM.Settings.SettingDefinition import SettingDefinition
from UM.Qt.ListModel import ListModel
class SpecificSettingsModel(ListModel):
CategoryRole = Qt.ItemDataRole.UserRole + 1
LabelRole = Qt.ItemDataRole.UserRole + 2
ValueRole = Qt.ItemDataRole.UserRole + 3
def __init__(self, parent = None):
super().__init__(parent = parent)
self.addRoleName(self.CategoryRole, "category")
self.addRoleName(self.LabelRole, "label")
self.addRoleName(self.ValueRole, "value")
self._settings_catalog = i18nCatalog("fdmprinter.def.json")
self._update()
modelChanged = pyqtSignal()
def addSettingsFromStack(self, stack, category, settings):
for setting, value in settings.items():
unit = stack.getProperty(setting, "unit")
setting_type = stack.getProperty(setting, "type")
if setting_type is not None:
# This is not very good looking, but will do for now
value = str(SettingDefinition.settingValueToString(setting_type, value))
if unit:
value += " " + str(unit)
if setting_type == "enum":
options = stack.getProperty(setting, "options")
value_msgctxt = f"{str(setting)} option {str(value)}"
value_msgid = options[stack.getProperty(setting, "value")]
value = self._settings_catalog.i18nc(value_msgctxt, value_msgid)
else:
value = str(value)
label_msgctxt = f"{str(setting)} label"
label_msgid = stack.getProperty(setting, "label")
label = self._settings_catalog.i18nc(label_msgctxt, label_msgid)
self.appendItem({
"category": category,
"label": label,
"value": value
})
self.modelChanged.emit()
def _update(self):
Logger.debug(f"Updating {self.__class__.__name__}")
self.setItems([])
self.modelChanged.emit()
return

View File

@ -16,6 +16,7 @@ from UM.Mesh.MeshReader import MeshReader
from UM.MimeTypeDatabase import MimeTypeDatabase, MimeType
from UM.Scene.GroupDecorator import GroupDecorator
from UM.Scene.SceneNode import SceneNode # For typing.
from UM.Scene.SceneNodeSettings import SceneNodeSettings
from cura.CuraApplication import CuraApplication
from cura.Machines.ContainerTree import ContainerTree
from cura.Scene.BuildPlateDecorator import BuildPlateDecorator
@ -41,7 +42,7 @@ class ThreeMFReader(MeshReader):
MimeTypeDatabase.addMimeType(
MimeType(
name = "application/vnd.ms-package.3dmanufacturing-3dmodel+xml",
name="application/vnd.ms-package.3dmanufacturing-3dmodel+xml",
comment="3MF",
suffixes=["3mf"]
)
@ -177,6 +178,12 @@ class ThreeMFReader(MeshReader):
else:
Logger.log("w", "Unable to find extruder in position %s", setting_value)
continue
if key == "print_order":
um_node.printOrder = int(setting_value)
continue
if key =="drop_to_buildplate":
um_node.setSetting(SceneNodeSettings.AutoDropDown, eval(setting_value))
continue
if key in known_setting_keys:
setting_container.setProperty(key, "value", setting_value)
else:
@ -233,8 +240,7 @@ class ThreeMFReader(MeshReader):
if mesh_data is not None:
extents = mesh_data.getExtents()
if extents is not None:
# We use a different coordinate space, so flip Z and Y
center_vector = Vector(extents.center.x, extents.center.z, extents.center.y)
center_vector = Vector(extents.center.x, extents.center.y, extents.center.z)
transform_matrix.setByTranslation(center_vector)
transform_matrix.multiply(um_node.getLocalTransformation())
um_node.setTransformation(transform_matrix)

View File

@ -5,10 +5,13 @@ from configparser import ConfigParser
import zipfile
import os
import json
import re
from typing import cast, Dict, List, Optional, Tuple, Any, Set
import xml.etree.ElementTree as ET
from UM.Math.AxisAlignedBox import AxisAlignedBox
from UM.Math.Vector import Vector
from UM.Util import parseBool
from UM.Workspace.WorkspaceReader import WorkspaceReader
from UM.Application import Application
@ -57,6 +60,7 @@ _ignored_machine_network_metadata: Set[str] = {
"is_abstract_machine"
}
USER_SETTINGS_PATH = "Cura/user-settings.json"
class ContainerInfo:
def __init__(self, file_name: Optional[str], serialized: Optional[str], parser: Optional[ConfigParser]) -> None:
@ -115,6 +119,7 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
self._supported_extensions = [".3mf"]
self._dialog = WorkspaceDialog()
self._3mf_mesh_reader = None
self._is_ucp = None
self._container_registry = ContainerRegistry.getInstance()
# suffixes registered with the MimeTypes don't start with a dot '.'
@ -141,10 +146,16 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
self._old_new_materials: Dict[str, str] = {}
self._machine_info = None
self._user_settings: Dict[str, Dict[str, Any]] = {}
def _clearState(self):
self._id_mapping = {}
self._old_new_materials = {}
self._machine_info = None
self._user_settings = {}
def clearOpenAsUcp(self):
self._is_ucp = None
def getNewId(self, old_id: str):
"""Get a unique name based on the old_id. This is different from directly calling the registry in that it caches results.
@ -200,6 +211,16 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
return global_stack_file_list[0], extruder_stack_file_list
def _isProjectUcp(self, file_name) -> bool:
if self._is_ucp == None:
archive = zipfile.ZipFile(file_name, "r")
cura_file_names = [name for name in archive.namelist() if name.startswith("Cura/")]
self._is_ucp =True if USER_SETTINGS_PATH in cura_file_names else False
def getIsProjectUcp(self) -> bool:
return self._is_ucp
def preRead(self, file_name, show_dialog=True, *args, **kwargs):
"""Read some info so we can make decisions
@ -208,7 +229,7 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
we don't want to show a dialog.
"""
self._clearState()
self._isProjectUcp(file_name)
self._3mf_mesh_reader = Application.getInstance().getMeshFileHandler().getReaderForFile(file_name)
if self._3mf_mesh_reader and self._3mf_mesh_reader.preRead(file_name) == WorkspaceReader.PreReadResult.accepted:
pass
@ -228,11 +249,14 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
self._resolve_strategies = {k: None for k in resolve_strategy_keys}
containers_found_dict = {k: False for k in resolve_strategy_keys}
# Check whether the file is a UCP, which changes some import options
is_ucp = USER_SETTINGS_PATH in cura_file_names
#
# Read definition containers
#
machine_definition_id = None
updatable_machines = []
updatable_machines = None if self._is_ucp else []
machine_definition_container_count = 0
extruder_definition_container_count = 0
definition_container_files = [name for name in cura_file_names if name.endswith(self._definition_container_suffix)]
@ -250,7 +274,7 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
if definition_container_type == "machine":
machine_definition_id = container_id
machine_definition_containers = self._container_registry.findDefinitionContainers(id = machine_definition_id)
if machine_definition_containers:
if machine_definition_containers and updatable_machines is not None:
updatable_machines = [machine for machine in self._container_registry.findContainerStacks(type = "machine") if machine.definition == machine_definition_containers[0]]
machine_type = definition_container["name"]
variant_type_name = definition_container.get("variants_name", variant_type_name)
@ -597,6 +621,39 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
package_metadata = self._parse_packages_metadata(archive)
missing_package_metadata = self._filter_missing_package_metadata(package_metadata)
# Load the user specifically exported settings
self._dialog.exportedSettingModel.clear()
self._dialog.setCurrentMachineName("")
if self._is_ucp:
try:
self._user_settings = json.loads(archive.open("Cura/user-settings.json").read().decode("utf-8"))
any_extruder_stack = ExtruderManager.getInstance().getExtruderStack(0)
actual_global_stack = CuraApplication.getInstance().getGlobalContainerStack()
self._dialog.setCurrentMachineName(actual_global_stack.id)
for stack_name, settings in self._user_settings.items():
if stack_name == 'global':
self._dialog.exportedSettingModel.addSettingsFromStack(actual_global_stack, i18n_catalog.i18nc("@label", "Global"), settings)
else:
extruder_match = re.fullmatch('extruder_([0-9]+)', stack_name)
if extruder_match is not None:
extruder_nr = int(extruder_match.group(1))
self._dialog.exportedSettingModel.addSettingsFromStack(any_extruder_stack,
i18n_catalog.i18nc("@label",
"Extruder {0}", extruder_nr + 1),
settings)
except KeyError as e:
# If there is no user settings file, it's not a UCP, so notify user of failure.
Logger.log("w", "File %s is not a valid UCP.", file_name)
message = Message(
i18n_catalog.i18nc("@info:error Don't translate the XML tags <filename> or <message>!",
"Project file <filename>{0}</filename> is corrupt: <message>{1}</message>.",
file_name, str(e)),
title=i18n_catalog.i18nc("@info:title", "Can't Open Project File"),
message_type=Message.MessageType.ERROR)
message.show()
return WorkspaceReader.PreReadResult.failed
# Show the dialog, informing the user what is about to happen.
self._dialog.setMachineConflict(machine_conflict)
self._dialog.setIsPrinterGroup(is_printer_group)
@ -606,7 +663,7 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
self._dialog.setNumVisibleSettings(num_visible_settings)
self._dialog.setQualityName(quality_name)
self._dialog.setQualityType(quality_type)
self._dialog.setIntentName(intent_name)
self._dialog.setIntentName(intent_category)
self._dialog.setNumSettingsOverriddenByQualityChanges(num_settings_overridden_by_quality_changes)
self._dialog.setNumUserSettings(num_user_settings)
self._dialog.setActiveMode(active_mode)
@ -617,12 +674,15 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
self._dialog.setVariantType(variant_type_name)
self._dialog.setHasObjectsOnPlate(Application.getInstance().platformActivity)
self._dialog.setMissingPackagesMetadata(missing_package_metadata)
self._dialog.setAllowCreatemachine(not self._is_ucp)
self._dialog.setIsUcp(self._is_ucp)
self._dialog.show()
# Choosing the initially selected printer in MachineSelector
is_networked_machine = False
is_abstract_machine = False
if global_stack and isinstance(global_stack, GlobalStack):
if global_stack and isinstance(global_stack, GlobalStack) and not self._is_ucp:
# The machine included in the project file exists locally already, no need to change selected printers.
is_networked_machine = global_stack.hasNetworkedConnection()
is_abstract_machine = parseBool(existing_global_stack.getMetaDataEntry("is_abstract_machine", False))
@ -631,7 +691,7 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
elif self._dialog.updatableMachinesModel.count > 0:
# The machine included in the project file does not exist. There is another machine of the same type.
# This will always default to an abstract machine first.
machine = self._dialog.updatableMachinesModel.getItem(0)
machine = self._dialog.updatableMachinesModel.getItem(self._dialog.currentMachinePositionIndex)
machine_name = machine["name"]
is_networked_machine = machine["isNetworked"]
is_abstract_machine = machine["isAbstractMachine"]
@ -648,6 +708,7 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
self._dialog.setIsNetworkedMachine(is_networked_machine)
self._dialog.setIsAbstractMachine(is_abstract_machine)
self._dialog.setMachineName(machine_name)
self._dialog.updateCompatibleMachine()
# Block until the dialog is closed.
self._dialog.waitForClose()
@ -669,7 +730,6 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
if key not in containers_found_dict or strategy is not None:
continue
self._resolve_strategies[key] = "override" if containers_found_dict[key] else "new"
return WorkspaceReader.PreReadResult.accepted
@call_on_qt_thread
@ -690,16 +750,16 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
except EnvironmentError as e:
message = Message(i18n_catalog.i18nc("@info:error Don't translate the XML tags <filename> or <message>!",
"Project file <filename>{0}</filename> is suddenly inaccessible: <message>{1}</message>.", file_name, str(e)),
title = i18n_catalog.i18nc("@info:title", "Can't Open Project File"),
message_type = Message.MessageType.ERROR)
title = i18n_catalog.i18nc("@info:title", "Can't Open Project File"),
message_type = Message.MessageType.ERROR)
message.show()
self.setWorkspaceName("")
return [], {}
except zipfile.BadZipFile as e:
message = Message(i18n_catalog.i18nc("@info:error Don't translate the XML tags <filename> or <message>!",
"Project file <filename>{0}</filename> is corrupt: <message>{1}</message>.", file_name, str(e)),
title = i18n_catalog.i18nc("@info:title", "Can't Open Project File"),
message_type = Message.MessageType.ERROR)
title = i18n_catalog.i18nc("@info:title", "Can't Open Project File"),
message_type = Message.MessageType.ERROR)
message.show()
self.setWorkspaceName("")
return [], {}
@ -761,9 +821,9 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
# Find the machine which will be overridden
global_stacks = self._container_registry.findContainerStacks(id = self._dialog.getMachineToOverride(), type = "machine")
if not global_stacks:
message = Message(i18n_catalog.i18nc("@info:error Don't translate the XML tag <filename>!",
message = Message(i18n_catalog.i18nc("@info:error Don't translate the XML tag <filename>!",
"Project file <filename>{0}</filename> is made using profiles that are unknown to this version of UltiMaker Cura.", file_name),
message_type = Message.MessageType.ERROR)
message_type = Message.MessageType.ERROR)
message.show()
self.setWorkspaceName("")
return [], {}
@ -777,84 +837,86 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
for stack in extruder_stacks:
stack.setNextStack(global_stack, connect_signals = False)
Logger.log("d", "Workspace loading is checking definitions...")
# Get all the definition files & check if they exist. If not, add them.
definition_container_files = [name for name in cura_file_names if name.endswith(self._definition_container_suffix)]
for definition_container_file in definition_container_files:
container_id = self._stripFileToId(definition_container_file)
if not self._is_ucp:
Logger.log("d", "Workspace loading is checking definitions...")
# Get all the definition files & check if they exist. If not, add them.
definition_container_files = [name for name in cura_file_names if name.endswith(self._definition_container_suffix)]
for definition_container_file in definition_container_files:
container_id = self._stripFileToId(definition_container_file)
definitions = self._container_registry.findDefinitionContainersMetadata(id = container_id)
if not definitions:
definition_container = DefinitionContainer(container_id)
try:
definition_container.deserialize(archive.open(definition_container_file).read().decode("utf-8"),
file_name = definition_container_file)
except ContainerFormatError:
# We cannot just skip the definition file because everything else later will just break if the
# machine definition cannot be found.
Logger.logException("e", "Failed to deserialize definition file %s in project file %s",
definition_container_file, file_name)
definition_container = self._container_registry.findDefinitionContainers(id = "fdmprinter")[0] #Fall back to defaults.
self._container_registry.addContainer(definition_container)
Job.yieldThread()
QCoreApplication.processEvents() # Ensure that the GUI does not freeze.
Logger.log("d", "Workspace loading is checking materials...")
# Get all the material files and check if they exist. If not, add them.
xml_material_profile = self._getXmlProfileClass()
if self._material_container_suffix is None:
self._material_container_suffix = ContainerRegistry.getMimeTypeForContainer(xml_material_profile).suffixes[0]
if xml_material_profile:
material_container_files = [name for name in cura_file_names if name.endswith(self._material_container_suffix)]
for material_container_file in material_container_files:
to_deserialize_material = False
container_id = self._stripFileToId(material_container_file)
need_new_name = False
materials = self._container_registry.findInstanceContainers(id = container_id)
if not materials:
# No material found, deserialize this material later and add it
to_deserialize_material = True
else:
material_container = materials[0]
old_material_root_id = material_container.getMetaDataEntry("base_file")
if old_material_root_id is not None and not self._container_registry.isReadOnly(old_material_root_id): # Only create new materials if they are not read only.
to_deserialize_material = True
if self._resolve_strategies["material"] == "override":
# Remove the old materials and then deserialize the one from the project
root_material_id = material_container.getMetaDataEntry("base_file")
application.getContainerRegistry().removeContainer(root_material_id)
elif self._resolve_strategies["material"] == "new":
# Note that we *must* deserialize it with a new ID, as multiple containers will be
# auto created & added.
container_id = self.getNewId(container_id)
self._old_new_materials[old_material_root_id] = container_id
need_new_name = True
if to_deserialize_material:
material_container = xml_material_profile(container_id)
definitions = self._container_registry.findDefinitionContainersMetadata(id = container_id)
if not definitions:
definition_container = DefinitionContainer(container_id)
try:
material_container.deserialize(archive.open(material_container_file).read().decode("utf-8"),
file_name = container_id + "." + self._material_container_suffix)
definition_container.deserialize(archive.open(definition_container_file).read().decode("utf-8"),
file_name = definition_container_file)
except ContainerFormatError:
Logger.logException("e", "Failed to deserialize material file %s in project file %s",
material_container_file, file_name)
continue
if need_new_name:
new_name = ContainerRegistry.getInstance().uniqueName(material_container.getName())
material_container.setName(new_name)
material_container.setDirty(True)
self._container_registry.addContainer(material_container)
# We cannot just skip the definition file because everything else later will just break if the
# machine definition cannot be found.
Logger.logException("e", "Failed to deserialize definition file %s in project file %s",
definition_container_file, file_name)
definition_container = self._container_registry.findDefinitionContainers(id = "fdmprinter")[0] #Fall back to defaults.
self._container_registry.addContainer(definition_container)
Job.yieldThread()
QCoreApplication.processEvents() # Ensure that the GUI does not freeze.
if global_stack:
# Handle quality changes if any
self._processQualityChanges(global_stack)
Logger.log("d", "Workspace loading is checking materials...")
# Get all the material files and check if they exist. If not, add them.
xml_material_profile = self._getXmlProfileClass()
if self._material_container_suffix is None:
self._material_container_suffix = ContainerRegistry.getMimeTypeForContainer(xml_material_profile).suffixes[0]
if xml_material_profile:
material_container_files = [name for name in cura_file_names if name.endswith(self._material_container_suffix)]
for material_container_file in material_container_files:
to_deserialize_material = False
container_id = self._stripFileToId(material_container_file)
need_new_name = False
materials = self._container_registry.findInstanceContainers(id = container_id)
# Prepare the machine
self._applyChangesToMachine(global_stack, extruder_stack_dict)
if not materials:
# No material found, deserialize this material later and add it
to_deserialize_material = True
else:
material_container = materials[0]
old_material_root_id = material_container.getMetaDataEntry("base_file")
if old_material_root_id is not None and not self._container_registry.isReadOnly(old_material_root_id): # Only create new materials if they are not read only.
to_deserialize_material = True
if self._resolve_strategies["material"] == "override":
# Remove the old materials and then deserialize the one from the project
root_material_id = material_container.getMetaDataEntry("base_file")
application.getContainerRegistry().removeContainer(root_material_id)
elif self._resolve_strategies["material"] == "new":
# Note that we *must* deserialize it with a new ID, as multiple containers will be
# auto created & added.
container_id = self.getNewId(container_id)
self._old_new_materials[old_material_root_id] = container_id
need_new_name = True
if to_deserialize_material:
material_container = xml_material_profile(container_id)
try:
material_container.deserialize(archive.open(material_container_file).read().decode("utf-8"),
file_name = container_id + "." + self._material_container_suffix)
except ContainerFormatError:
Logger.logException("e", "Failed to deserialize material file %s in project file %s",
material_container_file, file_name)
continue
if need_new_name:
new_name = ContainerRegistry.getInstance().uniqueName(material_container.getName())
material_container.setName(new_name)
material_container.setDirty(True)
self._container_registry.addContainer(material_container)
Job.yieldThread()
QCoreApplication.processEvents() # Ensure that the GUI does not freeze.
if global_stack:
if not self._is_ucp:
# Handle quality changes if any
self._processQualityChanges(global_stack)
# Prepare the machine
self._applyChangesToMachine(global_stack, extruder_stack_dict)
Logger.log("d", "Workspace loading is notifying rest of the code of changes...")
# Actually change the active machine.
@ -864,16 +926,40 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
# function is running on the main thread (Qt thread), although those "changed" signals have been emitted, but
# they won't take effect until this function is done.
# To solve this, we schedule _updateActiveMachine() for later so it will have the latest data.
self._updateActiveMachine(global_stack)
if self._is_ucp:
# Now we have switched, apply the user settings
self._applyUserSettings(global_stack, extruder_stack_dict, self._user_settings)
# Load all the nodes / mesh data of the workspace
nodes = self._3mf_mesh_reader.read(file_name)
if nodes is None:
nodes = []
if self._is_ucp:
# We might be on a different printer than the one this project was made on.
# The offset to the printers' center isn't saved; instead, try to just fit everything on the buildplate.
full_extents = None
for node in nodes:
extents = node.getMeshData().getExtents() if node.getMeshData() else None
if extents is not None:
pos = node.getPosition()
node_box = AxisAlignedBox(extents.minimum + pos, extents.maximum + pos)
if full_extents is None:
full_extents = node_box
else:
full_extents = full_extents + node_box
if full_extents and full_extents.isValid():
for node in nodes:
pos = node.getPosition()
node.setPosition(Vector(pos.x - full_extents.center.x, pos.y, pos.z - full_extents.center.z))
base_file_name = os.path.basename(file_name)
self.setWorkspaceName(base_file_name)
self._is_ucp = None
return nodes, self._loadMetadata(file_name)
@staticmethod
@ -1159,7 +1245,7 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
node = machine_node.variants.get(machine_node.preferred_variant_name, next(iter(machine_node.variants.values())))
else:
variant_name = extruder_info.variant_info.parser["general"]["name"]
node = ContainerTree.getInstance().machines[global_stack.definition.getId()].variants[variant_name]
node = ContainerTree.getInstance().machines[global_stack.definition.getId()].variants.get(variant_name, next(iter(machine_node.variants.values())))
extruder_stack.variant = node.container
def _applyMaterials(self, global_stack, extruder_stack_dict):
@ -1174,24 +1260,50 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
root_material_id = extruder_info.root_material_id
root_material_id = self._old_new_materials.get(root_material_id, root_material_id)
material_node = machine_node.variants[extruder_stack.variant.getName()].materials[root_material_id]
available_materials = machine_node.variants[extruder_stack.variant.getName()].materials
if root_material_id not in available_materials:
continue
material_node = available_materials[root_material_id]
extruder_stack.material = material_node.container
def _applyChangesToMachine(self, global_stack, extruder_stack_dict):
# Clear all first
def _clearMachineSettings(self, global_stack, extruder_stack_dict):
self._clearStack(global_stack)
for extruder_stack in extruder_stack_dict.values():
self._clearStack(extruder_stack)
self._quality_changes_to_apply = None
self._quality_type_to_apply = None
self._intent_category_to_apply = None
self._user_settings_to_apply = None
def _applyUserSettings(self, global_stack, extruder_stack_dict, user_settings):
for stack_name, settings in user_settings.items():
if stack_name == 'global':
ThreeMFWorkspaceReader._applyUserSettingsOnStack(global_stack, settings)
else:
extruder_match = re.fullmatch('extruder_([0-9]+)', stack_name)
if extruder_match is not None:
extruder_nr = extruder_match.group(1)
if extruder_nr in extruder_stack_dict:
ThreeMFWorkspaceReader._applyUserSettingsOnStack(extruder_stack_dict[extruder_nr], settings)
@staticmethod
def _applyUserSettingsOnStack(stack, user_settings):
user_settings_container = stack.userChanges
for setting_to_import, setting_value in user_settings.items():
user_settings_container.setProperty(setting_to_import, 'value', setting_value)
def _applyChangesToMachine(self, global_stack, extruder_stack_dict):
# Clear all first
self._clearMachineSettings(global_stack, extruder_stack_dict)
self._applyDefinitionChanges(global_stack, extruder_stack_dict)
self._applyUserChanges(global_stack, extruder_stack_dict)
self._applyVariants(global_stack, extruder_stack_dict)
self._applyMaterials(global_stack, extruder_stack_dict)
# prepare the quality to select
self._quality_changes_to_apply = None
self._quality_type_to_apply = None
self._intent_category_to_apply = None
if self._machine_info.quality_changes_info is not None:
self._quality_changes_to_apply = self._machine_info.quality_changes_info.name
else:
@ -1229,39 +1341,40 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
machine_manager.setActiveMachine(global_stack.getId())
# Set metadata fields that are missing from the global stack
for key, value in self._machine_info.metadata_dict.items():
if key not in global_stack.getMetaData() and key not in _ignored_machine_network_metadata:
global_stack.setMetaDataEntry(key, value)
if not self._is_ucp:
for key, value in self._machine_info.metadata_dict.items():
if key not in global_stack.getMetaData() and key not in _ignored_machine_network_metadata:
global_stack.setMetaDataEntry(key, value)
if self._quality_changes_to_apply:
quality_changes_group_list = container_tree.getCurrentQualityChangesGroups()
quality_changes_group = next((qcg for qcg in quality_changes_group_list if qcg.name == self._quality_changes_to_apply), None)
if not quality_changes_group:
Logger.log("e", "Could not find quality_changes [%s]", self._quality_changes_to_apply)
return
machine_manager.setQualityChangesGroup(quality_changes_group, no_dialog = True)
else:
self._quality_type_to_apply = self._quality_type_to_apply.lower() if self._quality_type_to_apply else None
quality_group_dict = container_tree.getCurrentQualityGroups()
if self._quality_type_to_apply in quality_group_dict:
quality_group = quality_group_dict[self._quality_type_to_apply]
if self._quality_changes_to_apply !=None:
quality_changes_group_list = container_tree.getCurrentQualityChangesGroups()
quality_changes_group = next((qcg for qcg in quality_changes_group_list if qcg.name == self._quality_changes_to_apply), None)
if not quality_changes_group:
Logger.log("e", "Could not find quality_changes [%s]", self._quality_changes_to_apply)
return
machine_manager.setQualityChangesGroup(quality_changes_group, no_dialog = True)
else:
Logger.log("i", "Could not find quality type [%s], switch to default", self._quality_type_to_apply)
preferred_quality_type = global_stack.getMetaDataEntry("preferred_quality_type")
quality_group = quality_group_dict.get(preferred_quality_type)
if quality_group is None:
Logger.log("e", "Could not get preferred quality type [%s]", preferred_quality_type)
if quality_group is not None:
machine_manager.setQualityGroup(quality_group, no_dialog = True)
# Also apply intent if available
available_intent_category_list = IntentManager.getInstance().currentAvailableIntentCategories()
if self._intent_category_to_apply is not None and self._intent_category_to_apply in available_intent_category_list:
machine_manager.setIntentByCategory(self._intent_category_to_apply)
self._quality_type_to_apply = self._quality_type_to_apply.lower() if self._quality_type_to_apply else None
quality_group_dict = container_tree.getCurrentQualityGroups()
if self._quality_type_to_apply in quality_group_dict:
quality_group = quality_group_dict[self._quality_type_to_apply]
else:
# if no intent is provided, reset to the default (balanced) intent
machine_manager.resetIntents()
Logger.log("i", "Could not find quality type [%s], switch to default", self._quality_type_to_apply)
preferred_quality_type = global_stack.getMetaDataEntry("preferred_quality_type")
quality_group = quality_group_dict.get(preferred_quality_type)
if quality_group is None:
Logger.log("e", "Could not get preferred quality type [%s]", preferred_quality_type)
if quality_group is not None:
machine_manager.setQualityGroup(quality_group, no_dialog = True)
# Also apply intent if available
available_intent_category_list = IntentManager.getInstance().currentAvailableIntentCategories()
if self._intent_category_to_apply is not None and self._intent_category_to_apply in available_intent_category_list:
machine_manager.setIntentByCategory(self._intent_category_to_apply)
else:
# if no intent is provided, reset to the default (balanced) intent
machine_manager.resetIntents()
# Notify everything/one that is to notify about changes.
global_stack.containersChanged.emit(global_stack.getTop())

View File

@ -6,6 +6,7 @@ from PyQt6.QtGui import QDesktopServices
from typing import List, Optional, Dict, cast
from cura.Machines.Models.MachineListModel import MachineListModel
from cura.Machines.Models.IntentTranslations import intent_translations
from cura.Settings.GlobalStack import GlobalStack
from UM.Application import Application
from UM.FlameProfiler import pyqtSlot
@ -21,6 +22,8 @@ import time
from cura.CuraApplication import CuraApplication
from .SpecificSettingsModel import SpecificSettingsModel
i18n_catalog = i18nCatalog("cura")
@ -60,16 +63,23 @@ class WorkspaceDialog(QObject):
self._machine_name = ""
self._machine_type = ""
self._variant_type = ""
self._current_machine_name = ""
self._material_labels = []
self._extruders = []
self._objects_on_plate = False
self._is_printer_group = False
self._updatable_machines_model = MachineListModel(self, listenToChanges=False)
self._updatable_machines_model = MachineListModel(self, listenToChanges = False, showCloudPrinters = True)
self._missing_package_metadata: List[Dict[str, str]] = []
self._plugin_registry: PluginRegistry = CuraApplication.getInstance().getPluginRegistry()
self._install_missing_package_dialog: Optional[QObject] = None
self._is_abstract_machine = False
self._is_networked_machine = False
self._is_compatible_machine = False
self._allow_create_machine = True
self._exported_settings_model = SpecificSettingsModel()
self._exported_settings_model.modelChanged.connect(self.exportedSettingModelChanged.emit)
self._current_machine_pos_index = 0
self._is_ucp = False
machineConflictChanged = pyqtSignal()
qualityChangesConflictChanged = pyqtSignal()
@ -93,6 +103,9 @@ class WorkspaceDialog(QObject):
extrudersChanged = pyqtSignal()
isPrinterGroupChanged = pyqtSignal()
missingPackagesChanged = pyqtSignal()
isCompatibleMachineChanged = pyqtSignal()
isUcpChanged = pyqtSignal()
exportedSettingModelChanged = pyqtSignal()
@pyqtProperty(bool, notify = isPrinterGroupChanged)
def isPrinterGroup(self) -> bool:
@ -165,8 +178,30 @@ class WorkspaceDialog(QObject):
self._machine_name = machine_name
self.machineNameChanged.emit()
def setCurrentMachineName(self, machine: str) -> None:
self._current_machine_name = machine
@pyqtProperty(str, notify = machineNameChanged)
def currentMachineName(self) -> str:
return self._current_machine_name
@staticmethod
def getIndexOfCurrentMachine(list_of_dicts, key, value, defaultIndex):
for i, d in enumerate(list_of_dicts):
if d.get(key) == value: # found the dictionary
return i
return defaultIndex
@pyqtProperty(int, notify = machineNameChanged)
def currentMachinePositionIndex(self):
return self._current_machine_pos_index
@pyqtProperty(QObject, notify = updatableMachinesChanged)
def updatableMachinesModel(self) -> MachineListModel:
if self._current_machine_name != "":
self._current_machine_pos_index = self.getIndexOfCurrentMachine(self._updatable_machines_model.getItems(), "id", self._current_machine_name, defaultIndex = 0)
else:
self._current_machine_pos_index = 0
return cast(MachineListModel, self._updatable_machines_model)
def setUpdatableMachines(self, updatable_machines: List[GlobalStack]) -> None:
@ -223,7 +258,14 @@ class WorkspaceDialog(QObject):
def setIntentName(self, intent_name: str) -> None:
if self._intent_name != intent_name:
self._intent_name = intent_name
try:
self._intent_name = intent_translations[intent_name]["name"]
except:
self._intent_name = intent_name.title()
self.intentNameChanged.emit()
if not self._intent_name:
self._intent_name = intent_translations["default"]["name"]
self.intentNameChanged.emit()
@pyqtProperty(str, notify=activeModeChanged)
@ -284,7 +326,49 @@ class WorkspaceDialog(QObject):
@pyqtSlot(str)
def setMachineToOverride(self, machine_name: str) -> None:
self._override_machine = machine_name
self.updateCompatibleMachine()
def updateCompatibleMachine(self):
registry = ContainerRegistry.getInstance()
containers_expected = registry.findDefinitionContainers(name=self._machine_type)
containers_selected = registry.findContainerStacks(id=self._override_machine)
if len(containers_expected) == 1 and len(containers_selected) == 1:
new_compatible_machine = (containers_expected[0] == containers_selected[0].definition)
if new_compatible_machine != self._is_compatible_machine:
self._is_compatible_machine = new_compatible_machine
self.isCompatibleMachineChanged.emit()
@pyqtProperty(bool, notify = isCompatibleMachineChanged)
def isCompatibleMachine(self) -> bool:
return self._is_compatible_machine
def setIsUcp(self, isUcp: bool) -> None:
if isUcp != self._is_ucp:
self._is_ucp = isUcp
self.isUcpChanged.emit()
@pyqtProperty(bool, notify=isUcpChanged)
def isUcp(self):
return self._is_ucp
def setAllowCreatemachine(self, allow_create_machine):
self._allow_create_machine = allow_create_machine
@pyqtProperty(bool, constant = True)
def allowCreateMachine(self):
return self._allow_create_machine
@pyqtProperty(QObject, notify=exportedSettingModelChanged)
def exportedSettingModel(self):
return self._exported_settings_model
@pyqtProperty("QVariantList", notify=exportedSettingModelChanged)
def exportedSettingModelItems(self):
return self._exported_settings_model.items
@pyqtProperty(int, notify=exportedSettingModelChanged)
def exportedSettingModelRowCount(self):
return self._exported_settings_model.rowCount()
@pyqtSlot()
def closeBackend(self) -> None:
"""Close the backend: otherwise one could end up with "Slicing..."""

View File

@ -6,13 +6,13 @@ import QtQuick.Controls 2.3
import QtQuick.Layouts 1.3
import QtQuick.Window 2.2
import UM 1.5 as UM
import UM 1.6 as UM
import Cura 1.1 as Cura
UM.Dialog
{
id: workspaceDialog
title: catalog.i18nc("@title:window", "Open Project")
title: manager.isUcp? catalog.i18nc("@title:window Don't translate 'Universal Cura Project'", "Open Universal Cura Project (UCP)"): catalog.i18nc("@title:window", "Open Project")
margin: UM.Theme.getSize("default_margin").width
minimumWidth: UM.Theme.getSize("modal_window_minimum").width
@ -24,16 +24,34 @@ UM.Dialog
{
height: childrenRect.height + 2 * UM.Theme.getSize("default_margin").height
color: UM.Theme.getColor("main_background")
UM.Label
ColumnLayout
{
id: titleLabel
text: catalog.i18nc("@action:title", "Summary - Cura Project")
font: UM.Theme.getFont("large")
id: headerColumn
anchors.top: parent.top
anchors.left: parent.left
anchors.right: parent.right
anchors.topMargin: UM.Theme.getSize("default_margin").height
anchors.leftMargin: UM.Theme.getSize("default_margin").height
anchors.leftMargin: UM.Theme.getSize("default_margin").width
anchors.rightMargin: anchors.leftMargin
RowLayout
{
UM.Label
{
id: titleLabel
text: manager.isUcp? catalog.i18nc("@action:title Don't translate 'Universal Cura Project'", "Summary - Open Universal Cura Project (UCP)"): catalog.i18nc("@action:title", "Summary - Cura Project")
font: UM.Theme.getFont("large")
}
Cura.TertiaryButton
{
id: learnMoreButton
visible: manager.isUcp
text: catalog.i18nc("@button", "Learn more")
iconSource: UM.Theme.getIcon("LinkExternal")
isIconOnRightSide: true
onClicked: Qt.openUrlExternally("https://support.ultimaker.com/s/article/000002979")
}
}
}
}
@ -96,7 +114,7 @@ UM.Dialog
WorkspaceRow
{
leftLabelText: catalog.i18nc("@action:label", manager.isPrinterGroup ? "Printer Group" : "Printer Name")
rightLabelText: manager.machineName == catalog.i18nc("@button", "Create new") ? "" : manager.machineName
rightLabelText: manager.isUcp? manager.machineType: manager.machineName == catalog.i18nc("@button", "Create new") ? "" : manager.machineName
}
}
@ -120,13 +138,17 @@ UM.Dialog
minDropDownWidth: machineSelector.width
buttons: [
Component
{
id: componentNewPrinter
Cura.SecondaryButton
{
id: createNewPrinter
text: catalog.i18nc("@button", "Create new")
fixedWidthMode: true
width: parent.width - leftPadding * 1.5
visible: manager.allowCreateMachine
onClicked:
{
toggleContent()
@ -136,7 +158,9 @@ UM.Dialog
manager.setIsNetworkedMachine(false)
}
}
]
}
buttons: manager.allowCreateMachine ? [componentNewPrinter.createObject()] : []
onSelectPrinter: function(machine)
{
@ -152,39 +176,56 @@ UM.Dialog
WorkspaceSection
{
id: profileSection
title: catalog.i18nc("@action:label", "Profile settings")
iconSource: UM.Theme.getIcon("Sliders")
id: ucpProfileSection
visible: manager.isUcp
title: catalog.i18nc("@action:label", "Settings Loaded from UCP file")
iconSource: UM.Theme.getIcon("Settings")
content: Column
{
id: profileSettingsValuesTable
id: ucpProfileSettingsValuesTable
spacing: UM.Theme.getSize("default_margin").height
leftPadding: UM.Theme.getSize("medium_button_icon").width + UM.Theme.getSize("default_margin").width
WorkspaceRow
{
leftLabelText: catalog.i18nc("@action:label", "Name")
rightLabelText: manager.qualityName
id: numberOfOverrides
leftLabelText: catalog.i18nc("@action:label", "Settings Loaded from UCP file")
rightLabelText: catalog.i18ncp("@action:label", "%1 override", "%1 overrides", manager.exportedSettingModelRowCount).arg(manager.exportedSettingModelRowCount)
buttonText: tableViewSpecificSettings.shouldBeVisible ? catalog.i18nc("@action:button", "Hide settings") : catalog.i18nc("@action:button", "Show settings")
onButtonClicked: tableViewSpecificSettings.shouldBeVisible = !tableViewSpecificSettings.shouldBeVisible
}
WorkspaceRow
Cura.TableView
{
leftLabelText: catalog.i18nc("@action:label", "Intent")
rightLabelText: manager.intentName
}
id: tableViewSpecificSettings
width: parent.width - parent.leftPadding - UM.Theme.getSize("default_margin").width
height: UM.Theme.getSize("card").height
visible: shouldBeVisible && manager.isUcp
property bool shouldBeVisible: true
WorkspaceRow
{
leftLabelText: catalog.i18nc("@action:label", "Not in profile")
rightLabelText: catalog.i18ncp("@action:label", "%1 override", "%1 overrides", manager.numUserSettings).arg(manager.numUserSettings)
visible: manager.numUserSettings != 0
}
columnHeaders:
[
catalog.i18nc("@title:column", "Applies on"),
catalog.i18nc("@title:column", "Setting"),
catalog.i18nc("@title:column", "Value")
]
WorkspaceRow
{
leftLabelText: catalog.i18nc("@action:label", "Derivative from")
rightLabelText: catalog.i18ncp("@action:label", "%1, %2 override", "%1, %2 overrides", manager.numSettingsOverridenByQualityChanges).arg(manager.qualityType).arg(manager.numSettingsOverridenByQualityChanges)
visible: manager.numSettingsOverridenByQualityChanges != 0
model: UM.TableModel
{
id: tableModel
headers: ["category", "label", "value"]
rows: manager.exportedSettingModelItems
}
Connections
{
target: manager
function onExportedSettingModelChanged()
{
tableModel.clear()
tableModel.rows = manager.exportedSettingModelItems
}
}
}
}
@ -194,7 +235,7 @@ UM.Dialog
id: qualityChangesResolveComboBox
model: resolveStrategiesModel
textRole: "label"
visible: manager.qualityChangesConflict
visible: manager.qualityChangesConflict && !manager.isUcp
contentLeftPadding: UM.Theme.getSize("default_margin").width + UM.Theme.getSize("narrow_margin").width
textFont: UM.Theme.getFont("medium")
@ -220,10 +261,51 @@ UM.Dialog
}
}
WorkspaceSection
{
id: profileSection
title: manager.isUcp? catalog.i18nc("@action:label", "Suggested Profile settings"):catalog.i18nc("@action:label", "Profile settings")
iconSource: UM.Theme.getIcon("Sliders")
content: Column
{
id: profileSettingsValuesTable
spacing: UM.Theme.getSize("default_margin").height
leftPadding: UM.Theme.getSize("medium_button_icon").width + UM.Theme.getSize("default_margin").width
WorkspaceRow
{
leftLabelText: catalog.i18nc("@action:label", "Name")
rightLabelText: manager.qualityName
visible: manager.isCompatibleMachine
}
WorkspaceRow
{
leftLabelText: catalog.i18nc("@action:label", "Intent")
rightLabelText: manager.intentName
visible: manager.isCompatibleMachine
}
WorkspaceRow
{
leftLabelText: catalog.i18nc("@action:label", "Not in profile")
rightLabelText: catalog.i18ncp("@action:label", "%1 override", "%1 overrides", manager.numUserSettings).arg(manager.numUserSettings)
visible: manager.numUserSettings != 0 && !manager.isUcp
}
WorkspaceRow
{
leftLabelText: catalog.i18nc("@action:label", "Derivative from")
rightLabelText: catalog.i18ncp("@action:label", "%1, %2 override", "%1, %2 overrides", manager.numSettingsOverridenByQualityChanges).arg(manager.qualityType).arg(manager.numSettingsOverridenByQualityChanges)
visible: manager.numSettingsOverridenByQualityChanges != 0 && manager.isCompatibleMachine
}
}
}
WorkspaceSection
{
id: materialSection
title: catalog.i18nc("@action:label", "Material settings")
title: manager.isUcp? catalog.i18nc("@action:label", "Suggested Material settings"): catalog.i18nc("@action:label", "Material settings")
iconSource: UM.Theme.getIcon("Spool")
content: Column
{
@ -248,7 +330,7 @@ UM.Dialog
id: materialResolveComboBox
model: resolveStrategiesModel
textRole: "label"
visible: manager.materialConflict
visible: manager.materialConflict && !manager.isUcp
contentLeftPadding: UM.Theme.getSize("default_margin").width + UM.Theme.getSize("narrow_margin").width
textFont: UM.Theme.getFont("medium")
@ -279,6 +361,7 @@ UM.Dialog
id: visibilitySection
title: catalog.i18nc("@action:label", "Setting visibility")
iconSource: UM.Theme.getIcon("Eye")
visible : !manager.isUcp
content: Column
{
spacing: UM.Theme.getSize("default_margin").height
@ -416,12 +499,13 @@ UM.Dialog
{
if (visible)
{
// Force relead the comboboxes
// Force reload the comboboxes
// Since this dialog is only created once the first time you open it, these comboxes need to be reloaded
// each time it is shown after the first time so that the indexes will update correctly.
materialSection.reloadValues()
profileSection.reloadValues()
printerSection.reloadValues()
ucpProfileSection.reloadValues()
}
}
}

View File

@ -9,26 +9,38 @@ import QtQuick.Window 2.2
import UM 1.5 as UM
import Cura 1.1 as Cura
Row
RowLayout
{
id: root
property alias leftLabelText: leftLabel.text
property alias rightLabelText: rightLabel.text
property alias buttonText: button.text
signal buttonClicked
width: parent.width
height: visible ? childrenRect.height : 0
UM.Label
{
id: leftLabel
text: catalog.i18nc("@action:label", "Type")
width: Math.round(parent.width / 4)
Layout.preferredWidth: Math.round(parent.width / 4)
wrapMode: Text.WordWrap
}
UM.Label
{
id: rightLabel
text: manager.machineType
width: Math.round(parent.width / 3)
wrapMode: Text.WordWrap
}
Cura.TertiaryButton
{
id: button
visible: !text.isEmpty
Layout.maximumHeight: leftLabel.implicitHeight
Layout.fillWidth: true
onClicked: root.buttonClicked()
}
}

View File

@ -5,7 +5,7 @@ import QtQuick 2.10
import QtQuick.Controls 2.3
import UM 1.5 as UM
import UM 1.8 as UM
Item
@ -80,42 +80,22 @@ Item
sourceComponent: combobox
}
MouseArea
UM.HelpIcon
{
id: helpIconMouseArea
anchors.right: parent.right
anchors.verticalCenter: comboboxLabel.verticalCenter
width: childrenRect.width
height: childrenRect.height
hoverEnabled: true
UM.ColorImage
{
width: UM.Theme.getSize("section_icon").width
height: width
visible: comboboxTooltipText != ""
source: UM.Theme.getIcon("Help")
color: UM.Theme.getColor("text")
UM.ToolTip
{
text: comboboxTooltipText
visible: helpIconMouseArea.containsMouse
targetPoint: Qt.point(parent.x + Math.round(parent.width / 2), parent.y)
x: 0
y: parent.y + parent.height + UM.Theme.getSize("default_margin").height
width: UM.Theme.getSize("tooltip").width
}
}
color: UM.Theme.getColor("small_button_text")
icon: UM.Theme.getIcon("Information")
text: comboboxTooltipText
visible: comboboxTooltipText != ""
}
}
Loader
{
width: parent.width
height: content.height
z: -1
anchors.top: sectionTitleRow.bottom
sourceComponent: content
}

View File

@ -0,0 +1,48 @@
# Copyright (c) 2024 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
from PyQt6.QtCore import QObject, pyqtProperty, pyqtSignal
class SettingExport(QObject):
def __init__(self, id, name, value, value_name, selectable, show):
super().__init__()
self.id = id
self._name = name
self._value = value
self._value_name = value_name
self._selected = selectable
self._selectable = selectable
self._show_in_menu = show
@pyqtProperty(str, constant=True)
def name(self):
return self._name
@pyqtProperty(str, constant=True)
def value(self):
return self._value
@pyqtProperty(str, constant=True)
def valuename(self):
return str(self._value_name)
selectedChanged = pyqtSignal(bool)
def setSelected(self, selected):
if selected != self._selected:
self._selected = selected
self.selectedChanged.emit(self._selected)
@pyqtProperty(bool, fset = setSelected, notify = selectedChanged)
def selected(self):
return self._selected
@pyqtProperty(bool, constant=True)
def selectable(self):
return self._selectable
@pyqtProperty(bool, constant=True)
def isVisible(self):
return self._show_in_menu

View File

@ -0,0 +1,39 @@
// Copyright (c) 2024 Ultimaker B.V.
// Cura is released under the terms of the LGPLv3 or higher.
import QtQuick 2.10
import QtQuick.Controls 2.3
import QtQuick.Layouts 1.3
import QtQuick.Window 2.2
import UM 1.8 as UM
import Cura 1.1 as Cura
RowLayout
{
id: settingSelection
UM.CheckBox
{
text: modelData.name
Layout.preferredWidth: UM.Theme.getSize("setting").width
checked: modelData.selected
onClicked: modelData.selected = checked
tooltip: modelData.selectable ? "" :catalog.i18nc("@tooltip Don't translate 'Universal Cura Project'", "This setting may not perform well while exporting to Universal Cura Project. Users are asked to add it at their own risk.")
}
UM.Label
{
text: modelData.valuename
}
UM.HelpIcon
{
UM.I18nCatalog { id: catalog; name: "cura" }
text: catalog.i18nc("@tooltip Don't translate 'Universal Cura Project'",
"This setting may not perform well while exporting to Universal Cura Project, Users are asked to add it at their own risk.")
visible: !modelData.selectable
}
}

View File

@ -0,0 +1,56 @@
# Copyright (c) 2024 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
from enum import IntEnum
from PyQt6.QtCore import QObject, pyqtProperty, pyqtEnum
class SettingsExportGroup(QObject):
@pyqtEnum
class Category(IntEnum):
Global = 0
Extruder = 1
Model = 2
def __init__(self, stack, name, category, settings, category_details = '', extruder_index = 0, extruder_color = ''):
super().__init__()
self.stack = stack
self._name = name
self._settings = settings
self._category = category
self._category_details = category_details
self._extruder_index = extruder_index
self._extruder_color = extruder_color
self._visible_settings = []
@pyqtProperty(str, constant=True)
def name(self):
return self._name
@pyqtProperty(list, constant=True)
def settings(self):
return self._settings
@pyqtProperty(list, constant=True)
def visibleSettings(self):
if self._visible_settings == []:
self._visible_settings = list(filter(lambda item : item.isVisible, self._settings))
return self._visible_settings
@pyqtProperty(int, constant=True)
def category(self):
return self._category
@pyqtProperty(str, constant=True)
def category_details(self):
return self._category_details
@pyqtProperty(int, constant=True)
def extruder_index(self):
return self._extruder_index
@pyqtProperty(str, constant=True)
def extruder_color(self):
return self._extruder_color

View File

@ -0,0 +1,150 @@
# Copyright (c) 2024 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
from dataclasses import asdict
from typing import Optional, cast, List, Dict, Pattern, Set
from PyQt6.QtCore import QObject, pyqtProperty
from UM import i18nCatalog
from UM.Settings.SettingDefinition import SettingDefinition
from UM.Settings.InstanceContainer import InstanceContainer
from UM.Settings.SettingFunction import SettingFunction
from cura.CuraApplication import CuraApplication
from cura.Settings.ExtruderManager import ExtruderManager
from cura.Settings.GlobalStack import GlobalStack
from .SettingsExportGroup import SettingsExportGroup
from .SettingExport import SettingExport
class SettingsExportModel(QObject):
EXPORTABLE_SETTINGS = {'infill_sparse_density',
'adhesion_type',
'support_enable',
'infill_pattern',
'support_type',
'support_structure',
'support_angle',
'support_infill_rate',
'ironing_enabled',
'fill_outline_gaps',
'coasting_enable',
'skin_monotonic',
'z_seam_position',
'infill_before_walls',
'ironing_only_highest_layer',
'xy_offset',
'adaptive_layer_height_enabled',
'brim_gap',
'support_offset',
'brim_location',
'magic_spiralize',
'slicing_tolerance',
'outer_inset_first',
'magic_fuzzy_skin_outside_only',
'conical_overhang_enabled',
'min_infill_area',
'small_hole_max_size',
'magic_mesh_surface_mode',
'carve_multiple_volumes',
'meshfix_union_all_remove_holes',
'support_tree_rest_preference',
'small_feature_max_length',
'draft_shield_enabled',
'brim_smart_ordering',
'ooze_shield_enabled',
'bottom_skin_preshrink',
'skin_edge_support_thickness',
'alternate_carve_order',
'top_skin_preshrink',
'interlocking_enable'}
PER_MODEL_EXPORTABLE_SETTINGS_KEYS = {"anti_overhang_mesh",
"infill_mesh",
"cutting_mesh",
"support_mesh"}
def __init__(self, parent=None):
super().__init__(parent)
self._settings_groups = []
application = CuraApplication.getInstance()
self._appendGlobalSettings(application)
self._appendExtruderSettings(application)
self._appendModelSettings(application)
def _appendGlobalSettings(self, application):
global_stack = application.getGlobalContainerStack()
self._settings_groups.append(SettingsExportGroup(
global_stack, "Global settings", SettingsExportGroup.Category.Global, self._exportSettings(global_stack)))
def _appendExtruderSettings(self, application):
extruders_stacks = ExtruderManager.getInstance().getUsedExtruderStacks()
for extruder_stack in extruders_stacks:
color = extruder_stack.material.getMetaDataEntry("color_code") if extruder_stack.material else ""
self._settings_groups.append(SettingsExportGroup(
extruder_stack, "Extruder settings", SettingsExportGroup.Category.Extruder,
self._exportSettings(extruder_stack), extruder_index=extruder_stack.position, extruder_color=color))
def _appendModelSettings(self, application):
scene = application.getController().getScene()
for scene_node in scene.getRoot().getChildren():
self._appendNodeSettings(scene_node, "Model settings", SettingsExportGroup.Category.Model)
def _appendNodeSettings(self, node, title_prefix, category):
stack = node.callDecoration("getStack")
if stack:
self._settings_groups.append(SettingsExportGroup(
stack, f"{title_prefix}", category, self._exportSettings(stack), node.getName()))
for child in node.getChildren():
self._appendNodeSettings(child, f"Children of {node.getName()}", SettingsExportGroup.Category.Model)
@pyqtProperty(list, constant=True)
def settingsGroups(self) -> List[SettingsExportGroup]:
return self._settings_groups
@staticmethod
def _exportSettings(settings_stack):
settings_catalog = i18nCatalog("fdmprinter.def.json")
user_settings_container = settings_stack.userChanges
user_keys = user_settings_container.getAllKeys()
exportable_settings = SettingsExportModel.EXPORTABLE_SETTINGS
settings_export = []
# Check whether any of the user keys exist in PER_MODEL_EXPORTABLE_SETTINGS_KEYS
is_exportable = any(key in SettingsExportModel.PER_MODEL_EXPORTABLE_SETTINGS_KEYS for key in user_keys)
for setting_to_export in user_keys:
show_in_menu = setting_to_export not in SettingsExportModel.PER_MODEL_EXPORTABLE_SETTINGS_KEYS
label_msgtxt = f"{str(setting_to_export)} label"
label_msgid = settings_stack.getProperty(setting_to_export, "label")
label = settings_catalog.i18nc(label_msgtxt, label_msgid)
value = settings_stack.getProperty(setting_to_export, "value")
unit = settings_stack.getProperty(setting_to_export, "unit")
setting_type = settings_stack.getProperty(setting_to_export, "type")
value_name = str(SettingDefinition.settingValueToString(setting_type, value))
if unit:
value_name += " " + str(unit)
if setting_type == "enum":
options = settings_stack.getProperty(setting_to_export, "options")
value_msgctxt = f"{str(setting_to_export)} option {str(value)}"
value_msgid = options.get(value, "")
value_name = settings_catalog.i18nc(value_msgctxt, value_msgid)
if setting_type is not None:
value = f"{str(SettingDefinition.settingValueToString(setting_type, value))} {unit}"
else:
value = str(value)
settings_export.append(SettingExport(setting_to_export,
label,
value,
value_name,
is_exportable or setting_to_export in exportable_settings,
show_in_menu))
return settings_export

View File

@ -0,0 +1,86 @@
// Copyright (c) 2024 Ultimaker B.V.
// Cura is released under the terms of the LGPLv3 or higher.
import QtQuick 2.10
import QtQuick.Controls 2.3
import QtQuick.Layouts 1.3
import QtQuick.Window 2.2
import UM 1.5 as UM
import Cura 1.1 as Cura
import ThreeMFWriter 1.0 as ThreeMFWriter
ColumnLayout
{
id: settingsGroup
spacing: UM.Theme.getSize("narrow_margin").width
RowLayout
{
id: settingsGroupTitleRow
spacing: UM.Theme.getSize("default_margin").width
Item
{
id: icon
height: UM.Theme.getSize("medium_button_icon").height
width: height
UM.ColorImage
{
id: settingsMainImage
anchors.fill: parent
source:
{
switch(modelData.category)
{
case ThreeMFWriter.SettingsExportGroup.Global:
return UM.Theme.getIcon("Sliders")
case ThreeMFWriter.SettingsExportGroup.Model:
return UM.Theme.getIcon("View3D")
default:
return ""
}
}
color: UM.Theme.getColor("text")
}
Cura.ExtruderIcon
{
id: settingsExtruderIcon
anchors.fill: parent
visible: modelData.category === ThreeMFWriter.SettingsExportGroup.Extruder
text: (modelData.extruder_index + 1).toString()
font: UM.Theme.getFont("tiny_emphasis")
materialColor: modelData.extruder_color
}
}
UM.Label
{
id: settingsTitle
text: modelData.name + (modelData.category_details ? ' (%1)'.arg(modelData.category_details) : '')
font: UM.Theme.getFont("default_bold")
}
}
ListView
{
id: settingsExportList
Layout.fillWidth: true
Layout.preferredHeight: contentHeight
spacing: 0
model: modelData.visibleSettings
visible: modelData.visibleSettings.length > 0
delegate: SettingSelection { }
}
UM.Label
{
UM.I18nCatalog { id: catalog; name: "cura" }
text: catalog.i18nc("@label", "No specific value has been set")
visible: modelData.visibleSettings.length === 0
}
}

View File

@ -1,9 +1,13 @@
# Copyright (c) 2020 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
from typing import Optional
import configparser
from io import StringIO
from threading import Lock
import zipfile
from typing import Dict, Any
from UM.Application import Application
from UM.Logger import Logger
@ -13,15 +17,23 @@ from UM.Workspace.WorkspaceWriter import WorkspaceWriter
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
from cura.Utils.Threading import call_on_qt_thread
from .ThreeMFWriter import ThreeMFWriter
from .SettingsExportModel import SettingsExportModel
from .SettingsExportGroup import SettingsExportGroup
USER_SETTINGS_PATH = "Cura/user-settings.json"
class ThreeMFWorkspaceWriter(WorkspaceWriter):
def __init__(self):
super().__init__()
self._ucp_model: Optional[SettingsExportModel] = None
@call_on_qt_thread
def write(self, stream, nodes, mode=WorkspaceWriter.OutputMode.BinaryMode):
def setExportModel(self, model: SettingsExportModel) -> None:
if self._ucp_model != model:
self._ucp_model = model
def _write(self, stream, nodes, mode=WorkspaceWriter.OutputMode.BinaryMode):
application = Application.getInstance()
machine_manager = application.getMachineManager()
@ -34,20 +46,20 @@ class ThreeMFWorkspaceWriter(WorkspaceWriter):
global_stack = machine_manager.activeMachine
if global_stack is None:
self.setInformation(catalog.i18nc("@error", "There is no workspace yet to write. Please add a printer first."))
self.setInformation(
catalog.i18nc("@error", "There is no workspace yet to write. Please add a printer first."))
Logger.error("Tried to write a 3MF workspace before there was a global stack.")
return False
# Indicate that the 3mf mesh writer should not close the archive just yet (we still need to add stuff to it).
mesh_writer.setStoreArchive(True)
if not mesh_writer.write(stream, nodes, mode):
if not mesh_writer.write(stream, nodes, mode, self._ucp_model):
self.setInformation(mesh_writer.getInformation())
return False
archive = mesh_writer.getArchive()
if archive is None: # This happens if there was no mesh data to write.
archive = zipfile.ZipFile(stream, "w", compression = zipfile.ZIP_DEFLATED)
archive = zipfile.ZipFile(stream, "w", compression=zipfile.ZIP_DEFLATED)
try:
# Add global container stack data to the archive.
@ -62,15 +74,21 @@ class ThreeMFWorkspaceWriter(WorkspaceWriter):
self._writeContainerToArchive(extruder_stack, archive)
for container in extruder_stack.getContainers():
self._writeContainerToArchive(container, archive)
# Write user settings data
if self._ucp_model is not None:
user_settings_data = self._getUserSettings(self._ucp_model)
ThreeMFWriter._storeMetadataJson(user_settings_data, archive, USER_SETTINGS_PATH)
except PermissionError:
self.setInformation(catalog.i18nc("@error:zip", "No permission to write the workspace here."))
Logger.error("No permission to write workspace to this stream.")
return False
# Write preferences to archive
original_preferences = Application.getInstance().getPreferences() #Copy only the preferences that we use to the workspace.
original_preferences = Application.getInstance().getPreferences() # Copy only the preferences that we use to the workspace.
temp_preferences = Preferences()
for preference in {"general/visible_settings", "cura/active_mode", "cura/categories_expanded", "metadata/setting_version"}:
for preference in {"general/visible_settings", "cura/active_mode", "cura/categories_expanded",
"metadata/setting_version"}:
temp_preferences.addPreference(preference, None)
temp_preferences.setValue(preference, original_preferences.getValue(preference))
preferences_string = StringIO()
@ -81,7 +99,7 @@ class ThreeMFWorkspaceWriter(WorkspaceWriter):
# Save Cura version
version_file = zipfile.ZipInfo("Cura/version.ini")
version_config_parser = configparser.ConfigParser(interpolation = None)
version_config_parser = configparser.ConfigParser(interpolation=None)
version_config_parser.add_section("versions")
version_config_parser.set("versions", "cura_version", application.getVersion())
version_config_parser.set("versions", "build_type", application.getBuildType())
@ -101,11 +119,17 @@ class ThreeMFWorkspaceWriter(WorkspaceWriter):
return False
except EnvironmentError as e:
self.setInformation(catalog.i18nc("@error:zip", str(e)))
Logger.error("EnvironmentError when writing workspace to this stream: {err}".format(err = str(e)))
Logger.error("EnvironmentError when writing workspace to this stream: {err}".format(err=str(e)))
return False
mesh_writer.setStoreArchive(False)
return True
def write(self, stream, nodes, mode=WorkspaceWriter.OutputMode.BinaryMode):
success = self._write(stream, nodes, mode=WorkspaceWriter.OutputMode.BinaryMode)
self._ucp_model = None
return success
@staticmethod
def _writePluginMetadataToArchive(archive: zipfile.ZipFile) -> None:
file_name_template = "%s/plugin_metadata.json"
@ -165,4 +189,27 @@ class ThreeMFWorkspaceWriter(WorkspaceWriter):
archive.writestr(file_in_archive, serialized_data)
except (FileNotFoundError, EnvironmentError):
Logger.error("File became inaccessible while writing to it: {archive_filename}".format(archive_filename = archive.fp.name))
return
return
@staticmethod
def _getUserSettings(model: SettingsExportModel) -> Dict[str, Dict[str, Any]]:
user_settings = {}
for group in model.settingsGroups:
category = ''
if group.category == SettingsExportGroup.Category.Global:
category = 'global'
elif group.category == SettingsExportGroup.Category.Extruder:
category = f"extruder_{group.extruder_index}"
if len(category) > 0:
settings_values = {}
stack = group.stack
for setting in group.settings:
if setting.selected:
settings_values[setting.id] = stack.getProperty(setting.id, "value")
user_settings[category] = settings_values
return user_settings

View File

@ -2,6 +2,7 @@
# Cura is released under the terms of the LGPLv3 or higher.
import json
import re
import threading
from typing import Optional, cast, List, Dict, Pattern, Set
@ -10,22 +11,24 @@ from UM.Math.Vector import Vector
from UM.Logger import Logger
from UM.Math.Matrix import Matrix
from UM.Application import Application
from UM.OutputDevice import OutputDeviceError
from UM.Message import Message
from UM.Resources import Resources
from UM.Scene.SceneNode import SceneNode
from UM.Settings.ContainerRegistry import ContainerRegistry
from UM.Settings.EmptyInstanceContainer import EmptyInstanceContainer
from cura.CuraApplication import CuraApplication
from cura.CuraPackageManager import CuraPackageManager
from cura.Settings import CuraContainerStack
from cura.Utils.Threading import call_on_qt_thread
from cura.Scene.CuraSceneNode import CuraSceneNode
from cura.Snapshot import Snapshot
from PyQt6.QtCore import QBuffer
from PyQt6.QtCore import Qt, QBuffer
from PyQt6.QtGui import QImage, QPainter
import pySavitar as Savitar
from .UCPDialog import UCPDialog
import numpy
import datetime
@ -40,6 +43,9 @@ except ImportError:
import zipfile
import UM.Application
from .SettingsExportModel import SettingsExportModel
from .SettingsExportGroup import SettingsExportGroup
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
@ -60,6 +66,7 @@ class ThreeMFWriter(MeshWriter):
self._unit_matrix_string = ThreeMFWriter._convertMatrixToString(Matrix())
self._archive: Optional[zipfile.ZipFile] = None
self._store_archive = False
self._lock = threading.Lock()
@staticmethod
def _convertMatrixToString(matrix):
@ -87,7 +94,9 @@ class ThreeMFWriter(MeshWriter):
self._store_archive = store_archive
@staticmethod
def _convertUMNodeToSavitarNode(um_node, transformation=Matrix()):
def _convertUMNodeToSavitarNode(um_node,
transformation = Matrix(),
exported_settings: Optional[Dict[str, Set[str]]] = None):
"""Convenience function that converts an Uranium SceneNode object to a SavitarSceneNode
:returns: Uranium Scene node.
@ -102,12 +111,20 @@ class ThreeMFWriter(MeshWriter):
savitar_node = Savitar.SceneNode()
savitar_node.setName(um_node.getName())
node_matrix = um_node.getLocalTransformation()
node_matrix = Matrix()
mesh_data = um_node.getMeshData()
# compensate for original center position, if object(s) is/are not around its zero position
if mesh_data is not None:
extents = mesh_data.getExtents()
if extents is not None:
# We use a different coordinate space while writing, so flip Z and Y
center_vector = Vector(extents.center.x, extents.center.z, extents.center.y)
node_matrix.setByTranslation(center_vector)
node_matrix.multiply(um_node.getLocalTransformation())
matrix_string = ThreeMFWriter._convertMatrixToString(node_matrix.preMultiply(transformation))
savitar_node.setTransformation(matrix_string)
mesh_data = um_node.getMeshData()
if mesh_data is not None:
savitar_node.getMeshData().setVerticesFromBytes(mesh_data.getVerticesAsByteArray())
indices_array = mesh_data.getIndicesAsByteArray()
@ -121,13 +138,26 @@ class ThreeMFWriter(MeshWriter):
if stack is not None:
changed_setting_keys = stack.getTop().getAllKeys()
# Ensure that we save the extruder used for this object in a multi-extrusion setup
if stack.getProperty("machine_extruder_count", "value") > 1:
changed_setting_keys.add("extruder_nr")
if exported_settings is None:
# Ensure that we save the extruder used for this object in a multi-extrusion setup
if stack.getProperty("machine_extruder_count", "value") > 1:
changed_setting_keys.add("extruder_nr")
# Get values for all changed settings & save them.
for key in changed_setting_keys:
savitar_node.setSetting("cura:" + key, str(stack.getProperty(key, "value")))
# Get values for all changed settings & save them.
for key in changed_setting_keys:
savitar_node.setSetting("cura:" + key, str(stack.getProperty(key, "value")))
else:
# We want to export only the specified settings
if um_node.getName() in exported_settings:
model_exported_settings = exported_settings[um_node.getName()]
# Get values for all exported settings & save them.
for key in model_exported_settings:
savitar_node.setSetting("cura:" + key, str(stack.getProperty(key, "value")))
if isinstance(um_node, CuraSceneNode):
savitar_node.setSetting("cura:print_order", str(um_node.printOrder))
savitar_node.setSetting("cura:drop_to_buildplate", str(um_node.isDropDownEnabled))
# Store the metadata.
for key, value in um_node.metadata.items():
@ -137,7 +167,8 @@ class ThreeMFWriter(MeshWriter):
# only save the nodes on the active build plate
if child_node.callDecoration("getBuildPlateNumber") != active_build_plate_nr:
continue
savitar_child_node = ThreeMFWriter._convertUMNodeToSavitarNode(child_node)
savitar_child_node = ThreeMFWriter._convertUMNodeToSavitarNode(child_node,
exported_settings = exported_settings)
if savitar_child_node is not None:
savitar_node.addChild(savitar_child_node)
@ -146,7 +177,24 @@ class ThreeMFWriter(MeshWriter):
def getArchive(self):
return self._archive
def write(self, stream, nodes, mode = MeshWriter.OutputMode.BinaryMode) -> bool:
def _addLogoToThumbnail(self, primary_image, logo_name):
# Load the icon png image
icon_image = QImage(Resources.getPath(Resources.Images, logo_name))
# Resize icon_image to be 1/4 of primary_image size
new_width = int(primary_image.width() / 4)
new_height = int(primary_image.height() / 4)
icon_image = icon_image.scaled(new_width, new_height, Qt.AspectRatioMode.KeepAspectRatio)
# Create a QPainter to draw on the image
painter = QPainter(primary_image)
# Draw the icon in the top-left corner (adjust coordinates as needed)
icon_position = (10, 10)
painter.drawImage(icon_position[0], icon_position[1], icon_image)
painter.end()
def write(self, stream, nodes, mode = MeshWriter.OutputMode.BinaryMode, export_settings_model = None) -> bool:
self._archive = None # Reset archive
archive = zipfile.ZipFile(stream, "w", compression = zipfile.ZIP_DEFLATED)
try:
@ -170,6 +218,10 @@ class ThreeMFWriter(MeshWriter):
# Attempt to add a thumbnail
snapshot = self._createSnapshot()
if snapshot:
if export_settings_model != None:
self._addLogoToThumbnail(snapshot, "cura-share.png")
elif export_settings_model == None and self._store_archive:
self._addLogoToThumbnail(snapshot, "cura-icon.png")
thumbnail_buffer = QBuffer()
thumbnail_buffer.open(QBuffer.OpenModeFlag.ReadWrite)
snapshot.save(thumbnail_buffer, "PNG")
@ -224,14 +276,20 @@ class ThreeMFWriter(MeshWriter):
transformation_matrix.preMultiply(translation_matrix)
root_node = UM.Application.Application.getInstance().getController().getScene().getRoot()
exported_model_settings = ThreeMFWriter._extractModelExportedSettings(export_settings_model) if export_settings_model != None else None
for node in nodes:
if node == root_node:
for root_child in node.getChildren():
savitar_node = ThreeMFWriter._convertUMNodeToSavitarNode(root_child, transformation_matrix)
savitar_node = ThreeMFWriter._convertUMNodeToSavitarNode(root_child,
transformation_matrix,
exported_model_settings)
if savitar_node:
savitar_scene.addSceneNode(savitar_node)
else:
savitar_node = self._convertUMNodeToSavitarNode(node, transformation_matrix)
savitar_node = self._convertUMNodeToSavitarNode(node,
transformation_matrix,
exported_model_settings)
if savitar_node:
savitar_scene.addSceneNode(savitar_node)
@ -367,6 +425,7 @@ class ThreeMFWriter(MeshWriter):
@call_on_qt_thread # must be called from the main thread because of OpenGL
def _createSnapshot(self):
Logger.log("d", "Creating thumbnail image...")
self._lock.acquire()
if not CuraApplication.getInstance().isVisible:
Logger.log("w", "Can't create snapshot when renderer not initialized.")
return None
@ -375,6 +434,7 @@ class ThreeMFWriter(MeshWriter):
except:
Logger.logException("w", "Failed to create snapshot image")
return None
finally: self._lock.release()
return snapshot
@ -387,3 +447,24 @@ class ThreeMFWriter(MeshWriter):
parser = Savitar.ThreeMFParser()
scene_string = parser.sceneToString(savitar_scene)
return scene_string
@staticmethod
def _extractModelExportedSettings(model: Optional[SettingsExportModel]) -> Dict[str, Set[str]]:
extra_settings = {}
if model is not None:
for group in model.settingsGroups:
if group.category == SettingsExportGroup.Category.Model:
exported_model_settings = set()
for exported_setting in group.settings:
if exported_setting.selected:
exported_model_settings.add(exported_setting.id)
extra_settings[group.category_details] = exported_model_settings
return extra_settings
def exportUcp(self):
self._config_dialog = UCPDialog()
self._config_dialog.show()

View File

@ -0,0 +1,114 @@
# Copyright (c) 2024 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
import os
from PyQt6.QtCore import pyqtSignal, QObject
import UM
from UM.FlameProfiler import pyqtSlot
from UM.OutputDevice import OutputDeviceError
from UM.Workspace.WorkspaceWriter import WorkspaceWriter
from UM.i18n import i18nCatalog
from UM.Logger import Logger
from UM.Message import Message
from cura.CuraApplication import CuraApplication
from .SettingsExportModel import SettingsExportModel
i18n_catalog = i18nCatalog("cura")
class UCPDialog(QObject):
finished = pyqtSignal(bool)
def __init__(self, parent = None) -> None:
super().__init__(parent)
plugin_path = os.path.dirname(__file__)
dialog_path = os.path.join(plugin_path, 'UCPDialog.qml')
self._model = SettingsExportModel()
self._view = CuraApplication.getInstance().createQmlComponent(
dialog_path,
{
"manager": self,
"settingsExportModel": self._model
}
)
self._view.accepted.connect(self._onAccepted)
self._view.rejected.connect(self._onRejected)
self._finished = False
self._accepted = False
def show(self) -> None:
self._finished = False
self._accepted = False
self._view.show()
def getModel(self) -> SettingsExportModel:
return self._model
@pyqtSlot()
def notifyClosed(self):
self._onFinished()
def save3mf(self):
application = CuraApplication.getInstance()
workspace_handler = application.getInstance().getWorkspaceFileHandler()
# Set the model to the workspace writer
mesh_writer = workspace_handler.getWriter("3MFWriter")
mesh_writer.setExportModel(self._model)
# Open file dialog and write the file
device = application.getOutputDeviceManager().getOutputDevice("local_file")
nodes = [application.getController().getScene().getRoot()]
device.writeError.connect(lambda: self._onRejected())
device.writeSuccess.connect(lambda: self._onSuccess())
device.writeFinished.connect(lambda: self._onFinished())
file_name = f"UCP_{CuraApplication.getInstance().getPrintInformation().baseName}"
try:
device.requestWrite(
nodes,
file_name,
["application/x-ucp"],
workspace_handler,
preferred_mimetype_list="application/x-ucp"
)
except OutputDeviceError.UserCanceledError:
self._onRejected()
except Exception as e:
message = Message(
i18n_catalog.i18nc("@info:error", "Unable to write to file: {0}", file_name),
title=i18n_catalog.i18nc("@info:title", "Error"),
message_type=Message.MessageType.ERROR
)
message.show()
Logger.logException("e", "Unable to write to file %s: %s", file_name, e)
self._onRejected()
def _onAccepted(self):
self.save3mf()
def _onRejected(self):
self._onFinished()
def _onSuccess(self):
self._accepted = True
self._onFinished()
def _onFinished(self):
# Make sure we don't send the finished signal twice, whatever happens
if self._finished:
return
self._finished = True
# Reset the model to the workspace writer
mesh_writer = CuraApplication.getInstance().getInstance().getWorkspaceFileHandler().getWriter("3MFWriter")
mesh_writer.setExportModel(None)
self.finished.emit(self._accepted)

View File

@ -0,0 +1,109 @@
// Copyright (c) 2024 Ultimaker B.V.
// Cura is released under the terms of the LGPLv3 or higher.
import QtQuick 2.10
import QtQuick.Controls 2.3
import QtQuick.Layouts 1.3
import QtQuick.Window 2.2
import UM 1.5 as UM
import Cura 1.1 as Cura
UM.Dialog
{
id: exportDialog
title: catalog.i18nc("@title:window Don't translate 'Universal Cura Project'", "Export Universal Cura Project")
margin: UM.Theme.getSize("default_margin").width
minimumWidth: UM.Theme.getSize("modal_window_minimum").width
minimumHeight: UM.Theme.getSize("modal_window_minimum").height
backgroundColor: UM.Theme.getColor("detail_background")
headerComponent: Rectangle
{
height: childrenRect.height + 2 * UM.Theme.getSize("default_margin").height
color: UM.Theme.getColor("main_background")
ColumnLayout
{
id: headerColumn
anchors.top: parent.top
anchors.left: parent.left
anchors.right: parent.right
anchors.topMargin: UM.Theme.getSize("default_margin").height
anchors.leftMargin: UM.Theme.getSize("default_margin").width
anchors.rightMargin: anchors.leftMargin
RowLayout
{
UM.Label
{
id: titleLabel
text: catalog.i18nc("@action:title Don't translate 'Universal Cura Project'", "Summary - Universal Cura Project")
font: UM.Theme.getFont("large")
}
Cura.TertiaryButton
{
id: learnMoreButton
text: catalog.i18nc("@button", "Learn more")
iconSource: UM.Theme.getIcon("LinkExternal")
isIconOnRightSide: true
onClicked: Qt.openUrlExternally("https://support.ultimaker.com/s/article/000002979")
}
}
UM.Label
{
id: descriptionLabel
text: catalog.i18nc("@action:description Don't translate 'Universal Cura Project'", "Universal Cura Project files can be printed on different 3D printers while retaining positional data and selected settings. When exported, all models present on the build plate will be included along with their current position, orientation, and scale. You can also select which per-extruder or per-model settings should be included to ensure proper printing.")
font: UM.Theme.getFont("default")
wrapMode: Text.Wrap
Layout.maximumWidth: headerColumn.width
}
}
}
Rectangle
{
anchors.fill: parent
color: UM.Theme.getColor("main_background")
UM.I18nCatalog { id: catalog; name: "cura" }
ListView
{
id: settingsExportList
anchors.fill: parent
anchors.margins: UM.Theme.getSize("default_margin").width
spacing: UM.Theme.getSize("thick_margin").height
model: settingsExportModel.settingsGroups
clip: true
ScrollBar.vertical: UM.ScrollBar { id: verticalScrollBar }
delegate: SettingsSelectionGroup { Layout.margins: 0 }
}
}
rightButtons:
[
Cura.TertiaryButton
{
text: catalog.i18nc("@action:button", "Cancel")
onClicked: reject()
},
Cura.PrimaryButton
{
text: catalog.i18nc("@action:button", "Save project")
onClicked: accept()
}
]
buttonSpacing: UM.Theme.getSize("wide_margin").width
onClosing:
{
manager.notifyClosed()
}
}

View File

@ -2,9 +2,12 @@
# Uranium is released under the terms of the LGPLv3 or higher.
import sys
from PyQt6.QtQml import qmlRegisterType
from UM.Logger import Logger
try:
from . import ThreeMFWriter
from .SettingsExportGroup import SettingsExportGroup
threemf_writer_was_imported = True
except ImportError:
Logger.log("w", "Could not import ThreeMFWriter; libSavitar may be missing")
@ -23,20 +26,30 @@ def getMetaData():
if threemf_writer_was_imported:
metaData["mesh_writer"] = {
"output": [{
"extension": "3mf",
"description": i18n_catalog.i18nc("@item:inlistbox", "3MF file"),
"mime_type": "application/vnd.ms-package.3dmanufacturing-3dmodel+xml",
"mode": ThreeMFWriter.ThreeMFWriter.OutputMode.BinaryMode
}]
"output": [
{
"extension": "3mf",
"description": i18n_catalog.i18nc("@item:inlistbox", "3MF file"),
"mime_type": "application/vnd.ms-package.3dmanufacturing-3dmodel+xml",
"mode": ThreeMFWriter.ThreeMFWriter.OutputMode.BinaryMode
},
]
}
metaData["workspace_writer"] = {
"output": [{
"extension": workspace_extension,
"description": i18n_catalog.i18nc("@item:inlistbox", "Cura Project 3MF file"),
"mime_type": "application/vnd.ms-package.3dmanufacturing-3dmodel+xml",
"mode": ThreeMFWorkspaceWriter.ThreeMFWorkspaceWriter.OutputMode.BinaryMode
}]
"output": [
{
"extension": workspace_extension,
"description": i18n_catalog.i18nc("@item:inlistbox", "Cura Project 3MF file"),
"mime_type": "application/vnd.ms-package.3dmanufacturing-3dmodel+xml",
"mode": ThreeMFWorkspaceWriter.ThreeMFWorkspaceWriter.OutputMode.BinaryMode
},
{
"extension": "3mf",
"description": i18n_catalog.i18nc("@item:inlistbox", "Universal Cura Project"),
"mime_type": "application/x-ucp",
"mode": ThreeMFWorkspaceWriter.ThreeMFWorkspaceWriter.OutputMode.BinaryMode
}
]
}
return metaData
@ -44,6 +57,8 @@ def getMetaData():
def register(app):
if "3MFWriter.ThreeMFWriter" in sys.modules:
qmlRegisterType(SettingsExportGroup, "ThreeMFWriter", 1, 0, "SettingsExportGroup")
return {"mesh_writer": ThreeMFWriter.ThreeMFWriter(),
"workspace_writer": ThreeMFWorkspaceWriter.ThreeMFWorkspaceWriter()}
else:

View File

@ -2,7 +2,7 @@
"name": "3MF Writer",
"author": "Ultimaker B.V.",
"version": "1.0.1",
"description": "Provides support for writing 3MF files.",
"description": "Provides support for writing 3MF and UCP files.",
"api": 8,
"i18n-catalog": "cura"
}

View File

@ -33,6 +33,10 @@ message Slice
repeated Extruder extruders = 3; // The settings sent to each extruder object
repeated SettingExtruder limit_to_extruder = 4; // From which stack the setting would inherit if not defined per object
repeated EnginePlugin engine_plugins = 5;
string sentry_id = 6; // The anonymized Sentry user id that requested the slice
string cura_version = 7; // The version of Cura that requested the slice
optional string project_name = 8; // The name of the project that requested the slice
optional string user_name = 9; // The Digital Factory account name of the user that requested the slice
}
message Extruder

View File

@ -76,6 +76,7 @@ class CuraEngineBackend(QObject, Backend):
self._default_engine_location = executable_name
search_path = [
os.path.abspath(os.path.join(os.path.dirname(sys.executable), "..", "Resources")),
os.path.abspath(os.path.dirname(sys.executable)),
os.path.abspath(os.path.join(os.path.dirname(sys.executable), "bin")),
os.path.abspath(os.path.join(os.path.dirname(sys.executable), "..")),
@ -163,6 +164,8 @@ class CuraEngineBackend(QObject, Backend):
self._is_disabled: bool = False
application.getPreferences().addPreference("general/auto_slice", False)
application.getPreferences().addPreference("info/send_engine_crash", True)
application.getPreferences().addPreference("info/anonymous_engine_crash_report", True)
self._use_timer: bool = False
@ -173,10 +176,15 @@ class CuraEngineBackend(QObject, Backend):
self._change_timer.setSingleShot(True)
self._change_timer.setInterval(500)
self.determineAutoSlicing()
application.getPreferences().preferenceChanged.connect(self._onPreferencesChanged)
self._slicing_error_message = Message(
text = catalog.i18nc("@message", "Slicing failed with an unexpected error. Please consider reporting a bug on our issue tracker."),
text = catalog.i18nc("@message", "Oops! We encountered an unexpected error during your slicing process. "
"Rest assured, we've automatically received the crash logs for analysis, "
"if you have not disabled data sharing in your preferences. To assist us "
"further, consider sharing your project details on our issue tracker."),
title = catalog.i18nc("@message:title", "Slicing failed"),
message_type = Message.MessageType.ERROR
)
@ -193,6 +201,9 @@ class CuraEngineBackend(QObject, Backend):
application.initializationFinished.connect(self.initialize)
# Ensure that the initial value for send_engine_crash is handled correctly.
application.callLater(self._onPreferencesChanged, "info/send_engine_crash")
def startPlugins(self) -> None:
"""
Ensure that all backend plugins are started
@ -1088,11 +1099,14 @@ class CuraEngineBackend(QObject, Backend):
self._change_timer.timeout.disconnect(self.slice)
def _onPreferencesChanged(self, preference: str) -> None:
if preference != "general/auto_slice":
if preference != "general/auto_slice" and preference != "info/send_engine_crash" and preference != "info/anonymous_engine_crash_report":
return
auto_slice = self.determineAutoSlicing()
if auto_slice:
self._change_timer.start()
if preference == "general/auto_slice":
auto_slice = self.determineAutoSlicing()
if auto_slice:
self._change_timer.start()
elif preference == "info/send_engine_crash":
os.environ["USE_SENTRY"] = "1" if CuraApplication.getInstance().getPreferences().getValue("info/send_engine_crash") else "0"
def tickle(self) -> None:
"""Tickle the backend so in case of auto slicing, it starts the timer."""

View File

@ -1,12 +1,14 @@
# Copyright (c) 2023 UltiMaker
# Copyright (c) 2024 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher.
import uuid
import os
import numpy
from string import Formatter
from enum import IntEnum
import time
from typing import Any, cast, Dict, List, Optional, Set
from typing import Any, cast, Dict, List, Optional, Set, Tuple
import re
import pyArcus as Arcus # For typing.
from PyQt6.QtCore import QCoreApplication
@ -30,6 +32,7 @@ from cura.CuraApplication import CuraApplication
from cura.Scene.CuraSceneNode import CuraSceneNode
from cura.OneAtATimeIterator import OneAtATimeIterator
from cura.Settings.ExtruderManager import ExtruderManager
from cura.CuraVersion import CuraVersion
NON_PRINTING_MESH_SETTINGS = ["anti_overhang_mesh", "infill_mesh", "cutting_mesh"]
@ -60,31 +63,56 @@ class GcodeStartEndFormatter(Formatter):
# will be used. Alternatively, if the expression is formatted as "{[expression], [extruder_nr]}",
# then the expression will be evaluated with the extruder stack of the specified extruder_nr.
_extruder_regex = re.compile(r"^\s*(?P<expression>.*)\s*,\s*(?P<extruder_nr>\d+)\s*$")
_extruder_regex = re.compile(r"^\s*(?P<expression>.*)\s*,\s*(?P<extruder_nr_expr>.*)\s*$")
def __init__(self, default_extruder_nr: int = -1, *,
additional_per_extruder_settings: Optional[Dict[str, Dict[str, any]]] = None) -> None:
def __init__(self, all_extruder_settings: Dict[str, Any], default_extruder_nr: int = -1) -> None:
super().__init__()
self._all_extruder_settings: Dict[str, Any] = all_extruder_settings
self._default_extruder_nr: int = default_extruder_nr
self._additional_per_extruder_settings: Optional[Dict[str, Dict[str, any]]] = additional_per_extruder_settings
def get_field(self, field_name, args: [str], kwargs: dict) -> Tuple[str, str]:
# get_field method parses all fields in the format-string and parses them individually to the get_value method.
# e.g. for a string "Hello {foo.bar}" would the complete field "foo.bar" would be passed to get_field, and then
# the individual parts "foo" and "bar" would be passed to get_value. This poses a problem for us, because want
# to parse the entire field as a single expression. To solve this, we override the get_field method and return
# the entire field as the expression.
return self.get_value(field_name, args, kwargs), field_name
def get_value(self, expression: str, args: [str], kwargs: dict) -> str:
extruder_nr = self._default_extruder_nr
# The following variables are not settings, but only become available after slicing.
# when these variables are encountered, we return them as-is. They are replaced later
# when the actual values are known.
post_slice_data_variables = ["filament_cost", "print_time", "filament_amount", "filament_weight", "jobname"]
if expression in post_slice_data_variables:
return f"{{{expression}}}"
extruder_nr = str(self._default_extruder_nr)
# The settings may specify a specific extruder to use. This is done by
# formatting the expression as "{expression}, {extruder_nr}". If the
# formatting the expression as "{expression}, {extruder_nr_expr}". If the
# expression is formatted like this, we extract the extruder_nr and use
# it to get the value from the correct extruder stack.
match = self._extruder_regex.match(expression)
if match:
expression = match.group("expression")
extruder_nr = int(match.group("extruder_nr"))
extruder_nr_expr = match.group("extruder_nr_expr")
if self._additional_per_extruder_settings is not None and str(
extruder_nr) in self._additional_per_extruder_settings:
additional_variables = self._additional_per_extruder_settings[str(extruder_nr)]
if extruder_nr_expr.isdigit():
extruder_nr = extruder_nr_expr
else:
# We get the value of the extruder_nr_expr from `_all_extruder_settings` dictionary
# rather than the global container stack. The `_all_extruder_settings["-1"]` is a
# dict-representation of the global container stack, with additional properties such
# as `initial_extruder_nr`. As users may enter such expressions we can't use the
# global container stack.
extruder_nr = str(self._all_extruder_settings["-1"].get(extruder_nr_expr, "-1"))
if extruder_nr in self._all_extruder_settings:
additional_variables = self._all_extruder_settings[extruder_nr].copy()
else:
additional_variables = dict()
Logger.warning(f"Extruder {extruder_nr} does not exist, using global settings")
additional_variables = self._all_extruder_settings["-1"].copy()
# Add the arguments and keyword arguments to the additional settings. These
# are currently _not_ used, but they are added for consistency with the
@ -94,10 +122,13 @@ class GcodeStartEndFormatter(Formatter):
for key, value in kwargs.items():
additional_variables[key] = value
if extruder_nr == -1:
if extruder_nr == "-1":
container_stack = CuraApplication.getInstance().getGlobalContainerStack()
else:
container_stack = ExtruderManager.getInstance().getExtruderStack(extruder_nr)
if not container_stack:
Logger.warning(f"Extruder {extruder_nr} does not exist, using global settings")
container_stack = CuraApplication.getInstance().getGlobalContainerStack()
setting_function = SettingFunction(expression)
value = setting_function(container_stack, additional_variables=additional_variables)
@ -111,12 +142,13 @@ class StartSliceJob(Job):
def __init__(self, slice_message: Arcus.PythonMessage) -> None:
super().__init__()
self._scene = CuraApplication.getInstance().getController().getScene() #type: Scene
self._scene: Scene = CuraApplication.getInstance().getController().getScene()
self._slice_message: Arcus.PythonMessage = slice_message
self._is_cancelled = False #type: bool
self._build_plate_number = None #type: Optional[int]
self._is_cancelled: bool = False
self._build_plate_number: Optional[int] = None
self._all_extruders_settings = None #type: Optional[Dict[str, Any]] # cache for all setting values from all stacks (global & extruder) for the current machine
# cache for all setting values from all stacks (global & extruder) for the current machine
self._all_extruders_settings: Optional[Dict[str, Any]] = None
def getSliceMessage(self) -> Arcus.PythonMessage:
return self._slice_message
@ -315,6 +347,17 @@ class StartSliceJob(Job):
self._buildGlobalSettingsMessage(stack)
self._buildGlobalInheritsStackMessage(stack)
user_id = uuid.getnode() # On all of Cura's supported platforms, this returns the MAC address which is pseudonymical information (!= anonymous).
user_id %= 2 ** 16 # So to make it anonymous, apply a bitmask selecting only the last 16 bits. This prevents it from being traceable to a specific user but still gives somewhat of an idea of whether it's just the same user hitting the same crash over and over again, or if it's widespread.
self._slice_message.sentry_id = f"{user_id}"
self._slice_message.cura_version = CuraVersion
# Add the project name to the message if the user allows for non-anonymous crash data collection.
account = CuraApplication.getInstance().getCuraAPI().account
if account and account.isLoggedIn and not CuraApplication.getInstance().getPreferences().getValue("info/anonymous_engine_crash_report"):
self._slice_message.project_name = CuraApplication.getInstance().getPrintInformation().baseName
self._slice_message.user_name = account.userName
# Build messages for extruder stacks
for extruder_stack in global_stack.extruderList:
self._buildExtruderMessage(extruder_stack)
@ -446,10 +489,7 @@ class StartSliceJob(Job):
# Get "replacement-keys" for the extruders. In the formatter the settings stack is used to get the
# replacement values for the setting-keys. However, the values for `material_id`, `material_type`,
# etc are not in the settings stack.
additional_per_extruder_settings = self._all_extruders_settings.copy()
additional_per_extruder_settings["default_extruder_nr"] = default_extruder_nr
fmt = GcodeStartEndFormatter(default_extruder_nr=default_extruder_nr,
additional_per_extruder_settings=additional_per_extruder_settings)
fmt = GcodeStartEndFormatter(self._all_extruders_settings, default_extruder_nr=default_extruder_nr)
return str(fmt.format(value))
except:
Logger.logException("w", "Unable to do token replacement on start/end g-code")

View File

@ -1,7 +1,6 @@
# Copyright (c) 2021 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
from UM.MimeTypeDatabase import MimeType, MimeTypeDatabase
from .src import DigitalFactoryFileProvider, DigitalFactoryOutputDevicePlugin, DigitalFactoryController

View File

@ -208,12 +208,14 @@ Item
anchors.rightMargin: UM.Theme.getSize("thin_margin").height
enabled: UM.Backend.state == UM.Backend.Done
currentIndex: UM.Backend.state == UM.Backend.Done ? 0 : 1
currentIndex: UM.Backend.state == UM.Backend.Done ? dfFilenameTextfield.text.startsWith("MM")? 1 : 0 : 2
textRole: "text"
valueRole: "value"
model: [
{ text: catalog.i18nc("@option", "Save Cura project and print file"), key: "3mf_ufp", value: ["3mf", "ufp"] },
{ text: catalog.i18nc("@option", "Save Cura project and .ufp print file"), key: "3mf_ufp", value: ["3mf", "ufp"] },
{ text: catalog.i18nc("@option", "Save Cura project and .makerbot print file"), key: "3mf_makerbot", value: ["3mf", "makerbot"] },
{ text: catalog.i18nc("@option", "Save Cura project"), key: "3mf", value: ["3mf"] },
]
}

View File

@ -27,7 +27,7 @@ from .ExportFileJob import ExportFileJob
class DFFileExportAndUploadManager:
"""
Class responsible for exporting the scene and uploading the exported data to the Digital Factory Library. Since 3mf
and UFP files may need to be uploaded at the same time, this class keeps a single progress and success message for
and (UFP or makerbot) files may need to be uploaded at the same time, this class keeps a single progress and success message for
both files and updates those messages according to the progress of both the file job uploads.
"""
def __init__(self, file_handlers: Dict[str, FileHandler],
@ -118,7 +118,7 @@ class DFFileExportAndUploadManager:
library_project_id = self._library_project_id,
source_file_id = self._source_file_id
)
self._api.requestUploadUFP(request, on_finished = self._uploadFileData, on_error = self._onRequestUploadPrintFileFailed)
self._api.requestUploadMeshFile(request, on_finished = self._uploadFileData, on_error = self._onRequestUploadPrintFileFailed)
def _uploadFileData(self, file_upload_response: Union[DFLibraryFileUploadResponse, DFPrintJobUploadResponse]) -> None:
"""Uploads the exported file data after the file or print job upload has been registered at the Digital Factory
@ -279,22 +279,25 @@ class DFFileExportAndUploadManager:
This means that something went wrong with the initial request to create a "file" entry in the digital library.
"""
reply_string = bytes(reply.readAll()).decode()
filename_ufp = self._file_name + ".ufp"
Logger.log("d", "An error occurred while uploading the print job file '{}' to the Digital Library project '{}': {}".format(filename_ufp, self._library_project_id, reply_string))
if "ufp" in self._formats:
filename_meshfile = self._file_name + ".ufp"
elif "makerbot" in self._formats:
filename_meshfile = self._file_name + ".makerbot"
Logger.log("d", "An error occurred while uploading the print job file '{}' to the Digital Library project '{}': {}".format(filename_meshfile, self._library_project_id, reply_string))
with self._message_lock:
# Set the progress to 100% when the upload job fails, to avoid having the progress message stuck
self._file_upload_job_metadata[filename_ufp]["upload_status"] = "failed"
self._file_upload_job_metadata[filename_ufp]["upload_progress"] = 100
self._file_upload_job_metadata[filename_meshfile]["upload_status"] = "failed"
self._file_upload_job_metadata[filename_meshfile]["upload_progress"] = 100
human_readable_error = self.extractErrorTitle(reply_string)
self._file_upload_job_metadata[filename_ufp]["file_upload_failed_message"] = getBackwardsCompatibleMessage(
self._file_upload_job_metadata[filename_meshfile]["file_upload_failed_message"] = getBackwardsCompatibleMessage(
title = "File upload error",
text = "Failed to upload the file '{}' to '{}'. {}".format(filename_ufp, self._library_project_name, human_readable_error),
text = "Failed to upload the file '{}' to '{}'. {}".format(filename_meshfile, self._library_project_name, human_readable_error),
message_type_str = "ERROR",
lifetime = 30
)
self._on_upload_error()
self._onFileUploadFinished(filename_ufp)
self._onFileUploadFinished(filename_meshfile)
@staticmethod
def extractErrorTitle(reply_body: Optional[str]) -> str:
@ -407,4 +410,28 @@ class DFFileExportAndUploadManager:
job_ufp = ExportFileJob(self._file_handlers["ufp"], self._nodes, self._file_name, "ufp")
job_ufp.finished.connect(self._onPrintFileExported)
self._upload_jobs.append(job_ufp)
if "makerbot" in self._formats and "makerbot" in self._file_handlers and self._file_handlers["makerbot"]:
filename_makerbot = self._file_name + ".makerbot"
metadata[filename_makerbot] = {
"export_job_output" : None,
"upload_progress" : -1,
"upload_status" : "",
"file_upload_response": None,
"file_upload_success_message": getBackwardsCompatibleMessage(
text = "'{}' was uploaded to '{}'.".format(filename_makerbot, self._library_project_name),
title = "Upload successful",
message_type_str = "POSITIVE",
lifetime = 30,
),
"file_upload_failed_message": getBackwardsCompatibleMessage(
text = "Failed to upload the file '{}' to '{}'.".format(filename_makerbot, self._library_project_name),
title = "File upload error",
message_type_str = "ERROR",
lifetime = 30
)
}
job_makerbot = ExportFileJob(self._file_handlers["makerbot"], self._nodes, self._file_name, "makerbot")
job_makerbot.finished.connect(self._onPrintFileExported)
self._upload_jobs.append(job_makerbot)
return metadata

View File

@ -3,7 +3,6 @@
import json
from json import JSONDecodeError
import re
from time import time
from typing import List, Any, Optional, Union, Type, Tuple, Dict, cast, TypeVar, Callable
@ -313,7 +312,7 @@ class DigitalFactoryApiClient:
error_callback = on_error,
timeout = self.DEFAULT_REQUEST_TIMEOUT)
def requestUploadUFP(self, request: DFPrintJobUploadRequest,
def requestUploadMeshFile(self, request: DFPrintJobUploadRequest,
on_finished: Callable[[DFPrintJobUploadResponse], Any],
on_error: Optional[Callable[["QNetworkReply", "QNetworkReply.NetworkError"], None]] = None) -> None:
"""Requests the Digital Factory to register the upload of a file in a library project.

View File

@ -92,7 +92,8 @@ class DigitalFactoryOutputDevice(ProjectOutputDevice):
if not self._controller.file_handlers:
self._controller.file_handlers = {
"3mf": CuraApplication.getInstance().getWorkspaceFileHandler(),
"ufp": CuraApplication.getInstance().getMeshFileHandler()
"ufp": CuraApplication.getInstance().getMeshFileHandler(),
"makerbot": CuraApplication.getInstance().getMeshFileHandler()
}
self._dialog = CuraApplication.getInstance().createQmlComponent(self._dialog_path, {"manager": self._controller})

View File

@ -4,7 +4,6 @@ from typing import List, Optional
from PyQt6.QtCore import Qt, pyqtSignal
from UM.Logger import Logger
from UM.Qt.ListModel import ListModel
from .DigitalFactoryProjectResponse import DigitalFactoryProjectResponse

View File

@ -2,7 +2,6 @@
# Cura is released under the terms of the LGPLv3 or higher.
from UM.i18n import i18nCatalog
from UM.Platform import Platform
from . import GCodeGzWriter

Some files were not shown because too many files have changed in this diff Show More