Merge branch 'main' into PP-397-adjust-z-scaling-factor-PET-CF

This commit is contained in:
Saumya Jain 2023-12-29 09:26:33 +01:00 committed by GitHub
commit 96feea0396
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
40 changed files with 621 additions and 1767 deletions

View File

@ -1,156 +0,0 @@
name: Create and Upload Conan package
on:
workflow_call:
inputs:
project_name:
required: true
type: string
recipe_id_full:
required: true
type: string
build_id:
required: true
type: number
build_info:
required: false
default: true
type: boolean
recipe_id_latest:
required: false
type: string
runs_on:
required: true
type: string
python_version:
required: true
type: string
conan_config_branch:
required: false
type: string
conan_logging_level:
required: false
type: string
conan_clean_local_cache:
required: false
type: boolean
default: false
conan_upload_community:
required: false
default: true
type: boolean
env:
CONAN_LOGIN_USERNAME: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD: ${{ secrets.CONAN_PASS }}
CONAN_LOG_RUN_TO_OUTPUT: 1
CONAN_LOGGING_LEVEL: ${{ inputs.conan_logging_level }}
CONAN_NON_INTERACTIVE: 1
jobs:
conan-package-create:
runs-on: ${{ inputs.runs_on }}
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: ${{ inputs.python_version }}
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements for runner
run: pip install -r https://raw.githubusercontent.com/Ultimaker/Cura/main/.github/workflows/requirements-conan-package.txt
# Note the runner requirements are always installed from the main branch in the Ultimaker/Cura repo
- name: Use Conan download cache (Bash)
if: ${{ runner.os != 'Windows' }}
run: conan config set storage.download_cache="$HOME/.conan/conan_download_cache"
- name: Use Conan download cache (Powershell)
if: ${{ runner.os == 'Windows' }}
run: conan config set storage.download_cache="C:\Users\runneradmin\.conan\conan_download_cache"
- name: Cache Conan local repository packages (Bash)
uses: actions/cache@v3
if: ${{ runner.os != 'Windows' }}
with:
path: |
$HOME/.conan/data
$HOME/.conan/conan_download_cache
key: conan-${{ inputs.runs_on }}-${{ runner.arch }}-create-cache
- name: Cache Conan local repository packages (Powershell)
uses: actions/cache@v3
if: ${{ runner.os == 'Windows' }}
with:
path: |
C:\Users\runneradmin\.conan\data
C:\.conan
C:\Users\runneradmin\.conan\conan_download_cache
key: conan-${{ inputs.runs_on }}-${{ runner.arch }}-create-cache
- name: Install MacOS system requirements
if: ${{ runner.os == 'Macos' }}
run: brew install autoconf automake ninja
# NOTE: Due to what are probably github issues, we have to remove the cache and reconfigure before the rest.
# This is maybe because grub caches the disk it uses last time, which is recreated each time.
- name: Install Linux system requirements
if: ${{ runner.os == 'Linux' }}
run: |
sudo rm /var/cache/debconf/config.dat
sudo dpkg --configure -a
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
sudo apt update
sudo apt upgrade
sudo apt install build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config flex bison -y
- name: Install GCC-13 on ubuntu
if: ${{ startsWith(inputs.runs_on, 'ubuntu') }}
run: |
sudo apt install g++-13 gcc-13 -y
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13
sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-13 13
- name: Create the default Conan profile
run: conan profile new default --detect
- name: Get Conan configuration from branch
if: ${{ inputs.conan_config_branch != '' }}
run: conan config install https://github.com/Ultimaker/conan-config.git -a "-b ${{ inputs.conan_config_branch }}"
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Add Cura private Artifactory remote
run: conan remote add cura-private-conan-dev https://cura.jfrog.io/artifactory/api/conan/cura-private-conan-dev True
- name: Set GH service account for remote cura-conan-dev
run: conan user -p ${{ secrets.CONAN_GH_RUNNER_PASS }} -r cura-private-conan-dev "${{ secrets.CONAN_GH_RUNNER_USER }}"
- name: Create the Packages
run: conan install ${{ inputs.recipe_id_full }} --build=missing --update -c tools.build:skip_test=True
- name: Upload the Package(s)
if: ${{ always() && inputs.conan_upload_community }}
run: conan upload ${{ inputs.recipe_id_full }} -r cura --all -c
- name: Upload the Package(s) to the private Artifactory
if: ${{ always() && ! inputs.conan_upload_community }}
run: conan upload ${{ inputs.recipe_id_full }} -r cura-private-conan-dev --all -c

View File

@ -1,27 +1,6 @@
---
name: conan-package name: conan-package
# Exports the recipe, sources and binaries for Mac, Windows and Linux and upload these to the server such that these can
# be used downstream.
#
# It should run on pushes against main or CURA-* branches, but it will only create the binaries for main and release branches
on: on:
workflow_dispatch:
inputs:
create_binaries_windows:
required: true
default: false
description: 'create binaries Windows'
create_binaries_linux:
required: true
default: false
description: 'create binaries Linux'
create_binaries_macos:
required: true
default: false
description: 'create binaries Macos'
push: push:
paths: paths:
- 'plugins/**' - 'plugins/**'
@ -32,15 +11,15 @@ on:
- 'packaging/**' - 'packaging/**'
- '.github/workflows/conan-*.yml' - '.github/workflows/conan-*.yml'
- '.github/workflows/notify.yml' - '.github/workflows/notify.yml'
- '.github/workflows/requirements-conan-package.txt' - '.github/workflows/requirements-runner.txt'
- 'requirements*.txt' - 'requirements*.txt'
- 'conanfile.py' - 'conanfile.py'
- 'conandata.yml' - 'conandata.yml'
- 'GitVersion.yml'
- '*.jinja' - '*.jinja'
branches: branches:
- main - 'main'
- 'CURA-*' - 'CURA-*'
- 'PP-*'
- '[0-9].[0-9]' - '[0-9].[0-9]'
- '[0-9].[0-9][0-9]' - '[0-9].[0-9][0-9]'
tags: tags:
@ -51,95 +30,17 @@ on:
env: env:
CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }} CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }} CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
CONAN_LOGIN_USERNAME_CURA_CE: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA_CE: ${{ secrets.CONAN_PASS }}
CONAN_LOG_RUN_TO_OUTPUT: 1
CONAN_LOGGING_LEVEL: ${{ inputs.conan_logging_level }}
CONAN_NON_INTERACTIVE: 1
permissions: { }
jobs: jobs:
conan-recipe-version: conan-recipe-version:
permissions: uses: ultimaker/cura-workflows/.github/workflows/conan-recipe-version.yml@main
contents: read
uses: ultimaker/cura/.github/workflows/conan-recipe-version.yml@main
with: with:
project_name: cura project_name: cura
conan-package-create-linux: conan-package-export:
needs: [ conan-recipe-version ] needs: [ conan-recipe-version ]
runs-on: 'ubuntu-latest' uses: ultimaker/cura-workflows/.github/workflows/conan-recipe-export.yml@main
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Cache Conan data
id: cache-conan
uses: actions/cache@v3
with: with:
path: ~/.conan recipe_id_full: ${{ needs.conan-recipe-version.outputs.recipe_id_full }}
key: ${{ runner.os }}-conan recipe_id_latest: ${{ needs.conan-recipe-version.outputs.recipe_id_latest }}
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: '3.11.x'
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements for runner
run: pip install -r .github/workflows/requirements-conan-package.txt
# NOTE: Due to what are probably github issues, we have to remove the cache and reconfigure before the rest.
# This is maybe because grub caches the disk it uses last time, which is recreated each time.
- name: Install Linux system requirements
if: ${{ runner.os == 'Linux' }}
run: |
sudo rm /var/cache/debconf/config.dat
sudo dpkg --configure -a
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
sudo apt update
sudo apt upgrade
sudo apt install efibootmgr build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config flex bison libxcb-cursor-dev g++-12 gcc-12 -y
- name: Install GCC-13
run: |
sudo apt install g++-13 gcc-13 -y
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13
sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-13 13
- name: Create the default Conan profile
run: conan profile new default --detect --force
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Create the Packages
run: conan create . ${{ needs.conan-recipe-version.outputs.recipe_id_full }} --build=missing --update -o ${{ needs.conan-recipe-version.outputs.project_name }}:devtools=True -o ${{ needs.conan-recipe-version.outputs.project_name }}:enable_i18n=True -c tools.build:skip_test=True
- name: Create the latest alias
if: always()
run: conan alias ${{ needs.conan-recipe-version.outputs.recipe_id_latest }} ${{ needs.conan-recipe-version.outputs.recipe_id_full }}
- name: Upload the Package(s)
if: always()
run: |
conan upload ${{ needs.conan-recipe-version.outputs.recipe_id_full }} -r cura --all -c
conan upload ${{ needs.conan-recipe-version.outputs.recipe_id_latest }} -r cura -c
notify-create:
if: ${{ always() && (github.event_name == 'push' && (github.ref_name == 'main' || github.ref_name == 'master' || needs.conan-recipe-version.outputs.is_release_branch == 'true')) }}
needs: [ conan-recipe-version, conan-package-create-linux ]
uses: ultimaker/cura/.github/workflows/notify.yml@main
with:
success: ${{ contains(join(needs.*.result, ','), 'success') }}
success_title: "New binaries created in ${{ github.repository }}"
success_body: "Created binaries for ${{ needs.conan-recipe-version.outputs.recipe_id_full }}"
failure_title: "Failed to create binaries in ${{ github.repository }}"
failure_body: "Failed to created binaries for ${{ needs.conan-recipe-version.outputs.recipe_id_full }}"
secrets: inherit secrets: inherit

View File

@ -1,110 +0,0 @@
name: Export Conan Recipe to server
on:
workflow_call:
inputs:
recipe_id_full:
required: true
type: string
recipe_id_latest:
required: false
type: string
runs_on:
required: true
type: string
python_version:
required: true
type: string
conan_config_branch:
required: false
type: string
conan_logging_level:
required: false
type: string
conan_export_binaries:
required: false
type: boolean
conan_upload_community:
required: false
default: true
type: boolean
env:
CONAN_LOGIN_USERNAME: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD: ${{ secrets.CONAN_PASS }}
CONAN_LOG_RUN_TO_OUTPUT: 1
CONAN_LOGGING_LEVEL: ${{ inputs.conan_logging_level }}
CONAN_NON_INTERACTIVE: 1
jobs:
package-export:
runs-on: ${{ inputs.runs_on }}
steps:
- name: Checkout project
uses: actions/checkout@v3
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: ${{ inputs.python_version }}
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements and Create default Conan profile
run: |
pip install -r https://raw.githubusercontent.com/Ultimaker/Cura/main/.github/workflows/requirements-conan-package.txt
conan profile new default --detect
# Note the runner requirements are always installed from the main branch in the Ultimaker/Cura repo
- name: Cache Conan local repository packages
uses: actions/cache@v3
with:
path: $HOME/.conan/data
key: ${{ runner.os }}-conan-export-cache
- name: Get Conan configuration from branch
if: ${{ inputs.conan_config_branch != '' }}
run: conan config install https://github.com/Ultimaker/conan-config.git -a "-b ${{ inputs.conan_config_branch }}"
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Add Cura private Artifactory remote
run: conan remote add cura-private-conan-dev https://cura.jfrog.io/artifactory/api/conan/cura-private-conan-dev True
- name: Set GH service account for remote cura-conan-dev
run: conan user -p ${{ secrets.CONAN_GH_RUNNER_PASS }} -r cura-private-conan-dev "${{ secrets.CONAN_GH_RUNNER_USER }}"
- name: Export the Package (binaries)
if: ${{ inputs.conan_export_binaries }}
run: conan create . ${{ inputs.recipe_id_full }} --build=missing --update -c tools.build:skip_test=True
- name: Export the Package
if: ${{ !inputs.conan_export_binaries }}
run: conan export . ${{ inputs.recipe_id_full }}
- name: Create the latest alias
if: always()
run: conan alias ${{ inputs.recipe_id_latest }} ${{ inputs.recipe_id_full }}
- name: Upload the Package(s)
if: ${{ always() && inputs.conan_upload_community }}
run: |
conan upload ${{ inputs.recipe_id_full }} -r cura --all -c
conan upload ${{ inputs.recipe_id_latest }} -r cura -c
- name: Upload the Package(s) to the private Artifactory
if: ${{ always() && ! inputs.conan_upload_community }}
run: |
conan upload ${{ inputs.recipe_id_full }} -r cura-private-conan-dev --all -c
conan upload ${{ inputs.recipe_id_latest }} -r cura-private-conan-dev -c

View File

@ -1,217 +0,0 @@
name: Get Conan Recipe Version
on:
workflow_call:
inputs:
project_name:
required: true
type: string
user:
required: false
default: ultimaker
type: string
additional_buildmetadata:
required: false
default: ""
type: string
outputs:
recipe_id_full:
description: "The full Conan recipe id: <name>/<version>@<user>/<channel>"
value: ${{ jobs.get-semver.outputs.recipe_id_full }}
recipe_id_latest:
description: "The full Conan recipe aliased (latest) id: <name>/(latest)@<user>/<channel>"
value: ${{ jobs.get-semver.outputs.recipe_id_latest }}
recipe_semver_full:
description: "The full semver <Major>.<Minor>.<Patch>-<PreReleaseTag>+<BuildMetaData>"
value: ${{ jobs.get-semver.outputs.semver_full }}
is_release_branch:
description: "is current branch a release branch?"
value: ${{ jobs.get-semver.outputs.release_branch }}
user:
description: "The conan user"
value: ${{ jobs.get-semver.outputs.user }}
channel:
description: "The conan channel"
value: ${{ jobs.get-semver.outputs.channel }}
project_name:
description: "The conan projectname"
value: ${{ inputs.project_name }}
jobs:
get-semver:
runs-on: ubuntu-latest
outputs:
recipe_id_full: ${{ steps.get-conan-broadcast-data.outputs.recipe_id_full }}
recipe_id_latest: ${{ steps.get-conan-broadcast-data.outputs.recipe_id_latest }}
semver_full: ${{ steps.get-conan-broadcast-data.outputs.semver_full }}
is_release_branch: ${{ steps.get-conan-broadcast-data.outputs.is_release_branch }}
user: ${{ steps.get-conan-broadcast-data.outputs.user }}
channel: ${{ steps.get-conan-broadcast-data.outputs.channel }}
steps:
- name: Checkout repo
uses: actions/checkout@v3
if: ${{ github.event.pull_request.head.repo.full_name == github.repository }}
with:
fetch-depth: 0
ref: ${{ github.head_ref }}
- name: Checkout repo PR
uses: actions/checkout@v3
if: ${{ github.event.pull_request.head.repo.full_name != github.repository }}
with:
fetch-depth: 0
ref: ${{ github.base_ref }}
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: "3.11.x"
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements and Create default Conan profile
run: |
pip install -r .github/workflows/requirements-conan-package.txt
pip install gitpython
- id: get-conan-broadcast-data
name: Get Conan broadcast data
run: |
import subprocess
import os
from conan.tools.scm import Version
from conan.errors import ConanException
from git import Repo
repo = Repo('.')
user = "${{ inputs.user }}".lower()
project_name = "${{ inputs.project_name }}"
event_name = "${{ github.event_name }}"
issue_number = "${{ github.ref }}".split('/')[2]
is_tag = "${{ github.ref_type }}" == "tag"
is_release_branch = False
ref_name = "${{ github.base_ref }}" if event_name == "pull_request" else "${{ github.ref_name }}"
buildmetadata = "" if "${{ inputs.additional_buildmetadata }}" == "" else "${{ inputs.additional_buildmetadata }}_"
# FIXME: for when we push a tag (such as an release)
channel = "testing"
if is_tag:
branch_version = Version(ref_name)
is_release_branch = True
channel = "_"
user = "_"
actual_version = f"{branch_version}"
else:
try:
branch_version = Version(repo.active_branch.name)
except ConanException:
branch_version = Version('0.0.0')
if ref_name == f"{branch_version.major}.{branch_version.minor}":
channel = 'stable'
is_release_branch = True
elif ref_name in ("main", "master"):
channel = 'testing'
else:
channel = "_".join(repo.active_branch.name.replace("-", "_").split("_")[:2]).lower()
if "pull_request" in event_name:
channel = f"pr_{issue_number}"
# %% Get the actual version
latest_branch_version = Version("0.0.0")
latest_branch_tag = None
for tag in repo.active_branch.repo.tags:
if str(tag).startswith("firmware") or str(tag).startswith("master"):
continue # Quick-fix for the versioning scheme name of the embedded team in fdm_materials(_private) repo
try:
version = Version(tag)
except ConanException:
continue
if version > latest_branch_version and version < Version("6.0.0"):
# FIXME: stupid old Cura tags 13.04 etc. keep popping up, als the fdm_material tag for firmware are messing with this
latest_branch_version = version
latest_branch_tag = repo.tag(tag)
if latest_branch_tag:
# %% Get the actual version
sha_commit = repo.commit().hexsha[:6]
latest_branch_version_prerelease = latest_branch_version.pre
if latest_branch_version.pre and not "." in str(latest_branch_version.pre):
# The prerealese did not contain a version number, default it to 1
latest_branch_version_prerelease = f"{latest_branch_version.pre}.1"
if event_name == "pull_request":
actual_version = f"{latest_branch_version.major}.{latest_branch_version.minor}.{latest_branch_version.patch}-{str(latest_branch_version_prerelease).lower()}+{buildmetadata}pr_{issue_number}_{sha_commit}"
channel_metadata = f"{channel}_{sha_commit}"
else:
if channel in ("stable", "_", ""):
channel_metadata = f"{sha_commit}"
else:
channel_metadata = f"{channel}_{sha_commit}"
if is_release_branch:
if (latest_branch_version.pre == "" or latest_branch_version.pre is None) and branch_version > latest_branch_version:
actual_version = f"{branch_version.major}.{branch_version.minor}.0-beta.1+{buildmetadata}{channel_metadata}"
elif latest_branch_version.pre == "":
# An actual full release has been created, we are working on patch
bump_up_patch = int(str(latest_branch_version.patch)) + 1
actual_version = f"{latest_branch_version.major}.{latest_branch_version.minor}.{bump_up_patch}-beta.1+{buildmetadata}{channel_metadata}"
elif latest_branch_version.pre is None:
actual_version = f"{latest_branch_version.major}.{latest_branch_version.minor}.{int(latest_branch_version.patch.value) + 1}-beta.1+{buildmetadata}{channel_metadata}"
else:
# An beta release has been created we are working toward a next beta or full release
bump_up_release_tag = int(str(latest_branch_version.pre).split('.')[1]) + 1
actual_version = f"{latest_branch_version.major}.{latest_branch_version.minor}.{latest_branch_version.patch}-{str(latest_branch_version.pre).split('.')[0]}.{bump_up_release_tag}+{buildmetadata}{channel_metadata}"
else:
max_branches_version = Version("0.0.0")
for branch in repo.references:
try:
if "remotes/origin" in branch.abspath:
b_version = Version(branch.name.split("/")[-1])
if b_version < Version("6.0.0") and b_version > max_branches_version:
max_branches_version = b_version
except:
pass
if max_branches_version > latest_branch_version:
actual_version = f"{max_branches_version.major}.{int(str(max_branches_version.minor)) + 1}.0-alpha+{buildmetadata}{channel}_{sha_commit}"
else:
actual_version = f"{latest_branch_version.major}.{int(str(latest_branch_version.minor)) + 1}.0-alpha+{buildmetadata}{channel_metadata}"
# %% Set the environment output
output_env = os.environ["GITHUB_OUTPUT"]
content = ""
if os.path.exists(output_env):
with open(output_env, "r") as f:
content = f.read()
with open(output_env, "w") as f:
f.write(content)
f.writelines(f"name={project_name}\n")
f.writelines(f"version={actual_version}\n")
f.writelines(f"channel={channel}\n")
f.writelines(f"recipe_id_full={project_name}/{actual_version}@{user}/{channel}\n")
f.writelines(f"recipe_id_latest={project_name}/latest@{user}/{channel}\n")
f.writelines(f"semver_full={actual_version}\n")
f.writelines(f"is_release_branch={str(is_release_branch).lower()}\n")
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
with open(summary_env, "w") as f:
f.writelines(f"# {project_name}\n")
f.writelines(f"name={project_name}\n")
f.writelines(f"version={actual_version}\n")
f.writelines(f"channel={channel}\n")
f.writelines(f"recipe_id_full={project_name}/{actual_version}@{user}/{channel}\n")
f.writelines(f"recipe_id_latest={project_name}/latest@{user}/{channel}\n")
f.writelines(f"semver_full={actual_version}\n")
f.writelines(f"is_release_branch={str(is_release_branch).lower()}\n")
shell: python

View File

@ -39,56 +39,18 @@ env:
CONAN_ARGS: ${{ inputs.conan_args || '' }} CONAN_ARGS: ${{ inputs.conan_args || '' }}
ENTERPRISE: ${{ inputs.enterprise || false }} ENTERPRISE: ${{ inputs.enterprise || false }}
STAGING: ${{ inputs.staging || false }} STAGING: ${{ inputs.staging || false }}
LATEST_RELEASE: '5.6'
LATEST_RELEASE_SCHEDULE_HOUR: 4
jobs: jobs:
default_values: default_values:
runs-on: ubuntu-latest uses: ultimaker/cura-workflows/.github/workflows/cura-installer-default-value.yml@main
outputs: with:
cura_conan_version: ${{ steps.default.outputs.cura_conan_version }} cura_conan_version: ${{ inputs.cura_conan_version }}
release_tag: ${{ steps.default.outputs.release_tag }} latest_release: '5.6'
latest_release_schedule_hour: 4
steps: latest_release_tag: 'nightly'
- name: Output default values
id: default
shell: python
run: |
import os
import datetime
if "${{ github.event_name }}" != "schedule":
cura_conan_version = "${{ github.event.inputs.cura_conan_version }}"
else:
now = datetime.datetime.now()
cura_conan_version = "cura/latest@ultimaker/stable" if now.hour == int(os.environ['LATEST_RELEASE_SCHEDULE_HOUR']) else "cura/latest@ultimaker/testing"
release_tag = f"nightly-{os.environ['LATEST_RELEASE']}" if "/stable" in cura_conan_version else "nightly"
# Set cura_conan_version environment variable
output_env = os.environ["GITHUB_OUTPUT"]
content = ""
if os.path.exists(output_env):
with open(output_env, "r") as f:
content = f.read()
with open(output_env, "w") as f:
f.write(content)
f.writelines(f"cura_conan_version={cura_conan_version}\n")
f.writelines(f"release_tag={release_tag}\n")
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
content = ""
if os.path.exists(summary_env):
with open(summary_env, "r") as f:
content = f.read()
with open(summary_env, "w") as f:
f.write(content)
f.writelines(f"# cura_conan_version = {cura_conan_version}\n")
f.writelines(f"# release_tag = {release_tag}\n")
windows-installer: windows-installer:
uses: ./.github/workflows/windows.yml uses: ultimaker/cura-workflows/.github/workflows/cura-installer-windows.yml@main
needs: [ default_values ] needs: [ default_values ]
with: with:
cura_conan_version: ${{ needs.default_values.outputs.cura_conan_version }} cura_conan_version: ${{ needs.default_values.outputs.cura_conan_version }}
@ -100,7 +62,7 @@ jobs:
secrets: inherit secrets: inherit
linux-installer: linux-installer:
uses: ./.github/workflows/linux.yml uses: ultimaker/cura-workflows/.github/workflows/cura-installer-linux.yml@main
needs: [ default_values ] needs: [ default_values ]
with: with:
cura_conan_version: ${{ needs.default_values.outputs.cura_conan_version }} cura_conan_version: ${{ needs.default_values.outputs.cura_conan_version }}
@ -112,7 +74,7 @@ jobs:
secrets: inherit secrets: inherit
macos-installer: macos-installer:
uses: ./.github/workflows/macos.yml uses: ultimaker/cura-workflows/.github/workflows/cura-installer-macos.yml@main
needs: [ default_values ] needs: [ default_values ]
with: with:
cura_conan_version: ${{ needs.default_values.outputs.cura_conan_version }} cura_conan_version: ${{ needs.default_values.outputs.cura_conan_version }}
@ -124,7 +86,7 @@ jobs:
secrets: inherit secrets: inherit
macos-arm-installer: macos-arm-installer:
uses: ./.github/workflows/macos.yml uses: ultimaker/cura-workflows/.github/workflows/cura-installer-macos.yml@main
needs: [ default_values ] needs: [ default_values ]
with: with:
cura_conan_version: ${{ needs.default_values.outputs.cura_conan_version }} cura_conan_version: ${{ needs.default_values.outputs.cura_conan_version }}
@ -142,9 +104,9 @@ jobs:
needs: [ default_values, windows-installer, linux-installer, macos-installer, macos-arm-installer ] needs: [ default_values, windows-installer, linux-installer, macos-installer, macos-arm-installer ]
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v4
with:
# It's not necessary to download all three, but it does make sure we have at least one if an OS is skipped. fetch-depth: 1
- name: Download the run info - name: Download the run info
uses: actions/download-artifact@v2 uses: actions/download-artifact@v2

View File

@ -1,5 +1,5 @@
name: Linux Installer name: Linux Installer
run-name: ${{ inputs.cura_conan_version }} for Linux-${{ inputs.architecture }} by @${{ github.actor }} run-name: ${{ inputs.cura_conan_version }} by @${{ github.actor }}
on: on:
workflow_dispatch: workflow_dispatch:
@ -39,232 +39,19 @@ on:
options: options:
- ubuntu-22.04 - ubuntu-22.04
workflow_call:
inputs:
cura_conan_version:
description: 'Cura Conan Version'
default: 'cura/latest@ultimaker/testing'
required: true
type: string
conan_args:
description: 'Conan args: eq.: --require-override'
default: ''
required: false
type: string
enterprise:
description: 'Build Cura as an Enterprise edition'
default: false
required: true
type: boolean
staging:
description: 'Use staging API'
default: false
required: true
type: boolean
architecture:
description: 'Architecture'
required: true
default: 'X64'
type: string
operating_system:
description: 'OS'
required: true
default: 'ubuntu-22.04'
type: string
env: env:
CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }} CONAN_ARGS: ${{ inputs.conan_args || '' }}
CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }} ENTERPRISE: ${{ inputs.enterprise || false }}
GPG_PRIVATE_KEY: ${{ secrets.GPG_PRIVATE_KEY }} STAGING: ${{ inputs.staging || false }}
CURA_CONAN_VERSION: ${{ inputs.cura_conan_version }}
ENTERPRISE: ${{ inputs.enterprise }}
STAGING: ${{ inputs.staging }}
jobs: jobs:
cura-installer-create: installer:
runs-on: ${{ inputs.operating_system }} uses: ultimaker/cura-workflows/.github/workflows/cura-installer-linux.yml@main
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Setup Python and pip
uses: actions/setup-python@v4
with: with:
python-version: '3.10.x' cura_conan_version: ${{ inputs.cura_conan_version }}
cache: 'pip' conan_args: ${{ inputs.conan_args }}
cache-dependency-path: .github/workflows/requirements-conan-package.txt enterprise: ${{ inputs.enterprise == 'true' }}
staging: ${{ inputs.staging == 'true' }}
- name: Install Python requirements for runner architecture: ${{ inputs.architecture }}
run: pip install -r .github/workflows/requirements-conan-package.txt operating_system: ${{ inputs.operating_system }}
- name: Cache Conan local repository packages (Bash)
uses: actions/cache@v3
with:
path: |
$HOME/.conan/data
$HOME/.conan/conan_download_cache
key: conan-${{ runner.os }}-${{ runner.arch }}-installer-cache
- name: Install Linux system requirements
run: |
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
sudo apt update
sudo apt upgrade
sudo apt install build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config binutils coreutils desktop-file-utils fakeroot fuse libgdk-pixbuf2.0-dev patchelf squashfs-tools strace util-linux zsync libxcb-cursor-dev -y
# Get the AppImage tool
wget --no-check-certificate --quiet https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage -O $GITHUB_WORKSPACE/appimagetool
chmod +x $GITHUB_WORKSPACE/appimagetool
echo "APPIMAGETOOL_LOCATION=$GITHUB_WORKSPACE/appimagetool" >> $GITHUB_ENV
# Get the AppImage builder
wget --no-check-certificate --quiet -O $GITHUB_WORKSPACE/appimage-builder-x86_64.AppImage https://github.com/AppImageCrafters/appimage-builder/releases/download/v1.1.0/appimage-builder-1.1.0-x86_64.AppImage
chmod +x appimage-builder-x86_64.AppImage
echo "APPIMAGEBUILDER_LOCATION=$GITHUB_WORKSPACE/appimage-builder-x86_64.AppImage" >> $GITHUB_ENV
# Make sure these tools can be found on the path
echo "$GITHUB_WORKSPACE" >> $GITHUB_PATH
- name: Install GCC-13
run: |
sudo apt install g++-13 gcc-13 -y
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13
sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-13 13
- name: Create the default Conan profile
run: conan profile new default --detect --force
- name: Configure GPG Key Linux (Bash)
run: echo -n "$GPG_PRIVATE_KEY" | base64 --decode | gpg --import
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Use Conan download cache (Bash)
run: conan config set storage.download_cache="$HOME/.conan/conan_download_cache"
- name: Create the Packages (Bash)
run: conan install $CURA_CONAN_VERSION ${{ inputs.conan_args }} --build=missing --update -if cura_inst -g VirtualPythonEnv -o cura:enterprise=$ENTERPRISE -o cura:staging=$STAGING -c tools.build:skip_test=True
- name: Remove internal packages before uploading
run: |
conan remove "*@internal/*" -f || true
conan remove "cura_private_data*" -f || true
- name: Upload the Package(s)
if: always()
run: |
conan upload "*" -r cura --all -c
- name: Set Environment variables for Cura (bash)
run: |
. ./cura_inst/bin/activate_github_actions_env.sh
. ./cura_inst/bin/activate_github_actions_version_env.sh
# FIXME: This is a workaround to ensure that we use and pack a shared library for OpenSSL 1.1.1l. We currently compile
# OpenSSL statically for CPython, but our Python Dependenies (such as PyQt6) require a shared library.
# Because Conan won't allow for building the same library with two different options (easily) we need to install it explicitly
# and do a manual copy to the VirtualEnv, such that Pyinstaller can find it.
- name: Install OpenSSL shared
run: conan install openssl/1.1.1l@_/_ --build=missing --update -o openssl:shared=True -g deploy
- name: Copy OpenSSL shared (Bash)
run: |
cp ./openssl/lib/*.so* ./cura_inst/bin/ || true
cp ./openssl/lib/*.dylib* ./cura_inst/bin/ || true
- name: Create the Cura dist
run: pyinstaller ./cura_inst/UltiMaker-Cura.spec
- name: Output the name file name and extension
id: filename
shell: python
run: |
import os
enterprise = "-Enterprise" if "${{ inputs.enterprise }}" == "true" else ""
installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-linux-${{ inputs.architecture }}"
output_env = os.environ["GITHUB_OUTPUT"]
content = ""
if os.path.exists(output_env):
with open(output_env, "r") as f:
content = f.read()
with open(output_env, "w") as f:
f.write(content)
f.writelines(f"INSTALLER_FILENAME={installer_filename}\n")
- name: Summarize the used dependencies
shell: python
run: |
import os
from cura.CuraVersion import ConanInstalls, PythonInstalls
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
content = ""
if os.path.exists(summary_env):
with open(summary_env, "r") as f:
content = f.read()
with open(summary_env, "w") as f:
f.write(content)
f.writelines("# ${{ steps.filename.outputs.INSTALLER_FILENAME }}\n")
f.writelines("## Conan packages:\n")
for dep_name, dep_info in ConanInstalls.items():
f.writelines(f"`{dep_name} {dep_info['version']} {dep_info['revision']}`\n")
f.writelines("## Python modules:\n")
for dep_name, dep_info in PythonInstalls.items():
f.writelines(f"`{dep_name} {dep_info['version']}`\n")
- name: Create the Linux AppImage (Bash)
run: |
python ../cura_inst/packaging/AppImage-builder/create_appimage.py ./UltiMaker-Cura $CURA_VERSION_FULL "${{ steps.filename.outputs.INSTALLER_FILENAME }}.AppImage"
chmod +x "${{ steps.filename.outputs.INSTALLER_FILENAME }}.AppImage"
working-directory: dist
- name: Upload the AppImage
uses: actions/upload-artifact@v3
with:
name: ${{ steps.filename.outputs.INSTALLER_FILENAME }}-AppImage
path: |
dist/${{ steps.filename.outputs.INSTALLER_FILENAME }}.AppImage
retention-days: 5
- name: Upload the asc
uses: actions/upload-artifact@v3
with:
name: ${{ steps.filename.outputs.INSTALLER_FILENAME }}-asc
path: |
dist/${{ steps.filename.outputs.INSTALLER_FILENAME }}.AppImage.asc
retention-days: 5
- name: Write the run info
shell: python
run: |
import os
with open("run_info.sh", "w") as f:
f.writelines(f'echo "CURA_VERSION_FULL={os.environ["CURA_VERSION_FULL"]}" >> $GITHUB_ENV\n')
- name: Upload the run info
uses: actions/upload-artifact@v3
with:
name: linux-run-info
path: |
run_info.sh
retention-days: 5
notify-export:
if: ${{ always() }}
needs: [ cura-installer-create ]
uses: ultimaker/cura/.github/workflows/notify.yml@main
with:
success: ${{ contains(join(needs.*.result, ','), 'success') }}
success_title: "Create the Cura distributions"
success_body: "Installers for ${{ inputs.cura_conan_version }}"
failure_title: "Failed to create the Cura distributions"
failure_body: "Failed to create at least 1 installer for ${{ inputs.cura_conan_version }}"
secrets: inherit secrets: inherit

View File

@ -1,5 +1,5 @@
name: Macos Installer name: MacOS Installer
run-name: ${{ inputs.cura_conan_version }} for Macos-${{ inputs.architecture }} by @${{ github.actor }} run-name: ${{ inputs.cura_conan_version }} by @${{ github.actor }}
on: on:
workflow_dispatch: workflow_dispatch:
@ -42,241 +42,20 @@ on:
- self-hosted-ARM64 - self-hosted-ARM64
- macos-11 - macos-11
- macos-12 - macos-12
workflow_call:
inputs:
cura_conan_version:
description: 'Cura Conan Version'
default: 'cura/latest@ultimaker/testing'
required: true
type: string
conan_args:
description: 'Conan args: eq.: --require-override'
default: ''
required: false
type: string
enterprise:
description: 'Build Cura as an Enterprise edition'
default: false
required: true
type: boolean
staging:
description: 'Use staging API'
default: false
required: true
type: boolean
architecture:
description: 'Architecture'
required: true
default: 'ARM64'
type: string
operating_system:
description: 'OS'
required: true
default: 'self-hosted-ARM64'
type: string
env: env:
CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }} CONAN_ARGS: ${{ inputs.conan_args || '' }}
CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }} ENTERPRISE: ${{ inputs.enterprise || false }}
CODESIGN_IDENTITY: ${{ secrets.CODESIGN_IDENTITY }} STAGING: ${{ inputs.staging || false }}
MAC_NOTARIZE_USER: ${{ secrets.MAC_NOTARIZE_USER }}
MAC_NOTARIZE_PASS: ${{ secrets.MAC_NOTARIZE_PASS }}
MACOS_CERT_P12: ${{ secrets.MACOS_CERT_P12 }}
MACOS_CERT_INSTALLER_P12: ${{ secrets.MACOS_CERT_INSTALLER_P12 }}
MACOS_CERT_USER: ${{ secrets.MACOS_CERT_USER }}
MACOS_CERT_PASSPHRASE: ${{ secrets.MACOS_CERT_PASSPHRASE }}
CURA_CONAN_VERSION: ${{ inputs.cura_conan_version }}
ENTERPRISE: ${{ inputs.enterprise }}
STAGING: ${{ inputs.staging }}
jobs: jobs:
cura-installer-create: installer:
runs-on: ${{ inputs.operating_system }} uses: ultimaker/cura-workflows/.github/workflows/cura-installer-macos.yml@main
outputs:
INSTALLER_FILENAME: ${{ steps.filename.outputs.INSTALLER_FILENAME }}
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Setup Python and pip
uses: actions/setup-python@v4
with: with:
python-version: '3.11.x' cura_conan_version: ${{ inputs.cura_conan_version }}
cache: 'pip' conan_args: ${{ inputs.conan_args }}
cache-dependency-path: .github/workflows/requirements-conan-package.txt enterprise: ${{ inputs.enterprise == 'true' }}
staging: ${{ inputs.staging == 'true' }}
- name: Install Python requirements for runner architecture: ${{ inputs.architecture }}
run: pip install -r .github/workflows/requirements-conan-package.txt operating_system: ${{ inputs.operating_system }}
- name: Cache Conan local repository packages (Bash)
uses: actions/cache@v3
with:
path: |
$HOME/.conan/data
$HOME/.conan/conan_download_cache
key: conan-${{ runner.os }}-${{ runner.arch }}-installer-cache
- name: Install MacOS system requirements
run: brew install cmake autoconf automake ninja create-dmg
- name: Create the default Conan profile
run: conan profile new default --detect --force
- name: Remove Macos keychain (Bash)
run: security delete-keychain signing_temp.keychain || true
- name: Configure Macos keychain Developer Cert(Bash)
id: macos-keychain-developer-cert
uses: apple-actions/import-codesign-certs@v1
with:
keychain-password: ${{ secrets.MACOS_KEYCHAIN_PASSWORD }}
p12-file-base64: ${{ secrets.MACOS_CERT_P12 }}
p12-password: ${{ secrets.MACOS_CERT_PASSPHRASE }}
- name: Configure Macos keychain Installer Cert (Bash)
id: macos-keychain-installer-cert
uses: apple-actions/import-codesign-certs@v1
with:
keychain-password: ${{ secrets.MACOS_KEYCHAIN_PASSWORD }}
create-keychain: false # keychain is created in previous use of action.
p12-file-base64: ${{ secrets.MACOS_CERT_INSTALLER_P12 }}
p12-password: ${{ secrets.MACOS_CERT_PASSPHRASE }}
- name: Remove private Artifactory
run: conan remote remove cura-private-conan-dev || true
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Use Conan download cache (Bash)
run: conan config set storage.download_cache="$HOME/.conan/conan_download_cache"
- name: Create the Packages (Bash)
run: conan install $CURA_CONAN_VERSION ${{ inputs.conan_args }} --build=missing --update -if cura_inst -g VirtualPythonEnv -o cura:enterprise=$ENTERPRISE -o cura:staging=$STAGING -c tools.build:skip_test=True
- name: Remove internal packages before uploading
run: |
conan remove "*@internal/*" -f || true
conan remove "cura_private_data*" -f || true
- name: Upload the Package(s)
run: |
conan upload "*" -r cura --all -c
- name: Set Environment variables for Cura (bash)
run: |
. ./cura_inst/bin/activate_github_actions_env.sh
. ./cura_inst/bin/activate_github_actions_version_env.sh
- name: Unlock Macos keychain (Bash)
run: security unlock -p $TEMP_KEYCHAIN_PASSWORD signing_temp.keychain
env:
TEMP_KEYCHAIN_PASSWORD: ${{ steps.macos-keychain-developer-cert.outputs.keychain-password }}
# FIXME: This is a workaround to ensure that we use and pack a shared library for OpenSSL 1.1.1l. We currently compile
# OpenSSL statically for CPython, but our Python Dependenies (such as PyQt6) require a shared library.
# Because Conan won't allow for building the same library with two different options (easily) we need to install it explicitly
# and do a manual copy to the VirtualEnv, such that Pyinstaller can find it.
- name: Install OpenSSL shared
run: conan install openssl/1.1.1l@_/_ --build=missing --update -o openssl:shared=True -g deploy
- name: Copy OpenSSL shared (Bash)
run: |
cp ./openssl/lib/*.so* ./cura_inst/bin/ || true
cp ./openssl/lib/*.dylib* ./cura_inst/bin/ || true
- name: Create the Cura dist
run: pyinstaller ./cura_inst/UltiMaker-Cura.spec
- name: Output the name file name and extension
id: filename
shell: python
run: |
import os
enterprise = "-Enterprise" if "${{ inputs.enterprise }}" == "true" else ""
installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-macos-${{ inputs.architecture }}"
output_env = os.environ["GITHUB_OUTPUT"]
content = ""
if os.path.exists(output_env):
with open(output_env, "r") as f:
content = f.read()
with open(output_env, "w") as f:
f.write(content)
f.writelines(f"INSTALLER_FILENAME={installer_filename}\n")
- name: Summarize the used dependencies
shell: python
run: |
import os
from cura.CuraVersion import ConanInstalls, PythonInstalls
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
content = ""
if os.path.exists(summary_env):
with open(summary_env, "r") as f:
content = f.read()
with open(summary_env, "w") as f:
f.write(content)
f.writelines("# ${{ steps.filename.outputs.INSTALLER_FILENAME }}\n")
f.writelines("## Conan packages:\n")
for dep_name, dep_info in ConanInstalls.items():
f.writelines(f"`{dep_name} {dep_info['version']} {dep_info['revision']}`\n")
f.writelines("## Python modules:\n")
for dep_name, dep_info in PythonInstalls.items():
f.writelines(f"`{dep_name} {dep_info['version']}`\n")
- name: Create the Macos dmg (Bash)
run: python ../cura_inst/packaging/MacOS/build_macos.py --source_path ../cura_inst --dist_path . --cura_conan_version $CURA_CONAN_VERSION --filename "${{ steps.filename.outputs.INSTALLER_FILENAME }}" --build_dmg --build_pkg --app_name "$CURA_APP_NAME"
working-directory: dist
- name: Upload the dmg
uses: actions/upload-artifact@v3
with:
name: ${{ steps.filename.outputs.INSTALLER_FILENAME }}-dmg
path: |
dist/${{ steps.filename.outputs.INSTALLER_FILENAME }}.dmg
retention-days: 5
- name: Upload the pkg
uses: actions/upload-artifact@v3
with:
name: ${{ steps.filename.outputs.INSTALLER_FILENAME }}-pkg
path: |
dist/${{ steps.filename.outputs.INSTALLER_FILENAME }}.pkg
retention-days: 5
- name: Write the run info
shell: python
run: |
import os
with open("run_info.sh", "w") as f:
f.writelines(f'echo "CURA_VERSION_FULL={os.environ["CURA_VERSION_FULL"]}" >> $GITHUB_ENV\n')
- name: Upload the run info
uses: actions/upload-artifact@v3
with:
name: macos-run-info
path: |
run_info.sh
retention-days: 5
notify-export:
if: ${{ always() }}
needs: [ cura-installer-create ]
uses: ultimaker/cura/.github/workflows/notify.yml@main
with:
success: ${{ contains(join(needs.*.result, ','), 'success') }}
success_title: "Create the Cura distributions"
success_body: "Installers for ${{ inputs.cura_conan_version }}"
failure_title: "Failed to create the Cura distributions"
failure_body: "Failed to create at least 1 installer for ${{ inputs.cura_conan_version }}"
secrets: inherit secrets: inherit

View File

@ -1,54 +0,0 @@
name: Get Conan Recipe Version
on:
workflow_call:
inputs:
success:
required: true
type: boolean
success_title:
required: true
type: string
success_body:
required: true
type: string
failure_title:
required: true
type: string
failure_body:
required: true
type: string
jobs:
slackNotification:
name: Slack Notification
runs-on: ubuntu-latest
steps:
- name: Slack notify on-success
if: ${{ inputs.success }}
uses: rtCamp/action-slack-notify@v2
env:
SLACK_USERNAME: ${{ github.repository }}
SLACK_COLOR: green
SLACK_ICON: https://github.com/Ultimaker/Cura/blob/main/icons/cura-128.png?raw=true
SLACK_TITLE: ${{ inputs.success_title }}
SLACK_MESSAGE: ${{ inputs.success_body }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
- name: Slack notify on-failure
if: ${{ !inputs.success }}
uses: rtCamp/action-slack-notify@v2
env:
SLACK_USERNAME: ${{ github.repository }}
SLACK_COLOR: red
SLACK_ICON: https://github.com/Ultimaker/Cura/blob/main/icons/cura-128.png?raw=true
SLACK_TITLE: ${{ inputs.failure_title }}
SLACK_MESSAGE: ${{ inputs.failure_body }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}

View File

@ -2,14 +2,9 @@ name: process-pull-request
on: on:
pull_request_target: pull_request_target:
types: [opened, reopened, edited, synchronize, review_requested, ready_for_review, assigned] types: [ opened, reopened, edited, review_requested, ready_for_review, assigned ]
jobs: jobs:
add_label: add_label:
runs-on: ubuntu-latest uses: ultimaker/cura-workflows/.github/workflows/process-pull-request.yml@main
steps: secrets: inherit
- uses: actions/checkout@v2
- uses: actions-ecosystem/action-add-labels@v1
if: ${{ github.event.pull_request.head.repo.full_name != github.repository }}
with:
labels: 'PR: Community Contribution :crown:'

View File

@ -1,2 +1,2 @@
conan>=1.60.2,<2.0.0 conan>=1.60.2,<2.0.0
sip sip<=6.7.12

View File

View File

@ -7,76 +7,7 @@ on:
jobs: jobs:
publish-test-results: publish-test-results:
if: ${{ github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success' }} uses: ultimaker/cura-workflows/.github/workflows/unit-test-post.yml@main
runs-on: ubuntu-latest
steps:
- name: Download analysis results
uses: actions/github-script@v3.1.0
with: with:
script: | event: ${{ github.event.workflow_run.event }}
let artifacts = await github.actions.listWorkflowRunArtifacts({ conclusion: ${{ github.event.workflow_run.conclusion }}
owner: context.repo.owner,
repo: context.repo.repo,
run_id: ${{github.event.workflow_run.id }},
});
let matchArtifact = artifacts.data.artifacts.filter((artifact) => {
return artifact.name == "test-result"
})[0];
let download = await github.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: "zip",
});
let fs = require("fs");
fs.writeFileSync("${{github.workspace}}/test-result.zip", Buffer.from(download.data));
- name: Set environment variables
run: |
mkdir pr_env
unzip test-result.zip -d pr_env
echo "pr_id=$(cat pr_env/pr-id.txt)" >> $GITHUB_ENV
echo "pr_head_repo=$(cat pr_env/pr-head-repo.txt)" >> $GITHUB_ENV
echo "pr_head_ref=$(cat pr_env/pr-head-ref.txt)" >> $GITHUB_ENV
- uses: actions/checkout@v3
with:
repository: ${{ env.pr_head_repo }}
ref: ${{ env.pr_head_ref }}
persist-credentials: false
- name: Redownload analysis results
uses: actions/github-script@v3.1.0
with:
script: |
let artifacts = await github.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: ${{github.event.workflow_run.id }},
});
let matchArtifact = artifacts.data.artifacts.filter((artifact) => {
return artifact.name == "test-result"
})[0];
let download = await github.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: "zip",
});
let fs = require("fs");
fs.writeFileSync("${{github.workspace}}/test-result.zip", Buffer.from(download.data));
- name: Extract analysis results
run: |
mkdir -p tests
unzip test-result.zip -d tests
- name: Publish Unit Test Results
id: test-results
uses: EnricoMi/publish-unit-test-result-action@v1
with:
files: "tests/**/*.xml"
- name: Conclusion
run: echo "Conclusion is ${{ steps.test-results.outputs.json && fromJSON( steps.test-results.outputs.json ).conclusion }}"

View File

@ -1,4 +1,3 @@
---
name: unit-test name: unit-test
on: on:
@ -9,23 +8,18 @@ on:
- 'cura/**' - 'cura/**'
- 'icons/**' - 'icons/**'
- 'tests/**' - 'tests/**'
- 'packaging/**'
- '.github/workflows/conan-*.yml'
- '.github/workflows/unit-test.yml' - '.github/workflows/unit-test.yml'
- '.github/workflows/notify.yml' - '.github/workflows/requirements-runner.txt'
- '.github/workflows/requirements-conan-package.txt'
- 'requirements*.txt' - 'requirements*.txt'
- 'conanfile.py' - 'conanfile.py'
- 'conandata.yml' - 'conandata.yml'
- 'GitVersion.yml'
- '*.jinja' - '*.jinja'
branches: branches:
- main - main
- 'CURA-*' - 'CURA-*'
- '[1-9]+.[0-9]+' - 'PP-*'
tags: - '[0-9]+.[0-9]+'
- '[0-9]+.[0-9]+.[0-9]+'
- '[0-9]+.[0-9]+-beta'
pull_request: pull_request:
paths: paths:
- 'plugins/**' - 'plugins/**'
@ -33,134 +27,35 @@ on:
- 'cura/**' - 'cura/**'
- 'icons/**' - 'icons/**'
- 'tests/**' - 'tests/**'
- 'packaging/**'
- '.github/workflows/conan-*.yml'
- '.github/workflows/unit-test.yml' - '.github/workflows/unit-test.yml'
- '.github/workflows/notify.yml' - '.github/workflows/requirements-runner.txt'
- '.github/workflows/requirements-conan-package.txt'
- 'requirements*.txt' - 'requirements*.txt'
- 'conanfile.py' - 'conanfile.py'
- 'conandata.yml' - 'conandata.yml'
- 'GitVersion.yml'
- '*.jinja' - '*.jinja'
branches: branches:
- main - main
- '[1-9]+.[0-9]+' - '[0-9]+.[0-9]+'
tags:
- '[0-9]+.[0-9]+.[0-9]+'
- '[0-9]+.[0-9]+-beta'
env:
CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
CONAN_LOGIN_USERNAME_CURA_CE: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA_CE: ${{ secrets.CONAN_PASS }}
CONAN_LOG_RUN_TO_OUTPUT: 1
CONAN_LOGGING_LEVEL: info
CONAN_NON_INTERACTIVE: 1
permissions: permissions:
contents: read contents: read
env:
CONAN_LOGIN_USERNAME: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD: ${{ secrets.CONAN_PASS }}
jobs: jobs:
conan-recipe-version: conan-recipe-version:
uses: ultimaker/cura/.github/workflows/conan-recipe-version.yml@main uses: ultimaker/cura-workflows/.github/workflows/conan-recipe-version.yml@main
with: with:
project_name: cura project_name: cura
testing: testing:
runs-on: ubuntu-22.04 uses: ultimaker/cura-workflows/.github/workflows/unit-test.yml@main
needs: [ conan-recipe-version ] needs: [ conan-recipe-version ]
steps:
- name: Checkout
uses: actions/checkout@v3
with: with:
fetch-depth: 2 recipe_id_full: ${{ needs.conan-recipe-version.outputs.recipe_id_full }}
conan_extra_args: '-g VirtualPythonEnv -o cura:devtools=True -c tools.build:skip_test=False'
- name: Setup Python and pip unit_test_cmd: 'pytest --junitxml=junit_cura.xml'
uses: actions/setup-python@v4 unit_test_dir: 'tests'
with: conan_generator_dir: './venv/bin'
python-version: '3.11.x'
architecture: 'x64'
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements and Create default Conan profile
run: pip install -r requirements-conan-package.txt
working-directory: .github/workflows/
- name: Use Conan download cache (Bash)
if: ${{ runner.os != 'Windows' }}
run: conan config set storage.download_cache="$HOME/.conan/conan_download_cache"
- name: Cache Conan local repository packages (Bash)
uses: actions/cache@v3
if: ${{ runner.os != 'Windows' }}
with:
path: |
$HOME/.conan/data
$HOME/.conan/conan_download_cache
key: conan-${{ runner.os }}-${{ runner.arch }}-unit-cache
# NOTE: Due to what are probably github issues, we have to remove the cache and reconfigure before the rest.
# This is maybe because grub caches the disk it uses last time, which is recreated each time.
- name: Install Linux system requirements
if: ${{ runner.os == 'Linux' }}
run: |
sudo rm /var/cache/debconf/config.dat
sudo dpkg --configure -a
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
sudo apt update
sudo apt upgrade
sudo apt install build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config -y
- name: Install GCC-13
run: |
sudo apt install g++-13 gcc-13 -y
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13
sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-13 13
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Get Conan profile
run: conan profile new default --detect --force
- name: Install dependencies
run: conan install . ${{ needs.conan-recipe-version.outputs.recipe_id_full }} --build=missing --update -o cura:devtools=True -g VirtualPythonEnv -if venv
- name: Upload the Dependency package(s)
run: conan upload "*" -r cura --all -c
- name: Set Environment variables for Cura (bash)
if: ${{ runner.os != 'Windows' }}
run: |
. ./venv/bin/activate_github_actions_env.sh
- name: Run Unit Test
id: run-test
run: |
pytest --junitxml=junit_cura.xml
working-directory: tests
- name: Save PR metadata
if: always()
run: |
echo ${{ github.event.number }} > pr-id.txt
echo ${{ github.event.pull_request.head.repo.full_name }} > pr-head-repo.txt
echo ${{ github.event.pull_request.head.ref }} > pr-head-ref.txt
working-directory: tests
- name: Upload Test Results
if: always()
uses: actions/upload-artifact@v3
with:
name: test-result
path: |
tests/**/*.xml
tests/pr-id.txt
tests/pr-head-repo.txt
tests/pr-head-ref.txt

View File

@ -1,5 +1,5 @@
name: Windows Installer name: Windows Installer
run-name: ${{ inputs.cura_conan_version }} for Windows-${{ inputs.architecture }} by @${{ github.actor }} run-name: ${{ inputs.cura_conan_version }} by @${{ github.actor }}
on: on:
workflow_dispatch: workflow_dispatch:
@ -38,235 +38,20 @@ on:
type: choice type: choice
options: options:
- windows-2022 - windows-2022
workflow_call:
inputs:
cura_conan_version:
description: 'Cura Conan Version'
default: 'cura/latest@ultimaker/testing'
required: true
type: string
conan_args:
description: 'Conan args: eq.: --require-override'
default: ''
required: false
type: string
enterprise:
description: 'Build Cura as an Enterprise edition'
default: false
required: true
type: boolean
staging:
description: 'Use staging API'
default: false
required: true
type: boolean
architecture:
description: 'Architecture'
required: true
default: 'X64'
type: string
operating_system:
description: 'OS'
required: true
default: 'windows-2022'
type: string
env: env:
CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }} CONAN_ARGS: ${{ inputs.conan_args || '' }}
CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }} ENTERPRISE: ${{ inputs.enterprise || false }}
WIN_CERT_INSTALLER_CER: ${{ secrets.WIN_CERT_INSTALLER_CER }} STAGING: ${{ inputs.staging || false }}
WIN_CERT_INSTALLER_CER_PASS: ${{ secrets.WIN_CERT_INSTALLER_CER_PASS }}
CURA_CONAN_VERSION: ${{ inputs.cura_conan_version }}
ENTERPRISE: ${{ inputs.enterprise }}
STAGING: ${{ inputs.staging }}
jobs: jobs:
cura-installer-create: installer:
runs-on: ${{ inputs.operating_system }} uses: ultimaker/cura-workflows/.github/workflows/cura-installer-windows.yml@main
outputs:
INSTALLER_FILENAME: ${{ steps.filename.outputs.INSTALLER_FILENAME }}
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Setup Python and pip
uses: actions/setup-python@v4
with: with:
python-version: '3.10.x' cura_conan_version: ${{ inputs.cura_conan_version }}
cache: 'pip' conan_args: ${{ inputs.conan_args }}
cache-dependency-path: .github/workflows/requirements-conan-package.txt enterprise: ${{ inputs.enterprise == 'true' }}
staging: ${{ inputs.staging == 'true' }}
- name: Install Python requirements for runner architecture: ${{ inputs.architecture }}
run: pip install -r .github/workflows/requirements-conan-package.txt operating_system: ${{ inputs.operating_system }}
- name: Cache Conan local repository packages (Powershell)
uses: actions/cache@v3
with:
path: |
C:\Users\runneradmin\.conan\data
C:\.conan
C:\Users\runneradmin\.conan\conan_download_cache
key: conan-${{ runner.os }}-${{ runner.arch }}-installer-cache
- name: Create the default Conan profile
run: conan profile new default --detect --force
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Use Conan download cache (Powershell)
run: conan config set storage.download_cache="C:\Users\runneradmin\.conan\conan_download_cache"
- name: Create the Packages (Powershell)
run: conan install $Env:CURA_CONAN_VERSION ${{ inputs.conan_args }} --build=missing --update -if cura_inst -g VirtualPythonEnv -o cura:enterprise=$Env:ENTERPRISE -o cura:staging=$Env:STAGING -c tools.build:skip_test=True
- name: Remove internal packages before uploading
run: |
conan remove "*@internal/*" -f || true
conan remove "cura_private_data*" -f || true
- name: Upload the Package(s)
if: always()
run: |
conan upload "*" -r cura --all -c
- name: Set Environment variables for Cura (Powershell)
run: |
echo "${Env:WIX}\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
.\cura_inst\Scripts\activate_github_actions_env.ps1
.\cura_inst\Scripts\activate_github_actions_version_env.ps1
# - name: Install OpenSSL shared
# run: conan install openssl/1.1.1l@_/_ --build=missing --update -o openssl:shared=True -g deploy
#
# - name: Copy OpenSSL shared (Powershell)
# run: |
# cp openssl/bin/*.dll ./cura_inst/Scripts/
# cp openssl/lib/*.lib ./cura_inst/Lib/
- name: Create the Cura dist
run: pyinstaller ./cura_inst/UltiMaker-Cura.spec
- name: Output the name file name and extension
id: filename
shell: python
run: |
import os
enterprise = "-Enterprise" if "${{ inputs.enterprise }}" == "true" else ""
installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-win64-${{ inputs.architecture }}"
output_env = os.environ["GITHUB_OUTPUT"]
content = ""
if os.path.exists(output_env):
with open(output_env, "r") as f:
content = f.read()
with open(output_env, "w") as f:
f.write(content)
f.writelines(f"INSTALLER_FILENAME={installer_filename}\n")
- name: Summarize the used dependencies
shell: python
run: |
import os
from cura.CuraVersion import ConanInstalls, PythonInstalls
summary_env = os.environ["GITHUB_STEP_SUMMARY"]
content = ""
if os.path.exists(summary_env):
with open(summary_env, "r") as f:
content = f.read()
with open(summary_env, "w") as f:
f.write(content)
f.writelines("# ${{ steps.filename.outputs.INSTALLER_FILENAME }}\n")
f.writelines("## Conan packages:\n")
for dep_name, dep_info in ConanInstalls.items():
f.writelines(f"`{dep_name} {dep_info['version']} {dep_info['revision']}`\n")
f.writelines("## Python modules:\n")
for dep_name, dep_info in PythonInstalls.items():
f.writelines(f"`{dep_name} {dep_info['version']}`\n")
- name: Create PFX certificate from BASE64_PFX_CONTENT secret
id: create-pfx
env:
PFX_CONTENT: ${{ secrets.WIN_CERT_INSTALLER_CER }}
run: |
$pfxPath = Join-Path -Path $env:RUNNER_TEMP -ChildPath "cert.pfx";
$encodedBytes = [System.Convert]::FromBase64String($env:PFX_CONTENT);
Set-Content $pfxPath -Value $encodedBytes -AsByteStream;
echo "PFX_PATH=$pfxPath" >> $env:GITHUB_OUTPUT;
- name: Create the Windows msi installer (Powershell)
run: |
python ..\cura_inst\packaging\msi\create_windows_msi.py ..\cura_inst .\UltiMaker-Cura "${{steps.filename.outputs.INSTALLER_FILENAME }}.msi" "$Env:CURA_APP_NAME"
working-directory: dist
- name: Sign the Windows msi installer (Powershell)
env:
PFX_PATH: ${{ steps.create-pfx.outputs.PFX_PATH }}
run: |
& "C:/Program Files (x86)/Windows Kits/10/bin/10.0.17763.0/x86/signtool.exe" sign /f $Env:PFX_PATH /p "$Env:WIN_CERT_INSTALLER_CER_PASS" /fd SHA256 /t http://timestamp.digicert.com "${{steps.filename.outputs.INSTALLER_FILENAME }}.msi"
working-directory: dist
- name: Create the Windows exe installer (Powershell)
run: |
python ..\cura_inst\packaging\NSIS\create_windows_installer.py ../cura_inst . "${{steps.filename.outputs.INSTALLER_FILENAME }}.exe"
working-directory: dist
- name: Sign the Windows exe installer (Powershell)
env:
PFX_PATH: ${{ steps.create-pfx.outputs.PFX_PATH }}
run: |
& "C:/Program Files (x86)/Windows Kits/10/bin/10.0.17763.0/x86/signtool.exe" sign /f $Env:PFX_PATH /p "$Env:WIN_CERT_INSTALLER_CER_PASS" /fd SHA256 /t http://timestamp.digicert.com "${{steps.filename.outputs.INSTALLER_FILENAME }}.exe"
working-directory: dist
- name: Upload the msi
uses: actions/upload-artifact@v3
with:
name: ${{steps.filename.outputs.INSTALLER_FILENAME }}-msi
path: |
dist/${{steps.filename.outputs.INSTALLER_FILENAME }}.msi
retention-days: 5
- name: Upload the exe
uses: actions/upload-artifact@v3
with:
name: ${{steps.filename.outputs.INSTALLER_FILENAME }}-exe
path: |
dist/${{steps.filename.outputs.INSTALLER_FILENAME }}.exe
retention-days: 5
# NOTE: The extension is .sh, since this isn't going to build-environment, so not on the Win build image.
- name: Write the run info
shell: python
run: |
import os
with open("run_info.sh", "w") as f:
f.writelines(f'echo "CURA_VERSION_FULL={os.environ["CURA_VERSION_FULL"]}" >> $GITHUB_ENV\n')
# NOTE: The extension is .sh, since this isn't going to build-environment, so not on the Win build image.
- name: Upload the run info
uses: actions/upload-artifact@v3
with:
name: windows-run-info
path: |
run_info.sh
retention-days: 5
notify-export:
if: ${{ always() }}
needs: [ cura-installer-create ]
uses: ultimaker/cura/.github/workflows/notify.yml@main
with:
success: ${{ contains(join(needs.*.result, ','), 'success') }}
success_title: "Create the Cura distributions"
success_body: "Installers for ${{ inputs.cura_conan_version }}"
failure_title: "Failed to create the Cura distributions"
failure_body: "Failed to create at least 1 installer for ${{ inputs.cura_conan_version }}"
secrets: inherit secrets: inherit

View File

@ -266,6 +266,10 @@ app = UMBUNDLE(
'CFBundlePackageType': 'APPL', 'CFBundlePackageType': 'APPL',
'CFBundleVersionString': {{ version }}, 'CFBundleVersionString': {{ version }},
'CFBundleShortVersionString': {{ short_version }}, 'CFBundleShortVersionString': {{ short_version }},
'CFBundleURLTypes': [{
'CFBundleURLName': '{{ display_name }}',
'CFBundleURLSchemes': ['cura', 'slicer'],
}],
'CFBundleDocumentTypes': [{ 'CFBundleDocumentTypes': [{
'CFBundleTypeRole': 'Viewer', 'CFBundleTypeRole': 'Viewer',
'CFBundleTypeExtensions': ['*'], 'CFBundleTypeExtensions': ['*'],

View File

@ -1,3 +1,18 @@
version: "5.7.0-alpha.0"
requirements:
- "uranium/(latest)@ultimaker/testing"
- "curaengine/(latest)@ultimaker/testing"
- "cura_binary_data/(latest)@ultimaker/testing"
- "fdm_materials/(latest)@ultimaker/testing"
- "curaengine_plugin_gradual_flow/(latest)@ultimaker/stable"
- "dulcificum/latest@ultimaker/testing"
- "pyarcus/5.3.0"
- "pysavitar/5.3.0"
- "pynest2d/5.3.0"
- "curaengine_grpc_definitions/(latest)@ultimaker/testing"
requirements_internal:
- "fdm_materials/(latest)@internal/testing"
- "cura_private_data/(latest)@internal/testing"
urls: urls:
default: default:
cloud_api_root: "https://api.ultimaker.com" cloud_api_root: "https://api.ultimaker.com"

View File

@ -4,7 +4,7 @@ from pathlib import Path
from jinja2 import Template from jinja2 import Template
from conan import ConanFile from conan import ConanFile
from conan.tools.files import copy, rmdir, save, mkdir, rm from conan.tools.files import copy, rmdir, save, mkdir, rm, update_conandata
from conan.tools.microsoft import unix_path from conan.tools.microsoft import unix_path
from conan.tools.env import VirtualRunEnv, Environment, VirtualBuildEnv from conan.tools.env import VirtualRunEnv, Environment, VirtualBuildEnv
from conan.tools.scm import Version from conan.tools.scm import Version
@ -34,7 +34,7 @@ class CuraConan(ConanFile):
"cloud_api_version": "ANY", "cloud_api_version": "ANY",
"display_name": "ANY", # TODO: should this be an option?? "display_name": "ANY", # TODO: should this be an option??
"cura_debug_mode": [True, False], # FIXME: Use profiles "cura_debug_mode": [True, False], # FIXME: Use profiles
"internal": [True, False], "internal": ["True", "False", "true", "false"], # Workaround for GH Action passing boolean as lowercase string
"enable_i18n": [True, False], "enable_i18n": [True, False],
} }
default_options = { default_options = {
@ -44,13 +44,13 @@ class CuraConan(ConanFile):
"cloud_api_version": "1", "cloud_api_version": "1",
"display_name": "UltiMaker Cura", "display_name": "UltiMaker Cura",
"cura_debug_mode": False, # Not yet implemented "cura_debug_mode": False, # Not yet implemented
"internal": False, "internal": "False",
"enable_i18n": False, "enable_i18n": False,
} }
def set_version(self): def set_version(self):
if not self.version: if not self.version:
self.version = "5.7.0-alpha" self.version = self.conan_data["version"]
@property @property
def _i18n_options(self): def _i18n_options(self):
@ -84,6 +84,10 @@ class CuraConan(ConanFile):
def _enterprise(self): def _enterprise(self):
return self.options.enterprise in ["True", 'true'] return self.options.enterprise in ["True", 'true']
@property
def _internal(self):
return self.options.internal in ["True", 'true']
@property @property
def _app_name(self): def _app_name(self):
if self._enterprise: if self._enterprise:
@ -182,7 +186,7 @@ class CuraConan(ConanFile):
cura_version = Version(self.conf.get("user.cura:version", default = self.version, check_type = str)) cura_version = Version(self.conf.get("user.cura:version", default = self.version, check_type = str))
pre_tag = f"-{cura_version.pre}" if cura_version.pre else "" pre_tag = f"-{cura_version.pre}" if cura_version.pre else ""
build_tag = f"+{cura_version.build}" if cura_version.build else "" build_tag = f"+{cura_version.build}" if cura_version.build else ""
internal_tag = f"+internal" if self.options.internal else "" internal_tag = f"+internal" if self._internal else ""
cura_version = f"{cura_version.major}.{cura_version.minor}.{cura_version.patch}{pre_tag}{build_tag}{internal_tag}" cura_version = f"{cura_version.major}.{cura_version.minor}.{cura_version.patch}{pre_tag}{build_tag}{internal_tag}"
with open(os.path.join(location, "CuraVersion.py"), "w") as f: with open(os.path.join(location, "CuraVersion.py"), "w") as f:
@ -206,7 +210,7 @@ class CuraConan(ConanFile):
pyinstaller_metadata = self.conan_data["pyinstaller"] pyinstaller_metadata = self.conan_data["pyinstaller"]
datas = [] datas = []
for data in pyinstaller_metadata["datas"].values(): for data in pyinstaller_metadata["datas"].values():
if not self.options.internal and data.get("internal", False): if not self._internal and data.get("internal", False):
continue continue
if "package" in data: # get the paths from conan package if "package" in data: # get the paths from conan package
@ -238,7 +242,7 @@ class CuraConan(ConanFile):
self.output.warning(f"Source path for binary {binary['binary']} does not exist") self.output.warning(f"Source path for binary {binary['binary']} does not exist")
continue continue
for bin in Path(src_path).glob(binary["binary"] + "*[.exe|.dll|.so|.dylib|.so.]*"): for bin in Path(src_path).glob(binary["binary"] + "*[.exe|.dll|.so|.dylib|.so.|.pdb]*"):
binaries.append((str(bin), binary["dst"])) binaries.append((str(bin), binary["dst"]))
for bin in Path(src_path).glob(binary["binary"]): for bin in Path(src_path).glob(binary["binary"]):
binaries.append((str(bin), binary["dst"])) binaries.append((str(bin), binary["dst"]))
@ -285,6 +289,9 @@ class CuraConan(ConanFile):
short_version = f"'{cura_version.major}.{cura_version.minor}.{cura_version.patch}'", short_version = f"'{cura_version.major}.{cura_version.minor}.{cura_version.patch}'",
)) ))
def export(self):
update_conandata(self, {"version": self.version})
def export_sources(self): def export_sources(self):
copy(self, "*", os.path.join(self.recipe_folder, "plugins"), os.path.join(self.export_sources_folder, "plugins")) copy(self, "*", os.path.join(self.recipe_folder, "plugins"), os.path.join(self.export_sources_folder, "plugins"))
copy(self, "*", os.path.join(self.recipe_folder, "resources"), os.path.join(self.export_sources_folder, "resources"), excludes = "*.mo") copy(self, "*", os.path.join(self.recipe_folder, "resources"), os.path.join(self.export_sources_folder, "resources"), excludes = "*.mo")
@ -310,6 +317,9 @@ class CuraConan(ConanFile):
self.options["boost"].header_only = True self.options["boost"].header_only = True
if self.settings.os == "Linux": if self.settings.os == "Linux":
self.options["curaengine_grpc_definitions"].shared = True self.options["curaengine_grpc_definitions"].shared = True
self.options["openssl"].shared = True
if self.conf.get("user.curaengine:sentry_url", "", check_type=str) != "":
self.options["curaengine"].enable_sentry = True
def validate(self): def validate(self):
version = self.conf.get("user.cura:version", default = self.version, check_type = str) version = self.conf.get("user.cura:version", default = self.version, check_type = str)
@ -317,24 +327,19 @@ class CuraConan(ConanFile):
raise ConanInvalidConfiguration("Only versions 5+ are support") raise ConanInvalidConfiguration("Only versions 5+ are support")
def requirements(self): def requirements(self):
for req in self.conan_data["requirements"]:
if self._internal and "fdm_materials" in req:
continue
self.requires(req)
if self._internal:
for req in self.conan_data["requirements_internal"]:
self.requires(req)
self.requires("cpython/3.10.4@ultimaker/stable")
self.requires("openssl/3.2.0")
self.requires("boost/1.82.0") self.requires("boost/1.82.0")
self.requires("fmt/9.0.0") self.requires("spdlog/1.12.0")
self.requires("curaengine_grpc_definitions/0.1.0") self.requires("fmt/10.1.1")
self.requires("zlib/1.2.13") self.requires("zlib/1.2.13")
self.requires("pyarcus/5.3.0")
self.requires("dulcificum/0.1.0-beta.1")
self.requires("curaengine/(latest)@ultimaker/testing")
self.requires("pysavitar/5.3.0")
self.requires("pynest2d/5.3.0")
self.requires("curaengine_plugin_gradual_flow/0.1.0")
self.requires("uranium/(latest)@ultimaker/testing")
self.requires("cura_binary_data/(latest)@ultimaker/testing")
self.requires("cpython/3.10.4")
if self.options.internal:
self.requires("cura_private_data/(latest)@internal/testing")
self.requires("fdm_materials/(latest)@internal/testing")
else:
self.requires("fdm_materials/(latest)@ultimaker/testing")
def build_requirements(self): def build_requirements(self):
if self.options.get_safe("enable_i18n", False): if self.options.get_safe("enable_i18n", False):
@ -394,7 +399,7 @@ class CuraConan(ConanFile):
copy(self, "*", fdm_materials.resdirs[0], self.source_folder) copy(self, "*", fdm_materials.resdirs[0], self.source_folder)
# Copy internal resources # Copy internal resources
if self.options.internal: if self._internal:
cura_private_data = self.dependencies["cura_private_data"].cpp_info cura_private_data = self.dependencies["cura_private_data"].cpp_info
copy(self, "*", cura_private_data.resdirs[0], str(self._share_dir.joinpath("cura"))) copy(self, "*", cura_private_data.resdirs[0], str(self._share_dir.joinpath("cura")))
@ -513,6 +518,7 @@ echo "CURA_APP_NAME={{ cura_app_name }}" >> ${{ env_prefix }}GITHUB_ENV
del self.info.options.cloud_api_version del self.info.options.cloud_api_version
del self.info.options.display_name del self.info.options.display_name
del self.info.options.cura_debug_mode del self.info.options.cura_debug_mode
self.options.rm_safe("enable_i18n")
# TODO: Use the hash of requirements.txt and requirements-ultimaker.txt, Because changing these will actually result in a different # TODO: Use the hash of requirements.txt and requirements-ultimaker.txt, Because changing these will actually result in a different
# Cura. This is needed because the requirements.txt aren't managed by Conan and therefor not resolved in the package_id. This isn't # Cura. This is needed because the requirements.txt aren't managed by Conan and therefor not resolved in the package_id. This isn't

View File

@ -241,14 +241,8 @@ class GridArrange(Arranger):
center_grid_x = coord_grid_x + (0.5 * self._grid_width) center_grid_x = coord_grid_x + (0.5 * self._grid_width)
center_grid_y = coord_grid_y + (0.5 * self._grid_height) center_grid_y = coord_grid_y + (0.5 * self._grid_height)
bounding_box = node.getBoundingBox() return TranslateOperation(node, Vector(center_grid_x, node.getWorldPosition().y, center_grid_y),
center_node_x = (bounding_box.left + bounding_box.right) * 0.5 set_position=True)
center_node_y = (bounding_box.back + bounding_box.front) * 0.5
delta_x = center_grid_x - center_node_x
delta_y = center_grid_y - center_node_y
return TranslateOperation(node, Vector(delta_x, 0, delta_y))
def _getGridCornerPoints( def _getGridCornerPoints(
self, self,

View File

@ -2,15 +2,18 @@
# Cura is released under the terms of the LGPLv3 or higher. # Cura is released under the terms of the LGPLv3 or higher.
import enum import enum
import os import os
import re
import sys import sys
import tempfile import tempfile
import time import time
import platform import platform
from pathlib import Path from pathlib import Path
from typing import cast, TYPE_CHECKING, Optional, Callable, List, Any, Dict from typing import cast, TYPE_CHECKING, Optional, Callable, List, Any, Dict
import requests
import numpy import numpy
from PyQt6.QtCore import QObject, QTimer, QUrl, pyqtSignal, pyqtProperty, QEvent, pyqtEnum, QCoreApplication from PyQt6.QtCore import QObject, QTimer, QUrl, QUrlQuery, pyqtSignal, pyqtProperty, QEvent, pyqtEnum, QCoreApplication, \
QByteArray
from PyQt6.QtGui import QColor, QIcon from PyQt6.QtGui import QColor, QIcon
from PyQt6.QtQml import qmlRegisterUncreatableType, qmlRegisterUncreatableMetaObject, qmlRegisterSingletonType, qmlRegisterType from PyQt6.QtQml import qmlRegisterUncreatableType, qmlRegisterUncreatableMetaObject, qmlRegisterSingletonType, qmlRegisterType
from PyQt6.QtWidgets import QMessageBox from PyQt6.QtWidgets import QMessageBox
@ -179,6 +182,7 @@ class CuraApplication(QtApplication):
self._use_single_instance = False self._use_single_instance = False
self._single_instance = None self._single_instance = None
self._open_project_mode: Optional[str] = None
self._cura_formula_functions = None # type: Optional[CuraFormulaFunctions] self._cura_formula_functions = None # type: Optional[CuraFormulaFunctions]
@ -249,7 +253,7 @@ class CuraApplication(QtApplication):
self._additional_components = {} # Components to add to certain areas in the interface self._additional_components = {} # Components to add to certain areas in the interface
self._open_file_queue = [] # A list of files to open (after the application has started) self._open_file_queue = [] # A list of files to open (after the application has started)
self._open_url_queue = [] # A list of urls to open (after the application has started)
self._update_platform_activity_timer = None self._update_platform_activity_timer = None
self._sidebar_custom_menu_items = [] # type: list # Keeps list of custom menu items for the side bar self._sidebar_custom_menu_items = [] # type: list # Keeps list of custom menu items for the side bar
@ -273,6 +277,8 @@ class CuraApplication(QtApplication):
self._conan_installs = ApplicationMetadata.CONAN_INSTALLS self._conan_installs = ApplicationMetadata.CONAN_INSTALLS
self._python_installs = ApplicationMetadata.PYTHON_INSTALLS self._python_installs = ApplicationMetadata.PYTHON_INSTALLS
self._supported_url_schemes: List[str] = ["cura", "slicer"]
@pyqtProperty(str, constant=True) @pyqtProperty(str, constant=True)
def ultimakerCloudApiRootUrl(self) -> str: def ultimakerCloudApiRootUrl(self) -> str:
return UltimakerCloudConstants.CuraCloudAPIRoot return UltimakerCloudConstants.CuraCloudAPIRoot
@ -325,6 +331,10 @@ class CuraApplication(QtApplication):
assert not "This crash is triggered by the trigger_early_crash command line argument." assert not "This crash is triggered by the trigger_early_crash command line argument."
for filename in self._cli_args.file: for filename in self._cli_args.file:
url = QUrl(filename)
if url.scheme() in self._supported_url_schemes:
self._open_url_queue.append(url)
else:
self._files_to_open.append(os.path.abspath(filename)) self._files_to_open.append(os.path.abspath(filename))
def initialize(self) -> None: def initialize(self) -> None:
@ -946,6 +956,8 @@ class CuraApplication(QtApplication):
self.callLater(self._openFile, file_name) self.callLater(self._openFile, file_name)
for file_name in self._open_file_queue: # Open all the files that were queued up while plug-ins were loading. for file_name in self._open_file_queue: # Open all the files that were queued up while plug-ins were loading.
self.callLater(self._openFile, file_name) self.callLater(self._openFile, file_name)
for url in self._open_url_queue:
self.callLater(self._openUrl, url)
initializationFinished = pyqtSignal() initializationFinished = pyqtSignal()
showAddPrintersUncancellableDialog = pyqtSignal() # Used to show the add printers dialog with a greyed background showAddPrintersUncancellableDialog = pyqtSignal() # Used to show the add printers dialog with a greyed background
@ -1155,9 +1167,15 @@ class CuraApplication(QtApplication):
if event.type() == QEvent.Type.FileOpen: if event.type() == QEvent.Type.FileOpen:
if self._plugins_loaded: if self._plugins_loaded:
if event.file():
self._openFile(event.file()) self._openFile(event.file())
if event.url():
self._openUrl(event.url())
else: else:
if event.file():
self._open_file_queue.append(event.file()) self._open_file_queue.append(event.file())
if event.url():
self._open_url_queue.append(event.url())
if int(event.type()) == 20: # 'QEvent.Type.Quit' enum isn't there, even though it should be according to docs. if int(event.type()) == 20: # 'QEvent.Type.Quit' enum isn't there, even though it should be according to docs.
# Once we're at this point, everything should have been flushed already (past OnExitCallbackManager). # Once we're at this point, everything should have been flushed already (past OnExitCallbackManager).
@ -1541,7 +1559,7 @@ class CuraApplication(QtApplication):
if not nodes: if not nodes:
return return
objects_in_filename = {} # type: Dict[str, List[CuraSceneNode]] objects_in_filename: Dict[str, List[CuraSceneNode]] = {}
for node in nodes: for node in nodes:
mesh_data = node.getMeshData() mesh_data = node.getMeshData()
if mesh_data: if mesh_data:
@ -1782,6 +1800,58 @@ class CuraApplication(QtApplication):
def _openFile(self, filename): def _openFile(self, filename):
self.readLocalFile(QUrl.fromLocalFile(filename)) self.readLocalFile(QUrl.fromLocalFile(filename))
def _openUrl(self, url: QUrl) -> None:
if url.scheme() not in self._supported_url_schemes:
# only handle cura:// and slicer:// urls schemes
return
match url.host() + url.path():
case "open" | "open/":
query = QUrlQuery(url.query())
model_url = QUrl(query.queryItemValue("file", options=QUrl.ComponentFormattingOption.FullyDecoded))
def on_finish(response):
content_disposition_header_key = QByteArray("content-disposition".encode())
if not response.hasRawHeader(content_disposition_header_key):
Logger.log("w", "Could not find Content-Disposition header in response from {0}".format(
model_url.url()))
# Use the last part of the url as the filename, and assume it is an STL file
filename = model_url.path().split("/")[-1] + ".stl"
else:
# content_disposition is in the format
# ```
# content_disposition attachment; "filename=[FILENAME]"
# ```
# Use a regex to extract the filename
content_disposition = str(response.rawHeader(content_disposition_header_key).data(),
encoding='utf-8')
content_disposition_match = re.match(r'attachment; filename="(?P<filename>.*)"',
content_disposition)
assert content_disposition_match is not None
filename = content_disposition_match.group("filename")
tmp = tempfile.NamedTemporaryFile(suffix=filename, delete=False)
with open(tmp.name, "wb") as f:
f.write(response.readAll())
self.readLocalFile(QUrl.fromLocalFile(tmp.name), add_to_recent_files=False)
def on_error(*args, **kwargs):
Logger.log("w", "Could not download file from {0}".format(model_url.url()))
Message("Could not download file: " + str(model_url.url()),
title= "Loading Model failed",
message_type=Message.MessageType.ERROR).show()
return
self.getHttpRequestManager().get(
model_url.url(),
callback=on_finish,
error_callback=on_error,
)
case path:
Logger.log("w", "Unsupported url scheme path: {0}".format(path))
def _addProfileReader(self, profile_reader): def _addProfileReader(self, profile_reader):
# TODO: Add the profile reader to the list of plug-ins that can be used when importing profiles. # TODO: Add the profile reader to the list of plug-ins that can be used when importing profiles.
pass pass
@ -1845,7 +1915,7 @@ class CuraApplication(QtApplication):
Logger.log("i", "Attempting to read file %s", file.toString()) Logger.log("i", "Attempting to read file %s", file.toString())
if not file.isValid(): if not file.isValid():
return return
self._open_project_mode = project_mode
scene = self.getController().getScene() scene = self.getController().getScene()
for node in DepthFirstIterator(scene.getRoot()): for node in DepthFirstIterator(scene.getRoot()):
@ -1855,16 +1925,16 @@ class CuraApplication(QtApplication):
is_project_file = self.checkIsValidProjectFile(file) is_project_file = self.checkIsValidProjectFile(file)
if project_mode is None: if self._open_project_mode is None:
project_mode = self.getPreferences().getValue("cura/choice_on_open_project") self._open_project_mode = self.getPreferences().getValue("cura/choice_on_open_project")
if is_project_file and project_mode == "open_as_project": if is_project_file and self._open_project_mode == "open_as_project":
# open as project immediately without presenting a dialog # open as project immediately without presenting a dialog
workspace_handler = self.getWorkspaceFileHandler() workspace_handler = self.getWorkspaceFileHandler()
workspace_handler.readLocalFile(file, add_to_recent_files_hint = add_to_recent_files) workspace_handler.readLocalFile(file, add_to_recent_files_hint = add_to_recent_files)
return return
if is_project_file and project_mode == "always_ask": if is_project_file and self._open_project_mode == "always_ask":
# present a dialog asking to open as project or import models # present a dialog asking to open as project or import models
self.callLater(self.openProjectFile.emit, file, add_to_recent_files) self.callLater(self.openProjectFile.emit, file, add_to_recent_files)
return return
@ -1999,7 +2069,10 @@ class CuraApplication(QtApplication):
center_y = 0 center_y = 0
node.translate(Vector(0, center_y, 0)) node.translate(Vector(0, center_y, 0))
nodes_to_arrange.append(node)
# If the file is a project,and models are to be loaded from a that project,
# models inside file should be arranged in buildplate.
elif self._open_project_mode == "open_as_model":
nodes_to_arrange.append(node) nodes_to_arrange.append(node)
# This node is deep copied from some other node which already has a BuildPlateDecorator, but the deepcopy # This node is deep copied from some other node which already has a BuildPlateDecorator, but the deepcopy

View File

@ -1,5 +1,6 @@
# Copyright (c) 2019 Ultimaker B.V. # Copyright (c) 2019 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher. # Cura is released under the terms of the LGPLv3 or higher.
import math
import numpy import numpy
from typing import Optional, cast from typing import Optional, cast
@ -66,7 +67,7 @@ class LayerPolygon:
# Buffering the colors shouldn't be necessary as it is not # Buffering the colors shouldn't be necessary as it is not
# re-used and can save a lot of memory usage. # re-used and can save a lot of memory usage.
self._color_map = LayerPolygon.getColorMap() self._color_map = LayerPolygon.getColorMap()
self._colors = self._color_map[self._types] # type: numpy.ndarray self._colors: numpy.ndarray = self._color_map[self._types]
# When type is used as index returns true if type == LayerPolygon.InfillType # When type is used as index returns true if type == LayerPolygon.InfillType
# or type == LayerPolygon.SkinType # or type == LayerPolygon.SkinType
@ -74,8 +75,8 @@ class LayerPolygon:
# Should be generated in better way, not hardcoded. # Should be generated in better way, not hardcoded.
self._is_infill_or_skin_type_map = numpy.array([0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0], dtype=bool) self._is_infill_or_skin_type_map = numpy.array([0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0], dtype=bool)
self._build_cache_line_mesh_mask = None # type: Optional[numpy.ndarray] self._build_cache_line_mesh_mask: Optional[numpy.ndarray] = None
self._build_cache_needed_points = None # type: Optional[numpy.ndarray] self._build_cache_needed_points: Optional[numpy.ndarray] = None
def buildCache(self) -> None: def buildCache(self) -> None:
# For the line mesh we do not draw Infill or Jumps. Therefore those lines are filtered out. # For the line mesh we do not draw Infill or Jumps. Therefore those lines are filtered out.
@ -186,6 +187,11 @@ class LayerPolygon:
def types(self): def types(self):
return self._types return self._types
@property
def lineLengths(self):
data_array = numpy.array(self._data)
return numpy.linalg.norm(data_array[1:] - data_array[:-1], axis=1)
@property @property
def data(self): def data(self):
return self._data return self._data

View File

@ -43,7 +43,7 @@ class MaterialOutputModel(QObject):
} }
if guid is None and brand is not "empty" and type in _MATERIAL_MAP: if guid is None and brand != "empty" and type in _MATERIAL_MAP:
name = _MATERIAL_MAP[type]["name"] name = _MATERIAL_MAP[type]["name"]
guid = _MATERIAL_MAP[type]["guid"] guid = _MATERIAL_MAP[type]["guid"]
return name, guid return name, guid

View File

@ -49,7 +49,7 @@ class Snapshot:
""" """
if node is None: if node is None:
root = Application.getInstance().getController().getScene().getRoot() node = Application.getInstance().getController().getScene().getRoot()
# the direction the camera is looking at to create the isometric view # the direction the camera is looking at to create the isometric view
iso_view_dir = Vector(-1, -1, -1).normalized() iso_view_dir = Vector(-1, -1, -1).normalized()

View File

@ -38,6 +38,7 @@ AppDir:
- usr/share/doc/*/changelog.* - usr/share/doc/*/changelog.*
- usr/share/doc/*/NEWS.* - usr/share/doc/*/NEWS.*
- usr/share/doc/*/TODO.* - usr/share/doc/*/TODO.*
- usr/lib/x86_64-linux-gnu/libssl.so*
runtime: runtime:
env: env:
APPDIR_LIBRARY_PATH: "$APPDIR:$APPDIR/runtime/compat/:$APPDIR/usr/lib/x86_64-linux-gnu:$APPDIR/lib/x86_64-linux-gnu:$APPDIR/usr/lib:$APPDIR/usr/lib/x86_64-linux-gnu/gdk-pixbuf-2.0/2.10.0/loaders" APPDIR_LIBRARY_PATH: "$APPDIR:$APPDIR/runtime/compat/:$APPDIR/usr/lib/x86_64-linux-gnu:$APPDIR/lib/x86_64-linux-gnu:$APPDIR/usr/lib:$APPDIR/usr/lib/x86_64-linux-gnu/gdk-pixbuf-2.0/2.10.0/loaders"

View File

@ -144,6 +144,23 @@ SectionEnd
###################################################################### ######################################################################
Section UrlProtocol
WriteRegStr HKCR "cura" "" "URL:cura"
WriteRegStr HKCR "cura" "URL Protocol" ""
WriteRegStr HKCR "cura\DefaultIcon" "" "$INSTDIR\${MAIN_APP_EXE},1"
WriteRegStr HKCR "cura\shell" "" "open"
WriteRegStr HKCR "cura\shell\open\command" "" '"$INSTDIR\${MAIN_APP_EXE}" "%1"'
WriteRegStr HKCR "slicer" "" "URL:slicer"
WriteRegStr HKCR "slicer" "URL Protocol" ""
WriteRegStr HKCR "slicer\DefaultIcon" "" "$INSTDIR\${MAIN_APP_EXE},1"
WriteRegStr HKCR "slicer\shell" "" "open"
WriteRegStr HKCR "slicer\shell\open\command" "" '"$INSTDIR\${MAIN_APP_EXE}" "%1"'
SectionEnd
######################################################################
Section Uninstall Section Uninstall
${INSTALL_TYPE}{% for files in mapped_out_paths.values() %}{% for file in files %} ${INSTALL_TYPE}{% for files in mapped_out_paths.values() %}{% for file in files %}
Delete "{{ file[1] }}"{% endfor %}{% endfor %}{% for rem_dir in rmdir_paths %} Delete "{{ file[1] }}"{% endfor %}{% endfor %}{% for rem_dir in rmdir_paths %}
@ -187,8 +204,13 @@ RmDir "$SMPROGRAMS\{{ app_name }}"
!insertmacro APP_UNASSOCIATE "stl" "Cura.model" !insertmacro APP_UNASSOCIATE "stl" "Cura.model"
!insertmacro APP_UNASSOCIATE "3mf" "Cura.project" !insertmacro APP_UNASSOCIATE "3mf" "Cura.project"
; Unassociate file associations for 'cura' protocol
DeleteRegKey HKCR "cura"
; Unassociate file associations for 'slicer' protocol
DeleteRegKey HKCR "slicer"
DeleteRegKey ${REG_ROOT} "${REG_APP_PATH}" DeleteRegKey ${REG_ROOT} "${REG_APP_PATH}"
DeleteRegKey ${REG_ROOT} "${UNINSTALL_PATH}" DeleteRegKey ${REG_ROOT} "${UNINSTALL_PATH}"
SectionEnd SectionEnd
######################################################################

View File

@ -33,6 +33,21 @@
/> />
</Upgrade> </Upgrade>
<Property Id="ASSOCIATE_URL_PROTOCOLS">
<RegistrySearch Id="CheckCuraProtocolHandler"
Type="raw"
Root="HKCR"
Key="cura"
Name="URL Protocol"
/>
<RegistrySearch Id="CheckSlicerProtocolHandler"
Type="raw"
Root="HKCR"
Key="slicer"
Name="URL Protocol"
/>
</Property>
{% if "Enterprise" in app_name %} {% if "Enterprise" in app_name %}
<Property Id="PREVIOUS_413_INSTALLED" Secure="yes" /> <Property Id="PREVIOUS_413_INSTALLED" Secure="yes" />
<Upgrade Id="53C603BB-2B17-4206-A609-29C2E0D0B0AE"> <Upgrade Id="53C603BB-2B17-4206-A609-29C2E0D0B0AE">
@ -144,11 +159,32 @@
</Component> </Component>
</DirectoryRef> </DirectoryRef>
<!--Url Scheme-->
<Component Id="CuraRegistration" Guid="*" Directory="APPLICATIONFOLDER">
<RegistryKey Root="HKCR" Key="cura">
<RegistryValue Type="string" Value="URL:Cura Protocol"/>
<RegistryValue Type="string" Name="URL Protocol" Value=""/>
<RegistryValue Type="string" Key="DefaultIcon" Value="[APPLICATIONFOLDER]\{{ main_app }},1"/>
<RegistryValue Type="string" Key="shell\open\command" Value="&quot;[APPLICATIONFOLDER]\{{ main_app }}&quot; &quot;%1&quot;"/>
</RegistryKey>
</Component>
<Component Id="SlicerRegistration" Guid="*" Directory="APPLICATIONFOLDER">
<RegistryKey Root="HKCR" Key="slicer">
<RegistryValue Type="string" Value="URL:Slicer Protocol"/>
<RegistryValue Type="string" Name="URL Protocol" Value=""/>
<RegistryValue Type="string" Key="DefaultIcon" Value="[APPLICATIONFOLDER]\{{ main_app }},1"/>
<RegistryValue Type="string" Key="shell\open\command" Value="&quot;[APPLICATIONFOLDER]\{{ main_app }}&quot; &quot;%1&quot;"/>
</RegistryKey>
</Component>
<Feature Id="ProductFeature" Title="{{ app_name }}" Level="1" ConfigurableDirectory="APPLICATIONFOLDER"> <Feature Id="ProductFeature" Title="{{ app_name }}" Level="1" ConfigurableDirectory="APPLICATIONFOLDER">
<ComponentRef Id="CMP_UltiMaker_Cura_exe" /> <ComponentRef Id="CMP_UltiMaker_Cura_exe" />
<ComponentRef Id="CMP_CuraEngine_exe" /> <ComponentRef Id="CMP_CuraEngine_exe" />
<ComponentGroupRef Id="NewFilesGroup" /> <ComponentGroupRef Id="NewFilesGroup" />
<ComponentRef Id="CMP_Shortcuts" /> <ComponentRef Id="CMP_Shortcuts" />
<ComponentRef Id="CuraRegistration"/>
<ComponentRef Id="SlicerRegistration"/>
</Feature> </Feature>
<Feature Id="UninstallOlderVersionFeature" Title="Uninstall previous versions" Level="{{ 1 if "Enterprise" in app_name else 0 }}" Description="..."/> <Feature Id="UninstallOlderVersionFeature" Title="Uninstall previous versions" Level="{{ 1 if "Enterprise" in app_name else 0 }}" Description="..."/>
</Product> </Product>

View File

@ -33,6 +33,8 @@ message Slice
repeated Extruder extruders = 3; // The settings sent to each extruder object repeated Extruder extruders = 3; // The settings sent to each extruder object
repeated SettingExtruder limit_to_extruder = 4; // From which stack the setting would inherit if not defined per object repeated SettingExtruder limit_to_extruder = 4; // From which stack the setting would inherit if not defined per object
repeated EnginePlugin engine_plugins = 5; repeated EnginePlugin engine_plugins = 5;
string sentry_id = 6; // The anonymized Sentry user id that requested the slice
string cura_version = 7; // The version of Cura that requested the slice
} }
message Extruder message Extruder

View File

@ -163,6 +163,7 @@ class CuraEngineBackend(QObject, Backend):
self._is_disabled: bool = False self._is_disabled: bool = False
application.getPreferences().addPreference("general/auto_slice", False) application.getPreferences().addPreference("general/auto_slice", False)
application.getPreferences().addPreference("info/send_engine_crash", True)
self._use_timer: bool = False self._use_timer: bool = False
@ -173,6 +174,8 @@ class CuraEngineBackend(QObject, Backend):
self._change_timer.setSingleShot(True) self._change_timer.setSingleShot(True)
self._change_timer.setInterval(500) self._change_timer.setInterval(500)
self.determineAutoSlicing() self.determineAutoSlicing()
application.getPreferences().preferenceChanged.connect(self._onPreferencesChanged) application.getPreferences().preferenceChanged.connect(self._onPreferencesChanged)
self._slicing_error_message = Message( self._slicing_error_message = Message(
@ -193,6 +196,9 @@ class CuraEngineBackend(QObject, Backend):
application.initializationFinished.connect(self.initialize) application.initializationFinished.connect(self.initialize)
# Ensure that the initial value for send_engine_crash is handled correctly.
application.callLater(self._onPreferencesChanged, "info/send_engine_crash")
def startPlugins(self) -> None: def startPlugins(self) -> None:
""" """
Ensure that all backend plugins are started Ensure that all backend plugins are started
@ -1088,11 +1094,14 @@ class CuraEngineBackend(QObject, Backend):
self._change_timer.timeout.disconnect(self.slice) self._change_timer.timeout.disconnect(self.slice)
def _onPreferencesChanged(self, preference: str) -> None: def _onPreferencesChanged(self, preference: str) -> None:
if preference != "general/auto_slice": if preference != "general/auto_slice" and preference != "info/send_engine_crash":
return return
if preference == "general/auto_slice":
auto_slice = self.determineAutoSlicing() auto_slice = self.determineAutoSlicing()
if auto_slice: if auto_slice:
self._change_timer.start() self._change_timer.start()
elif preference == "info/send_engine_crash":
os.environ["use_sentry"] = "1" if CuraApplication.getInstance().getPreferences().getValue("info/send_engine_crash") else "0"
def tickle(self) -> None: def tickle(self) -> None:
"""Tickle the backend so in case of auto slicing, it starts the timer.""" """Tickle the backend so in case of auto slicing, it starts the timer."""

View File

@ -1,5 +1,7 @@
# Copyright (c) 2023 UltiMaker # Copyright (c) 2023 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher. # Cura is released under the terms of the LGPLv3 or higher.
import uuid
import os import os
import numpy import numpy
@ -30,6 +32,7 @@ from cura.CuraApplication import CuraApplication
from cura.Scene.CuraSceneNode import CuraSceneNode from cura.Scene.CuraSceneNode import CuraSceneNode
from cura.OneAtATimeIterator import OneAtATimeIterator from cura.OneAtATimeIterator import OneAtATimeIterator
from cura.Settings.ExtruderManager import ExtruderManager from cura.Settings.ExtruderManager import ExtruderManager
from cura.CuraVersion import CuraVersion
NON_PRINTING_MESH_SETTINGS = ["anti_overhang_mesh", "infill_mesh", "cutting_mesh"] NON_PRINTING_MESH_SETTINGS = ["anti_overhang_mesh", "infill_mesh", "cutting_mesh"]
@ -332,6 +335,11 @@ class StartSliceJob(Job):
self._buildGlobalSettingsMessage(stack) self._buildGlobalSettingsMessage(stack)
self._buildGlobalInheritsStackMessage(stack) self._buildGlobalInheritsStackMessage(stack)
user_id = uuid.getnode() # On all of Cura's supported platforms, this returns the MAC address which is pseudonymical information (!= anonymous).
user_id %= 2 ** 16 # So to make it anonymous, apply a bitmask selecting only the last 16 bits. This prevents it from being traceable to a specific user but still gives somewhat of an idea of whether it's just the same user hitting the same crash over and over again, or if it's widespread.
self._slice_message.sentry_id = "{user_id}"
self._slice_message.cura_version = CuraVersion
# Build messages for extruder stacks # Build messages for extruder stacks
for extruder_stack in global_stack.extruderList: for extruder_stack in global_stack.extruderList:
self._buildExtruderMessage(extruder_stack) self._buildExtruderMessage(extruder_stack)

View File

@ -35,7 +35,7 @@ class SimulationPass(RenderPass):
self._nozzle_shader = None self._nozzle_shader = None
self._disabled_shader = None self._disabled_shader = None
self._old_current_layer = 0 self._old_current_layer = 0
self._old_current_path = 0 self._old_current_path: float = 0.0
self._switching_layers = True # Tracking whether the user is moving across layers (True) or across paths (False). If false, lower layers render as shadowy. self._switching_layers = True # Tracking whether the user is moving across layers (True) or across paths (False). If false, lower layers render as shadowy.
self._gl = OpenGL.getInstance().getBindingsObject() self._gl = OpenGL.getInstance().getBindingsObject()
self._scene = Application.getInstance().getController().getScene() self._scene = Application.getInstance().getController().getScene()
@ -139,7 +139,7 @@ class SimulationPass(RenderPass):
continue continue
# Render all layers below a certain number as line mesh instead of vertices. # Render all layers below a certain number as line mesh instead of vertices.
if self._layer_view._current_layer_num > -1 and ((not self._layer_view._only_show_top_layers) or (not self._layer_view.getCompatibilityMode())): if self._layer_view.getCurrentLayer() > -1 and ((not self._layer_view._only_show_top_layers) or (not self._layer_view.getCompatibilityMode())):
start = 0 start = 0
end = 0 end = 0
element_counts = layer_data.getElementCounts() element_counts = layer_data.getElementCounts()
@ -147,7 +147,7 @@ class SimulationPass(RenderPass):
# In the current layer, we show just the indicated paths # In the current layer, we show just the indicated paths
if layer == self._layer_view._current_layer_num: if layer == self._layer_view._current_layer_num:
# We look for the position of the head, searching the point of the current path # We look for the position of the head, searching the point of the current path
index = self._layer_view._current_path_num index = int(self._layer_view.getCurrentPath())
offset = 0 offset = 0
for polygon in layer_data.getLayer(layer).polygons: for polygon in layer_data.getLayer(layer).polygons:
# The size indicates all values in the two-dimension array, and the second dimension is # The size indicates all values in the two-dimension array, and the second dimension is
@ -157,23 +157,33 @@ class SimulationPass(RenderPass):
offset = 1 # This is to avoid the first point when there is more than one polygon, since has the same value as the last point in the previous polygon offset = 1 # This is to avoid the first point when there is more than one polygon, since has the same value as the last point in the previous polygon
continue continue
# The head position is calculated and translated # The head position is calculated and translated
head_position = Vector(polygon.data[index+offset][0], polygon.data[index+offset][1], polygon.data[index+offset][2]) + node.getWorldPosition() ratio = self._layer_view.getCurrentPath() - index
pos_a = Vector(polygon.data[index + offset][0], polygon.data[index + offset][1],
polygon.data[index + offset][2])
if ratio <= 0.0001 or index + offset < len(polygon.data):
head_position = pos_a + node.getWorldPosition()
else:
pos_b = Vector(polygon.data[index + offset + 1][0],
polygon.data[index + offset + 1][1],
polygon.data[index + offset + 1][2])
vec = pos_a * (1.0 - ratio) + pos_b * ratio
head_position = vec + node.getWorldPosition()
break break
break break
if self._layer_view._minimum_layer_num > layer: if self._layer_view.getMinimumLayer() > layer:
start += element_counts[layer] start += element_counts[layer]
end += element_counts[layer] end += element_counts[layer]
# Calculate the range of paths in the last layer # Calculate the range of paths in the last layer
current_layer_start = end current_layer_start = end
current_layer_end = end + self._layer_view._current_path_num * 2 # Because each point is used twice current_layer_end = end + int( self._layer_view.getCurrentPath()) * 2 # Because each point is used twice
# This uses glDrawRangeElements internally to only draw a certain range of lines. # This uses glDrawRangeElements internally to only draw a certain range of lines.
# All the layers but the current selected layer are rendered first # All the layers but the current selected layer are rendered first
if self._old_current_path != self._layer_view._current_path_num: if self._old_current_path != self._layer_view.getCurrentPath():
self._current_shader = self._layer_shadow_shader self._current_shader = self._layer_shadow_shader
self._switching_layers = False self._switching_layers = False
if not self._layer_view.isSimulationRunning() and self._old_current_layer != self._layer_view._current_layer_num: if not self._layer_view.isSimulationRunning() and self._old_current_layer != self._layer_view.getCurrentLayer():
self._current_shader = self._layer_shader self._current_shader = self._layer_shader
self._switching_layers = True self._switching_layers = True
@ -193,8 +203,8 @@ class SimulationPass(RenderPass):
current_layer_batch.addItem(node.getWorldTransformation(), layer_data) current_layer_batch.addItem(node.getWorldTransformation(), layer_data)
current_layer_batch.render(self._scene.getActiveCamera()) current_layer_batch.render(self._scene.getActiveCamera())
self._old_current_layer = self._layer_view._current_layer_num self._old_current_layer = self._layer_view.getCurrentLayer()
self._old_current_path = self._layer_view._current_path_num self._old_current_path = self._layer_view.getCurrentPath()
# Create a new batch that is not range-limited # Create a new batch that is not range-limited
batch = RenderBatch(self._layer_shader, type = RenderBatch.RenderType.Solid) batch = RenderBatch(self._layer_shader, type = RenderBatch.RenderType.Solid)
@ -230,4 +240,4 @@ class SimulationPass(RenderPass):
if changed_object.callDecoration("getLayerData"): # Any layer data has changed. if changed_object.callDecoration("getLayerData"): # Any layer data has changed.
self._switching_layers = True self._switching_layers = True
self._old_current_layer = 0 self._old_current_layer = 0
self._old_current_path = 0 self._old_current_path = 0.0

View File

@ -1,6 +1,5 @@
# Copyright (c) 2021 Ultimaker B.V. # Copyright (c) 2021 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher. # Cura is released under the terms of the LGPLv3 or higher.
import sys import sys
from PyQt6.QtCore import Qt from PyQt6.QtCore import Qt
@ -58,6 +57,7 @@ class SimulationView(CuraView):
LAYER_VIEW_TYPE_LINE_TYPE = 1 LAYER_VIEW_TYPE_LINE_TYPE = 1
LAYER_VIEW_TYPE_FEEDRATE = 2 LAYER_VIEW_TYPE_FEEDRATE = 2
LAYER_VIEW_TYPE_THICKNESS = 3 LAYER_VIEW_TYPE_THICKNESS = 3
SIMULATION_FACTOR = 3
_no_layers_warning_preference = "view/no_layers_warning" _no_layers_warning_preference = "view/no_layers_warning"
@ -74,19 +74,20 @@ class SimulationView(CuraView):
self._old_max_layers = 0 self._old_max_layers = 0
self._max_paths = 0 self._max_paths = 0
self._current_path_num = 0 self._current_path_num: float = 0.0
self._current_time = 0.0
self._minimum_path_num = 0 self._minimum_path_num = 0
self.currentLayerNumChanged.connect(self._onCurrentLayerNumChanged) self.currentLayerNumChanged.connect(self._onCurrentLayerNumChanged)
self._busy = False self._busy = False
self._simulation_running = False self._simulation_running = False
self._ghost_shader = None # type: Optional["ShaderProgram"] self._ghost_shader: Optional["ShaderProgram"] = None
self._layer_pass = None # type: Optional[SimulationPass] self._layer_pass: Optional[SimulationPass] = None
self._composite_pass = None # type: Optional[CompositePass] self._composite_pass: Optional[CompositePass] = None
self._old_layer_bindings = None # type: Optional[List[str]] self._old_layer_bindings: Optional[List[str]] = None
self._simulationview_composite_shader = None # type: Optional["ShaderProgram"] self._simulationview_composite_shader: Optional["ShaderProgram"] = None
self._old_composite_shader = None # type: Optional["ShaderProgram"] self._old_composite_shader: Optional["ShaderProgram"] = None
self._max_feedrate = sys.float_info.min self._max_feedrate = sys.float_info.min
self._min_feedrate = sys.float_info.max self._min_feedrate = sys.float_info.max
@ -96,14 +97,15 @@ class SimulationView(CuraView):
self._min_line_width = sys.float_info.max self._min_line_width = sys.float_info.max
self._min_flow_rate = sys.float_info.max self._min_flow_rate = sys.float_info.max
self._max_flow_rate = sys.float_info.min self._max_flow_rate = sys.float_info.min
self._cumulative_line_duration = {}
self._global_container_stack = None # type: Optional[ContainerStack] self._global_container_stack: Optional[ContainerStack] = None
self._proxy = None self._proxy = None
self._resetSettings() self._resetSettings()
self._legend_items = None self._legend_items = None
self._show_travel_moves = False self._show_travel_moves = False
self._nozzle_node = None # type: Optional[NozzleNode] self._nozzle_node: Optional[NozzleNode] = None
Application.getInstance().getPreferences().addPreference("view/top_layer_count", 5) Application.getInstance().getPreferences().addPreference("view/top_layer_count", 5)
Application.getInstance().getPreferences().addPreference("view/only_show_top_layers", False) Application.getInstance().getPreferences().addPreference("view/only_show_top_layers", False)
@ -125,8 +127,7 @@ class SimulationView(CuraView):
self._only_show_top_layers = bool(Application.getInstance().getPreferences().getValue("view/only_show_top_layers")) self._only_show_top_layers = bool(Application.getInstance().getPreferences().getValue("view/only_show_top_layers"))
self._compatibility_mode = self._evaluateCompatibilityMode() self._compatibility_mode = self._evaluateCompatibilityMode()
self._slice_first_warning_message = Message(catalog.i18nc("@info:status", self._slice_first_warning_message = Message(catalog.i18nc("@info:status", "Nothing is shown because you need to slice first."),
"Nothing is shown because you need to slice first."),
title=catalog.i18nc("@info:title", "No layers to show"), title=catalog.i18nc("@info:title", "No layers to show"),
option_text=catalog.i18nc("@info:option_text", option_text=catalog.i18nc("@info:option_text",
"Do not show this message again"), "Do not show this message again"),
@ -187,9 +188,85 @@ class SimulationView(CuraView):
def getMaxLayers(self) -> int: def getMaxLayers(self) -> int:
return self._max_layers return self._max_layers
def getCurrentPath(self) -> int: def getCurrentPath(self) -> float:
return self._current_path_num return self._current_path_num
def setTime(self, time: float) -> None:
cumulative_line_duration = self.cumulativeLineDuration()
if len(cumulative_line_duration) > 0:
self._current_time = time
left_i = 0
right_i = self._max_paths - 1
total_duration = cumulative_line_duration[-1]
# make an educated guess about where to start
i = int(right_i * max(0.0, min(1.0, self._current_time / total_duration)))
# binary search for the correct path
while left_i < right_i:
if cumulative_line_duration[i] <= self._current_time:
left_i = i + 1
else:
right_i = i
i = int((left_i + right_i) / 2)
left_value = cumulative_line_duration[i - 1] if i > 0 else 0.0
right_value = cumulative_line_duration[i]
assert (left_value <= self._current_time <= right_value)
fractional_value = (self._current_time - left_value) / (right_value - left_value)
self.setPath(i + fractional_value)
def advanceTime(self, time_increase: float) -> bool:
"""
Advance the time by the given amount.
:param time_increase: The amount of time to advance (in seconds).
:return: True if the time was advanced, False if the end of the simulation was reached.
"""
total_duration = 0.0
if len(self.cumulativeLineDuration()) > 0:
total_duration = self.cumulativeLineDuration()[-1]
if self._current_time + time_increase > total_duration:
# If we have reached the end of the simulation, go to the next layer.
if self.getCurrentLayer() == self.getMaxLayers():
# If we are already at the last layer, go to the first layer.
self.setTime(total_duration)
return False
# advance to the next layer, and reset the time
self.setLayer(self.getCurrentLayer() + 1)
self.setTime(0.0)
else:
self.setTime(self._current_time + time_increase)
return True
def cumulativeLineDuration(self) -> List[float]:
# Make sure _cumulative_line_duration is initialized properly
if self.getCurrentLayer() not in self._cumulative_line_duration:
#clear cache
self._cumulative_line_duration = {}
self._cumulative_line_duration[self.getCurrentLayer()] = []
total_duration = 0.0
polylines = self.getLayerData()
if polylines is not None:
for polyline in polylines.polygons:
for line_duration in list((polyline.lineLengths / polyline.lineFeedrates)[0]):
total_duration += line_duration / SimulationView.SIMULATION_FACTOR
self._cumulative_line_duration[self.getCurrentLayer()].append(total_duration)
return self._cumulative_line_duration[self.getCurrentLayer()]
def getLayerData(self) -> Optional["LayerData"]:
scene = self.getController().getScene()
for node in DepthFirstIterator(scene.getRoot()): # type: ignore
layer_data = node.callDecoration("getLayerData")
if not layer_data:
continue
return layer_data.getLayer(self.getCurrentLayer())
return None
def getMinimumPath(self) -> int: def getMinimumPath(self) -> int:
return self._minimum_path_num return self._minimum_path_num
@ -277,7 +354,7 @@ class SimulationView(CuraView):
self._startUpdateTopLayers() self._startUpdateTopLayers()
self.currentLayerNumChanged.emit() self.currentLayerNumChanged.emit()
def setPath(self, value: int) -> None: def setPath(self, value: float) -> None:
""" """
Set the upper end of the range of visible paths on the current layer. Set the upper end of the range of visible paths on the current layer.
@ -287,6 +364,9 @@ class SimulationView(CuraView):
if self._current_path_num != value: if self._current_path_num != value:
self._current_path_num = min(max(value, 0), self._max_paths) self._current_path_num = min(max(value, 0), self._max_paths)
self._minimum_path_num = min(self._minimum_path_num, self._current_path_num) self._minimum_path_num = min(self._minimum_path_num, self._current_path_num)
# update _current time when the path is changed by user
if self._current_path_num < self._max_paths and round(self._current_path_num)== self._current_path_num:
self._current_time = self.cumulativeLineDuration()[int(self._current_path_num)]
self._startUpdateTopLayers() self._startUpdateTopLayers()
self.currentPathNumChanged.emit() self.currentPathNumChanged.emit()
@ -492,6 +572,7 @@ class SimulationView(CuraView):
self._max_thickness = sys.float_info.min self._max_thickness = sys.float_info.min
self._min_flow_rate = sys.float_info.max self._min_flow_rate = sys.float_info.max
self._max_flow_rate = sys.float_info.min self._max_flow_rate = sys.float_info.min
self._cumulative_line_duration = {}
# The colour scheme is only influenced by the visible lines, so filter the lines by if they should be visible. # The colour scheme is only influenced by the visible lines, so filter the lines by if they should be visible.
visible_line_types = [] visible_line_types = []

View File

@ -127,6 +127,7 @@ Item
function resumeSimulation() function resumeSimulation()
{ {
UM.SimulationView.setSimulationRunning(true) UM.SimulationView.setSimulationRunning(true)
UM.SimulationView.setCurrentPath(UM.SimulationView.currentPath)
simulationTimer.start() simulationTimer.start()
layerSlider.manuallyChanged = false layerSlider.manuallyChanged = false
pathSlider.manuallyChanged = false pathSlider.manuallyChanged = false
@ -136,54 +137,19 @@ Item
Timer Timer
{ {
id: simulationTimer id: simulationTimer
interval: 100 interval: 1000 / 15
running: false running: false
repeat: true repeat: true
onTriggered: onTriggered:
{ {
var currentPath = UM.SimulationView.currentPath // divide by 1000 to account for ms to s conversion
var numPaths = UM.SimulationView.numPaths const advance_time = simulationTimer.interval / 1000.0;
var currentLayer = UM.SimulationView.currentLayer if (!UM.SimulationView.advanceTime(advance_time)) {
var numLayers = UM.SimulationView.numLayers playButton.pauseSimulation();
// When the user plays the simulation, if the path slider is at the end of this layer, we start
// the simulation at the beginning of the current layer.
if (!isSimulationPlaying)
{
if (currentPath >= numPaths)
{
UM.SimulationView.setCurrentPath(0)
}
else
{
UM.SimulationView.setCurrentPath(currentPath + 1)
}
}
// If the simulation is already playing and we reach the end of a layer, then it automatically
// starts at the beginning of the next layer.
else
{
if (currentPath >= numPaths)
{
// At the end of the model, the simulation stops
if (currentLayer >= numLayers)
{
playButton.pauseSimulation()
}
else
{
UM.SimulationView.setCurrentLayer(currentLayer + 1)
UM.SimulationView.setCurrentPath(0)
}
}
else
{
UM.SimulationView.setCurrentPath(currentPath + 1)
}
} }
// The status must be set here instead of in the resumeSimulation function otherwise it won't work // The status must be set here instead of in the resumeSimulation function otherwise it won't work
// correctly, because part of the logic is in this trigger function. // correctly, because part of the logic is in this trigger function.
isSimulationPlaying = true isSimulationPlaying = true;
} }
} }

View File

@ -50,10 +50,14 @@ class SimulationViewProxy(QObject):
def numPaths(self): def numPaths(self):
return self._simulation_view.getMaxPaths() return self._simulation_view.getMaxPaths()
@pyqtProperty(int, notify=currentPathChanged) @pyqtProperty(float, notify=currentPathChanged)
def currentPath(self): def currentPath(self):
return self._simulation_view.getCurrentPath() return self._simulation_view.getCurrentPath()
@pyqtSlot(float, result=bool)
def advanceTime(self, duration: float) -> bool:
return self._simulation_view.advanceTime(duration)
@pyqtProperty(int, notify=currentPathChanged) @pyqtProperty(int, notify=currentPathChanged)
def minimumPath(self): def minimumPath(self):
return self._simulation_view.getMinimumPath() return self._simulation_view.getMinimumPath()
@ -78,8 +82,8 @@ class SimulationViewProxy(QObject):
def setMinimumLayer(self, layer_num): def setMinimumLayer(self, layer_num):
self._simulation_view.setMinimumLayer(layer_num) self._simulation_view.setMinimumLayer(layer_num)
@pyqtSlot(int) @pyqtSlot(float)
def setCurrentPath(self, path_num): def setCurrentPath(self, path_num: float):
self._simulation_view.setPath(path_num) self._simulation_view.setPath(path_num)
@pyqtSlot(int) @pyqtSlot(int)
@ -215,4 +219,3 @@ class SimulationViewProxy(QObject):
self._simulation_view.activityChanged.disconnect(self._onActivityChanged) self._simulation_view.activityChanged.disconnect(self._onActivityChanged)
self._simulation_view.globalStackChanged.disconnect(self._onGlobalStackChanged) self._simulation_view.globalStackChanged.disconnect(self._onGlobalStackChanged)
self._simulation_view.preferencesChanged.disconnect(self._onPreferencesChanged) self._simulation_view.preferencesChanged.disconnect(self._onPreferencesChanged)

View File

@ -331,7 +331,7 @@ class CloudOutputDevice(UltimakerNetworkedPrinterOutputDevice):
return False return False
[printer, *_] = self._printers [printer, *_] = self._printers
return printer.pinterType in ("ultimaker_methodx", "ultimaker_methodxl") return printer.name in ("ultimaker_methodx", "ultimaker_methodxl")
@pyqtProperty(bool, notify=_cloudClusterPrintersChanged) @pyqtProperty(bool, notify=_cloudClusterPrintersChanged)
def supportsPrintJobActions(self) -> bool: def supportsPrintJobActions(self) -> bool:

View File

@ -1,41 +1,47 @@
### Direct requirements for Uranium and libCharon ### ### Direct requirements for Uranium and libCharon ###
PyQt6-sip==13.4.1 \ PyQt6-sip==13.6.0 \
--hash=sha256:0df998f2b6ceeacfd10de773441572e215be0c9cae566cc7dd36e231bf714a12 \ --hash=sha256:0dfd22cfedd87e96f9d51e0778ca2ba3dc0be83e424e9e0f98f6994d8d9c90f0 \
--hash=sha256:224575e84805c4317bacd5d1b8e93e0ad5c48685dadbbe1e902d4ebe16f22828 \ --hash=sha256:13885361ca2cb2f5085d50359ba61b3fabd41b139fb58f37332acbe631ef2357 \
--hash=sha256:36ae29cdc223cacc1257d0f5075cf81474550c6d26b728f922487a2aa935f130 \ --hash=sha256:24441032a29791e82beb7dfd76878339058def0e97fdb7c1cea517f3a0e6e96b \
--hash=sha256:3a674c591d4274d4ea8127205290e927a7dab0eb87a0038d4f4ea1d430782649 \ --hash=sha256:2486e1588071943d4f6657ba09096dc9fffd2322ad2c30041e78ea3f037b5778 \
--hash=sha256:3ef9392e4ae29d393b79237d85840cdc6b8831f36eed5d56c7d9b329b380cc8d \ --hash=sha256:3075d8b325382750829e6cde6971c943352309d35768a4d4da0587459606d562 \
--hash=sha256:43935873d60f57719632840d517afee04ef8f30e92cfe0dadc7e6326691920fc \ --hash=sha256:33ea771fe777eb0d1a2c3ef35bcc3f7a286eb3ff09cd5b2fdd3d87d1f392d7e8 \
--hash=sha256:5731f22618435654352ef07684549a17be82b75254227fc80b4b5b0b59fc6656 \ --hash=sha256:39854dba35f8e5a4288da26ecb5f40b4c5ec1932efffb3f49d5ea435a7f37fb3 \
--hash=sha256:5bc4beb6fb1de4c9ba8beee7b1a4a813fa888c3b095206dafcd25d7e6e4ed2a7 \ --hash=sha256:3bf03e130fbfd75c9c06e687b86ba375410c7a9e835e4e03285889e61dd4b0c4 \
--hash=sha256:5c36ab984402e96792eebf4b031abfaa589aa20af3190a79c54502c16964d97e \ --hash=sha256:43fb8551796030aae3d66d6e35e277494071ec6172cd182c9569ab7db268a2f5 \
--hash=sha256:a2a0461992c6657f343308b150c4d6b57e9e7a0e5c2f79538434e7fb869ea827 \ --hash=sha256:58f68a48400e0b3d1ccb18090090299bad26e3aed7ccb7057c65887b79b8aeea \
--hash=sha256:a81490ee84d7a41a126b116081bd97d758f41bf706aee0a8cec24d6e4c660184 \ --hash=sha256:5b9c6b6f9cfccb48cbb78a59603145a698fb4ffd176764d7083e5bf47631d8df \
--hash=sha256:e00e287ea05bbc293fc6e2198301962af9b7b622bd2daf4288f925a88ae35dc9 \ --hash=sha256:747f6ca44af81777a2c696bd501bc4815a53ec6fc94d4e25830e10bc1391f8ab \
--hash=sha256:e670a7b2fb7e32204ce67d274017bfff3e21139d217d60cebbfcb75b019c91ee \ --hash=sha256:86a7b67c64436e32bffa9c28c9f21bf14a9faa54991520b12c3f6f435f24df7f \
--hash=sha256:ee06f255787a0b4957f357f93b78d2a11ca3761916833e3afa83f1381d4d1a46 \ --hash=sha256:8c282062125eea5baf830c6998587d98c50be7c3a817a057fb95fef647184012 \
--hash=sha256:fbee0d554e0e98f56dbf6d94b00a28cc32425938ad7ae98fd91f8822c5b24d45 \ --hash=sha256:8f9df9f7ccd8a9f0f1d36948c686f03ce1a1281543a3e636b7b7d5e086e1a436 \
--hash=sha256:fcc6d78314783f4a193f02353f431b7ea4d357f47c3c7a7d0740e723f69c64dc --hash=sha256:98bf954103b087162fa63b3a78f30b0b63da22fd6450b610ec1b851dbb798228 \
PyQt6==6.4.2 \ --hash=sha256:9adf672f9114687533a74d5c2d4c03a9a929ad5ad9c3e88098a7da1a440ab916 \
--hash=sha256:18d1daf98d9236d55102cdadafd1056f5802f3c9288fcf7238569937b71a89f0 \ --hash=sha256:a6ce80bc24618d8a41be8ca51ad9f10e8bc4296dd90ab2809573df30a23ae0e5 \
--hash=sha256:25bd399b4a95dce65d5f937c1aa85d3c7e14a21745ae2a4ca14c0116cd104290 \ --hash=sha256:d6b5f699aaed0ac1fcd23e8fbca70d8a77965831b7c1ce474b81b1678817a49d \
--hash=sha256:740244f608fe15ee1d89695c43f31a14caeca41c4f02ac36c86dfba4a5d5813d \ --hash=sha256:fa759b6339ff7e25f9afe2a6b651b775f0a36bcb3f5fa85e81a90d3b033c83f4 \
--hash=sha256:c128bc0f17833e324593e3db83e99470d451a197dd17ff0333927b946c935bd9 --hash=sha256:fa7b10af7488efc5e53b41dd42c0f421bde6c2865a107af7ae259aff9d841da9
PyQt6-Qt6==6.4.2 \ PyQt6==6.6.0 \
--hash=sha256:9f07c3c100cb46cca4074965e7494d4df4f0fc016497d5303c1fe135822876e1 \ --hash=sha256:33655db05ac2de699320f035250c21434c77144a6a2943aca3f4c579dabc3f7b \
--hash=sha256:a29b8c858babd523e80c8db5f8fd19792641588ec04eab49af18b7a4423eb99f \ --hash=sha256:3ef68830a9b32050c30f7962c56a5927802c9193b68eaf405faecb8ce9ae10a8 \
--hash=sha256:c0e91d0275d428496cacff717a9b719c52bfa52b21f124d638b79cc2217bc81e \ --hash=sha256:d41512d66044c2df9c5f515a56a922170d68a37b3406ffddc8b4adc57181b576 \
--hash=sha256:d19c4e72615762cd6f0b043f23fa5f0b02656091427ce6de1efccd58e10e6a53 --hash=sha256:fc7185d65755f26d7a6842492ec5398c92544dc4eafbbcbef1b1922aca585c96
PyQt6-NetworkAuth==6.4.0 \ PyQt6-Qt6==6.6.0 \
--hash=sha256:ab6178b3b2902ae9939a148555cfcee8c7803d6b0d5924cd1bd8f3407b8b9210 \ --hash=sha256:1b079a33088d32ff47872cdb37fd15aa42101f0be46c3340244483849b781438 \
--hash=sha256:c16ec80232d88024b60d04386a23cc93067e5644a65f47f26ffb13d84dcd4a6d \ --hash=sha256:8cb30d64a4d32465ea1686bc827cbe452225fb387c4873356b0fa7b9ae63534f \
--hash=sha256:c302cd0d838c7229eda5e26e0b1b3d3ec4f8720f8d9379472bce5a89ff0735c2 \ --hash=sha256:a151f34712cd645111e89cb30b02e5fb69c9dcc3603ab3c03a561e874bd7cbcf \
--hash=sha256:d948fc0cf43b64afbda2acb5bf2392f785a1e7a2950d79ea850c1a3f4ae12f1a --hash=sha256:e5483ae04bf107411c7469f1be9f9e2eb9840303e788b3ac524fe30af90d45f4
PyQt6-NetworkAuth-Qt6==6.4.2 \ PyQt6-NetworkAuth==6.6.0 \
--hash=sha256:179094bcb4d4d056316c22d3d067cd94d4591da23f804461bfb025ccfa29b2b4 \ --hash=sha256:7b90b81792fe53105287c8cbb5e4b22bc44a482268ffb7d3e33f852807f86182 \
--hash=sha256:1de6abbb5fa6585b97ae49d3f64b0dfad40bd56b1a31744d9775ff26247241c8 \ --hash=sha256:c7e2335159aa795e2fe6fb069ccce6308672ab80f26c50fab57caf957371cbb5 \
--hash=sha256:79ec4b0fc9450bbedbff03541b93b10d1c7e761cd2cc16ce70d2b09dcdf8c720 \ --hash=sha256:cdfc0bfaea16a9e09f075bdafefb996aa9fdec392052ba4fb3cbac233c1958fb \
--hash=sha256:d96d557fe61edb9b68d189f270f0393d6579c8d308e6b0d41bc0699371d7cb4e --hash=sha256:f60ff9a62f5129dc2a9d4c495fb47f9a03e4dfb666b50fb7d61f46e89bf7b6a2
PyQt6-NetworkAuth-Qt6==6.6.0 \
--hash=sha256:481d9093e1fb1ac6843d8beabcd359cc34b74b9a2cbb3e2b68d96bd3f178d4e0 \
--hash=sha256:4cc48fd375730a0ba5fbed9d64abb2914f587377560a78a63aff893f9e276a45 \
--hash=sha256:5006deabf55304d4a3e0b3c954f93e5835546b11e789d14653a2493d12d3a063 \
--hash=sha256:bcd56bfc892fec961c51eba3c0bf32ba8317a762d9e254d3830569611ed569d6
certifi==2023.5.7; \ certifi==2023.5.7; \
--hash=sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716 --hash=sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716
cryptography==41.0.1 \ cryptography==41.0.1 \

View File

@ -8305,6 +8305,88 @@
} }
} }
}, },
"ppr":
{
"label": "Print Process Reporting",
"type": "category",
"icon": "DocumentFilled",
"description": "Reporting events that go out of set thresholds",
"enabled": false,
"children":
{
"ppr_enable":
{
"label": "Enable Print Process Reporting",
"description": "Enable print process reporting for setting threshold values for possible fault detection.",
"type": "bool",
"enabled": false,
"default_value": false,
"value": false,
"settable_per_mesh": false,
"settable_per_extruder": false
},
"flow_warn_limit":
{
"label": "Flow Warning",
"description": "Limit on the flow warning for detection.",
"default_value": "15.0",
"enabled": "ppr_enable",
"unit": "%",
"type": "float",
"settable_per_extruder": true
},
"flow_anomaly_limit":
{
"label": "Flow Limit",
"description": "Limit on flow anomaly for detection.",
"default_value": "25.0",
"enabled": "ppr_enable",
"unit": "%",
"type": "float",
"settable_per_extruder": true
},
"print_temp_warn_limit":
{
"label": "Print temperature Warning",
"description": "Limit on Print temperature warning for detection.",
"unit": "\u00b0C",
"type": "float",
"default_value": "3.0",
"enabled": "ppr_enable",
"settable_per_extruder": true
},
"print_temp_anomaly_limit":
{
"label": "Print temperature Limit",
"description": "Limit on Print Temperature anomaly for detection.",
"unit": "\u00b0C",
"type": "float",
"default_value": "7.0",
"enabled": "ppr_enable",
"settable_per_extruder": true
},
"bv_temp_warn_limit":
{
"label": "Build Volume temperature Warning",
"description": "Limit on Build Volume Temperature warning for detection.",
"unit": "\u00b0C",
"type": "float",
"default_value": "7.5",
"enabled": "ppr_enable",
"settable_per_extruder": false
},
"bv_temp_anomaly_limit":
{
"label": "Build Volume temperature Limit",
"description": "Limit on Build Volume temperature Anomaly for detection.",
"unit": "\u00b0C",
"type": "float",
"default_value": "10.0",
"enabled": "ppr_enable",
"settable_per_extruder": false
}
}
},
"command_line_settings": "command_line_settings":
{ {
"label": "Command Line Settings", "label": "Command Line Settings",

View File

@ -329,6 +329,7 @@
"machine_name": { "default_value": "UltiMaker Method" }, "machine_name": { "default_value": "UltiMaker Method" },
"machine_nozzle_cool_down_speed": { "value": 0.8 }, "machine_nozzle_cool_down_speed": { "value": 0.8 },
"machine_nozzle_heat_up_speed": { "value": 3.5 }, "machine_nozzle_heat_up_speed": { "value": 3.5 },
"machine_scale_fan_speed_zero_to_one": { "value": true },
"machine_start_gcode": { "default_value": "" }, "machine_start_gcode": { "default_value": "" },
"material_bed_temperature": { "enabled": "machine_heated_bed" }, "material_bed_temperature": { "enabled": "machine_heated_bed" },
"material_bed_temperature_layer_0": { "enabled": "machine_heated_bed" }, "material_bed_temperature_layer_0": { "enabled": "machine_heated_bed" },
@ -353,11 +354,12 @@
"print_sequence": { "enabled": false }, "print_sequence": { "enabled": false },
"raft_base_line_spacing": { "value": "2*raft_base_line_width" }, "raft_base_line_spacing": { "value": "2*raft_base_line_width" },
"raft_base_line_width": { "value": 1.4 }, "raft_base_line_width": { "value": 1.4 },
"raft_base_speed": { "value": 5 }, "raft_base_speed": { "value": 10 },
"raft_base_thickness": { "value": 0.8 }, "raft_base_thickness": { "value": 0.8 },
"raft_interface_extruder_nr": { "value": "raft_surface_extruder_nr" }, "raft_interface_extruder_nr": { "value": "raft_surface_extruder_nr" },
"raft_interface_layers": { "value": 2 }, "raft_interface_layers": { "value": 2 },
"raft_interface_line_width": { "value": 1.2 }, "raft_interface_line_width": { "value": 0.7 },
"raft_interface_speed": { "value": 90 },
"raft_interface_thickness": { "value": 0.3 }, "raft_interface_thickness": { "value": 0.3 },
"raft_margin": { "value": 3 }, "raft_margin": { "value": 3 },
"raft_surface_extruder_nr": { "value": "int(anyExtruderWithMaterial('material_is_support_material')) if support_enable and extruderValue(support_extruder_nr,'material_is_support_material') else raft_base_extruder_nr" }, "raft_surface_extruder_nr": { "value": "int(anyExtruderWithMaterial('material_is_support_material')) if support_enable and extruderValue(support_extruder_nr,'material_is_support_material') else raft_base_extruder_nr" },

View File

@ -45,6 +45,16 @@ UM.Dialog
anchors.centerIn: parent anchors.centerIn: parent
} }
Image
{
id: enterpriseLogo
visible: CuraApplication.isEnterprise
source: UM.Theme.getImage("enterprise")
fillMode: Image.PreserveAspectFit
anchors.bottom: parent.bottom
}
UM.Label UM.Label
{ {
id: version id: version

View File

@ -120,6 +120,10 @@ UM.PreferencesPage
UM.Preferences.resetPreference("info/send_slice_info") UM.Preferences.resetPreference("info/send_slice_info")
sendDataCheckbox.checked = boolCheck(UM.Preferences.getValue("info/send_slice_info")) sendDataCheckbox.checked = boolCheck(UM.Preferences.getValue("info/send_slice_info"))
UM.Preferences.resetPreference("info/send_engine_crash")
sendEngineCrashCheckbox.checked = boolCheck(UM.Preferences.getValue("info/send_engine_crash"))
UM.Preferences.resetPreference("info/automatic_update_check") UM.Preferences.resetPreference("info/automatic_update_check")
checkUpdatesCheckbox.checked = boolCheck(UM.Preferences.getValue("info/automatic_update_check")) checkUpdatesCheckbox.checked = boolCheck(UM.Preferences.getValue("info/automatic_update_check"))
@ -855,6 +859,21 @@ UM.PreferencesPage
font: UM.Theme.getFont("medium_bold") font: UM.Theme.getFont("medium_bold")
text: catalog.i18nc("@label", "Privacy") text: catalog.i18nc("@label", "Privacy")
} }
UM.TooltipArea
{
width: childrenRect.width
height: visible ? childrenRect.height : 0
text: catalog.i18nc("@info:tooltip", "Should slicing crashes be automatically reported to Ultimaker? Note, no models, IP addresses or other personally identifiable information is sent or stored.")
UM.CheckBox
{
id: sendEngineCrashCheckbox
text: catalog.i18nc("@option:check","Send (anonymous) engine crash reports")
checked: boolCheck(UM.Preferences.getValue("info/send_engine_crash"))
onCheckedChanged: UM.Preferences.setValue("info/send_engine_crash", checked)
}
}
UM.TooltipArea UM.TooltipArea
{ {
width: childrenRect.width width: childrenRect.width

View File

@ -11,7 +11,7 @@ Cura.ExpandablePopup
{ {
id: machineSelector id: machineSelector
property Cura.MachineManager machineManager property var machineManager: Cura.MachineManager
property bool isNetworkPrinter: machineManager.activeMachineHasNetworkConnection property bool isNetworkPrinter: machineManager.activeMachineHasNetworkConnection
property bool isConnectedCloudPrinter: machineManager.activeMachineHasCloudConnection property bool isConnectedCloudPrinter: machineManager.activeMachineHasCloudConnection
property bool isCloudRegistered: machineManager.activeMachineHasCloudRegistration property bool isCloudRegistered: machineManager.activeMachineHasCloudRegistration
@ -107,6 +107,7 @@ Cura.ExpandablePopup
{ {
return UM.Theme.getIcon("Printer", "medium") return UM.Theme.getIcon("Printer", "medium")
} }
else else
{ {
return "" return ""

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 KiB