Merge branch 'main' into fix_qml_py_re

This commit is contained in:
Erwan MATHIEU 2025-02-11 13:19:50 +01:00 committed by GitHub
commit 01fd82e8e5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8063 changed files with 78301 additions and 15031 deletions

View File

@ -6,9 +6,9 @@ body:
attributes:
value: |
### ✨Try our improved Cura 5.7✨
Before filling out the report below, we want you to try the latest Cura 5.7 Beta.
Before filling out the report below, we want you to try the latest Cura 5.7.
This version of Cura has become significantly more reliable and has an updated slicing engine that will automatically send a report to the Cura Team for analysis.
#### [You can find the downloads here](https://github.com/Ultimaker/Cura/releases/tag/5.7.0-beta.1) ####
#### [You can find the downloads here](https://github.com/Ultimaker/Cura/releases/latest) ####
If you still encounter a crash you are still welcome to report the issue so we can use your model as a test case, you can find instructions on how to do that below.
### Project File
@ -35,7 +35,7 @@ body:
- type: markdown
attributes:
value: |
We work hard on improving our slicing crashes. Our most recent release is 5.6.0.
We work hard on improving our slicing crashes. Our most recent release is 5.7.1.
If you are not on the latest version of Cura, [you can download it here](https://github.com/Ultimaker/Cura/releases/latest)
- type: input
attributes:

View File

@ -20,21 +20,12 @@ on:
- '[0-9].[0-9]*'
- '[0-9].[0-9][0-9]*'
env:
CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
jobs:
conan-recipe-version:
uses: ultimaker/cura-workflows/.github/workflows/conan-recipe-version.yml@main
conan-package:
uses: ultimaker/cura-workflows/.github/workflows/conan-package.yml@main
with:
project_name: cura_resources
conan-package-export:
needs: [ conan-recipe-version ]
uses: ultimaker/cura-workflows/.github/workflows/conan-recipe-export.yml@main
with:
recipe_id_full: ${{ needs.conan-recipe-version.outputs.recipe_id_full }}
recipe_id_latest: ${{ needs.conan-recipe-version.outputs.recipe_id_latest }}
conan_recipe_root: "./resources/"
secrets: inherit
platform_windows: false
platform_mac: false
install_system_dependencies: false
secrets: inherit

View File

@ -32,28 +32,7 @@ on:
- '[0-9].[0-9]*'
- '[0-9].[0-9][0-9]*'
env:
CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
jobs:
conan-recipe-version:
uses: ultimaker/cura-workflows/.github/workflows/conan-recipe-version.yml@main
with:
project_name: cura
conan-package-export:
needs: [ conan-recipe-version ]
uses: ultimaker/cura-workflows/.github/workflows/conan-recipe-export.yml@main
with:
recipe_id_full: ${{ needs.conan-recipe-version.outputs.recipe_id_full }}
recipe_id_latest: ${{ needs.conan-recipe-version.outputs.recipe_id_latest }}
conan-package:
uses: ultimaker/cura-workflows/.github/workflows/conan-package.yml@main
secrets: inherit
conan-package-create:
needs: [ conan-recipe-version, conan-package-export ]
uses: ultimaker/cura-workflows/.github/workflows/conan-package-create-linux.yml@main
with:
recipe_id_full: ${{ needs.conan-recipe-version.outputs.recipe_id_full }}
conan_extra_args: "-o cura:enable_i18n=True"
secrets: inherit

View File

@ -6,290 +6,34 @@ on:
inputs:
cura_conan_version:
description: 'Cura Conan Version'
default: 'cura/latest@ultimaker/testing'
required: true
default: ''
type: string
package_overrides:
description: 'List of specific packages to be used (space-separated)'
default: ''
type: string
conan_args:
description: 'Conan args: eq.: --require-override'
description: 'Conan args'
default: ''
required: false
type: string
enterprise:
description: 'Build Cura as an Enterprise edition'
default: false
required: true
type: boolean
staging:
description: 'Use staging API'
default: false
required: true
type: boolean
nightly:
description: 'Upload to nightly release'
default: false
required: true
type: boolean
workflow_call:
inputs:
cura_conan_version:
default: 'cura/latest@ultimaker/testing'
required: true
type: string
conan_args:
default: ''
required: false
type: string
enterprise:
default: false
required: true
type: boolean
staging:
default: false
required: true
type: boolean
nightly:
default: false
required: true
type: boolean
schedule:
# Daily at 4:15 CET (main-branch) and 5:15 CET (release-branch)
- cron: '15 3 * * *'
- cron: '15 4 * * *'
env:
CONAN_ARGS: ${{ inputs.conan_args || '' }}
ENTERPRISE: ${{ inputs.enterprise || false }}
STAGING: ${{ inputs.staging || false }}
jobs:
default_values:
uses: ultimaker/cura-workflows/.github/workflows/cura-installer-default-value.yml@main
installers:
name: Create installers
uses: ultimaker/cura-workflows/.github/workflows/cura-installers.yml@main
with:
cura_conan_version: ${{ inputs.cura_conan_version }}
latest_release: '5.6'
latest_release_schedule_hour: 4
latest_release_tag: 'nightly'
windows-installer:
uses: ultimaker/cura-workflows/.github/workflows/cura-installer-windows.yml@main
needs: [ default_values ]
with:
cura_conan_version: ${{ needs.default_values.outputs.cura_conan_version }}
conan_args: ${{ github.event.inputs.conan_args }}
enterprise: ${{ github.event.inputs.enterprise == 'true' }}
staging: ${{ github.event.inputs.staging == 'true' }}
architecture: X64
operating_system: self-hosted-Windows-X64
package_overrides: ${{ inputs.package_overrides }}
conan_args: ${{ inputs.conan_args }}
enterprise: ${{ inputs.enterprise }}
staging: ${{ inputs.staging }}
secrets: inherit
linux-installer:
uses: ultimaker/cura-workflows/.github/workflows/cura-installer-linux.yml@main
needs: [ default_values ]
with:
cura_conan_version: ${{ needs.default_values.outputs.cura_conan_version }}
conan_args: ${{ github.event.inputs.conan_args }}
enterprise: ${{ github.event.inputs.enterprise == 'true' }}
staging: ${{ github.event.inputs.staging == 'true' }}
architecture: X64
operating_system: self-hosted-Ubuntu22-X64
secrets: inherit
macos-installer:
uses: ultimaker/cura-workflows/.github/workflows/cura-installer-macos.yml@main
needs: [ default_values ]
with:
cura_conan_version: ${{ needs.default_values.outputs.cura_conan_version }}
conan_args: ${{ github.event.inputs.conan_args }}
enterprise: ${{ github.event.inputs.enterprise == 'true' }}
staging: ${{ github.event.inputs.staging == 'true' }}
architecture: X64
operating_system: self-hosted-X64
secrets: inherit
macos-arm-installer:
uses: ultimaker/cura-workflows/.github/workflows/cura-installer-macos.yml@main
needs: [ default_values ]
with:
cura_conan_version: ${{ needs.default_values.outputs.cura_conan_version }}
conan_args: ${{ github.event.inputs.conan_args }}
enterprise: ${{ github.event.inputs.enterprise == 'true' }}
staging: ${{ github.event.inputs.staging == 'true' }}
architecture: ARM64
operating_system: self-hosted-ARM64
secrets: inherit
# Run and update nightly release when the nightly input is set to true or if the schedule is triggered
update-nightly-release:
if: ${{ inputs.nightly || github.event_name == 'schedule' }}
runs-on: ubuntu-latest
needs: [ default_values, windows-installer, linux-installer, macos-installer, macos-arm-installer ]
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Download the run info
uses: actions/download-artifact@v4
with:
name: linux-run-info
- name: Set the run info as environment variables
run: |
. run_info.sh
- name: Output the name file name and extension
id: filename
shell: python
run: |
import os
import datetime
enterprise = "-Enterprise" if "${{ github.event.inputs.enterprise }}" == "true" else ""
linux = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-linux-X64"
mac_x64_dmg = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-macos-X64"
mac_x64_pkg = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-macos-X64"
mac_arm_dmg = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-macos-ARM64"
mac_arm_pkg = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-macos-ARM64"
win_msi = installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-win64-X64"
win_exe = installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-win64-X64"
nightly_name = "UltiMaker-Cura-" + os.getenv('CURA_VERSION_FULL').split("+")[0]
nightly_creation_time = str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
output_env = os.environ["GITHUB_OUTPUT"]
content = ""
if os.path.exists(output_env):
with open(output_env, "r") as f:
content = f.read()
with open(output_env, "w") as f:
f.write(content)
f.writelines(f"LINUX={linux}\n")
f.writelines(f"MAC_X64_DMG={mac_x64_dmg}\n")
f.writelines(f"MAC_X64_PKG={mac_x64_pkg}\n")
f.writelines(f"MAC_ARM_DMG={mac_arm_dmg}\n")
f.writelines(f"MAC_ARM_PKG={mac_arm_pkg}\n")
f.writelines(f"WIN_MSI={win_msi}\n")
f.writelines(f"WIN_EXE={win_exe}\n")
f.writelines(f"NIGHTLY_NAME={nightly_name}\n")
f.writelines(f"NIGHTLY_TIME={nightly_creation_time}\n")
- name: Download linux installer jobs artifacts
uses: actions/download-artifact@v4
with:
name: ${{ steps.filename.outputs.LINUX }}-AppImage
path: installers
- name: Download linux installer jobs asc artifacts
uses: actions/download-artifact@v4
with:
name: ${{ steps.filename.outputs.LINUX }}-asc
path: installers
- name: Rename Linux installer to nightlies
run: |
mv installers/${{ steps.filename.outputs.LINUX }}.AppImage installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-linux-X64.AppImage
mv installers/${{ steps.filename.outputs.LINUX }}.AppImage.asc installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-linux-X64.AppImage.asc
- name: Update nightly release for Linux
run: |
gh release upload ${{ needs.default_values.outputs.release_tag }} installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-linux-X64.AppImage --clobber
gh release upload ${{ needs.default_values.outputs.release_tag }} installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-linux-X64.AppImage.asc --clobber
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Download win msi installer jobs artifacts
uses: actions/download-artifact@v4
with:
name: ${{ steps.filename.outputs.WIN_MSI }}-msi
path: installers
- name: Download win exe installer jobs artifacts
uses: actions/download-artifact@v4
with:
name: ${{ steps.filename.outputs.WIN_EXE }}-exe
path: installers
- name: Rename Windows installers to nightlies
run: |
mv installers/${{ steps.filename.outputs.WIN_MSI }}.msi installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-win64-X64.msi
mv installers/${{ steps.filename.outputs.WIN_EXE }}.exe installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-win64-X64.exe
- name: Update nightly release for Windows
run: |
gh release upload ${{ needs.default_values.outputs.release_tag }} installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-win64-X64.msi --clobber
gh release upload ${{ needs.default_values.outputs.release_tag }} installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-win64-X64.exe --clobber
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Download MacOS (X64) dmg installer jobs artifacts
uses: actions/download-artifact@v4
with:
name: ${{ steps.filename.outputs.MAC_X64_DMG }}-dmg
path: installers
- name: Download MacOS (X64) pkg installer jobs artifacts
uses: actions/download-artifact@v4
with:
name: ${{ steps.filename.outputs.MAC_X64_PKG }}-pkg
path: installers
- name: Rename MacOS (X64) installers to nightlies
run: |
mv installers/${{ steps.filename.outputs.MAC_X64_DMG }}.dmg installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-X64.dmg
mv installers/${{ steps.filename.outputs.MAC_X64_PKG }}.pkg installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-X64.pkg
- name: Update nightly release for MacOS (X64)
run: |
gh release upload ${{ needs.default_values.outputs.release_tag }} installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-X64.dmg --clobber
gh release upload ${{ needs.default_values.outputs.release_tag }} installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-X64.pkg --clobber
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Download MacOS (ARM-64) dmg installer jobs artifacts
uses: actions/download-artifact@v4
with:
name: ${{ steps.filename.outputs.MAC_ARM_DMG }}-dmg
path: installers
- name: Download MacOS (ARM-64) pkg installer jobs artifacts
uses: actions/download-artifact@v4
with:
name: ${{ steps.filename.outputs.MAC_ARM_PKG }}-pkg
path: installers
- name: Rename MacOS (ARM-64) installers to nightlies
run: |
mv installers/${{ steps.filename.outputs.MAC_ARM_DMG }}.dmg installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-ARM64.dmg
mv installers/${{ steps.filename.outputs.MAC_ARM_PKG }}.pkg installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-ARM64.pkg
- name: Update nightly release for MacOS (ARM-64)
run: |
gh release upload ${{ needs.default_values.outputs.release_tag }} installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-ARM64.dmg --clobber
gh release upload ${{ needs.default_values.outputs.release_tag }} installers/${{ steps.filename.outputs.NIGHTLY_NAME }}-macos-ARM64.pkg --clobber
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: create the release notes
shell: python
run: |
import os
import datetime
from jinja2 import Template
with open(".github/workflows/release_notes.md.jinja", "r") as f:
release_notes = Template(f.read())
current_nightly_beta = "${{ needs.default_values.outputs.release_tag }}".split("nightly-")[-1]
with open("release-notes.md", "w") as f:
f.write(release_notes.render(
timestamp="${{ steps.filename.outputs.NIGHTLY_TIME }}",
branch="" if "${{ needs.default-values.outputs.release_tag == 'nightly' }}" == 'true' else current_nightly_beta,
branch_specific="" if os.getenv("GITHUB_REF") == "refs/heads/main" else f"?branch={current_nightly_beta}",
))
- name: Update nightly release description (with date)
if: always()
run: |
gh release edit ${{ needs.default_values.outputs.release_tag }} --title "${{ steps.filename.outputs.NIGHTLY_NAME }}" --notes-file release-notes.md
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -6,11 +6,14 @@ on:
inputs:
cura_conan_version:
description: 'Cura Conan Version'
default: 'cura/latest@ultimaker/testing'
required: true
default: ''
type: string
package_overrides:
description: 'List of specific packages to be used (space-separated)'
default: ''
type: string
conan_args:
description: 'Conan args: eq.: --require-override'
description: 'Conan args'
default: ''
required: false
type: string
@ -24,13 +27,6 @@ on:
default: false
required: true
type: boolean
architecture:
description: 'Architecture'
required: true
default: 'X64'
type: choice
options:
- X64
operating_system:
description: 'OS'
required: true
@ -45,9 +41,9 @@ jobs:
uses: ultimaker/cura-workflows/.github/workflows/cura-installer-linux.yml@main
with:
cura_conan_version: ${{ inputs.cura_conan_version }}
package_overrides: ${{ inputs.package_overrides }}
conan_args: ${{ inputs.conan_args }}
enterprise: ${{ inputs.enterprise }}
staging: ${{ inputs.staging }}
architecture: ${{ inputs.architecture }}
operating_system: ${{ inputs.operating_system }}
secrets: inherit

View File

@ -6,11 +6,14 @@ on:
inputs:
cura_conan_version:
description: 'Cura Conan Version'
default: 'cura/latest@ultimaker/testing'
required: true
default: ''
type: string
package_overrides:
description: 'List of specific packages to be used (space-separated)'
default: ''
type: string
conan_args:
description: 'Conan args: eq.: --require-override'
description: 'Conan args'
default: ''
required: false
type: string
@ -47,6 +50,7 @@ jobs:
uses: ultimaker/cura-workflows/.github/workflows/cura-installer-macos.yml@main
with:
cura_conan_version: ${{ inputs.cura_conan_version }}
package_overrides: ${{ inputs.package_overrides }}
conan_args: ${{ inputs.conan_args }}
enterprise: ${{ inputs.enterprise }}
staging: ${{ inputs.staging }}

16
.github/workflows/nightly-stable.yml vendored Normal file
View File

@ -0,0 +1,16 @@
name: Nightly build - stable release
run-name: Nightly build - stable release
on:
schedule:
# Daily at 5:15 CET
- cron: '15 4 * * *'
jobs:
build-nightly:
uses: ./.github/workflows/nightly.yml
with:
cura_conan_version: "cura/[*]@ultimaker/stable"
release_tag: "nightly-stable"
caller_workflow: "nightly-stable.yml"
secrets: inherit

16
.github/workflows/nightly-testing.yml vendored Normal file
View File

@ -0,0 +1,16 @@
name: Nightly build - dev release
run-name: Nightly build - dev release
on:
schedule:
# Daily at 4:15 CET
- cron: '15 3 * * *'
jobs:
build-nightly:
uses: ./.github/workflows/nightly.yml
with:
cura_conan_version: "cura/[*]@ultimaker/testing"
release_tag: "nightly-testing" # Fixed version, we reuse the same tag forever
caller_workflow: "nightly-testing.yml"
secrets: inherit

75
.github/workflows/nightly.yml vendored Normal file
View File

@ -0,0 +1,75 @@
name: Nightly build
run-name: Nightly build
on:
workflow_call:
inputs:
cura_conan_version:
required: true
type: string
release_tag:
required: true
type: string
caller_workflow:
required: true
type: string
jobs:
create-installers:
name: Create installers
uses: ultimaker/cura-workflows/.github/workflows/cura-installers.yml@main
with:
cura_conan_version: ${{ inputs.cura_conan_version }}
secrets: inherit
update-nightly-release:
name: Upload installers
runs-on: ubuntu-latest
needs: [ create-installers ]
steps:
- name: Setup the build environment
uses: ultimaker/cura-workflows/.github/actions/setup-build-environment@main
- name: Download installers jobs artifacts
uses: actions/download-artifact@v4
with:
pattern: UltiMaker-Cura-*
path: installers
merge-multiple: true
- name: Rename the installers
id: rename-installers
working-directory: installers
run: python ../Cura-workflows/runner_scripts/rename_installers.py --tag "nightly" >> $GITHUB_OUTPUT
- name: create the release notes
shell: python
run: |
import os
import datetime
from jinja2 import Template
with open(".github/workflows/nightly_release_notes.md.jinja", "r") as f:
release_notes = Template(f.read())
short_version = "${{ steps.rename-installers.outputs.short_version }}"
caller_workflow = "${{ inputs.caller_workflow }}"
is_main = os.getenv("GITHUB_REF") == "refs/heads/main"
with open("release-notes.md", "w") as f:
f.write(release_notes.render(
timestamp=str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")),
caller_workflow=caller_workflow,
branch_specific="" if is_main else f"?branch={short_version}",
))
- name: Update nightly release description and binaries
run: |
gh release edit ${{ inputs.release_tag }} --title "${{ steps.rename-installers.outputs.cura_version }}" --notes-file release-notes.md
for file in "installers/*"; do
gh release upload ${{ inputs.release_tag }} $file --clobber
done
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -4,18 +4,17 @@
| | |
|--------------:|--------------------------------------------------------------------------------------------|
| **Nightlies** | [![nightly {{ branch }}](https://github.com/Ultimaker/Cura/actions/workflows/installers.yml/badge.svg{{ branch_specific }}
?event=schedule)](https://github.com/Ultimaker/Cura/actions/workflows/installers.yml) |
| **Nightlies** | [![nightly](https://github.com/Ultimaker/Cura/actions/workflows/{{ caller_workflow }}/badge.svg{{ branch_specific }}?event=schedule)](https://github.com/Ultimaker/Cura/actions/workflows/installers.yml) |
# Unit Test results
| | |
|-------------------------------:|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| **Cura {{ branch }}** | [![unit-test](https://github.com/Ultimaker/Cura/actions/workflows/unit-test.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/Cura/actions/workflows/unit-test.yml) |
| **CuraEngine {{ branch }}** | [![unit-test](https://github.com/Ultimaker/CuraEngine/actions/workflows/unit-test.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/CuraEngine/actions/workflows/unit-test.yml) |
| **Uranium {{ branch }}** | [![unit-test](https://github.com/Ultimaker/Uranium/actions/workflows/unit-test.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/Uranium/actions/workflows/unit-test.yml) |
| **CuraEngine GradualFlow 0.1** | [![unit-test](https://github.com/Ultimaker/CuraEngine_plugin_gradual_flow/actions/workflows/unit-test.yml/badge.svg?branch=0.1)](https://github.com/Ultimaker/CuraEngine_plugin_gradual_flow/actions/workflows/unit-test.yml) |
| **synsepalum-dulcificum 0.1** | [![unit-test](https://github.com/Ultimaker/synsepalum-dulcificum/actions/workflows/unit-test.yml/badge.svg?branch=0.1)](https://github.com/Ultimaker/synsepalum-dulcificum/actions/workflows/unit-test.yml) |
| **Cura** | [![unit-test](https://github.com/Ultimaker/Cura/actions/workflows/unit-test.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/Cura/actions/workflows/unit-test.yml) |
| **CuraEngine** | [![unit-test](https://github.com/Ultimaker/CuraEngine/actions/workflows/unit-test.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/CuraEngine/actions/workflows/unit-test.yml) |
| **Uranium** | [![unit-test](https://github.com/Ultimaker/Uranium/actions/workflows/unit-test.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/Uranium/actions/workflows/unit-test.yml) |
| **CuraEngine GradualFlow** | [![unit-test](https://github.com/Ultimaker/CuraEngine_plugin_gradual_flow/actions/workflows/unit-test.yml/badge.svg)](https://github.com/Ultimaker/CuraEngine_plugin_gradual_flow/actions/workflows/unit-test.yml) |
| **synsepalum-dulcificum** | [![unit-test](https://github.com/Ultimaker/synsepalum-dulcificum/actions/workflows/unit-test.yml/badge.svg)](https://github.com/Ultimaker/synsepalum-dulcificum/actions/workflows/unit-test.yml) |
| **libSavitar** | [![unit-test](https://github.com/Ultimaker/libSavitar/actions/workflows/unit-test.yml/badge.svg)](https://github.com/Ultimaker/libSavitar/actions/workflows/unit-test.yml) |
| **libnest2d** | [![unit-test](https://github.com/Ultimaker/libnest2d/actions/workflows/unit-test.yml/badge.svg)](https://github.com/Ultimaker/libnest2d/actions/workflows/unit-test.yml) |
@ -23,17 +22,17 @@
| | |
|------------------------------------:|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| **Cura {{ branch }}** | [![conan-package](https://github.com/Ultimaker/Cura/actions/workflows/conan-package.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/Cura/actions/workflows/conan-package.yml) |
| **CuraEngine {{ branch }}** | [![conan-package](https://github.com/Ultimaker/CuraEngine/actions/workflows/conan-package.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/CuraEngine/actions/workflows/conan-package.yml) |
| **Uranium {{ branch }}** | [![conan-package](https://github.com/Ultimaker/Uranium/actions/workflows/conan-package.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/Uranium/actions/workflows/conan-package.yml) |
| **fdm_materials {{ branch }}** | [![conan-package](https://github.com/Ultimaker/fdm_materials/actions/workflows/conan-package.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/fdm_materials/actions/workflows/conan-package.yml) |
| **cura-binary-data {{ branch }}** | [![conan-package](https://github.com/Ultimaker/cura-binary-data/actions/workflows/conan-package.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/cura-binary-data/actions/workflows/conan-package.yml) |
| **CuraEngine GradualFlow 0.1** | [![conan-package](https://github.com/Ultimaker/CuraEngine_plugin_gradual_flow/actions/workflows/conan-package.yml/badge.svg?branch=0.1)](https://github.com/Ultimaker/CuraEngine_plugin_gradual_flow/actions/workflows/conan-package.yml) |
| **synsepalum-dulcificum 0.1** | [![conan-package](https://github.com/Ultimaker/synsepalum-dulcificum/actions/workflows/conan-package.yml/badge.svg?branch=0.1)](https://github.com/Ultimaker/synsepalum-dulcificum/actions/workflows/conan-package.yml) |
| **CuraEngine gRPC definitions 0.1** | [![conan-package](https://github.com/Ultimaker/CuraEngine_grpc_definitions/actions/workflows/conan-package.yml/badge.svg?branch=0.1)](https://github.com/Ultimaker/CuraEngine_grpc_definitions/actions/workflows/conan-package.yml) |
| **Cura** | [![conan-package](https://github.com/Ultimaker/Cura/actions/workflows/conan-package.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/Cura/actions/workflows/conan-package.yml) |
| **CuraEngine** | [![conan-package](https://github.com/Ultimaker/CuraEngine/actions/workflows/package.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/CuraEngine/actions/workflows/package.yml) |
| **Uranium** | [![conan-package](https://github.com/Ultimaker/Uranium/actions/workflows/conan-package.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/Uranium/actions/workflows/conan-package.yml) |
| **fdm_materials** | [![conan-package](https://github.com/Ultimaker/fdm_materials/actions/workflows/conan-package.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/fdm_materials/actions/workflows/conan-package.yml) |
| **cura-binary-data** | [![conan-package](https://github.com/Ultimaker/cura-binary-data/actions/workflows/conan-package.yml/badge.svg{{ branch_specific }})](https://github.com/Ultimaker/cura-binary-data/actions/workflows/conan-package.yml) |
| **CuraEngine GradualFlow** | [![conan-package](https://github.com/Ultimaker/CuraEngine_plugin_gradual_flow/actions/workflows/conan-package.yml/badge.svg)](https://github.com/Ultimaker/CuraEngine_plugin_gradual_flow/actions/workflows/conan-package.yml) |
| **synsepalum-dulcificum** | [![conan-package](https://github.com/Ultimaker/synsepalum-dulcificum/actions/workflows/conan-package.yml/badge.svg)](https://github.com/Ultimaker/synsepalum-dulcificum/actions/workflows/conan-package.yml) |
| **CuraEngine gRPC definitions** | [![conan-package](https://github.com/Ultimaker/CuraEngine_grpc_definitions/actions/workflows/conan-package.yml/badge.svg)](https://github.com/Ultimaker/CuraEngine_grpc_definitions/actions/workflows/conan-package.yml) |
| **libArcus** | [![conan-package](https://github.com/Ultimaker/libArcus/actions/workflows/conan-package.yml/badge.svg)](https://github.com/Ultimaker/libArcus/actions/workflows/conan-package.yml) |
| **pyArcus** | [![conan-package](https://github.com/Ultimaker/pyArcus/actions/workflows/conan-package.yml/badge.svg)](https://github.com/Ultimaker/pyArcus/actions/workflows/conan-package.yml) |
| **libSavitar** | [![conan-package](https://github.com/Ultimaker/libSavitar/actions/workflows/conan-package.yml/badge.svg)](https://github.com/Ultimaker/libSavitar/actions/workflows/conan-package.yml) |
| **pySavitar** | [![conan-package](https://github.com/Ultimaker/pySavitar/actions/workflows/conan-package.yml/badge.svg)](https://github.com/Ultimaker/pySavitar/actions/workflows/conan-package.yml) |
| **libnest2d** | [![conan-package](https://github.com/Ultimaker/libnest2d/actions/workflows/conan-package.yml/badge.svg)](https://github.com/Ultimaker/libnest2d/actions/workflows/conan-package.yml) |
| **pynest2d** | [![conan-package](https://github.com/Ultimaker/pynest2d/actions/workflows/conan-package.yml/badge.svg)](https://github.com/Ultimaker/pynest2d/actions/workflows/conan-package.yml) |
| **pynest2d** | [![conan-package](https://github.com/Ultimaker/pynest2d/actions/workflows/conan-package.yml/badge.svg)](https://github.com/Ultimaker/pynest2d/actions/workflows/conan-package.yml) |

View File

@ -1,36 +0,0 @@
name: notify_on_print_profile_change
on:
push:
branches: [ "main" ]
paths:
- 'resources/definitions/fdmprinter.def.json'
- 'resources/definitions/ultimaker**'
- 'resources/extruders/ultimaker**'
- 'resources/intent/ultimaker**'
- 'resources/quality/ultimaker**'
- 'resources/variants/ultimaker**'
pull_request:
branches: [ "main" ]
paths:
- 'resources/definitions/fdmprinter.def.json'
- 'resources/definitions/ultimaker**'
- 'resources/extruders/ultimaker**'
- 'resources/intent/ultimaker**'
- 'resources/quality/ultimaker**'
- 'resources/variants/ultimaker**'
permissions: {}
jobs:
slackNotification:
name: Slack Notification
runs-on: ubuntu-latest
steps:
- name: Ultimaker Print Profile Changed
uses: rtCamp/action-slack-notify@v2
env:
SLACK_CHANNEL: profile-changes
SLACK_USERNAME: ${{ github.repository }}
SLACK_COLOR: '#00FF00'
SLACK_TITLE: Print profiles changed
MSG_MINIMAL: commit
SLACK_WEBHOOK: ${{ secrets.SLACK_CURA_PPM_HOOK }}

View File

@ -1,46 +1,21 @@
name: printer-linter-format
on:
push:
paths:
- 'resources/definitions/**'
- 'resources/extruders/**'
- 'resources/intent/**'
- 'resources/quality/**'
- 'resources/variants/**'
push:
paths:
- 'resources/definitions/**'
- 'resources/extruders/**'
- 'resources/intent/**'
- 'resources/quality/**'
- 'resources/variants/**'
jobs:
printer-linter-format:
name: Printer linter auto format
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- uses: technote-space/get-diff-action@v6
with:
PATTERNS: |
resources/+(definitions|extruders)/*.def.json
resources/+(intent|quality|variants)/**/*.inst.cfg
- name: Setup Python and pip
if: env.GIT_DIFF && !env.MATCHED_FILES # If nothing happens with python and/or pip after, the clean-up crashes.
uses: actions/setup-python@v4
with:
python-version: 3.11.x
cache: 'pip'
cache-dependency-path: .github/workflows/requirements-printer-linter.txt
- name: Install Python requirements for runner
if: env.GIT_DIFF && !env.MATCHED_FILES
run: pip install -r .github/workflows/requirements-printer-linter.txt
- name: Format file
if: env.GIT_DIFF && !env.MATCHED_FILES
run: python printer-linter/src/terminal.py --format ${{ env.GIT_DIFF_FILTERED }}
- uses: stefanzweifel/git-auto-commit-action@v4
if: env.GIT_DIFF && !env.MATCHED_FILES
with:
commit_message: "Applied printer-linter format"
printer-linter-format:
name: Printer linter auto format
uses: ultimaker/cura-workflows/.github/workflows/lint-formatter.yml@main
with:
file_patterns: |
resources/+(definitions|extruders)/*.def.json
resources/+(intent|quality|variants)/**/*.inst.cfg
command: python printer-linter/src/terminal.py --format
commit_message: "Apply printer-linter format"

View File

@ -14,30 +14,16 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 2
- name: Setup the build environment
uses: ultimaker/cura-workflows/.github/actions/setup-build-environment@main
- uses: technote-space/get-diff-action@v6
- uses: greguintow/get-diff-action@v7
with:
DIFF_FILTER: AMRCD
PATTERNS: |
resources/+(extruders|definitions)/*.def.json
resources/+(intent|quality|variants)/**/*.inst.cfg
- name: Setup Python and pip
if: env.GIT_DIFF && !env.MATCHED_FILES # If nothing happens with python and/or pip after, the clean-up crashes.
uses: actions/setup-python@v4
with:
python-version: 3.11.x
cache: "pip"
cache-dependency-path: .github/workflows/requirements-printer-linter.txt
- name: Install Python requirements for runner
if: env.GIT_DIFF && !env.MATCHED_FILES
run: pip install -r .github/workflows/requirements-printer-linter.txt
- name: Create results directory
run: mkdir printer-linter-result
@ -55,7 +41,7 @@ jobs:
echo ${{ github.event.pull_request.head.repo.full_name }} > printer-linter-result/pr-head-repo.txt
echo ${{ github.event.pull_request.head.sha }} > printer-linter-result/pr-head-sha.txt
- uses: actions/upload-artifact@v2
- uses: actions/upload-artifact@v4
with:
name: printer-linter-result
path: printer-linter-result/

View File

@ -15,6 +15,7 @@ jobs:
runs-on: ubuntu-latest
outputs:
package_version: ${{ steps.version_parser.outputs.major }}.${{ steps.version_parser.outputs.minor }}.0-alpha.1
branch: ${{ steps.version_parser.outputs.major }}.${{ steps.version_parser.outputs.minor }}
steps:
- name: Parse version string
id: version_parser
@ -28,5 +29,6 @@ jobs:
needs: [parse-version]
with:
cura_version: ${{ needs.parse-version.outputs.package_version }}
branch: ${{ needs.parse-version.outputs.branch }}
create_feature_branch: true
secrets: inherit

View File

@ -9,6 +9,11 @@ on:
required: true
type: string
publish_release_description:
description: 'Create the GitHub release (if existing, the description will be overridden based on the changelog)'
required: true
type: boolean
jobs:
parse-version:
name: Parse input version string
@ -31,6 +36,7 @@ jobs:
needs: [parse-version]
with:
cura_version: ${{ inputs.cura_version }}
branch: ${{ needs.parse-version.outputs.branch_name }}
create_feature_branch: false
secrets: inherit
@ -65,6 +71,8 @@ jobs:
name: Create tags
runs-on: ubuntu-latest
needs: [parse-version, find-rc-tag]
outputs:
main_commit: ${{ steps.export-main-commit.outputs.main_commit }}
strategy:
matrix:
repository: [Cura, Uranium, CuraEngine, cura-binary-data, fdm_materials]
@ -76,13 +84,24 @@ jobs:
ref: ${{ needs.parse-version.outputs.branch_name }}
token: ${{ secrets.CURA_AUTORELEASE_PAT }}
- name: Create tag
- name: Create RC tag
run: |
git tag ${{ needs.find-rc-tag.outputs.tag_name }}
git push origin tag ${{ needs.find-rc-tag.outputs.tag_name }}
create-dependencies-packages:
name: Create conan packages for dependencies
- name: Create or update release tag
run: |
git tag -f ${{ inputs.cura_version }}
git push -f origin tag ${{ inputs.cura_version }}
- name: Export Cura tagged commit
id: export-main-commit
if: ${{ matrix.repository == 'Cura' }}
run: |
echo "main_commit=`git rev-parse HEAD`" >> "$GITHUB_OUTPUT"
create-packages:
name: Create conan packages
uses: ultimaker/cura-workflows/.github/workflows/conan-package-release.yml@main
needs: [parse-version, freeze-packages-versions]
strategy:
@ -94,44 +113,23 @@ jobs:
conan_recipe_root: "resources"
with:
repository: ${{ matrix.repository }}
ref_name: ${{ needs.parse-version.outputs.branch_name }}
version: ${{ inputs.cura_version }}
conan_release: true
conan_user_channel: ultimaker/stable
conan_internal: false
conan_latest: true
branch: ${{ needs.parse-version.outputs.branch_name }}
conan_recipe_root: ${{ matrix.conan_recipe_root }}
secrets: inherit
create-cura-package:
name: Create conan package for Cura
uses: ultimaker/cura-workflows/.github/workflows/conan-package-release.yml@main
needs: [parse-version, create-dependencies-packages]
with:
repository: Cura
ref_name: ${{ needs.parse-version.outputs.branch_name }}
version: ${{ inputs.cura_version }}
conan_release: true
conan_user_channel: ultimaker/stable
conan_internal: false
conan_latest: true
secrets: inherit
create-installers:
name: Create installers
uses: ./.github/workflows/installers.yml
needs: [parse-version, create-cura-package]
uses: ultimaker/cura-workflows/.github/workflows/cura-installers.yml@main
needs: [parse-version, create-packages]
with:
cura_conan_version: cura/${{ inputs.cura_version }}@/
enterprise: false
staging: false
nightly: false
cura_conan_version: cura/${{ inputs.cura_version }}@ultimaker/stable
conan_args: "-c user.sentry:environment=production"
secrets: inherit
create-release-draft:
name: Create the release draft
runs-on: ubuntu-latest
needs: [create-installers, parse-version]
needs: [create-installers, parse-version, create-tags]
steps:
- name: Checkout Cura repo
uses: actions/checkout@v4
@ -139,18 +137,14 @@ jobs:
ref: ${{ needs.parse-version.outputs.branch_name }}
- name: Extract changelog
if: ${{ inputs.publish_release_description }}
run: python ./scripts/extract_changelog.py --version ${{ needs.parse-version.outputs.version_major }}.${{ needs.parse-version.outputs.version_minor }}.${{ needs.parse-version.outputs.version_patch }} --changelog ./resources/texts/change_log.txt > formatted_changelog.txt
- name: Get commit id for release
id: get-commit-id
uses: iawia002/get-tag-or-commit-id@v1.0.1
with:
length: 40
- name: Create release
uses: notpeelz/action-gh-create-release@v5.0.1
if: ${{ inputs.publish_release_description }}
with:
target: ${{ steps.get-commit-id.outputs.id }}
target: ${{ needs.create-tags.outputs.main_commit }}
tag: ${{ inputs.cura_version }}
strategy: replace
title: UltiMaker Cura ${{ inputs.cura_version }}
@ -159,7 +153,7 @@ jobs:
body: formatted_changelog.txt
- name: Download artifacts
uses: actions/download-artifact@v4.1.7
uses: actions/download-artifact@v4
with:
path: artifacts
merge-multiple: true

View File

@ -1,2 +0,0 @@
conan>=1.60.2,<2.0.0
sip<=6.7.12

View File

@ -1 +0,0 @@
pyyaml

View File

@ -9,6 +9,5 @@ jobs:
publish-test-results:
uses: ultimaker/cura-workflows/.github/workflows/unit-test-post.yml@main
with:
event: ${{ github.event.workflow_run.event }}
conclusion: ${{ github.event.workflow_run.conclusion }}
workflow_run_json: ${{ toJSON(github.event.workflow_run) }}
secrets: inherit

View File

@ -37,26 +37,9 @@ on:
- main
- '[0-9]+.[0-9]+'
permissions:
contents: read
env:
CONAN_LOGIN_USERNAME: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD: ${{ secrets.CONAN_PASS }}
jobs:
conan-recipe-version:
uses: ultimaker/cura-workflows/.github/workflows/conan-recipe-version.yml@main
with:
project_name: cura
testing:
name: Run unit tests
uses: ultimaker/cura-workflows/.github/workflows/unit-test.yml@main
needs: [ conan-recipe-version ]
with:
recipe_id_full: ${{ needs.conan-recipe-version.outputs.recipe_id_full }}
conan_extra_args: '-g VirtualPythonEnv -o cura:devtools=True -c tools.build:skip_test=False --options "*:enable_sentry=False"'
unit_test_cmd: 'pytest --junitxml=junit_cura.xml'
unit_test_dir: 'tests'
conan_generator_dir: './venv/bin'
secrets: inherit
test_use_pytest: true

View File

@ -1,87 +1,15 @@
name: update-translations
name: Update translations
on:
push:
paths:
- 'plugins/**'
- 'resources/**'
- 'cura/**'
- 'icons/**'
- 'tests/**'
- 'packaging/**'
- '.github/workflows/conan-*.yml'
- '.github/workflows/notify.yml'
- '.github/workflows/requirements-conan-package.txt'
- 'requirements*.txt'
- 'conanfile.py'
- 'conandata.yml'
- 'GitVersion.yml'
- '*.jinja'
branches:
- '[1-9].[0-9]'
- '[1-9].[0-9][0-9]'
tags:
- '[1-9].[0-9].[0-9]*'
- '[1-9].[0-9].[0-9]'
- '[1-9].[0-9][0-9].[0-9]*'
workflow_dispatch:
inputs:
branch:
description: 'Specific branch to update translations on'
required: false
type: string
jobs:
update-translations:
name: Update translations
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Cache Conan data
id: cache-conan
uses: actions/cache@v3
with:
path: ~/.conan
key: ${{ runner.os }}-conan
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: 3.11.x
cache: pip
cache-dependency-path: .github/workflows/requirements-conan-package.txt
- name: Install Python requirements for runner
run: pip install -r .github/workflows/requirements-conan-package.txt
# NOTE: Due to what are probably github issues, we have to remove the cache and reconfigure before the rest.
# This is maybe because grub caches the disk it uses last time, which is recreated each time.
- name: Install Linux system requirements
if: ${{ runner.os == 'Linux' }}
run: |
sudo rm /var/cache/debconf/config.dat
sudo dpkg --configure -a
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
sudo apt update
sudo apt upgrade
sudo apt install efibootmgr build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config flex bison g++-12 gcc-12 -y
- name: Install GCC-13
run: |
sudo apt install g++-13 gcc-13 -y
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13
sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-13 13
- name: Create the default Conan profile
run: conan profile new default --detect --force
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: generate the files using Conan install
run: conan install . --build=missing --update -o cura:devtools=True
- uses: stefanzweifel/git-auto-commit-action@v4
with:
file_pattern: resources/i18n/*.po resources/i18n/*.pot
status_options: --untracked-files=no
commit_message: update translations
uses: ultimaker/cura-workflows/.github/workflows/update-translations.yml@main
with:
branch: ${{ inputs.branch }}

View File

@ -6,11 +6,14 @@ on:
inputs:
cura_conan_version:
description: 'Cura Conan Version'
default: 'cura/latest@ultimaker/testing'
required: true
default: ''
type: string
package_overrides:
description: 'List of specific packages to be used (space-separated)'
default: ''
type: string
conan_args:
description: 'Conan args: eq.: --require-override'
description: 'Conan args'
default: ''
required: false
type: string
@ -24,13 +27,6 @@ on:
default: false
required: true
type: boolean
architecture:
description: 'Architecture'
required: true
default: 'X64'
type: choice
options:
- X64
operating_system:
description: 'OS'
required: true
@ -45,9 +41,9 @@ jobs:
uses: ultimaker/cura-workflows/.github/workflows/cura-installer-windows.yml@main
with:
cura_conan_version: ${{ inputs.cura_conan_version }}
package_overrides: ${{ inputs.package_overrides }}
conan_args: ${{ inputs.conan_args }}
enterprise: ${{ inputs.enterprise }}
staging: ${{ inputs.staging }}
architecture: ${{ inputs.architecture }}
operating_system: ${{ inputs.operating_system }}
secrets: inherit

View File

@ -4,6 +4,7 @@
CuraAppName = "{{ cura_app_name }}"
CuraAppDisplayName = "{{ cura_app_display_name }}"
CuraVersion = "{{ cura_version }}"
CuraVersionFull = "{{ cura_version_full }}"
CuraBuildType = "{{ cura_build_type }}"
CuraDebugMode = {{ cura_debug_mode }}
CuraCloudAPIRoot = "{{ cura_cloud_api_root }}"
@ -14,4 +15,5 @@ CuraDigitalFactoryURL = "{{ cura_digital_factory_url }}"
CuraLatestURL = "{{ cura_latest_url }}"
ConanInstalls = {{ conan_installs }}
PythonInstalls = {{ python_installs }}

View File

@ -28,7 +28,7 @@ a = Analysis(
binaries=binaries,
datas=datas,
hiddenimports=hiddenimports,
hookspath=[],
hookspath=["Cura-workflows/runner_scripts/pyinstaller_hooks"],
hooksconfig={},
runtime_hooks=[],
excludes=[],
@ -75,7 +75,7 @@ app = BUNDLE(
coll,
name='{{ display_name }}.app',
icon={{ icon }},
bundle_identifier={{ osx_bundle_identifier }} + "_" + '{{ display_name }}'.replace(" ", "_") + "_" {{ short_version }},
bundle_identifier={{ osx_bundle_identifier }} + "_" + '{{ display_name }}'.replace(" ", "_"),
version={{ version }},
info_plist={
'CFBundleDisplayName': '{{ display_name }}',

View File

@ -1,18 +1,18 @@
version: "5.8.1"
version: "5.10.0-alpha.0"
requirements:
- "cura_resources/5.8.1"
- "uranium/5.8.1"
- "curaengine/5.8.1"
- "cura_binary_data/5.8.1"
- "fdm_materials/5.8.1"
- "curaengine_plugin_gradual_flow/0.1.0-beta.4"
- "dulcificum/0.2.1"
- "pysavitar/5.3.0"
- "pynest2d/5.3.0"
- "native_cad_plugin/2.0.0"
- "cura_resources/5.10.0-alpha.0@ultimaker/testing"
- "uranium/5.10.0-alpha.0@ultimaker/testing"
- "curaengine/5.10.0-alpha.0@ultimaker/testing"
- "cura_binary_data/5.10.0-alpha.0@ultimaker/testing"
- "fdm_materials/5.10.0-alpha.0@ultimaker/testing"
- "dulcificum/0.3.0@ultimaker/stable"
- "pysavitar/5.4.0-alpha.0@ultimaker/stable"
- "pynest2d/5.4.0-alpha.0@ultimaker/stable"
requirements_internal:
- "fdm_materials/5.8.1"
- "cura_private_data/(latest)@internal/testing"
- "fdm_materials/5.10.0-alpha.0@internal/testing"
- "cura_private_data/5.10.0-alpha.0@internal/testing"
requirements_enterprise:
- "native_cad_plugin/2.0.0"
urls:
default:
cloud_api_root: "https://api.ultimaker.com"
@ -26,6 +26,7 @@ urls:
marketplace_root: "https://marketplace-staging.ultimaker.com"
digital_factory_url: "https://digitalfactory-staging.ultimaker.com"
cura_latest_url: "https://software.ultimaker.com/latest.json"
pyinstaller:
runinfo:
entrypoint: "cura_app.py"
@ -34,22 +35,16 @@ pyinstaller:
package: "cura"
src: "plugins"
dst: "share/cura/plugins"
curaengine_gradual_flow_plugin:
package: "curaengine_plugin_gradual_flow"
src: "res/plugins/CuraEngineGradualFlow"
dst: "share/cura/plugins/CuraEngineGradualFlow"
curaengine_gradual_flow_plugin_bundled:
package: "curaengine_plugin_gradual_flow"
native_cad_plugin:
package: "native_cad_plugin"
src: "res/plugins/NativeCADplugin"
dst: "share/cura/plugins/NativeCADplugin"
enterprise_only: true
native_cad_plugin_bundled:
package: "native_cad_plugin"
src: "res/bundled_packages"
dst: "share/cura/resources/bundled_packages"
native_cad_plugin:
package: "native_cad_plugin"
src: "res/plugins/NativeCADplugin"
dst: "share/cura/plugins/NativeCADplugin"
native_cad_plugin_bundled:
package: "native_cad_plugin"
src: "res/bundled_packages"
dst: "share/cura/resources/bundled_packages"
enterprise_only: true
cura_resources:
package: "cura"
src: "resources"
@ -87,29 +82,18 @@ pyinstaller:
package: "cura_binary_data"
src: "windows"
dst: "share/windows"
oses:
- "Windows"
fdm_materials:
package: "fdm_materials"
src: "res/resources/materials"
dst: "share/cura/resources/materials"
tcl:
package: "tcl"
src: "lib/tcl8.6"
dst: "tcl"
tk:
package: "tk"
src: "lib/tk8.6"
dst: "tk"
binaries:
curaengine:
package: "curaengine"
src: "bin"
dst: "."
binary: "CuraEngine"
curaengine_gradual_flow_plugin_service:
package: "curaengine_plugin_gradual_flow"
src: "bin"
dst: "."
binary: "curaengine_plugin_gradual_flow"
hiddenimports:
- "pySavitar"
- "pyArcus"
@ -123,6 +107,12 @@ pyinstaller:
- "fcntl"
- "stl"
- "serial"
- "win32cred"
- "win32timezone"
- "pkgutil"
hiddenimports_WINDOWS_ONLY:
- "PyQt6.Qt"
- "PyQt6.Qt6"
collect_all:
- "cura"
- "UM"
@ -134,10 +124,28 @@ pyinstaller:
- "PyQt6.QtNetwork"
- "PyQt6.sip"
- "stl"
- "keyrings.alt"
collect_all_WINDOWS_ONLY:
- "PyQt6.Qt"
- "PyQt6.Qt6"
icon:
Windows: "./icons/Cura.ico"
Macos: "./icons/cura.icns"
Linux: "./icons/cura-128.png"
blacklist:
- [ "assimp" ]
- [ "qt", "charts" ]
- [ "qt", "coap" ]
- [ "qt", "data", "vis" ]
- [ "qt", "lab", "animat" ]
- [ "qt", "mqtt" ]
- [ "qt", "net", "auth" ]
- [ "quick3d" ]
- [ "qt", "timeline" ]
- [ "qt", "virt", "key" ]
- [ "qt", "wayland", "compos" ]
- [ "qt", "5", "compat" ]
pycharm_targets:
- jinja_path: .run_templates/pycharm_cura_run.run.xml.jinja
module_name: Cura
@ -256,3 +264,346 @@ pycharm_targets:
module_name: Cura
name: pytest in TestSettingVisibilityPresets.py
script_name: tests/Settings/TestSettingVisibilityPresets.py
pip_requirements_core:
any_os:
charon:
url: "git+https://github.com/ultimaker/libcharon@master/s-line#egg=charon"
certifi:
version: "2023.5.7"
hashes:
- sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716
- sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7
zeroconf:
version: "0.31.0"
hashes:
- sha256:5a468da018bc3f04bbce77ae247924d802df7aeb4c291bbbb5a9616d128800b0
- sha256:53a180248471c6f81bd1fffcbce03ed93d7d8eaf10905c9121ac1ea996d19844
importlib-metadata:
version: "4.10.0"
hashes:
- sha256:b7cf7d3fef75f1e4c80a96ca660efbd51473d7e8f39b5ab9210febc7809012a4
- sha256:92a8b58ce734b2a4494878e0ecf7d79ccd7a128b5fc6014c401e0b61f006f0f6
trimesh:
version: "3.9.36"
hashes:
- sha256:8ac8bea693b3ee119f11b022fc9b9481c9f1af06cb38bc859bf5d16bbbe49b23
- sha256:f01e8edab14d1999700c980c21a1546f37417216ad915a53be649d263130181e
setuptools:
version: "75.6.0"
hashes:
- sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d
- sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6
sentry-sdk:
version: "0.13.5"
hashes:
- sha256:05285942901d38c7ce2498aba50d8e87b361fc603281a5902dda98f3f8c5e145
- sha256:c6b919623e488134a728f16326c6f0bcdab7e3f59e7f4c472a90eea4d6d8fe82
pyserial:
version: "3.4"
hashes:
- sha256:e0770fadba80c31013896c7e6ef703f72e7834965954a78e71a3049488d4d7d8
- sha256:6e2d401fdee0eab996cf734e67773a0143b932772ca8b42451440cfed942c627
chardet:
version: "3.0.4"
hashes:
- sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691
- sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae
idna:
version: "2.8"
hashes:
- sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c
- sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407
attrs:
version: "21.3.0"
hashes:
- sha256:8f7335278dedd26b58c38e006338242cc0977f06d51579b2b8b87b9b33bff66c
- sha256:50f3c9b216dc9021042f71b392859a773b904ce1a029077f58f6598272432045
requests:
version: "2.32.3"
hashes:
- sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6
- sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760
twisted:
version: "21.2.0"
hashes:
- sha256:aab38085ea6cda5b378b519a0ec99986874921ee8881318626b0a3414bb2631e
- sha256:77544a8945cf69b98d2946689bbe0c75de7d145cdf11f391dd487eae8fc95a12
constantly:
version: "15.1.0"
hashes:
- sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d
- sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35
hyperlink:
version: "21.0.0"
hashes:
- sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4
- sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b
incremental:
version: "22.10.0"
hashes:
- sha256:b864a1f30885ee72c5ac2835a761b8fe8aa9c28b9395cacf27286602688d3e51
- sha256:912feeb5e0f7e0188e6f42241d2f450002e11bbc0937c65865045854c24c0bd0
zope.interface:
version: "5.4.0"
hashes:
- sha256:7df1e1c05304f26faa49fa752a8c690126cf98b40b91d54e6e9cc3b7d6ffe8b7
- sha256:2c98384b254b37ce50eddd55db8d381a5c53b4c10ee66e1e7fe749824f894021
- sha256:08f9636e99a9d5410181ba0729e0408d3d8748026ea938f3b970a0249daa8192
- sha256:0ea1d73b7c9dcbc5080bb8aaffb776f1c68e807767069b9ccdd06f27a161914a
- sha256:273f158fabc5ea33cbc936da0ab3d4ba80ede5351babc4f577d768e057651531
- sha256:f7ee479e96f7ee350db1cf24afa5685a5899e2b34992fb99e1f7c1b0b758d263
- sha256:b0297b1e05fd128d26cc2460c810d42e205d16d76799526dfa8c8ccd50e74959
- sha256:af310ec8335016b5e52cae60cda4a4f2a60a788cbb949a4fbea13d441aa5a09e
- sha256:9a9845c4c6bb56e508651f005c4aeb0404e518c6f000d5a1123ab077ab769f5c
- sha256:a1e6e96217a0f72e2b8629e271e1b280c6fa3fe6e59fa8f6701bec14e3354325
- sha256:877473e675fdcc113c138813a5dd440da0769a2d81f4d86614e5d62b69497155
- sha256:0b465ae0962d49c68aa9733ba92a001b2a0933c317780435f00be7ecb959c702
- sha256:5dd9ca406499444f4c8299f803d4a14edf7890ecc595c8b1c7115c2342cadc5f
- sha256:469e2407e0fe9880ac690a3666f03eb4c3c444411a5a5fddfdabc5d184a79f05
- sha256:52de7fc6c21b419078008f697fd4103dbc763288b1406b4562554bd47514c004
- sha256:3dd4952748521205697bc2802e4afac5ed4b02909bb799ba1fe239f77fd4e117
- sha256:dd93ea5c0c7f3e25335ab7d22a507b1dc43976e1345508f845efc573d3d779d8
- sha256:3748fac0d0f6a304e674955ab1365d515993b3a0a865e16a11ec9d86fb307f63
- sha256:66c0061c91b3b9cf542131148ef7ecbecb2690d48d1612ec386de9d36766058f
- sha256:d0c1bc2fa9a7285719e5678584f6b92572a5b639d0e471bb8d4b650a1a910920
- sha256:2876246527c91e101184f63ccd1d716ec9c46519cc5f3d5375a3351c46467c46
- sha256:334701327f37c47fa628fc8b8d28c7d7730ce7daaf4bda1efb741679c2b087fc
- sha256:71aace0c42d53abe6fc7f726c5d3b60d90f3c5c055a447950ad6ea9cec2e37d9
- sha256:5bb3489b4558e49ad2c5118137cfeaf59434f9737fa9c5deefc72d22c23822e2
- sha256:1c0e316c9add0db48a5b703833881351444398b04111188069a26a61cfb4df78
- sha256:6f0c02cbb9691b7c91d5009108f975f8ffeab5dff8f26d62e21c493060eff2a1
- sha256:7d97a4306898b05404a0dcdc32d9709b7d8832c0c542b861d9a826301719794e
- sha256:867a5ad16892bf20e6c4ea2aab1971f45645ff3102ad29bd84c86027fa99997b
- sha256:5f931a1c21dfa7a9c573ec1f50a31135ccce84e32507c54e1ea404894c5eb96f
- sha256:194d0bcb1374ac3e1e023961610dc8f2c78a0f5f634d0c737691e215569e640d
- sha256:8270252effc60b9642b423189a2fe90eb6b59e87cbee54549db3f5562ff8d1b8
- sha256:15e7d1f7a6ee16572e21e3576d2012b2778cbacf75eb4b7400be37455f5ca8bf
- sha256:8892f89999ffd992208754851e5a052f6b5db70a1e3f7d54b17c5211e37a98c7
- sha256:2e5a26f16503be6c826abca904e45f1a44ff275fdb7e9d1b75c10671c26f8b94
- sha256:0f91b5b948686659a8e28b728ff5e74b1be6bf40cb04704453617e5f1e945ef3
- sha256:4de4bc9b6d35c5af65b454d3e9bc98c50eb3960d5a3762c9438df57427134b8e
- sha256:bf68f4b2b6683e52bec69273562df15af352e5ed25d1b6641e7efddc5951d1a7
- sha256:63b82bb63de7c821428d513607e84c6d97d58afd1fe2eb645030bdc185440120
- sha256:db1fa631737dab9fa0b37f3979d8d2631e348c3b4e8325d6873c2541d0ae5a48
- sha256:f44e517131a98f7a76696a7b21b164bcb85291cee106a23beccce454e1f433a4
- sha256:a9506a7e80bcf6eacfff7f804c0ad5350c8c95b9010e4356a4b36f5322f09abb
- sha256:3c02411a3b62668200910090a0dff17c0b25aaa36145082a5a6adf08fa281e54
- sha256:0cee5187b60ed26d56eb2960136288ce91bcf61e2a9405660d271d1f122a69a4
- sha256:a8156e6a7f5e2a0ff0c5b21d6bcb45145efece1909efcbbbf48c56f8da68221d
- sha256:205e40ccde0f37496904572035deea747390a8b7dc65146d30b96e2dd1359a83
- sha256:3f24df7124c323fceb53ff6168da70dbfbae1442b4f3da439cd441681f54fe25
- sha256:5208ebd5152e040640518a77827bdfcc73773a15a33d6644015b763b9c9febc1
- sha256:17776ecd3a1fdd2b2cd5373e5ef8b307162f581c693575ec62e7c5399d80794c
- sha256:d4d9d6c1a455d4babd320203b918ccc7fcbefe308615c521062bc2ba1aa4d26e
- sha256:0cba8477e300d64a11a9789ed40ee8932b59f9ee05f85276dbb4b59acee5dd09
- sha256:5dba5f530fec3f0988d83b78cc591b58c0b6eb8431a85edd1569a0539a8a5a0e
automat:
version: "20.2.0"
hashes:
- sha256:b6feb6455337df834f6c9962d6ccf771515b7d939bca142b29c20c2376bc6111
- sha256:7979803c74610e11ef0c0d68a2942b152df52da55336e0c9d58daf1831cbdf33
shapely:
version: "2.0.6"
hashes:
- sha256:29a34e068da2d321e926b5073539fd2a1d4429a2c656bd63f0bd4c8f5b236d0b
- sha256:e1c84c3f53144febf6af909d6b581bc05e8785d57e27f35ebaa5c1ab9baba13b
- sha256:2ad2fae12dca8d2b727fa12b007e46fbc522148a584f5d6546c539f3464dccde
- sha256:b3304883bd82d44be1b27a9d17f1167fda8c7f5a02a897958d86c59ec69b705e
- sha256:3ec3a0eab496b5e04633a39fa3d5eb5454628228201fb24903d38174ee34565e
- sha256:28f87cdf5308a514763a5c38de295544cb27429cfa655d50ed8431a4796090c4
- sha256:5aeb0f51a9db176da9a30cb2f4329b6fbd1e26d359012bb0ac3d3c7781667a9e
- sha256:9a7a78b0d51257a367ee115f4d41ca4d46edbd0dd280f697a8092dd3989867b2
- sha256:f32c23d2f43d54029f986479f7c1f6e09c6b3a19353a3833c2ffb226fb63a855
- sha256:b3dc9fb0eb56498912025f5eb352b5126f04801ed0e8bdbd867d21bdbfd7cbd0
- sha256:d93b7e0e71c9f095e09454bf18dad5ea716fb6ced5df3cb044564a00723f339d
- sha256:c02eb6bf4cfb9fe6568502e85bb2647921ee49171bcd2d4116c7b3109724ef9b
- sha256:cec9193519940e9d1b86a3b4f5af9eb6910197d24af02f247afbfb47bcb3fab0
- sha256:83b94a44ab04a90e88be69e7ddcc6f332da7c0a0ebb1156e1c4f568bbec983c3
- sha256:537c4b2716d22c92036d00b34aac9d3775e3691f80c7aa517c2c290351f42cd8
- sha256:98fea108334be345c283ce74bf064fa00cfdd718048a8af7343c59eb40f59726
- sha256:42fd4cd4834747e4990227e4cbafb02242c0cffe9ce7ef9971f53ac52d80d55f
- sha256:665990c84aece05efb68a21b3523a6b2057e84a1afbef426ad287f0796ef8a48
- sha256:42805ef90783ce689a4dde2b6b2f261e2c52609226a0438d882e3ced40bb3013
- sha256:6d2cb146191a47bd0cee8ff5f90b47547b82b6345c0d02dd8b25b88b68af62d7
- sha256:e3fdef0a1794a8fe70dc1f514440aa34426cc0ae98d9a1027fb299d45741c381
- sha256:2c665a0301c645615a107ff7f52adafa2153beab51daf34587170d85e8ba6805
- sha256:0334bd51828f68cd54b87d80b3e7cee93f249d82ae55a0faf3ea21c9be7b323a
- sha256:d37d070da9e0e0f0a530a621e17c0b8c3c9d04105655132a87cfff8bd77cc4c2
- sha256:fa7468e4f5b92049c0f36d63c3e309f85f2775752e076378e36c6387245c5462
- sha256:ed5867e598a9e8ac3291da6cc9baa62ca25706eea186117034e8ec0ea4355653
- sha256:81d9dfe155f371f78c8d895a7b7f323bb241fb148d848a2bf2244f79213123fe
- sha256:fbb7bf02a7542dba55129062570211cfb0defa05386409b3e306c39612e7fbcc
- sha256:837d395fac58aa01aa544495b97940995211e3e25f9aaf87bc3ba5b3a8cd1ac7
- sha256:c6d88ade96bf02f6bfd667ddd3626913098e243e419a0325ebef2bbd481d1eb6
- sha256:8b3b818c4407eaa0b4cb376fd2305e20ff6df757bf1356651589eadc14aab41b
- sha256:1bbc783529a21f2bd50c79cef90761f72d41c45622b3e57acf78d984c50a5d13
- sha256:2423f6c0903ebe5df6d32e0066b3d94029aab18425ad4b07bf98c3972a6e25a1
- sha256:2de00c3bfa80d6750832bde1d9487e302a6dd21d90cb2f210515cefdb616e5f5
- sha256:3a82d58a1134d5e975f19268710e53bddd9c473743356c90d97ce04b73e101ee
- sha256:392f66f458a0a2c706254f473290418236e52aa4c9b476a072539d63a2460595
- sha256:eba5bae271d523c938274c61658ebc34de6c4b33fdf43ef7e938b5776388c1be
- sha256:7060566bc4888b0c8ed14b5d57df8a0ead5c28f9b69fb6bed4476df31c51b0af
- sha256:b02154b3e9d076a29a8513dffcb80f047a5ea63c897c0cd3d3679f29363cf7e5
- sha256:44246d30124a4f1a638a7d5419149959532b99dfa25b54393512e6acc9c211ac
- sha256:2b542d7f1dbb89192d3512c52b679c822ba916f93479fa5d4fc2fe4fa0b3c9e8
- sha256:997f6159b1484059ec239cacaa53467fd8b5564dabe186cd84ac2944663b0bf6
cython:
version: "0.29.26"
hashes:
- sha256:c4b003b6b7aa9e74552ef8d4e6009b3e3c3e8fa585710b3a6d062e088e460c1b
- sha256:ce804a021c92fea66c8c100781a111706f21bade7a546895c5a9c57fe2df8b24
- sha256:93840f2071c1f15e613509eadee1fbcd335e8666772437fe5038e24059edd48c
- sha256:10402f0f1564ffc6ecb9c45e07f995771d05bb0b0e1d151e40574638292ee3a5
- sha256:8e07121b34221458a2151d37e137b8f5b011a9c51dd38db2499a6198590aa319
- sha256:233a87db76941626f1db08f4b25a4a5b425b13b64ed0e673c3641f7b650a48d8
- sha256:6773cce9d4b3b6168d8feb2b6f06b658ef1e11cbfec075041745666d8e2a5e45
- sha256:c813799d533194b7d85203d881d8b4f567a8c644a67f50d47f1ffbf316df412f
- sha256:362fbb9cb4627c7786231429768b54aaba5459a2a0e46c25e59f202ca6155437
- sha256:2b834ff6e4d10ba6d7a0d676dd71c1b427a181ddbbbbf79e91d1861557aab59f
- sha256:0c3093bc99facfc97e5019f6c5bc39987663792265c1334d9fc9e37c3a3dcd6f
- sha256:bbf0149680c1fca07200a3ed372b22e6bad7851d191b717a61f9a68af370e180
- sha256:a1cc55db32cd39474081d478263b96e036552cdbbab8831c90ea43fb385a9b66
- sha256:ebe32e002a9e6553de399033e259ece72aa17c77f740b265e66f122572a8a278
- sha256:6b385f68789c3e8a75b4827e8a4970ff04605ad3cb1c0b41005cc69368dad65d
- sha256:1519eb639de308f5763eb0666b4cc7bd3958268f3f6228cc610b7b4d6c94b68b
- sha256:e118525defec3f67471d8ee5ce04340d43195410a87e5d7ec8a1a9e953c0066a
- sha256:706ea55f58c2722206e51cd9a8754ed0995c4c4231d24b095875d2641d745222
- sha256:77913fe27c5e22c995bac090d01e200ff91e5f58aa944e2d2e94cbf67ea2ae34
- sha256:51923120f57a42c59f5ee6bba9e89a31a394ae8cd419c753f64d8a3aea1ee8b7
- sha256:82881565d04027728d7762edd8c085927a840873af7ee049d703e0ca226bc08d
- sha256:531303085503959338e6cdac630626280ef111aecbb22d48321673a8c3897c0a
- sha256:0205b685802eb4c039b14f67b7ac3f00c55ff04b9e3871df2249576d3e59ba42
- sha256:7df94e56872df8f396ca669466fee60256f69f678654239f706b1e643c2ac4a5
- sha256:4b7d04b393d9a4b5fec0cbc4b0f29fe083a9d862d95231a6e7608978bd661d7e
- sha256:af91dd63ac5f1f7fc70dc91ea063f727db42b5eb934d1f3832611be18e25171e
- sha256:d83dad8dc6c63706cb3178dc79010b3865b1345090727189d2cd61758a825ee8
- sha256:ca10e9fde0eaba1407ab353ff07a26daaa3e4dbe356108a149e482d441f070dd
- sha256:fec66cd0a48697fab903854566235aecf1084f62e3163d6589ae7335a1b4d448
- sha256:b3041e45aefaa4449fd671902132c0ac1f72eedaedac745c0e1a70a13bf990bb
- sha256:ed76fb98979f02b5e89079906071983a36f3634d3028b86f935cf0196f24fcaa
- sha256:4d868e1a41f5123f51a20c1b8e82f7cb6fa3370c104e98e707f7c910e8cadad1
- sha256:868f309095e557f06dc58723ae865e8c65cfedb2646a562bd8080c92d69e4e4b
- sha256:be550b566345bf53b95616334793ce42a128cf1d9dcde1e28cbf7ce52ea61d6d
- sha256:be13be1e2b9b7395588f2a23bfa692f2f3e6f7936ccf7825c83431b8c8c3452b
- sha256:41ee918480371ae5e5851ba9b1ead5a183e24aedb27bf807c7405d124e943f40
- sha256:c91b1ba0d43f7f7ccde8121c672207c7891735ddcc83496af1e0ab617ddc4aba
- sha256:5ecf5cf5b57086cc6c1cfc76d6353bbd7023e95da32e0883f1302ca50e481c33
- sha256:0ffce25bf50fa926ec6bf8d6f29650e7cb33fae445938c9880e1ce9b776353ef
- sha256:5041adfef502d67ecd5e291a7cf645a37fed7a9dac557f40d491053f35204d00
- sha256:5fd5db458c9d3d2c2abd047f3190624d9cce8a80a8e0ca0baa69cfd133a523bc
- sha256:75eaa22911d2ec37a3841f77b710b178c805cd378b5e1c4fb82dbc35620d2062
- sha256:3aed8c642e8fb27024bca46830b7f62335a44a92354acf708d6b8d050f945a3a
- sha256:b5ca05c2bfba0c2480b5fd390ecffe46b8da574d887d600388d6e3caf3f99a88
- sha256:f5e15ff892c8afad64931ee3dd723c4755c2c516606f9aae7613bebfac62b0f6
- sha256:af377d543a762867da11fcf6e558f7a4a535ff8693f30cce123fab10c00fa312
pybind11:
version: "2.6.2"
hashes:
- sha256:2d8aebe1709bc367e34e3b23d8eccbf3f387ee9d5640548c6260d33b59f02405
- sha256:d0e0aed9279656f21501243b707eb6e3b951e89e10c3271dedf3ae41c365e5ed
wheel:
version: "0.37.1"
hashes:
- sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a
- sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4
ifaddr:
version: "0.1.7"
hashes:
- sha256:d1f603952f0a71c9ab4e705754511e4e03b02565bc4cec7188ad6415ff534cd3
- sha256:1f9e8a6ca6f16db5a37d3356f07b6e52344f6f9f7e806d618537731669eb1a94
pycparser:
version: "2.20"
hashes:
- sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705
- sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0
zipp:
version: "3.5.0"
hashes:
- sha256:957cfda87797e389580cb8b9e3870841ca991e2125350677b2ca83a0e99390a3
- sha256:f5812b1e007e48cff63449a5e9f4e7ebea716b4111f9c4f9a645f91d579bf0c4
urllib3:
version: "2.2.3"
hashes:
- sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac
- sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9
jeepney:
version: "0.8.0"
hashes:
- sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755
- sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806
SecretStorage:
version: "3.3.3"
hashes:
- sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99
- sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77
keyring:
version: "25.5.0"
hashes:
- sha256:e67f8ac32b04be4714b42fe84ce7dad9c40985b9ca827c592cc303e7c26d9741
- sha256:4c753b3ec91717fe713c4edd522d625889d8973a349b0e582622f49766de58e6
jaraco.classes:
version: "3.4.0"
hashes:
- sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790
- sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd
jaraco.functools:
version: "4.1.0"
hashes:
- sha256:ad159f13428bc4acbf5541ad6dec511f91573b90fba04df61dafa2a1231cf649
- sha256:70f7e0e2ae076498e212562325e805204fc092d7b4c17e0e86c959e249701a9d
jaraco.context:
version: "6.0.1"
hashes:
- sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4
- sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3
more_itertools:
version: "10.5.0"
hashes:
- sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef
- sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6
charset-normalizer:
version: "2.1.0"
hashes:
- sha256:5189b6f22b01957427f35b6a08d9a0bc45b46d3788ef5a92e978433c7a35f8a5
- sha256:575e708016ff3a5e3681541cb9d79312c416835686d054a23accb873b254f413
Windows:
twisted-iocpsupport:
version: "1.0.2"
hashes:
- sha256:985c06a33f5c0dae92c71a036d1ea63872ee86a21dd9b01e1f287486f15524b4
- sha256:81b3abe3527b367da0220482820cb12a16c661672b7bcfcde328902890d63323
- sha256:9dbb8823b49f06d4de52721b47de4d3b3026064ef4788ce62b1a21c57c3fff6f
- sha256:b9fed67cf0f951573f06d560ac2f10f2a4bbdc6697770113a2fc396ea2cb2565
- sha256:b76b4eed9b27fd63ddb0877efdd2d15835fdcb6baa745cb85b66e5d016ac2878
- sha256:851b3735ca7e8102e661872390e3bce88f8901bece95c25a0c8bb9ecb8a23d32
- sha256:bf4133139d77fc706d8f572e6b7d82871d82ec7ef25d685c2351bdacfb701415
- sha256:306becd6e22ab6e8e4f36b6bdafd9c92e867c98a5ce517b27fdd27760ee7ae41
- sha256:3c61742cb0bc6c1ac117a7e5f422c129832f0c295af49e01d8a6066df8cfc04d
- sha256:b435857b9efcbfc12f8c326ef0383f26416272260455bbca2cd8d8eca470c546
- sha256:7d972cfa8439bdcb35a7be78b7ef86d73b34b808c74be56dfa785c8a93b851bf
- sha256:72068b206ee809c9c596b57b5287259ea41ddb4774d86725b19f35bf56aa32a9
pywin32-ctypes:
version: "0.2.3"
hashes:
- sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8
- sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755
pip_requirements_dev:
any_os:
pytest: {}
pyyaml: {}
sip: { version: "6.5.1" }
jinja2: {}
pip_requirements_installer:
any_os:
pyinstaller: { version: "6.11.1" }
pyinstaller-hooks-contrib: {}
python_translation_source_folders:
- cura
- plugins
qml_translation_source_folders:
- resources/qml
- plugins

View File

@ -11,7 +11,7 @@ from conan.tools.env import VirtualRunEnv, Environment, VirtualBuildEnv
from conan.tools.scm import Version
from conan.errors import ConanInvalidConfiguration, ConanException
required_conan_version = ">=1.58.0 <2.0.0"
required_conan_version = ">=2.7.0"
class CuraConan(ConanFile):
@ -24,99 +24,53 @@ class CuraConan(ConanFile):
build_policy = "missing"
exports = "LICENSE*", "*.jinja"
settings = "os", "compiler", "build_type", "arch"
generators = "VirtualPythonEnv"
tool_requires = "gettext/0.22.5"
# FIXME: Remove specific branch once merged to main
python_requires = "translationextractor/[>=2.2.0]@ultimaker/stable"
options = {
"enterprise": ["True", "False", "true", "false"], # Workaround for GH Action passing boolean as lowercase string
"staging": ["True", "False", "true", "false"], # Workaround for GH Action passing boolean as lowercase string
"devtools": [True, False], # FIXME: Split this up in testing and (development / build (pyinstaller) / system installer) tools
"cloud_api_version": "ANY",
"display_name": "ANY", # TODO: should this be an option??
"enterprise": [True, False],
"staging": [True, False],
"cloud_api_version": ["ANY"],
"display_name": ["ANY"], # TODO: should this be an option??
"cura_debug_mode": [True, False], # FIXME: Use profiles
"internal": ["True", "False", "true", "false"], # Workaround for GH Action passing boolean as lowercase string
"enable_i18n": [True, False],
"internal": [True, False],
"i18n_extract": [True, False],
}
default_options = {
"enterprise": "False",
"staging": "False",
"devtools": False,
"enterprise": False,
"staging": False,
"cloud_api_version": "1",
"display_name": "UltiMaker Cura",
"cura_debug_mode": False, # Not yet implemented
"internal": "False",
"enable_i18n": False,
"internal": False,
"i18n_extract": False,
}
def set_version(self):
if not self.version:
self.version = self.conan_data["version"]
@property
def _i18n_options(self):
return self.conf.get("user.i18n:options", default = {"extract": True, "build": True}, check_type = dict)
@property
def _pycharm_targets(self):
return self.conan_data["pycharm_targets"]
# FIXME: These env vars should be defined in the runenv.
_cura_env = None
@property
def _cura_run_env(self):
if self._cura_env:
return self._cura_env
self._cura_env = Environment()
self._cura_env.define("QML2_IMPORT_PATH", str(self._site_packages.joinpath("PyQt6", "Qt6", "qml")))
self._cura_env.define("QT_PLUGIN_PATH", str(self._site_packages.joinpath("PyQt6", "Qt6", "plugins")))
if not self.in_local_cache:
self._cura_env.define("CURA_DATA_ROOT", str(self._share_dir.joinpath("cura")))
if self.settings.os == "Linux":
self._cura_env.define("QT_QPA_FONTDIR", "/usr/share/fonts")
self._cura_env.define("QT_QPA_PLATFORMTHEME", "xdgdesktopportal")
self._cura_env.define("QT_XKB_CONFIG_ROOT", "/usr/share/X11/xkb")
return self._cura_env
@property
def _enterprise(self):
return self.options.enterprise in ["True", 'true']
@property
def _internal(self):
return self.options.internal in ["True", 'true']
@property
def _app_name(self):
if self._enterprise:
if self.options.enterprise:
return str(self.options.display_name) + " Enterprise"
return str(self.options.display_name)
@property
def _urls(self):
if self.options.staging in ["True", 'true']:
if self.options.staging:
return "staging"
return "default"
@property
def requirements_txts(self):
if self.options.devtools:
return ["requirements.txt", "requirements-ultimaker.txt", "requirements-dev.txt"]
return ["requirements.txt", "requirements-ultimaker.txt"]
def _root_dir(self):
return Path(self.deploy_folder if hasattr(self, "deploy_folder") else self.source_folder)
@property
def _base_dir(self):
if self.install_folder is None:
if self.build_folder is not None:
return Path(self.build_folder)
return Path(os.getcwd(), "venv")
if self.in_local_cache:
return Path(self.install_folder)
else:
return Path(self.source_folder, "venv")
return self._root_dir.joinpath("venv")
@property
def _share_dir(self):
@ -132,7 +86,7 @@ class CuraConan(ConanFile):
def _site_packages(self):
if self.settings.os == "Windows":
return self._base_dir.joinpath("Lib", "site-packages")
py_version = Version(self.deps_cpp_info["cpython"].version)
py_version = Version(self.dependencies["cpython"].ref.version)
return self._base_dir.joinpath("lib", f"python{py_version.major}.{py_version.minor}", "site-packages")
@property
@ -157,7 +111,7 @@ class CuraConan(ConanFile):
# list of conan installs
for dependency in self.dependencies.host.values():
conan_installs[dependency.ref.name] = {
"version": dependency.ref.version,
"version": str(dependency.ref.version),
"revision": dependency.ref.revision
}
return conan_installs
@ -166,22 +120,15 @@ class CuraConan(ConanFile):
self.output.info("Collecting python installs")
python_installs = {}
# list of python installs
run_env = VirtualRunEnv(self)
env = run_env.environment()
env.prepend_path("PYTHONPATH", str(self._site_packages.as_posix()))
venv_vars = env.vars(self, scope = "run")
collect_python_installs = "collect_python_installs.py"
code = f"import importlib.metadata; print(';'.join([(package.metadata['Name']+','+ package.metadata['Version']) for package in importlib.metadata.distributions()]))"
save(self, collect_python_installs, code)
outer = '"' if self.settings.os == "Windows" else "'"
inner = "'" if self.settings.os == "Windows" else '"'
buffer = StringIO()
with venv_vars.apply():
self.run(f"""python -c {outer}import pkg_resources; print({inner};{inner}.join([(s.key+{inner},{inner}+ s.version) for s in pkg_resources.working_set])){outer}""",
env = "conanrun",
output = buffer)
self.run(f"""python {collect_python_installs}""", env = "virtual_python_env", stdout = buffer)
rm(self, collect_python_installs, ".")
packages = str(buffer.getvalue()).split("-----------------\n")
packages = packages[1].strip('\r\n').split(";")
packages = str(buffer.getvalue()).strip('\r\n').split(";")
for package in packages:
name, version = package.split(",")
python_installs[name] = {"version": version}
@ -197,15 +144,18 @@ class CuraConan(ConanFile):
cura_version = Version(self.conf.get("user.cura:version", default = self.version, check_type = str))
pre_tag = f"-{cura_version.pre}" if cura_version.pre else ""
build_tag = f"+{cura_version.build}" if cura_version.build else ""
internal_tag = f"+internal" if self._internal else ""
internal_tag = f"+internal" if self.options.internal else ""
cura_version = f"{cura_version.major}.{cura_version.minor}.{cura_version.patch}{pre_tag}{build_tag}{internal_tag}"
self.output.info(f"Write CuraVersion.py to {self.recipe_folder}")
with open(os.path.join(location, "CuraVersion.py"), "w") as f:
f.write(cura_version_py.render(
cura_app_name = self.name,
cura_app_display_name = self._app_name,
cura_version = cura_version,
cura_build_type = "Enterprise" if self._enterprise else "",
cura_version_full = self.version,
cura_build_type = "Enterprise" if self.options.enterprise else "",
cura_debug_mode = self.options.cura_debug_mode,
cura_cloud_api_root = self.conan_data["urls"][self._urls]["cloud_api_root"],
cura_cloud_api_version = self.options.cloud_api_version,
@ -217,43 +167,108 @@ class CuraConan(ConanFile):
python_installs=self._python_installs(),
))
def _generate_pyinstaller_spec(self, location, entrypoint_location, icon_path, entitlements_file):
def _delete_unwanted_binaries(self, root):
dynamic_binary_file_exts = [".so", ".dylib", ".dll", ".pyd", ".pyi"]
prohibited = [
"qt5compat",
"qtcharts",
"qtcoap",
"qtdatavis3d",
"qtlottie",
"qtmqtt",
"qtnetworkauth",
"qtquick3d",
"quick3d",
"qtquick3dphysics",
"qtquicktimeline",
"qtvirtualkeyboard",
"qtwayland"
]
forbiddens = [x.encode() for x in prohibited]
to_remove_files = []
to_remove_dirs = []
for root, dir_, files in os.walk(root):
for filename in files:
if not any([(x in filename) for x in dynamic_binary_file_exts]):
continue
pathname = os.path.join(root, filename)
still_exist = True
for forbidden in prohibited:
if forbidden.lower() in str(pathname).lower():
to_remove_files.append(pathname)
still_exist = False
break
if not still_exist:
continue
with open(pathname, "rb") as file:
bytez = file.read().lower()
for forbidden in forbiddens:
if bytez.find(forbidden) >= 0:
to_remove_files.append(pathname)
for dirname in dir_:
for forbidden in prohibited:
if forbidden.lower() in str(dirname).lower():
pathname = os.path.join(root, dirname)
to_remove_dirs.append(pathname)
break
for file in to_remove_files:
try:
os.remove(file)
print(f"deleted file: {file}")
except Exception as ex:
print(f"WARNING: Attempt to delete file {file} results in: {str(ex)}")
for dir_ in to_remove_dirs:
try:
rmdir(self, dir_)
print(f"deleted dir_: {dir_}")
except Exception as ex:
print(f"WARNING: Attempt to delete folder {dir_} results in: {str(ex)}")
def _generate_pyinstaller_spec(self, location, entrypoint_location, icon_path, entitlements_file, cura_source_folder):
pyinstaller_metadata = self.conan_data["pyinstaller"]
datas = []
for data in pyinstaller_metadata["datas"].values():
if not self._internal and data.get("internal", False):
if (not self.options.internal and data.get("internal", False)) or (not self.options.enterprise and data.get("enterprise_only", False)):
continue
if "oses" in data and self.settings.os not in data["oses"]:
continue
if "package" in data: # get the paths from conan package
if data["package"] == self.name:
if self.in_local_cache:
src_path = os.path.join(self.package_folder, data["src"])
else:
src_path = os.path.join(self.source_folder, data["src"])
src_path = str(Path(cura_source_folder, data["src"]))
else:
if data["package"] not in self.deps_cpp_info.deps:
continue
src_path = os.path.join(self.deps_cpp_info[data["package"]].rootpath, data["src"])
if data["package"] not in self.dependencies:
raise ConanException(f"Required package {data['package']} does not exist as a dependency")
package_folder = self.dependencies[data['package']].package_folder
if package_folder is None:
raise ConanException(f"Unable to find package_folder for {data['package']}, check that it has not been skipped")
src_path = os.path.join(self.dependencies[data["package"]].package_folder, data["src"])
elif "root" in data: # get the paths relative from the install folder
src_path = os.path.join(self.install_folder, data["root"], data["src"])
else:
continue
if Path(src_path).exists():
datas.append((str(src_path), data["dst"]))
raise ConanException("Misformatted conan data for pyinstaller datas, expected either package or root option")
if not Path(src_path).exists():
raise ConanException(f"Missing folder {src_path} for pyinstaller data {data}")
datas.append((str(src_path), data["dst"]))
binaries = []
for binary in pyinstaller_metadata["binaries"].values():
if "package" in binary: # get the paths from conan package
src_path = os.path.join(self.deps_cpp_info[binary["package"]].rootpath, binary["src"])
src_path = os.path.join(self.dependencies[binary["package"]].package_folder, binary["src"])
elif "root" in binary: # get the paths relative from the sourcefolder
src_path = str(self.source_path.joinpath(binary["root"], binary["src"]))
src_path = str(Path(self.source_folder, binary["root"], binary["src"]))
if self.settings.os == "Windows":
src_path = src_path.replace("\\", "\\\\")
else:
continue
raise ConanException("Misformatted conan data for pyinstaller binaries, expected either package or root option")
if not Path(src_path).exists():
self.output.warning(f"Source path for binary {binary['binary']} does not exist")
continue
raise ConanException(f"Missing folder {src_path} for pyinstaller binary {binary}")
for bin in Path(src_path).glob(binary["binary"] + "*[.exe|.dll|.so|.dylib|.so.]*"):
binaries.append((str(bin), binary["dst"]))
@ -281,16 +296,36 @@ class CuraConan(ConanFile):
version = self.conf.get("user.cura:version", default = self.version, check_type = str)
cura_version = Version(version)
# filter all binary files in binaries on the blacklist
blacklist = pyinstaller_metadata["blacklist"]
filtered_binaries = [b for b in binaries if not any([all([(part in b[0].lower()) for part in parts]) for parts in blacklist])]
# In case the installer isn't actually pyinstaller (Windows at the moment), outright remove the offending files:
specifically_delete = set(binaries) - set(filtered_binaries)
for (unwanted_path, _) in specifically_delete:
try:
os.remove(unwanted_path)
print(f"delete: {unwanted_path}")
except Exception as ex:
print(f"WARNING: Attempt to delete binary {unwanted_path} results in: {str(ex)}")
hiddenimports = pyinstaller_metadata["hiddenimports"]
collect_all = pyinstaller_metadata["collect_all"]
if self.settings.os == "Windows":
hiddenimports += pyinstaller_metadata["hiddenimports_WINDOWS_ONLY"]
collect_all += pyinstaller_metadata["collect_all_WINDOWS_ONLY"]
# Write the actual file:
with open(os.path.join(location, "UltiMaker-Cura.spec"), "w") as f:
f.write(pyinstaller.render(
name = str(self.options.display_name).replace(" ", "-"),
display_name = self._app_name,
entrypoint = entrypoint_location,
datas = datas,
binaries = binaries,
binaries = filtered_binaries,
venv_script_path = str(self._script_dir),
hiddenimports = pyinstaller_metadata["hiddenimports"],
collect_all = pyinstaller_metadata["collect_all"],
hiddenimports = hiddenimports,
collect_all = collect_all,
icon = icon_path,
entitlements_file = entitlements_file,
osx_bundle_identifier = "'nl.ultimaker.cura'" if self.settings.os == "Macos" else "None",
@ -312,95 +347,50 @@ class CuraConan(ConanFile):
copy(self, "*", os.path.join(self.recipe_folder, "cura"), os.path.join(self.export_sources_folder, "cura"), excludes="CuraVersion.py")
copy(self, "*", os.path.join(self.recipe_folder, "packaging"), os.path.join(self.export_sources_folder, "packaging"))
copy(self, "*", os.path.join(self.recipe_folder, ".run_templates"), os.path.join(self.export_sources_folder, ".run_templates"))
copy(self, "requirements.txt", self.recipe_folder, self.export_sources_folder)
copy(self, "requirements-dev.txt", self.recipe_folder, self.export_sources_folder)
copy(self, "requirements-ultimaker.txt", self.recipe_folder, self.export_sources_folder)
copy(self, "cura_app.py", self.recipe_folder, self.export_sources_folder)
def config_options(self):
if self.settings.os == "Windows" and not self.conf.get("tools.microsoft.bash:path", check_type=str):
del self.options.enable_i18n
def configure(self):
self.options["pyarcus"].shared = True
self.options["pysavitar"].shared = True
self.options["pynest2d"].shared = True
self.options["dulcificum"].shared = self.settings.os != "Windows"
self.options["cpython"].shared = True
self.options["boost"].header_only = True
if self.settings.os == "Linux":
self.options["openssl"].shared = True
if self.conf.get("user.curaengine:sentry_url", "", check_type=str) != "":
self.options["curaengine"].enable_sentry = True
self.options["arcus"].enable_sentry = True
self.options["clipper"].enable_sentry = True
def validate(self):
version = self.conf.get("user.cura:version", default = self.version, check_type = str)
if version and Version(version) <= Version("4"):
raise ConanInvalidConfiguration("Only versions 5+ are support")
if self.options.i18n_extract and self.settings.os == "Windows" and not self.conf.get("tools.microsoft.bash:path", check_type=str):
raise ConanInvalidConfiguration("Unable to extract translations on Windows without Bash installed")
def requirements(self):
for req in self.conan_data["requirements"]:
if self._internal and "fdm_materials" in req:
continue
if not self._enterprise and "native_cad_plugin" in req:
if self.options.internal and "fdm_materials" in req:
continue
self.requires(req)
if self._internal:
if self.options.internal:
for req in self.conan_data["requirements_internal"]:
self.requires(req)
self.requires("cpython/3.10.4@ultimaker/stable")
self.requires("clipper/6.4.2@ultimaker/stable")
self.requires("openssl/3.2.0")
self.requires("protobuf/3.21.12")
self.requires("boost/1.82.0")
self.requires("spdlog/1.12.0")
self.requires("fmt/10.1.1")
self.requires("zlib/1.2.13")
def build_requirements(self):
if self.options.get_safe("enable_i18n", False):
self.tool_requires("gettext/0.21", force_host_context = True)
if self.options.enterprise:
for req in self.conan_data["requirements_enterprise"]:
self.requires(req)
self.requires("cpython/3.12.2")
def layout(self):
self.folders.source = "."
self.folders.build = "venv"
self.folders.generators = os.path.join(self.folders.build, "conan")
self.folders.build = "build"
self.folders.generators = os.path.join(self.folders.build, "generators")
self.cpp.package.libdirs = [os.path.join("site-packages", "cura")]
self.cpp.package.bindirs = ["bin"]
self.cpp.package.resdirs = ["resources", "plugins", "packaging", "pip_requirements"] # pip_requirements should be the last item in the list
self.cpp.package.resdirs = ["resources", "plugins", "packaging"]
def generate(self):
copy(self, "cura_app.py", self.source_folder, str(self._script_dir))
cura_run_envvars = self._cura_run_env.vars(self, scope = "run")
ext = ".ps1" if self.settings.os == "Windows" else ".sh"
cura_run_envvars.save_script(os.path.join(self.folders.generators, f"cura_run_environment{ext}"))
vr = VirtualRunEnv(self)
vr.generate()
self._generate_cura_version(str(Path(self.source_folder, "cura")))
self._generate_cura_version(os.path.join(self.source_folder, "cura"))
# Copy CuraEngine.exe to bindirs of Virtual Python Environment
curaengine = self.dependencies["curaengine"].cpp_info
copy(self, "CuraEngine.exe", curaengine.bindirs[0], self.source_folder, keep_path = False)
copy(self, "CuraEngine", curaengine.bindirs[0], self.source_folder, keep_path = False)
if not self.in_local_cache:
# Copy CuraEngine.exe to bindirs of Virtual Python Environment
curaengine = self.dependencies["curaengine"].cpp_info
copy(self, "CuraEngine.exe", curaengine.bindirs[0], self.source_folder, keep_path = False)
copy(self, "CuraEngine", curaengine.bindirs[0], self.source_folder, keep_path = False)
# Copy the external plugins that we want to bundle with Cura
rmdir(self,str(self.source_path.joinpath("plugins", "CuraEngineGradualFlow")))
curaengine_plugin_gradual_flow = self.dependencies["curaengine_plugin_gradual_flow"].cpp_info
copy(self, "*", curaengine_plugin_gradual_flow.resdirs[0], str(self.source_path.joinpath("plugins", "CuraEngineGradualFlow")), keep_path = True)
copy(self, "*", curaengine_plugin_gradual_flow.bindirs[0], self.source_folder, keep_path = False)
copy(self, "bundled_*.json", curaengine_plugin_gradual_flow.resdirs[1], str(self.source_path.joinpath("resources", "bundled_packages")), keep_path = False)
if self._enterprise:
rmdir(self, str(self.source_path.joinpath("plugins", "NativeCADplugin")))
curaengine_plugin_gradual_flow = self.dependencies["native_cad_plugin"].cpp_info
copy(self, "*", curaengine_plugin_gradual_flow.resdirs[0], str(self.source_path.joinpath("plugins", "NativeCADplugin")), keep_path = True)
copy(self, "bundled_*.json", curaengine_plugin_gradual_flow.resdirs[1], str(self.source_path.joinpath("resources", "bundled_packages")), keep_path = False)
# Copy the external plugins that we want to bundle with Cura
if self.options.enterprise:
rmdir(self, str(Path(self.source_folder, "plugins", "NativeCADplugin")))
native_cad_plugin = self.dependencies["native_cad_plugin"].cpp_info
copy(self, "*", native_cad_plugin.resdirs[0], str(Path(self.source_folder, "plugins", "NativeCADplugin")), keep_path = True)
copy(self, "bundled_*.json", native_cad_plugin.resdirs[1], str(Path(self.source_folder, "resources", "bundled_packages")), keep_path = False)
# Copy resources of cura_binary_data
cura_binary_data = self.dependencies["cura_binary_data"].cpp_info
@ -411,55 +401,47 @@ class CuraConan(ConanFile):
for dependency in self.dependencies.host.values():
for bindir in dependency.cpp_info.bindirs:
self._delete_unwanted_binaries(bindir)
copy(self, "*.dll", bindir, str(self._site_packages), keep_path = False)
for libdir in dependency.cpp_info.libdirs:
self._delete_unwanted_binaries(libdir)
copy(self, "*.pyd", libdir, str(self._site_packages), keep_path = False)
copy(self, "*.pyi", libdir, str(self._site_packages), keep_path = False)
copy(self, "*.dylib", libdir, str(self._base_dir.joinpath("lib")), keep_path = False)
# Copy materials (flat)
rmdir(self, os.path.join(self.source_folder, "resources", "materials"))
rmdir(self, str(Path(self.source_folder, "resources", "materials")))
fdm_materials = self.dependencies["fdm_materials"].cpp_info
copy(self, "*", fdm_materials.resdirs[0], self.source_folder)
# Copy internal resources
if self._internal:
if self.options.internal:
cura_private_data = self.dependencies["cura_private_data"].cpp_info
copy(self, "*", cura_private_data.resdirs[0], str(self._share_dir.joinpath("cura")))
if self.options.devtools:
entitlements_file = "'{}'".format(os.path.join(self.source_folder, "packaging", "MacOS", "cura.entitlements"))
self._generate_pyinstaller_spec(
location=self.generators_folder,
entrypoint_location="'{}'".format(
os.path.join(self.source_folder, self.conan_data["pyinstaller"]["runinfo"]["entrypoint"])).replace(
"\\", "\\\\"),
icon_path="'{}'".format(os.path.join(self.source_folder, "packaging",
self.conan_data["pyinstaller"]["icon"][
str(self.settings.os)])).replace("\\", "\\\\"),
entitlements_file=entitlements_file if self.settings.os == "Macos" else "None"
)
if self.options.get_safe("enable_i18n", False) and self._i18n_options["extract"]:
if self.options.i18n_extract:
vb = VirtualBuildEnv(self)
vb.generate()
# # FIXME: once m4, autoconf, automake are Conan V2 ready use self.win_bash and add gettext as base tool_requirement
cpp_info = self.dependencies["gettext"].cpp_info
pot = self.python_requires["translationextractor"].module.ExtractTranslations(self, cpp_info.bindirs[0])
pot = self.python_requires["translationextractor"].module.ExtractTranslations(self)
pot.generate()
def build(self):
if self.options.get_safe("enable_i18n", False) and self._i18n_options["build"]:
for po_file in self.source_path.joinpath("resources", "i18n").glob("**/*.po"):
mo_file = Path(self.build_folder, po_file.with_suffix('.mo').relative_to(self.source_path))
mo_file = mo_file.parent.joinpath("LC_MESSAGES", mo_file.name)
mkdir(self, str(unix_path(self, Path(mo_file).parent)))
cpp_info = self.dependencies["gettext"].cpp_info
self.run(f"{cpp_info.bindirs[0]}/msgfmt {po_file} -o {mo_file} -f", env="conanbuild", ignore_errors=True)
if self.settings.os == "Windows" and not self.conf.get("tools.microsoft.bash:path", check_type=str):
self.output.warning("Skipping generation of binary translation files because Bash could not be found and is required")
return
for po_file in Path(self.source_folder, "resources", "i18n").glob("**/*.po"):
mo_file = Path(self.build_folder, po_file.with_suffix('.mo').relative_to(self.source_folder))
mo_file = mo_file.parent.joinpath("LC_MESSAGES", mo_file.name)
mkdir(self, str(unix_path(self, Path(mo_file).parent)))
self.run(f"msgfmt {po_file} -o {mo_file} -f", env="conanbuild")
def deploy(self):
copy(self, "*", os.path.join(self.package_folder, self.cpp.package.resdirs[2]), os.path.join(self.install_folder, "packaging"), keep_path = True)
''' Note: this deploy step is actually used to prepare for building a Cura distribution with pyinstaller, which is not
the original purpose in the Conan philosophy '''
copy(self, "*", os.path.join(self.package_folder, self.cpp.package.resdirs[2]), os.path.join(self.deploy_folder, "packaging"), keep_path = True)
# Copy resources of Cura (keep folder structure) needed by pyinstaller to determine the module structure
copy(self, "*", os.path.join(self.package_folder, self.cpp_info.bindirs[0]), str(self._base_dir), keep_path = False)
@ -479,34 +461,19 @@ class CuraConan(ConanFile):
copy(self, "*", uranium.resdirs[1], str(self._share_dir.joinpath("uranium", "plugins")), keep_path = True)
copy(self, "*", uranium.libdirs[0], str(self._site_packages.joinpath("UM")), keep_path = True)
# Generate the GitHub Action version info Environment
version = self.conf.get("user.cura:version", default = self.version, check_type = str)
cura_version = Version(version)
env_prefix = "Env:" if self.settings.os == "Windows" else ""
activate_github_actions_version_env = Template(r"""echo "CURA_VERSION_MAJOR={{ cura_version_major }}" >> ${{ env_prefix }}GITHUB_ENV
echo "CURA_VERSION_MINOR={{ cura_version_minor }}" >> ${{ env_prefix }}GITHUB_ENV
echo "CURA_VERSION_PATCH={{ cura_version_patch }}" >> ${{ env_prefix }}GITHUB_ENV
echo "CURA_VERSION_BUILD={{ cura_version_build }}" >> ${{ env_prefix }}GITHUB_ENV
echo "CURA_VERSION_FULL={{ cura_version_full }}" >> ${{ env_prefix }}GITHUB_ENV
echo "CURA_APP_NAME={{ cura_app_name }}" >> ${{ env_prefix }}GITHUB_ENV
""").render(cura_version_major = cura_version.major,
cura_version_minor = cura_version.minor,
cura_version_patch = cura_version.patch,
cura_version_build = cura_version.build if cura_version.build != "" else "0",
cura_version_full = self.version,
cura_app_name = self._app_name,
env_prefix = env_prefix)
ext = ".sh" if self.settings.os != "Windows" else ".ps1"
save(self, os.path.join(self._script_dir, f"activate_github_actions_version_env{ext}"), activate_github_actions_version_env)
self._generate_cura_version(os.path.join(self._site_packages, "cura"))
entitlements_file = "'{}'".format(Path(self.cpp_info.res_paths[2], "MacOS", "cura.entitlements"))
self._generate_pyinstaller_spec(location = self._base_dir,
self._delete_unwanted_binaries(self._site_packages)
self._delete_unwanted_binaries(self.package_folder)
self._delete_unwanted_binaries(self._base_dir)
self._delete_unwanted_binaries(self._share_dir)
entitlements_file = "'{}'".format(Path(self.deploy_folder, "packaging", "MacOS", "cura.entitlements"))
self._generate_pyinstaller_spec(location = self.deploy_folder,
entrypoint_location = "'{}'".format(os.path.join(self.package_folder, self.cpp_info.bindirs[0], self.conan_data["pyinstaller"]["runinfo"]["entrypoint"])).replace("\\", "\\\\"),
icon_path = "'{}'".format(os.path.join(self.package_folder, self.cpp_info.resdirs[2], self.conan_data["pyinstaller"]["icon"][str(self.settings.os)])).replace("\\", "\\\\"),
entitlements_file = entitlements_file if self.settings.os == "Macos" else "None")
entitlements_file = entitlements_file if self.settings.os == "Macos" else "None",
cura_source_folder = self.package_folder)
def package(self):
copy(self, "cura_app.py", src = self.source_folder, dst = os.path.join(self.package_folder, self.cpp.package.bindirs[0]))
@ -514,12 +481,8 @@ echo "CURA_APP_NAME={{ cura_app_name }}" >> ${{ env_prefix }}GITHUB_ENV
copy(self, "*", src = os.path.join(self.source_folder, "resources"), dst = os.path.join(self.package_folder, self.cpp.package.resdirs[0]))
copy(self, "*.mo", os.path.join(self.build_folder, "resources"), os.path.join(self.package_folder, "resources"))
copy(self, "*", src = os.path.join(self.source_folder, "plugins"), dst = os.path.join(self.package_folder, self.cpp.package.resdirs[1]))
copy(self, "requirement*.txt", src = self.source_folder, dst = os.path.join(self.package_folder, self.cpp.package.resdirs[-1]))
copy(self, "*", src = os.path.join(self.source_folder, "packaging"), dst = os.path.join(self.package_folder, self.cpp.package.resdirs[2]))
# Remove the CuraEngineGradualFlow plugin from the package
rmdir(self, os.path.join(self.package_folder, self.cpp.package.resdirs[1], "CuraEngineGradualFlow"))
rm(self, "bundled_*.json", os.path.join(self.package_folder, self.cpp.package.resdirs[0], "bundled_packages"), recursive = False)
copy(self, "pip_requirements_*.txt", src = self.generators_folder, dst = os.path.join(self.package_folder, self.cpp.package.resdirs[-1]))
# Remove the fdm_materials from the package
rmdir(self, os.path.join(self.package_folder, self.cpp.package.resdirs[0], "materials"))
@ -531,35 +494,8 @@ echo "CURA_APP_NAME={{ cura_app_name }}" >> ${{ env_prefix }}GITHUB_ENV
rmdir(self, os.path.join(self.package_folder, self.cpp.package.resdirs[0], Path(res_dir).name))
def package_info(self):
self.user_info.pip_requirements = "requirements.txt"
self.user_info.pip_requirements_git = "requirements-ultimaker.txt"
self.user_info.pip_requirements_build = "requirements-dev.txt"
if self.in_local_cache:
self.runenv_info.append_path("PYTHONPATH", os.path.join(self.package_folder, "site-packages"))
self.env_info.PYTHONPATH.append(os.path.join(self.package_folder, "site-packages"))
self.runenv_info.append_path("PYTHONPATH", os.path.join(self.package_folder, "plugins"))
self.env_info.PYTHONPATH.append(os.path.join(self.package_folder, "plugins"))
else:
self.runenv_info.append_path("PYTHONPATH", self.source_folder)
self.env_info.PYTHONPATH.append(self.source_folder)
self.runenv_info.append_path("PYTHONPATH", os.path.join(self.source_folder, "plugins"))
self.env_info.PYTHONPATH.append(os.path.join(self.source_folder, "plugins"))
self.runenv_info.append_path("PYTHONPATH", os.path.join(self.package_folder, "site-packages"))
self.runenv_info.append_path("PYTHONPATH", os.path.join(self.package_folder, "plugins"))
def package_id(self):
self.info.clear()
# The following options shouldn't be used to determine the hash, since these are only used to set the CuraVersion.py
# which will als be generated by the deploy method during the `conan install cura/5.1.0@_/_`
del self.info.options.enterprise
del self.info.options.staging
del self.info.options.devtools
del self.info.options.cloud_api_version
del self.info.options.display_name
del self.info.options.cura_debug_mode
if self.options.get_safe("enable_i18n", False):
del self.info.options.enable_i18n
# TODO: Use the hash of requirements.txt and requirements-ultimaker.txt, Because changing these will actually result in a different
# Cura. This is needed because the requirements.txt aren't managed by Conan and therefor not resolved in the package_id. This isn't
# ideal but an acceptable solution for now.
self.info.options.rm_safe("enable_i18n")

View File

@ -232,6 +232,7 @@ class Account(QObject):
def _onProfileChanged(self, profile: Optional[UserProfile]) -> None:
self._user_profile = profile
self._updatePermissions()
self.userProfileChanged.emit()
def _sync(self) -> None:

View File

@ -1,7 +1,13 @@
# Copyright (c) 2018 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
from typing import TYPE_CHECKING
from dataclasses import asdict
from typing import cast, Dict, TYPE_CHECKING
from UM.Settings.InstanceContainer import InstanceContainer
from UM.Settings.SettingFunction import SettingFunction
from cura.Settings.GlobalStack import GlobalStack
if TYPE_CHECKING:
from cura.CuraApplication import CuraApplication
@ -47,3 +53,57 @@ class Settings:
"""
return self.application.getSidebarCustomMenuItems()
def getSliceMetadata(self) -> Dict[str, Dict[str, Dict[str, str]]]:
"""Get all changed settings and all settings. For each extruder and the global stack"""
print_information = self.application.getPrintInformation()
machine_manager = self.application.getMachineManager()
settings = {
"material": {
"length": print_information.materialLengths,
"weight": print_information.materialWeights,
"cost": print_information.materialCosts,
},
"global": {
"changes": {},
"all_settings": {},
},
"quality": asdict(machine_manager.activeQualityDisplayNameMap()),
}
def _retrieveValue(container: InstanceContainer, setting_: str):
value_ = container.getProperty(setting_, "value")
for _ in range(0, 1024): # Prevent possibly endless loop by not using a limit.
if not isinstance(value_, SettingFunction):
return value_ # Success!
value_ = value_(container)
return 0 # Fallback value after breaking possibly endless loop.
global_stack = cast(GlobalStack, self.application.getGlobalContainerStack())
# Add global user or quality changes
global_flattened_changes = InstanceContainer.createMergedInstanceContainer(global_stack.userChanges, global_stack.qualityChanges)
for setting in global_flattened_changes.getAllKeys():
settings["global"]["changes"][setting] = _retrieveValue(global_flattened_changes, setting)
# Get global all settings values without user or quality changes
for setting in global_stack.getAllKeys():
settings["global"]["all_settings"][setting] = _retrieveValue(global_stack, setting)
for i, extruder in enumerate(global_stack.extruderList):
# Add extruder fields to settings dictionary
settings[f"extruder_{i}"] = {
"changes": {},
"all_settings": {},
}
# Add extruder user or quality changes
extruder_flattened_changes = InstanceContainer.createMergedInstanceContainer(extruder.userChanges, extruder.qualityChanges)
for setting in extruder_flattened_changes.getAllKeys():
settings[f"extruder_{i}"]["changes"][setting] = _retrieveValue(extruder_flattened_changes, setting)
# Get extruder all settings values without user or quality changes
for setting in extruder.getAllKeys():
settings[f"extruder_{i}"]["all_settings"][setting] = _retrieveValue(extruder, setting)
return settings

View File

@ -14,7 +14,7 @@ DEFAULT_CURA_LATEST_URL = "https://software.ultimaker.com/latest.json"
# Each release has a fixed SDK version coupled with it. It doesn't make sense to make it configurable because, for
# example Cura 3.2 with SDK version 6.1 will not work. So the SDK version is hard-coded here and left out of the
# CuraVersion.py.in template.
CuraSDKVersion = "8.8.0"
CuraSDKVersion = "8.9.0"
try:
from cura.CuraVersion import CuraLatestURL

View File

@ -252,19 +252,23 @@ class BuildVolume(SceneNode):
if not self.getMeshData() or not self.isVisible():
return True
theme = self._application.getTheme()
if not self._shader:
self._shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "default.shader"))
self._grid_shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "grid.shader"))
theme = self._application.getTheme()
self._grid_shader.setUniformValue("u_plateColor", Color(*theme.getColor("buildplate").getRgb()))
self._grid_shader.setUniformValue("u_gridColor0", Color(*theme.getColor("buildplate_grid").getRgb()))
self._grid_shader.setUniformValue("u_gridColor1", Color(*theme.getColor("buildplate_grid_minor").getRgb()))
plate_color = Color(*theme.getColor("buildplate").getRgb())
if self._global_container_stack.getMetaDataEntry("has_textured_buildplate", False):
plate_color.setA(0.5)
self._grid_shader.setUniformValue("u_plateColor", plate_color)
renderer.queueNode(self, mode = RenderBatch.RenderMode.Lines)
renderer.queueNode(self, mesh = self._origin_mesh, backface_cull = True)
renderer.queueNode(self, mesh = self._grid_mesh, shader = self._grid_shader, backface_cull = True)
renderer.queueNode(self, mesh = self._grid_mesh, shader = self._grid_shader, backface_cull = True, transparent = True, sort = -10)
if self._disallowed_area_mesh:
renderer.queueNode(self, mesh = self._disallowed_area_mesh, shader = self._shader, transparent = True, backface_cull = True, sort = -9)
renderer.queueNode(self, mesh = self._disallowed_area_mesh, shader = self._shader, transparent = True, backface_cull = True, sort = -5)
if self._error_mesh:
renderer.queueNode(self, mesh=self._error_mesh, shader=self._shader, transparent=True,

View File

@ -139,7 +139,7 @@ class CuraApplication(QtApplication):
# SettingVersion represents the set of settings available in the machine/extruder definitions.
# You need to make sure that this version number needs to be increased if there is any non-backwards-compatible
# changes of the settings.
SettingVersion = 23
SettingVersion = 24
Created = False
@ -1895,23 +1895,20 @@ class CuraApplication(QtApplication):
def on_finish(response):
content_disposition_header_key = QByteArray("content-disposition".encode())
if not response.hasRawHeader(content_disposition_header_key):
Logger.log("w", "Could not find Content-Disposition header in response from {0}".format(
model_url.url()))
# Use the last part of the url as the filename, and assume it is an STL file
filename = model_url.path().split("/")[-1] + ".stl"
else:
filename = model_url.path().split("/")[-1] + ".stl"
if response.hasRawHeader(content_disposition_header_key):
# content_disposition is in the format
# ```
# content_disposition attachment; "filename=[FILENAME]"
# content_disposition attachment; filename="[FILENAME]"
# ```
# Use a regex to extract the filename
content_disposition = str(response.rawHeader(content_disposition_header_key).data(),
encoding='utf-8')
content_disposition_match = re.match(r'attachment; filename="(?P<filename>.*)"',
content_disposition_match = re.match(r'attachment; filename=(?P<filename>.*)',
content_disposition)
assert content_disposition_match is not None
filename = content_disposition_match.group("filename")
if content_disposition_match is not None:
filename = content_disposition_match.group("filename").strip("\"")
tmp = tempfile.NamedTemporaryFile(suffix=filename, delete=False)
with open(tmp.name, "wb") as f:

View File

@ -1,8 +1,9 @@
# Copyright (c) 2019 Ultimaker B.V.
# Copyright (c) 2024 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher.
from typing import Dict, List
from UM.Decorators import deprecated
from UM.Logger import Logger
from UM.Signal import Signal
from UM.Util import parseBool
@ -47,6 +48,7 @@ class MachineNode(ContainerNode):
self.preferred_variant_name = my_metadata.get("preferred_variant_name", "")
self.preferred_material = my_metadata.get("preferred_material", "")
self.preferred_quality_type = my_metadata.get("preferred_quality_type", "")
self.supports_abstract_color = parseBool(my_metadata.get("supports_abstract_color", "false"))
self._loadAll()
@ -168,13 +170,18 @@ class MachineNode(ContainerNode):
return self.global_qualities.get(self.preferred_quality_type, next(iter(self.global_qualities.values())))
def isExcludedMaterial(self, material: MaterialNode) -> bool:
def isExcludedMaterialBaseFile(self, material_base_file: str) -> bool:
"""Returns whether the material should be excluded from the list of materials."""
for exclude_material in self.exclude_materials:
if exclude_material in material["id"]:
if exclude_material in material_base_file:
return True
return False
@deprecated("Use isExcludedMaterialBaseFile instead.", since = "5.9.0")
def isExcludedMaterial(self, material: MaterialNode) -> bool:
"""Returns whether the material should be excluded from the list of materials."""
return self.isExcludedMaterialBaseFile(material.base_file)
@UM.FlameProfiler.profile
def _loadAll(self) -> None:
"""(Re)loads all variants under this printer."""

View File

@ -127,13 +127,12 @@ class QualityManagementModel(ListModel):
# have no container for the global stack, because "my_profile" just got renamed to "my_new_profile". This results
# in crashes because the rest of the system assumes that all data in a QualityChangesGroup will be correct.
#
# Renaming the container for the global stack in the end seems to be ok, because the assumption is mostly based
# on the quality changes container for the global stack.
# This is why we use the "supress_signals" flag for the set name. This basically makes the change silent.
for metadata in quality_changes_group.metadata_per_extruder.values():
extruder_container = cast(InstanceContainer, container_registry.findContainers(id = metadata["id"])[0])
extruder_container.setName(new_name)
extruder_container.setName(new_name, supress_signals=True)
global_container = cast(InstanceContainer, container_registry.findContainers(id = quality_changes_group.metadata_for_global["id"])[0])
global_container.setName(new_name)
global_container.setName(new_name, supress_signals=True)
quality_changes_group.name = new_name

View File

@ -60,9 +60,11 @@ class VariantNode(ContainerNode):
materials = list(materials_per_base_file.values())
# Filter materials based on the exclude_materials property.
filtered_materials = [material for material in materials if not self.machine.isExcludedMaterial(material)]
filtered_materials = [material for material in materials if not self.machine.isExcludedMaterialBaseFile(material["id"])]
for material in filtered_materials:
if material.get("abstract_color", False) and not self.machine.supports_abstract_color:
continue # do not show abstract color profiles if the machine does not support them
base_file = material["base_file"]
if base_file not in self.materials:
self.materials[base_file] = MaterialNode(material["id"], variant = self)
@ -126,8 +128,10 @@ class VariantNode(ContainerNode):
return # We won't add any materials.
material_definition = container.getMetaDataEntry("definition")
if (not self.machine.supports_abstract_color) and container.getMetaDataEntry("abstract_color", False):
return
base_file = container.getMetaDataEntry("base_file")
if base_file in self.machine.exclude_materials:
if self.machine.isExcludedMaterialBaseFile(base_file):
return # Material is forbidden for this printer.
if base_file not in self.materials: # Completely new base file. Always better than not having a file as long as it matches our set-up.
if material_definition != "fdmprinter" and material_definition != self.machine.container_id:

View File

@ -16,8 +16,6 @@ if TYPE_CHECKING:
import sys
from UM.Platform import Platform
if Platform.isWindows():
if hasattr(sys, "frozen"):
import win32timezone
from keyring.backends.Windows import WinVaultKeyring
keyring.set_keyring(WinVaultKeyring())
if Platform.isOSX():

View File

@ -0,0 +1,104 @@
# Copyright (c) 2024 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher.
from UM.Resources import Resources
import json
from typing import Dict, List, Optional
class FormatMaps:
# A map from the printer-type in their native file-formats to the internal name we use.
PRINTER_TYPE_NAME = {
"fire_e": "ultimaker_method",
"lava_f": "ultimaker_methodx",
"magma_10": "ultimaker_methodxl",
"sketch": "ultimaker_sketch",
"sketch_large": "ultimaker_sketch_large",
"sketch_sprint": "ultimaker_sketch_sprint"
}
# A map from the extruder-name in their native file-formats to the internal name we use.
EXTRUDER_NAME_MAP = {
"mk14_hot": "1XA",
"mk14_hot_s": "2XA",
"mk14_c": "1C",
"mk14": "1A",
"mk14_s": "2A",
"mk14_e": "LABS"
}
# A map from the material-name in their native file-formats to some info, including the internal name we use.
MATERIAL_MAP = {
"abs": {"name": "ABS", "guid": "e0f1d581-cc6b-4e36-8f3c-3f5601ecba5f"},
"abs-cf10": {"name": "ABS-CF", "guid": "495a0ce5-9daf-4a16-b7b2-06856d82394d"},
"abs-wss1": {"name": "ABS-R", "guid": "88c8919c-6a09-471a-b7b6-e801263d862d"},
"asa": {"name": "ASA", "guid": "f79bc612-21eb-482e-ad6c-87d75bdde066"},
"nylon12-cf": {"name": "Nylon 12 CF", "guid": "3c6f2877-71cc-4760-84e6-4b89ab243e3b"},
"nylon-cf": {"name": "Nylon CF", "guid": "17abb865-ca73-4ccd-aeda-38e294c9c60b"},
"pet": {"name": "PETG", "guid": "2d004bbd-d1bb-47f8-beac-b066702d5273"},
"pla": {"name": "PLA", "guid": "abb9c58e-1f56-48d1-bd8f-055fde3a5b56"},
"pva": {"name": "PVA", "guid": "add51ef2-86eb-4c39-afd5-5586564f0715"},
"wss1": {"name": "RapidRinse", "guid": "a140ef8f-4f26-4e73-abe0-cfc29d6d1024"},
"sr30": {"name": "SR-30", "guid": "77873465-83a9-4283-bc44-4e542b8eb3eb"},
"im-pla": {"name": "Tough", "guid": "de031137-a8ca-4a72-bd1b-17bb964033ad"}
# /!\ When changing this list, make sure the changes are reported accordingly on Digital Factory
}
__inverse_printer_name: Optional[Dict[str, str]] = None
__inverse_extruder_type: Optional[Dict[str, str]] = None
__inverse_material_map: Optional[Dict[str, str]] = None
__product_to_id_map: Optional[Dict[str, List[str]]] = None
@classmethod
def getInversePrinterNameMap(cls) -> Dict[str, str]:
"""Returns the inverse of the printer name map, that is, from the internal name to the name used in output."""
if cls.__inverse_printer_name is not None:
return cls.__inverse_printer_name
cls.__inverse_printer_name = {}
for key, value in cls.PRINTER_TYPE_NAME.items():
cls.__inverse_printer_name[value] = key
return cls.__inverse_printer_name
@classmethod
def getInverseExtruderTypeMap(cls) -> Dict[str, str]:
"""Returns the inverse of the extruder type map, that is, from the internal name to the name used in output."""
if cls.__inverse_extruder_type is not None:
return cls.__inverse_extruder_type
cls.__inverse_extruder_type = {}
for key, value in cls.EXTRUDER_NAME_MAP.items():
cls.__inverse_extruder_type[value] = key
return cls.__inverse_extruder_type
@classmethod
def getInverseMaterialMap(cls) -> Dict[str, str]:
"""Returns the inverse of the material map, that is, from the internal name to the name used in output.
Note that this drops the extra info saved in the non-inverse material map, use that if you need it.
"""
if cls.__inverse_material_map is not None:
return cls.__inverse_material_map
cls.__inverse_material_map = {}
for key, value in cls.MATERIAL_MAP.items():
cls.__inverse_material_map[value["name"]] = key
return cls.__inverse_material_map
@classmethod
def getProductIdMap(cls) -> Dict[str, List[str]]:
"""Gets a mapping from product names (for example, in the XML files) to their definition IDs.
This loads the mapping from a file.
"""
if cls.__product_to_id_map is not None:
return cls.__product_to_id_map
product_to_id_file = Resources.getPath(Resources.Texts, "product_to_id.json")
with open(product_to_id_file, encoding = "utf-8") as f:
contents = ""
for line in f:
contents += line if "#" not in line else "".join([line.replace("#", str(n)) for n in range(1, 12)])
cls.__product_to_id_map = json.loads(contents)
cls.__product_to_id_map = {key: [value] for key, value in cls.__product_to_id_map.items()}
#This also loads "Ultimaker S5" -> "ultimaker_s5" even though that is not strictly necessary with the default to change spaces into underscores.
#However it is not always loaded with that default; this mapping is also used in serialize() without that default.
return cls.__product_to_id_map

View File

@ -1,9 +1,10 @@
# Copyright (c) 2018 Ultimaker B.V.
# Copyright (c) 2024 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher.
from typing import Optional
from PyQt6.QtCore import pyqtProperty, QObject, pyqtSignal
from cura.PrinterOutput.FormatMaps import FormatMaps
from .MaterialOutputModel import MaterialOutputModel
@ -45,16 +46,8 @@ class ExtruderConfigurationModel(QObject):
@staticmethod
def applyNameMappingHotend(hotendId) -> str:
_EXTRUDER_NAME_MAP = {
"mk14_hot":"1XA",
"mk14_hot_s":"2XA",
"mk14_c":"1C",
"mk14":"1A",
"mk14_s":"2A",
"mk14_e": "LABS"
}
if hotendId in _EXTRUDER_NAME_MAP:
return _EXTRUDER_NAME_MAP[hotendId]
if hotendId in FormatMaps.EXTRUDER_NAME_MAP:
return FormatMaps.EXTRUDER_NAME_MAP[hotendId]
return hotendId
@pyqtProperty(str, fset = setHotendID, notify = extruderConfigurationChanged)

View File

@ -1,9 +1,10 @@
# Copyright (c) 2017 Ultimaker B.V.
# Copyright (c) 2024 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher.
from typing import Optional
from PyQt6.QtCore import pyqtProperty, QObject
from cura.PrinterOutput.FormatMaps import FormatMaps
class MaterialOutputModel(QObject):
@ -23,30 +24,9 @@ class MaterialOutputModel(QObject):
@staticmethod
def getMaterialFromDefinition(guid, type, brand, name):
_MATERIAL_MAP = { "abs" :{"name" :"ABS" ,"guid": "2780b345-577b-4a24-a2c5-12e6aad3e690"},
"abs-cf10" :{"name": "ABS-CF" ,"guid": "495a0ce5-9daf-4a16-b7b2-06856d82394d"},
"abs-wss1" :{"name" :"ABS-R" ,"guid": "88c8919c-6a09-471a-b7b6-e801263d862d"},
"asa" :{"name" :"ASA" ,"guid": "f79bc612-21eb-482e-ad6c-87d75bdde066"},
"nylon12-cf":{"name": "Nylon 12 CF" ,"guid": "3c6f2877-71cc-4760-84e6-4b89ab243e3b"},
"nylon" :{"name" :"Nylon" ,"guid": "283d439a-3490-4481-920c-c51d8cdecf9c"},
"pc" :{"name" :"PC" ,"guid": "62414577-94d1-490d-b1e4-7ef3ec40db02"},
"petg" :{"name" :"PETG" ,"guid": "69386c85-5b6c-421a-bec5-aeb1fb33f060"},
"pla" :{"name" :"PLA" ,"guid": "abb9c58e-1f56-48d1-bd8f-055fde3a5b56"},
"pva" :{"name" :"PVA" ,"guid": "add51ef2-86eb-4c39-afd5-5586564f0715"},
"wss1" :{"name" :"RapidRinse" ,"guid": "a140ef8f-4f26-4e73-abe0-cfc29d6d1024"},
"sr30" :{"name" :"SR-30" ,"guid": "77873465-83a9-4283-bc44-4e542b8eb3eb"},
"bvoh" :{"name" :"BVOH" ,"guid": "923e604c-8432-4b09-96aa-9bbbd42207f4"},
"cpe" :{"name" :"CPE" ,"guid": "da1872c1-b991-4795-80ad-bdac0f131726"},
"hips" :{"name" :"HIPS" ,"guid": "a468d86a-220c-47eb-99a5-bbb47e514eb0"},
"tpu" :{"name" :"TPU 95A" ,"guid": "19baa6a9-94ff-478b-b4a1-8157b74358d2"},
"im-pla" :{"name": "Tough" ,"guid": "de031137-a8ca-4a72-bd1b-17bb964033ad"}
}
if guid is None and brand != "empty" and type in _MATERIAL_MAP:
name = _MATERIAL_MAP[type]["name"]
guid = _MATERIAL_MAP[type]["guid"]
if guid is None and brand != "empty" and type in FormatMaps.MATERIAL_MAP:
name = FormatMaps.MATERIAL_MAP[type]["name"]
guid = FormatMaps.MATERIAL_MAP[type]["guid"]
return name, guid

View File

@ -1,9 +1,12 @@
# Copyright (c) 2018 Ultimaker B.V.
# Copyright (c) 2025 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher.
from PyQt6.QtCore import pyqtProperty, QObject, pyqtSignal
from typing import List
from UM.Settings.ContainerRegistry import ContainerRegistry
from UM.Settings.DefinitionContainer import DefinitionContainer
MYPY = False
if MYPY:
from cura.PrinterOutput.Models.ExtruderConfigurationModel import ExtruderConfigurationModel
@ -68,6 +71,15 @@ class PrinterConfigurationModel(QObject):
return True
return False
@pyqtProperty("QStringList", constant=True)
def validCoresForPrinterType(self) -> List[str]:
printers = ContainerRegistry.getInstance().findContainersMetadata(
ignore_case=True, type="machine", name=self._printer_type, container_type=DefinitionContainer)
id = printers[0]["id"] if len(printers) > 0 and "id" in printers[0] else ""
definitions = ContainerRegistry.getInstance().findContainersMetadata(
ignore_case=True, type="variant", definition=id+"*")
return [x["name"] for x in definitions]
def __str__(self):
message_chunks = []
message_chunks.append("Printer type: " + self._printer_type)

View File

@ -1,4 +1,4 @@
# Copyright (c) 2021 Ultimaker B.V.
# Copyright (c) 2024 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher.
from UM.FileHandler.FileHandler import FileHandler #For typing.
@ -6,6 +6,7 @@ from UM.Logger import Logger
from UM.Scene.SceneNode import SceneNode #For typing.
from cura.API import Account
from cura.CuraApplication import CuraApplication
from cura.PrinterOutput.FormatMaps import FormatMaps
from cura.PrinterOutput.PrinterOutputDevice import PrinterOutputDevice, ConnectionState, ConnectionType
@ -419,14 +420,8 @@ class NetworkedPrinterOutputDevice(PrinterOutputDevice):
@staticmethod
def applyPrinterTypeMapping(printer_type):
_PRINTER_TYPE_NAME = {
"fire_e": "ultimaker_method",
"lava_f": "ultimaker_methodx",
"magma_10": "ultimaker_methodxl",
"sketch": "ultimaker_sketch"
}
if printer_type in _PRINTER_TYPE_NAME:
return _PRINTER_TYPE_NAME[printer_type]
if printer_type in FormatMaps.PRINTER_TYPE_NAME:
return FormatMaps.PRINTER_TYPE_NAME[printer_type]
return printer_type
@pyqtProperty(str, constant = True)

View File

@ -5,7 +5,7 @@ from typing import Any, cast, List, Optional, Dict
from PyQt6.QtCore import pyqtProperty, pyqtSignal, QObject
from UM.Application import Application
from UM.Decorators import override
from UM.Decorators import CachedMemberFunctions, override
from UM.FlameProfiler import pyqtSlot
from UM.Logger import Logger
from UM.Settings.ContainerStack import ContainerStack, InvalidContainerStackError
@ -237,6 +237,7 @@ class CuraContainerStack(ContainerStack):
:param new_value: The new value to set the property to.
"""
CachedMemberFunctions.clearInstanceCache(self)
container_index = _ContainerIndexes.UserChanges
self._containers[container_index].setProperty(key, property_name, property_value, container, set_from_cache)

View File

@ -1,22 +1,19 @@
# Copyright (c) 2018 Ultimaker B.V.
# Copyright (c) 2024 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher.
from typing import Any, Dict, TYPE_CHECKING, Optional
from PyQt6.QtCore import pyqtProperty, pyqtSignal
from UM.Decorators import override
from UM.Decorators import CachedMemberFunctions, override
from UM.MimeTypeDatabase import MimeType, MimeTypeDatabase
from UM.Settings.ContainerStack import ContainerStack
from UM.Settings.ContainerRegistry import ContainerRegistry
from UM.Settings.Interfaces import ContainerInterface, PropertyEvaluationContext
from UM.Util import parseBool
import cura.CuraApplication
from . import Exceptions
from .CuraContainerStack import CuraContainerStack, _ContainerIndexes
from .ExtruderManager import ExtruderManager
if TYPE_CHECKING:
from cura.Settings.GlobalStack import GlobalStack
@ -86,6 +83,7 @@ class ExtruderStack(CuraContainerStack):
def setCompatibleMaterialDiameter(self, value: float) -> None:
old_approximate_diameter = self.getApproximateMaterialDiameter()
if self.getCompatibleMaterialDiameter() != value:
CachedMemberFunctions.clearInstanceCache(self)
self.definitionChanges.setProperty("material_diameter", "value", value)
self.compatibleMaterialDiameterChanged.emit()
@ -140,7 +138,11 @@ class ExtruderStack(CuraContainerStack):
context.popContainer()
return result
limit_to_extruder = super().getProperty(key, "limit_to_extruder", context)
if not context:
context = PropertyEvaluationContext(self)
if "extruder_position" not in context.context:
context.context["extruder_position"] = super().getProperty(key, "limit_to_extruder", context)
limit_to_extruder = context.context["extruder_position"]
if limit_to_extruder is not None:
limit_to_extruder = str(limit_to_extruder)

View File

@ -398,7 +398,7 @@ class MachineManager(QObject):
self.setVariantByName(extruder.getMetaDataEntry("position"), machine_node.preferred_variant_name)
variant_node = machine_node.variants.get(machine_node.preferred_variant_name)
material_node = variant_node.materials.get(extruder.material.getMetaDataEntry("base_file"))
material_node = variant_node.materials.get(extruder.material.getMetaDataEntry("base_file")) if variant_node else None
if material_node is None:
Logger.log("w", "An extruder has an unknown material, switching it to the preferred material")
if not self.setMaterialById(extruder.getMetaDataEntry("position"), machine_node.preferred_material):
@ -1678,7 +1678,7 @@ class MachineManager(QObject):
intent_category = self.activeIntentCategory,
intent_name = IntentCategoryModel.translation(self.activeIntentCategory, "name", self.activeIntentCategory.title()),
custom_profile = self.activeQualityOrQualityChangesName if global_stack.qualityChanges is not empty_quality_changes_container else None,
layer_height = self.activeQualityLayerHeight if self.isActiveQualitySupported else None,
layer_height = float("{:.2f}".format(self.activeQualityLayerHeight)) if self.isActiveQualitySupported else None,
is_experimental = self.isActiveQualityExperimental and self.isActiveQualitySupported
)

View File

@ -1,5 +1,6 @@
# Copyright (c) 2020 Ultimaker B.V.
# Uranium is released under the terms of the LGPLv3 or higher.
import math
from PyQt6.QtCore import Qt, QCoreApplication, QTimer
from PyQt6.QtGui import QPixmap, QColor, QFont, QPen, QPainter
@ -51,6 +52,7 @@ class CuraSplashScreen(QSplashScreen):
self._last_update_time = time.time()
# Since we don't know how much time actually passed, check how many intervals of 50 we had.
self._loading_image_rotation_angle -= 10 * (time_since_last_update * 1000 / 50)
self._loading_image_rotation_angle = math.fmod(self._loading_image_rotation_angle, 360)
self.repaint()
# Override the mousePressEvent so the splashscreen doesn't disappear when clicked

View File

@ -217,8 +217,7 @@ class WelcomePagesModel(ListModel):
def _getBuiltinWelcomePagePath(page_filename: str) -> QUrl:
"""Convenience function to get QUrl path to pages that's located in "resources/qml/WelcomePages"."""
from cura.CuraApplication import CuraApplication
return QUrl.fromLocalFile(Resources.getPath(CuraApplication.ResourceTypes.QmlFiles,
os.path.join("WelcomePages", page_filename)))
return QUrl.fromLocalFile(Resources.getPath(CuraApplication.ResourceTypes.QmlFiles, "WelcomePages", page_filename))
# FIXME: HACKs for optimization that we don't update the model every time the active machine gets changed.
def _onActiveMachineChanged(self) -> None:

View File

@ -47,6 +47,7 @@ AppDir:
QT_PLUGIN_PATH: "$APPDIR/qt/plugins"
QML2_IMPORT_PATH: "$APPDIR/qt/qml"
QT_QPA_PLATFORMTHEME: xdgdesktopportal
QT_QPA_PLATFORM: xcb
GDK_PIXBUF_MODULEDIR: $APPDIR/usr/lib/x86_64-linux-gnu/gdk-pixbuf-2.0/2.10.0/loaders
GDK_PIXBUF_MODULE_FILE: $APPDIR/usr/lib/x86_64-linux-gnu/gdk-pixbuf-2.0/2.10.0/loaders.cache
path_mappings:

View File

@ -25,7 +25,8 @@ def build_dmg(source_path: str, dist_path: str, filename: str, app_name: str) ->
f"{dist_path}/{filename}",
f"{dist_path}/{app_name}"]
subprocess.run(arguments)
print(f"Run create dmg command [{" ".join([str(arg) for arg in arguments])}]")
subprocess.run(arguments, check=True)
def build_pkg(dist_path: str, app_filename: str, component_filename: str, cura_version: str, installer_filename: str) -> None:
@ -56,7 +57,8 @@ def build_pkg(dist_path: str, app_filename: str, component_filename: str, cura_v
else:
print("CODESIGN_IDENTITY missing. The installer is not being signed")
subprocess.run(pkg_build_arguments)
print(f"Run package build command [{" ".join([str(arg) for arg in pkg_build_arguments])}]")
subprocess.run(pkg_build_arguments, check=True)
# This automatically generates a distribution.xml file that is used to build the installer.
# If you want to make any changes to how the installer functions, this file should be changed to do that.
@ -67,7 +69,8 @@ def build_pkg(dist_path: str, app_filename: str, component_filename: str, cura_v
"--package", Path(dist_path, component_filename), # Package that will be inside installer
Path(dist_path, "distribution.xml"), # Output location for sythesized distributions file
]
subprocess.run(distribution_creation_arguments)
print(f"Run distribution creation command [{" ".join([str(arg) for arg in distribution_creation_arguments])}]")
subprocess.run(distribution_creation_arguments, check=True)
# This creates the distributable package (Installer)
installer_creation_arguments = [
@ -80,7 +83,8 @@ def build_pkg(dist_path: str, app_filename: str, component_filename: str, cura_v
if codesign_identity:
installer_creation_arguments.extend(["--sign", codesign_identity])
subprocess.run(installer_creation_arguments)
print(f"Run installer creation command [{" ".join([str(arg) for arg in installer_creation_arguments])}]")
subprocess.run(installer_creation_arguments, check=True)
def notarize_file(dist_path: str, filename: str) -> None:
@ -99,7 +103,8 @@ def notarize_file(dist_path: str, filename: str) -> None:
Path(dist_path, filename)
]
subprocess.run(notarize_arguments)
print(f"Run notarize command [{" ".join([str(arg) for arg in notarize_arguments])}]")
subprocess.run(notarize_arguments, check=True)
def create_pkg_installer(filename: str, dist_path: str, cura_version: str, app_name: str) -> None:
@ -149,7 +154,7 @@ if __name__ == "__main__":
parser.add_argument("--app_name", required = True, type = str, help = "Filename of the .app that will be contained within the dmg/pkg")
args = parser.parse_args()
cura_version = args.cura_conan_version.split("/")[-1]
cura_version = args.cura_conan_version.replace("+","-") # + is not allowed for bundle identifier
app_name = f"{args.app_name}.app"

View File

@ -133,6 +133,7 @@ CreateShortCut "$SMPROGRAMS\{{ app_name }}\UltiMaker Cura website.lnk" "$INSTDIR
WriteRegStr ${REG_ROOT} "${REG_APP_PATH}" "" "$INSTDIR\${MAIN_APP_EXE}"
WriteRegStr ${REG_ROOT} "${UNINSTALL_PATH}" "DisplayName" "${APP_NAME}"
WriteRegStr ${REG_ROOT} "${UNINSTALL_PATH}" "UninstallString" "$INSTDIR\uninstall.exe"
WriteRegStr ${REG_ROOT} "${UNINSTALL_PATH}" "QuietUninstallString" '"$INSTDIR\uninstall.exe" /S'
WriteRegStr ${REG_ROOT} "${UNINSTALL_PATH}" "DisplayIcon" "$INSTDIR\${MAIN_APP_EXE}"
WriteRegStr ${REG_ROOT} "${UNINSTALL_PATH}" "DisplayVersion" "${VERSION}"
WriteRegStr ${REG_ROOT} "${UNINSTALL_PATH}" "Publisher" "${COMP_NAME}"

View File

@ -5,6 +5,7 @@
import os
import argparse # Command line arguments parsing and help.
import subprocess
import semver
import shutil
from datetime import datetime
@ -14,11 +15,12 @@ from pathlib import Path
from jinja2 import Template
def generate_nsi(source_path: str, dist_path: str, filename: str):
def generate_nsi(source_path: str, dist_path: str, filename: str, version: str):
dist_loc = Path(os.getcwd(), dist_path)
source_loc = Path(os.getcwd(), source_path)
instdir = Path("$INSTDIR")
dist_paths = [p.relative_to(dist_loc.joinpath("UltiMaker-Cura")) for p in sorted(dist_loc.joinpath("UltiMaker-Cura").rglob("*")) if p.is_file()]
parsed_version = semver.Version.parse(version)
mapped_out_paths = {}
for dist_path in dist_paths:
if "__pycache__" not in dist_path.parts:
@ -42,12 +44,12 @@ def generate_nsi(source_path: str, dist_path: str, filename: str):
nsis_content = template.render(
app_name = f"UltiMaker Cura {os.getenv('CURA_VERSION_FULL')}",
app_name = f"UltiMaker Cura {version}",
main_app = "UltiMaker-Cura.exe",
version = os.getenv('CURA_VERSION_FULL'),
version_major = os.environ.get("CURA_VERSION_MAJOR"),
version_minor = os.environ.get("CURA_VERSION_MINOR"),
version_patch = os.environ.get("CURA_VERSION_PATCH"),
version = version,
version_major = str(parsed_version.major),
version_minor = str(parsed_version.minor),
version_patch = str(parsed_version.patch),
company = "UltiMaker",
web_site = "https://ultimaker.com",
year = datetime.now().year,
@ -74,9 +76,10 @@ def build(dist_path: str):
if __name__ == "__main__":
parser = argparse.ArgumentParser(description = "Create Windows exe installer of Cura.")
parser.add_argument("source_path", type=str, help="Path to Conan install Cura folder.")
parser.add_argument("dist_path", type=str, help="Path to Pyinstaller dist folder")
parser.add_argument("filename", type = str, help = "Filename of the exe (e.g. 'UltiMaker-Cura-5.1.0-beta-Windows-X64.exe')")
parser.add_argument("--source_path", type=str, help="Path to Conan install Cura folder.")
parser.add_argument("--dist_path", type=str, help="Path to Pyinstaller dist folder")
parser.add_argument("--filename", type = str, help = "Filename of the exe (e.g. 'UltiMaker-Cura-5.1.0-beta-Windows-X64.exe')")
parser.add_argument("--version", type=str, help="The full cura version, e.g. 5.9.0-beta.1+24132")
args = parser.parse_args()
generate_nsi(args.source_path, args.dist_path, args.filename)
generate_nsi(args.source_path, args.dist_path, args.filename, args.version)
build(args.dist_path)

View File

@ -7,6 +7,7 @@ import os
import shutil
import subprocess
import uuid
import semver
from datetime import datetime
from pathlib import Path
@ -20,11 +21,12 @@ def work_path(filename: Path) -> Path:
return filename.parent
def generate_wxs(source_path: Path, dist_path: Path, filename: Path, app_name: str):
def generate_wxs(source_path: Path, dist_path: Path, filename: Path, app_name: str, version: str):
source_loc = Path(os.getcwd(), source_path)
dist_loc = Path(os.getcwd(), dist_path)
work_loc = work_path(filename)
work_loc.mkdir(parents=True, exist_ok=True)
parsed_version = semver.Version.parse(version)
jinja_template_path = Path(source_loc.joinpath("packaging", "msi", "UltiMaker-Cura.wxs.jinja"))
with open(jinja_template_path, "r") as f:
@ -33,10 +35,10 @@ def generate_wxs(source_path: Path, dist_path: Path, filename: Path, app_name: s
wxs_content = template.render(
app_name=f"{app_name}",
main_app="UltiMaker-Cura.exe",
version=os.getenv('CURA_VERSION_FULL'),
version_major=os.environ.get("CURA_VERSION_MAJOR"),
version_minor=os.environ.get("CURA_VERSION_MINOR"),
version_patch=os.environ.get("CURA_VERSION_PATCH"),
version=version,
version_major=str(parsed_version.major),
version_minor=str(parsed_version.minor),
version_patch=str(parsed_version.patch),
company="UltiMaker",
web_site="https://ultimaker.com",
year=datetime.now().year,
@ -111,12 +113,13 @@ def build(dist_path: Path, filename: Path):
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Create Windows msi installer of Cura.")
parser.add_argument("source_path", type=Path, help="Path to Conan install Cura folder.")
parser.add_argument("dist_path", type=Path, help="Path to Pyinstaller dist folder")
parser.add_argument("filename", type=Path,
parser.add_argument("--source_path", type=Path, help="Path to Conan install Cura folder.")
parser.add_argument("--dist_path", type=Path, help="Path to Pyinstaller dist folder")
parser.add_argument("--filename", type=Path,
help="Filename of the exe (e.g. 'UltiMaker-Cura-5.1.0-beta-Windows-X64.msi')")
parser.add_argument("name", type=str, help="App name (e.g. 'UltiMaker Cura')")
parser.add_argument("--name", type=str, help="App name (e.g. 'UltiMaker Cura')")
parser.add_argument("--version", type=str, help="The full cura version, e.g. 5.9.0-beta.1+24132")
args = parser.parse_args()
generate_wxs(args.source_path.resolve(), args.dist_path.resolve(), args.filename.resolve(), args.name)
generate_wxs(args.source_path.resolve(), args.dist_path.resolve(), args.filename.resolve(), args.name, args.version)
cleanup_artifacts(args.dist_path.resolve())
build(args.dist_path.resolve(), args.filename)

View File

@ -132,6 +132,7 @@ class ThreeMFReader(MeshReader):
vertices = numpy.resize(data, (int(data.size / 3), 3))
mesh_builder.setVertices(vertices)
mesh_builder.calculateNormals(fast=True)
mesh_builder.setMeshId(node_id)
if file_name:
# The filename is used to give the user the option to reload the file if it is changed on disk
# It is only set for the root node of the 3mf file

View File

@ -1354,7 +1354,6 @@ class ThreeMFWorkspaceReader(WorkspaceReader):
return
machine_manager.setQualityChangesGroup(quality_changes_group, no_dialog = True)
else:
self._quality_type_to_apply = self._quality_type_to_apply.lower() if self._quality_type_to_apply else None
quality_group_dict = container_tree.getCurrentQualityGroups()
if self._quality_type_to_apply in quality_group_dict:
quality_group = quality_group_dict[self._quality_type_to_apply]

View File

@ -8,9 +8,12 @@ from io import StringIO
from threading import Lock
import zipfile
from typing import Dict, Any
from pathlib import Path
from zipfile import ZipFile
from UM.Application import Application
from UM.Logger import Logger
from UM.PluginRegistry import PluginRegistry
from UM.Preferences import Preferences
from UM.Settings.ContainerRegistry import ContainerRegistry
from UM.Workspace.WorkspaceWriter import WorkspaceWriter
@ -33,7 +36,7 @@ class ThreeMFWorkspaceWriter(WorkspaceWriter):
if self._ucp_model != model:
self._ucp_model = model
def _write(self, stream, nodes, mode=WorkspaceWriter.OutputMode.BinaryMode):
def _write(self, stream, nodes, mode, include_log):
application = Application.getInstance()
machine_manager = application.getMachineManager()
@ -79,6 +82,11 @@ class ThreeMFWorkspaceWriter(WorkspaceWriter):
if self._ucp_model is not None:
user_settings_data = self._getUserSettings(self._ucp_model)
ThreeMFWriter._storeMetadataJson(user_settings_data, archive, USER_SETTINGS_PATH)
# Write log file
if include_log:
ThreeMFWorkspaceWriter._writeLogFile(archive)
except PermissionError:
self.setInformation(catalog.i18nc("@error:zip", "No permission to write the workspace here."))
Logger.error("No permission to write workspace to this stream.")
@ -125,8 +133,8 @@ class ThreeMFWorkspaceWriter(WorkspaceWriter):
return True
def write(self, stream, nodes, mode=WorkspaceWriter.OutputMode.BinaryMode):
success = self._write(stream, nodes, mode=WorkspaceWriter.OutputMode.BinaryMode)
def write(self, stream, nodes, mode=WorkspaceWriter.OutputMode.BinaryMode, **kwargs):
success = self._write(stream, nodes, WorkspaceWriter.OutputMode.BinaryMode, kwargs.get("include_log", False))
self._ucp_model = None
return success
@ -191,6 +199,17 @@ class ThreeMFWorkspaceWriter(WorkspaceWriter):
Logger.error("File became inaccessible while writing to it: {archive_filename}".format(archive_filename = archive.fp.name))
return
@staticmethod
def _writeLogFile(archive: ZipFile) -> None:
"""Helper function that writes the Cura log file to the archive.
:param archive: The archive to write to.
"""
file_logger = PluginRegistry.getInstance().getPluginObject("FileLogger")
file_logger.flush()
for file_path in file_logger.getFilesPaths():
archive.write(file_path, arcname=f"log/{Path(file_path).name}")
@staticmethod
def _getUserSettings(model: SettingsExportModel) -> Dict[str, Dict[str, Any]]:
user_settings = {}

View File

@ -114,22 +114,24 @@ class ThreeMFWriter(MeshWriter):
mesh_data = um_node.getMeshData()
node_matrix = um_node.getLocalTransformation()
node_matrix.preMultiply(transformation)
if center_mesh:
node_matrix = Matrix()
center_matrix = Matrix()
# compensate for original center position, if object(s) is/are not around its zero position
if mesh_data is not None:
extents = mesh_data.getExtents()
if extents is not None:
# We use a different coordinate space while writing, so flip Z and Y
center_vector = Vector(extents.center.x, extents.center.y, extents.center.z)
node_matrix.setByTranslation(center_vector)
node_matrix.multiply(um_node.getLocalTransformation())
else:
node_matrix = um_node.getLocalTransformation()
center_vector = Vector(-extents.center.x, -extents.center.y, -extents.center.z)
center_matrix.setByTranslation(center_vector)
node_matrix.preMultiply(center_matrix)
matrix_string = ThreeMFWriter._convertMatrixToString(node_matrix.preMultiply(transformation))
matrix_string = ThreeMFWriter._convertMatrixToString(node_matrix)
savitar_node.setTransformation(matrix_string)
if mesh_data is not None:
savitar_node.getMeshData().setVerticesFromBytes(mesh_data.getVerticesAsByteArray())
indices_array = mesh_data.getIndicesAsByteArray()

View File

@ -68,6 +68,9 @@ class CuraEngineBackend(QObject, Backend):
"""
super().__init__()
self._init_done = False
self._immediate_slice_after_init = False
# Find out where the engine is located, and how it is called.
# This depends on how Cura is packaged and which OS we are running on.
executable_name = "CuraEngine"
@ -197,7 +200,8 @@ class CuraEngineBackend(QObject, Backend):
self._slicing_error_message.actionTriggered.connect(self._reportBackendError)
self._resetLastSliceTimeStats()
self._snapshot: Optional[QImage] = None
self._snapshot: Optional[QImage] = None
self._last_socket_error: Optional[Arcus.Error] = None
application.initializationFinished.connect(self.initialize)
@ -267,6 +271,10 @@ class CuraEngineBackend(QObject, Backend):
self._machine_error_checker = application.getMachineErrorChecker()
self._machine_error_checker.errorCheckFinished.connect(self._onStackErrorCheckFinished)
self._init_done = True
if self._immediate_slice_after_init:
self.slice()
def close(self) -> None:
"""Terminate the engine process.
@ -341,6 +349,11 @@ class CuraEngineBackend(QObject, Backend):
def slice(self) -> None:
"""Perform a slice of the scene."""
if not self._init_done:
self._immediate_slice_after_init = True
return
self._immediate_slice_after_init = False
self._createSnapshot()
self.startPlugins()
@ -569,7 +582,20 @@ class CuraEngineBackend(QObject, Backend):
return
# Preparation completed, send it to the backend.
self._socket.sendMessage(job.getSliceMessage())
immediate_success = self._socket.sendMessage(job.getSliceMessage())
if (not CuraApplication.getInstance().getUseExternalBackend()) and (not immediate_success):
if self._last_socket_error is not None and self._last_socket_error.getErrorCode() == Arcus.ErrorCode.MessageTooBigError:
error_txt = catalog.i18nc("@info:status", "Unable to send the model data to the engine. Please try to use a less detailed model, or reduce the number of instances.")
else:
error_txt = catalog.i18nc("@info:status", "Unable to send the model data to the engine. Please try again, or contact support.")
self._error_message = Message(error_txt,
title=catalog.i18nc("@info:title", "Unable to slice"),
message_type=Message.MessageType.WARNING)
self._error_message.show()
self.setState(BackendState.Error)
self.backendError.emit(job)
return
# Notify the user that it's now up to the backend to do its job
self.setState(BackendState.Processing)
@ -691,6 +717,7 @@ class CuraEngineBackend(QObject, Backend):
if error.getErrorCode() == Arcus.ErrorCode.Debug:
return
self._last_socket_error = error
self._terminate()
self._createSocket()

View File

@ -49,7 +49,20 @@ class StartJobResult(IntEnum):
ObjectsWithDisabledExtruder = 8
class GcodeStartEndFormatter(Formatter):
class GcodeConditionState(IntEnum):
OutsideCondition = 1
ConditionFalse = 2
ConditionTrue = 3
ConditionDone = 4
class GcodeInstruction(IntEnum):
Skip = 1
Evaluate = 2
EvaluateAndWrite = 3
class GcodeStartEndFormatter:
# Formatter class that handles token expansion in start/end gcode
# Example of a start/end gcode string:
# ```
@ -63,22 +76,50 @@ class GcodeStartEndFormatter(Formatter):
# will be used. Alternatively, if the expression is formatted as "{[expression], [extruder_nr]}",
# then the expression will be evaluated with the extruder stack of the specified extruder_nr.
_extruder_regex = re.compile(r"^\s*(?P<expression>.*)\s*,\s*(?P<extruder_nr_expr>.*)\s*$")
_instruction_regex = re.compile(r"{(?P<condition>if|else|elif|endif)?\s*(?P<expression>.*?)\s*(?:,\s*(?P<extruder_nr_expr>.*))?\s*}(?P<end_of_line>\n?)")
def __init__(self, all_extruder_settings: Dict[str, Any], default_extruder_nr: int = -1) -> None:
def __init__(self, all_extruder_settings: Dict[str, Dict[str, Any]], default_extruder_nr: int = -1) -> None:
super().__init__()
self._all_extruder_settings: Dict[str, Any] = all_extruder_settings
self._all_extruder_settings: Dict[str, Dict[str, Any]] = all_extruder_settings
self._default_extruder_nr: int = default_extruder_nr
self._cura_application = CuraApplication.getInstance()
self._extruder_manager = ExtruderManager.getInstance()
def get_field(self, field_name, args: [str], kwargs: dict) -> Tuple[str, str]:
# get_field method parses all fields in the format-string and parses them individually to the get_value method.
# e.g. for a string "Hello {foo.bar}" would the complete field "foo.bar" would be passed to get_field, and then
# the individual parts "foo" and "bar" would be passed to get_value. This poses a problem for us, because want
# to parse the entire field as a single expression. To solve this, we override the get_field method and return
# the entire field as the expression.
return self.get_value(field_name, args, kwargs), field_name
def format(self, text: str) -> str:
remaining_text: str = text
result: str = ""
def get_value(self, expression: str, args: [str], kwargs: dict) -> str:
self._condition_state: GcodeConditionState = GcodeConditionState.OutsideCondition
while len(remaining_text) > 0:
next_code_match = self._instruction_regex.search(remaining_text)
if next_code_match is not None:
expression_start, expression_end = next_code_match.span()
if expression_start > 0:
result += self._process_statement(remaining_text[:expression_start])
result += self._process_code(next_code_match)
remaining_text = remaining_text[expression_end:]
else:
result += self._process_statement(remaining_text)
remaining_text = ""
return result
def _process_statement(self, statement: str) -> str:
if self._condition_state in [GcodeConditionState.OutsideCondition, GcodeConditionState.ConditionTrue]:
return statement
else:
return ""
def _process_code(self, code: re.Match) -> str:
condition: Optional[str] = code.group("condition")
expression: Optional[str] = code.group("expression")
extruder_nr_expr: Optional[str] = code.group("extruder_nr_expr")
end_of_line: Optional[str] = code.group("end_of_line")
# The following variables are not settings, but only become available after slicing.
# when these variables are encountered, we return them as-is. They are replaced later
@ -87,53 +128,100 @@ class GcodeStartEndFormatter(Formatter):
if expression in post_slice_data_variables:
return f"{{{expression}}}"
extruder_nr = str(self._default_extruder_nr)
extruder_nr: str = str(self._default_extruder_nr)
instruction: GcodeInstruction = GcodeInstruction.Skip
# The settings may specify a specific extruder to use. This is done by
# formatting the expression as "{expression}, {extruder_nr_expr}". If the
# expression is formatted like this, we extract the extruder_nr and use
# it to get the value from the correct extruder stack.
match = self._extruder_regex.match(expression)
if match:
expression = match.group("expression")
extruder_nr_expr = match.group("extruder_nr_expr")
if extruder_nr_expr.isdigit():
extruder_nr = extruder_nr_expr
if condition is None:
# This is a classic statement
if self._condition_state in [GcodeConditionState.OutsideCondition, GcodeConditionState.ConditionTrue]:
# Skip and move to next
instruction = GcodeInstruction.EvaluateAndWrite
else:
# This is a condition statement, first check validity
if condition == "if":
if self._condition_state != GcodeConditionState.OutsideCondition:
raise SyntaxError("Nested conditions are not supported")
else:
# We get the value of the extruder_nr_expr from `_all_extruder_settings` dictionary
# rather than the global container stack. The `_all_extruder_settings["-1"]` is a
# dict-representation of the global container stack, with additional properties such
# as `initial_extruder_nr`. As users may enter such expressions we can't use the
# global container stack.
extruder_nr = str(self._all_extruder_settings["-1"].get(extruder_nr_expr, "-1"))
if self._condition_state == GcodeConditionState.OutsideCondition:
raise SyntaxError("Condition should start with an 'if' statement")
if extruder_nr in self._all_extruder_settings:
additional_variables = self._all_extruder_settings[extruder_nr].copy()
else:
Logger.warning(f"Extruder {extruder_nr} does not exist, using global settings")
additional_variables = self._all_extruder_settings["-1"].copy()
if condition == "if":
# First instruction, just evaluate it
instruction = GcodeInstruction.Evaluate
# Add the arguments and keyword arguments to the additional settings. These
# are currently _not_ used, but they are added for consistency with the
# base Formatter class.
for key, value in enumerate(args):
additional_variables[key] = value
for key, value in kwargs.items():
additional_variables[key] = value
else:
if self._condition_state == GcodeConditionState.ConditionTrue:
# We have reached the next condition after a valid one has been found, skip the rest
self._condition_state = GcodeConditionState.ConditionDone
if extruder_nr == "-1":
container_stack = CuraApplication.getInstance().getGlobalContainerStack()
else:
container_stack = ExtruderManager.getInstance().getExtruderStack(extruder_nr)
if not container_stack:
if condition == "elif":
if self._condition_state == GcodeConditionState.ConditionFalse:
# New instruction, and valid condition has not been reached so far => evaluate it
instruction = GcodeInstruction.Evaluate
else:
# New instruction, but valid condition has already been reached => skip it
instruction = GcodeInstruction.Skip
elif condition == "else":
instruction = GcodeInstruction.Skip # Never evaluate, expression should be empty
if self._condition_state == GcodeConditionState.ConditionFalse:
# Fallback instruction, and valid condition has not been reached so far => active next
self._condition_state = GcodeConditionState.ConditionTrue
elif condition == "endif":
instruction = GcodeInstruction.Skip # Never evaluate, expression should be empty
self._condition_state = GcodeConditionState.OutsideCondition
if instruction >= GcodeInstruction.Evaluate and extruder_nr_expr is not None:
extruder_nr_function = SettingFunction(extruder_nr_expr)
container_stack = self._cura_application.getGlobalContainerStack()
# We add the variables contained in `_all_extruder_settings["-1"]`, which is a dict-representation of the
# global container stack, with additional properties such as `initial_extruder_nr`. As users may enter such
# expressions we can't use the global container stack. The variables contained in the global container stack
# will then be inserted twice, which is not optimal but works well.
extruder_nr = str(extruder_nr_function(container_stack, additional_variables=self._all_extruder_settings["-1"]))
if instruction >= GcodeInstruction.Evaluate:
if extruder_nr in self._all_extruder_settings:
additional_variables = self._all_extruder_settings[extruder_nr].copy()
else:
Logger.warning(f"Extruder {extruder_nr} does not exist, using global settings")
container_stack = CuraApplication.getInstance().getGlobalContainerStack()
additional_variables = self._all_extruder_settings["-1"].copy()
setting_function = SettingFunction(expression)
value = setting_function(container_stack, additional_variables=additional_variables)
if extruder_nr == "-1":
container_stack = self._cura_application.getGlobalContainerStack()
else:
container_stack = self._extruder_manager.getExtruderStack(extruder_nr)
if not container_stack:
Logger.warning(f"Extruder {extruder_nr} does not exist, using global settings")
container_stack = self._cura_application.getGlobalContainerStack()
return value
setting_function = SettingFunction(expression)
value = setting_function(container_stack, additional_variables=additional_variables)
if instruction == GcodeInstruction.Evaluate:
if value:
self._condition_state = GcodeConditionState.ConditionTrue
else:
self._condition_state = GcodeConditionState.ConditionFalse
return ""
else:
value_str = str(value)
if end_of_line is not None:
# If we are evaluating an expression that is not a condition, restore the end of line
value_str += end_of_line
return value_str
else:
return ""
class StartSliceJob(Job):
@ -366,7 +454,12 @@ class StartSliceJob(Job):
for extruder_stack in global_stack.extruderList:
self._buildExtruderMessage(extruder_stack)
for plugin in CuraApplication.getInstance().getBackendPlugins():
backend_plugins = CuraApplication.getInstance().getBackendPlugins()
# Sort backend plugins by name. Not a very good strategy, but at least it is repeatable. This will be improved later.
backend_plugins = sorted(backend_plugins, key=lambda backend_plugin: backend_plugin.getId())
for plugin in backend_plugins:
if not plugin.usePlugin():
continue
for slot in plugin.getSupportedSlots():
@ -465,6 +558,9 @@ class StartSliceJob(Job):
result["day"] = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"][int(time.strftime("%w"))]
result["initial_extruder_nr"] = CuraApplication.getInstance().getExtruderManager().getInitialExtruderNr()
# If adding or changing a setting here, please update the associated wiki page
# https://github.com/Ultimaker/Cura/wiki/Start-End-G%E2%80%90Code
return result
def _cacheAllExtruderSettings(self):
@ -518,6 +614,7 @@ class StartSliceJob(Job):
# Replace the setting tokens in start and end g-code.
extruder_nr = stack.getProperty("extruder_nr", "value")
settings["machine_extruder_prestart_code"] = self._expandGcodeTokens(settings["machine_extruder_prestart_code"], extruder_nr)
settings["machine_extruder_start_code"] = self._expandGcodeTokens(settings["machine_extruder_start_code"], extruder_nr)
settings["machine_extruder_end_code"] = self._expandGcodeTokens(settings["machine_extruder_end_code"], extruder_nr)

View File

@ -208,7 +208,14 @@ Item
anchors.rightMargin: UM.Theme.getSize("thin_margin").height
enabled: UM.Backend.state == UM.Backend.Done
currentIndex: UM.Backend.state == UM.Backend.Done ? (Cura.MachineManager.activeMachine.getOutputFileFormats.includes("application/x-makerbot") ? 1 : 0) : 2
// Pre-select the correct index, depending on the situation (see the model-property below):
// - Don't select any post-slice-file-format when the engine isn't done.
// - Choose either the S-series or the Makerbot-series of printers' format otherwise, depending on the active printer.
// This way, the user can just click 'save' without having to worry about wether or not the format is right.
property int isMakerbotFormat: Cura.MachineManager.activeMachine.getOutputFileFormats.includes("application/x-makerbot") || Cura.MachineManager.activeMachine.getOutputFileFormats.includes("application/x-makerbot-sketch")
property int isBackendDone: UM.Backend.state == UM.Backend.Done
currentIndex: isBackendDone ? (isMakerbotFormat ? 1 : 0) : 2
textRole: "text"
valueRole: "value"

View File

@ -196,7 +196,7 @@ class DigitalFactoryApiClient:
url = "{}/projects/{}/files".format(self.CURA_API_ROOT, library_project_id)
self._http.get(url,
scope = self._scope,
callback = self._parseCallback(on_finished, DigitalFactoryFileResponse, failed),
callback = self._parseCallback(on_finished, DigitalFactoryFileResponse, failed, default_values = {'username': ''}),
error_callback = failed,
timeout = self.DEFAULT_REQUEST_TIMEOUT)
@ -205,7 +205,8 @@ class DigitalFactoryApiClient:
Callable[[List[CloudApiClientModel]], Any]],
model: Type[CloudApiClientModel],
on_error: Optional[Callable] = None,
pagination_manager: Optional[PaginationManager] = None) -> Callable[[QNetworkReply], None]:
pagination_manager: Optional[PaginationManager] = None,
default_values: Dict[str, str] = None) -> Callable[[QNetworkReply], None]:
"""
Creates a callback function so that it includes the parsing of the response into the correct model.
@ -234,7 +235,7 @@ class DigitalFactoryApiClient:
if status_code >= 300 and on_error is not None:
on_error()
else:
self._parseModels(response, on_finished, model, pagination_manager = pagination_manager)
self._parseModels(response, on_finished, model, pagination_manager = pagination_manager, default_values = default_values)
self._anti_gc_callbacks.append(parse)
return parse
@ -262,7 +263,8 @@ class DigitalFactoryApiClient:
on_finished: Union[Callable[[CloudApiClientModel], Any],
Callable[[List[CloudApiClientModel]], Any]],
model_class: Type[CloudApiClientModel],
pagination_manager: Optional[PaginationManager] = None) -> None:
pagination_manager: Optional[PaginationManager] = None,
default_values: Dict[str, str] = None) -> None:
"""Parses the given models and calls the correct callback depending on the result.
:param response: The response from the server, after being converted to a dict.
@ -279,7 +281,10 @@ class DigitalFactoryApiClient:
if "links" in response and pagination_manager:
pagination_manager.setLinks(response["links"])
if isinstance(data, list):
results = [model_class(**c) for c in data] # type: List[CloudApiClientModel]
results = [] # type: List[CloudApiClientModel]
for model_data in data:
complete_model_data = (default_values | model_data) if default_values is not None else model_data
results.append(model_class(**complete_model_data))
on_finished_list = cast(Callable[[List[CloudApiClientModel]], Any], on_finished)
on_finished_list(results)
else:

View File

@ -298,8 +298,14 @@ class FlavorParser:
position.e.extend([0] * (self._extruder_number - len(position.e) + 1))
return position
def processMCode(self, M: int, line: str, position: Position, path: List[List[Union[float, int]]]) -> Position:
pass
def processMCode(self, M: int, line: str, position: Position, path: List[List[Union[float, int]]]) -> None:
# Set extrusion mode
if M == 82:
# Set absolute extrusion mode
self._is_absolute_extrusion = True
elif M == 83:
# Set relative extrusion mode
self._is_absolute_extrusion = False
_type_keyword = ";TYPE:"
_layer_keyword = ";LAYER:"

View File

@ -11,14 +11,6 @@ class RepRapFlavorParser(FlavorParser.FlavorParser):
def __init__(self):
super().__init__()
def processMCode(self, M, line, position, path):
if M == 82:
# Set absolute extrusion mode
self._is_absolute_extrusion = True
elif M == 83:
# Set relative extrusion mode
self._is_absolute_extrusion = False
def _gCode90(self, position, params, path):
"""Set the absolute positioning

View File

@ -54,7 +54,7 @@ Item
{
anchors.top: parent.top
anchors.left: parent.left
width: parent.width * 2 / 3
width: parent.width / 2
spacing: base.columnSpacing
@ -139,6 +139,39 @@ Item
decimals: 0
forceUpdateOnChangeFunction: forceUpdateFunction
}
}
// =======================================
// Right-side column "Nozzle Settings"
// =======================================
Column
{
anchors.top: parent.top
anchors.right: parent.right
width: parent.width / 2
spacing: base.columnSpacing
UM.Label // Title Label
{
text: catalog.i18nc("@title:label", " ")
font: UM.Theme.getFont("medium_bold")
}
Cura.NumericTextFieldWithUnit
{
id: extruderChangeDurationFieldId
containerStackId: base.extruderStackId
settingKey: "machine_extruder_change_duration"
settingStoreIndex: propertyStoreIndex
labelText: catalog.i18nc("@label", "Extruder Change duration")
labelFont: base.labelFont
labelWidth: base.labelWidth
controlWidth: base.controlWidth
unitText: catalog.i18nc("@label", "s")
forceUpdateOnChangeFunction: forceUpdateFunction
}
Cura.NumericTextFieldWithUnit
{
@ -179,24 +212,48 @@ Item
anchors.right: parent.right
anchors.margins: UM.Theme.getSize("default_margin").width
Cura.GcodeTextArea // "Extruder Start G-code"
Column
{
anchors.top: parent.top
anchors.bottom: parent.bottom
anchors.bottomMargin: UM.Theme.getSize("default_margin").height
anchors.left: parent.left
width: base.columnWidth - UM.Theme.getSize("default_margin").width
anchors.bottom: buttonLearnMore.top
anchors.bottomMargin: UM.Theme.getSize("default_margin").height
width: parent.width / 2
labelText: catalog.i18nc("@title:label", "Extruder Start G-code")
containerStackId: base.extruderStackId
settingKey: "machine_extruder_start_code"
settingStoreIndex: propertyStoreIndex
spacing: base.columnSpacing
Cura.GcodeTextArea // "Extruder Prestart G-code"
{
anchors.top: parent.top
anchors.left: parent.left
height: (parent.height / 2) - UM.Theme.getSize("default_margin").height
width: base.columnWidth - UM.Theme.getSize("default_margin").width
labelText: catalog.i18nc("@title:label", "Extruder Prestart G-code")
containerStackId: base.extruderStackId
settingKey: "machine_extruder_prestart_code"
settingStoreIndex: propertyStoreIndex
}
Cura.GcodeTextArea // "Extruder Start G-code"
{
anchors.bottom: parent.bottom
anchors.left: parent.left
height: (parent.height / 2) - UM.Theme.getSize("default_margin").height
width: base.columnWidth - UM.Theme.getSize("default_margin").width
labelText: catalog.i18nc("@title:label", "Extruder Start G-code")
containerStackId: base.extruderStackId
settingKey: "machine_extruder_start_code"
settingStoreIndex: propertyStoreIndex
}
}
Cura.GcodeTextArea // "Extruder End G-code"
{
anchors.top: parent.top
anchors.bottom: parent.bottom
anchors.bottom: buttonLearnMore.top
anchors.bottomMargin: UM.Theme.getSize("default_margin").height
anchors.right: parent.right
width: base.columnWidth - UM.Theme.getSize("default_margin").width
@ -206,5 +263,17 @@ Item
settingKey: "machine_extruder_end_code"
settingStoreIndex: propertyStoreIndex
}
Cura.TertiaryButton
{
id: buttonLearnMore
text: catalog.i18nc("@button", "Learn more")
iconSource: UM.Theme.getIcon("LinkExternal")
isIconOnRightSide: true
onClicked: Qt.openUrlExternally("https://github.com/Ultimaker/Cura/wiki/Start-End-G%E2%80%90Code")
anchors.bottom: parent.bottom
anchors.right: parent.right
}
}
}

View File

@ -214,7 +214,7 @@ Item
settingStoreIndex: propertyStoreIndex
labelText: catalog.i18nc("@label", "Y min")
labelText: catalog.i18nc("@label", "Y min ( '-' towards back)")
labelFont: base.labelFont
labelWidth: base.labelWidth
controlWidth: base.controlWidth
@ -254,7 +254,7 @@ Item
settingKey: "machine_head_with_fans_polygon"
settingStoreIndex: propertyStoreIndex
labelText: catalog.i18nc("@label", "Y max")
labelText: catalog.i18nc("@label", "Y max ( '+' towards front)")
labelFont: base.labelFont
labelWidth: base.labelWidth
controlWidth: base.controlWidth
@ -344,6 +344,21 @@ Item
labelWidth: base.labelWidth
forceUpdateOnChangeFunction: forceUpdateFunction
}
/*
- Allows user to toggle if Start Gcode is the absolute first gcode.
*/
Cura.SimpleCheckBox // "Make sure Start Code is before all gcodes"
{
id: applyStartGcodeFirstCheckbox
containerStackId: machineStackId
settingKey: "machine_start_gcode_first"
settingStoreIndex: propertyStoreIndex
labelText: catalog.i18nc("@label", "Start GCode must be first")
labelFont: base.labelFont
labelWidth: base.labelWidth
forceUpdateOnChangeFunction: forceUpdateFunction
}
/* The "Shared Heater" feature is temporarily disabled because its
@ -376,7 +391,7 @@ Item
anchors
{
top: upperBlock.bottom
bottom: parent.bottom
bottom: buttonLearnMore.top
left: parent.left
right: parent.right
margins: UM.Theme.getSize("default_margin").width
@ -403,5 +418,19 @@ Item
settingKey: "machine_end_gcode"
settingStoreIndex: propertyStoreIndex
}
}
Cura.TertiaryButton
{
id: buttonLearnMore
text: catalog.i18nc("@button", "Learn more")
iconSource: UM.Theme.getIcon("LinkExternal")
isIconOnRightSide: true
onClicked: Qt.openUrlExternally("https://github.com/Ultimaker/Cura/wiki/Start-End-G%E2%80%90Code")
anchors.bottom: parent.bottom
anchors.right: parent.right
anchors.margins: UM.Theme.getSize("default_margin").width
}
}

View File

@ -1,4 +1,4 @@
# Copyright (c) 2023 UltiMaker
# Copyright (c) 2024 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher.
from io import StringIO, BufferedIOBase
import json
@ -18,6 +18,7 @@ from UM.Scene.Iterator.DepthFirstIterator import DepthFirstIterator
from UM.i18n import i18nCatalog
from cura.CuraApplication import CuraApplication
from cura.PrinterOutput.FormatMaps import FormatMaps
from cura.Snapshot import Snapshot
from cura.Utils.Threading import call_on_qt_thread
from cura.CuraVersion import ConanInstalls
@ -45,6 +46,13 @@ class MakerbotWriter(MeshWriter):
suffixes=["makerbot"]
)
)
MimeTypeDatabase.addMimeType(
MimeType(
name="application/x-makerbot-replicator_plus",
comment="Makerbot Toolpath Package",
suffixes=["makerbot"]
)
)
_PNG_FORMAT = [
{"prefix": "isometric_thumbnail", "width": 120, "height": 120},
@ -111,15 +119,15 @@ class MakerbotWriter(MeshWriter):
match file_format:
case "application/x-makerbot-sketch":
filename, filedata = "print.gcode", gcode_text_io.getvalue()
self._PNG_FORMATS = self._PNG_FORMAT
case "application/x-makerbot":
filename, filedata = "print.jsontoolpath", du.gcode_2_miracle_jtp(gcode_text_io.getvalue())
self._PNG_FORMATS = self._PNG_FORMAT + self._PNG_FORMAT_METHOD
case "application/x-makerbot-replicator_plus":
filename, filedata = "print.jsontoolpath", du.gcode_2_miracle_jtp(gcode_text_io.getvalue(), nb_extruders=1)
case _:
raise Exception("Unsupported Mime type")
png_files = []
for png_format in self._PNG_FORMATS:
for png_format in (self._PNG_FORMAT + self._PNG_FORMAT_METHOD):
width, height, prefix = png_format["width"], png_format["height"], png_format["prefix"]
thumbnail_buffer = self._createThumbnail(width, height)
if thumbnail_buffer is None:
@ -137,6 +145,30 @@ class MakerbotWriter(MeshWriter):
for png_file in png_files:
file, data = png_file["file"], png_file["data"]
zip_stream.writestr(file, data)
api = CuraApplication.getInstance().getCuraAPI()
metadata_json = api.interface.settings.getSliceMetadata()
# All the mapping stuff we have to do:
product_to_id_map = FormatMaps.getProductIdMap()
printer_name_map = FormatMaps.getInversePrinterNameMap()
extruder_type_map = FormatMaps.getInverseExtruderTypeMap()
material_map = FormatMaps.getInverseMaterialMap()
for key, value in metadata_json.items():
if "all_settings" in value:
if "machine_name" in value["all_settings"]:
machine_name = value["all_settings"]["machine_name"]
if machine_name in product_to_id_map:
machine_name = product_to_id_map[machine_name][0]
value["all_settings"]["machine_name"] = printer_name_map.get(machine_name, machine_name)
if "machine_nozzle_id" in value["all_settings"]:
extruder_type = value["all_settings"]["machine_nozzle_id"]
value["all_settings"]["machine_nozzle_id"] = extruder_type_map.get(extruder_type, extruder_type)
if "material_type" in value["all_settings"]:
material_type = value["all_settings"]["material_type"]
value["all_settings"]["material_type"] = material_map.get(material_type, material_type)
slice_metadata = json.dumps(metadata_json, separators=(", ", ": "), indent=4)
zip_stream.writestr("slicemetadata.json", slice_metadata)
except (IOError, OSError, BadZipFile) as ex:
Logger.log("e", f"Could not write to (.makerbot) file because: '{ex}'.")
self.setInformation(catalog.i18nc("@error", "MakerbotWriter could not save to the designated path."))
@ -226,11 +258,87 @@ class MakerbotWriter(MeshWriter):
meta["preferences"] = dict()
bounds = application.getBuildVolume().getBoundingBox()
intent = CuraApplication.getInstance().getIntentManager().currentIntentCategory
meta["preferences"]["instance0"] = {
"machineBounds": [bounds.right, bounds.back, bounds.left, bounds.front] if bounds is not None else None,
"printMode": CuraApplication.getInstance().getIntentManager().currentIntentCategory,
"machineBounds": [bounds.right, bounds.front, bounds.left, bounds.back] if bounds is not None else None,
"printMode": intent
}
if file_format == "application/x-makerbot":
accel_overrides = meta["accel_overrides"] = {}
if intent in ['highspeed', 'highspeedsolid']:
accel_overrides['do_input_shaping'] = True
accel_overrides['do_corner_rounding'] = True
bead_mode_overrides = accel_overrides["bead_mode"] = {}
accel_enabled = global_stack.getProperty('acceleration_enabled', 'value')
if accel_enabled:
global_accel_setting = global_stack.getProperty('acceleration_print', 'value')
accel_overrides["rate_mm_per_s_sq"] = {
"x": global_accel_setting,
"y": global_accel_setting
}
if global_stack.getProperty('acceleration_travel_enabled', 'value'):
travel_accel_setting = global_stack.getProperty('acceleration_travel', 'value')
bead_mode_overrides['Travel Move'] = {
"rate_mm_per_s_sq": {
"x": travel_accel_setting,
"y": travel_accel_setting
}
}
jerk_enabled = global_stack.getProperty('jerk_enabled', 'value')
if jerk_enabled:
global_jerk_setting = global_stack.getProperty('jerk_print', 'value')
accel_overrides["max_speed_change_mm_per_s"] = {
"x": global_jerk_setting,
"y": global_jerk_setting
}
if global_stack.getProperty('jerk_travel_enabled', 'value'):
travel_jerk_setting = global_stack.getProperty('jerk_travel', 'value')
if 'Travel Move' not in bead_mode_overrides:
bead_mode_overrides['Travel Move' ] = {}
bead_mode_overrides['Travel Move'].update({
"max_speed_change_mm_per_s": {
"x": travel_jerk_setting,
"y": travel_jerk_setting
}
})
# Get bead mode settings per extruder
available_bead_modes = {
"infill": "FILL",
"prime_tower": "PRIME_TOWER",
"roofing": "TOP_SURFACE",
"support_infill": "SUPPORT",
"support_interface": "SUPPORT_INTERFACE",
"wall_0": "WALL_OUTER",
"wall_x": "WALL_INNER",
"skirt_brim": "SKIRT"
}
for idx, extruder in enumerate(extruders):
for bead_mode_setting, bead_mode_tag in available_bead_modes.items():
ext_specific_tag = "%s_%s" % (bead_mode_tag, idx)
if accel_enabled or jerk_enabled:
bead_mode_overrides[ext_specific_tag] = {}
if accel_enabled:
accel_val = extruder.getProperty('acceleration_%s' % bead_mode_setting, 'value')
bead_mode_overrides[ext_specific_tag]["rate_mm_per_s_sq"] = {
"x": accel_val,
"y": accel_val
}
if jerk_enabled:
jerk_val = extruder.getProperty('jerk_%s' % bead_mode_setting, 'value')
bead_mode_overrides[ext_specific_tag][ "max_speed_change_mm_per_s"] = {
"x": jerk_val,
"y": jerk_val
}
meta["miracle_config"] = {"gaggles": {"instance0": {}}}
version_info = dict()

View File

@ -25,6 +25,12 @@ def getMetaData():
"description": catalog.i18nc("@item:inlistbox", "Makerbot Sketch Printfile"),
"mime_type": "application/x-makerbot-sketch",
"mode": MakerbotWriter.MakerbotWriter.OutputMode.BinaryMode,
},
{
"extension": file_extension,
"description": catalog.i18nc("@item:inlistbox", "Makerbot Replicator+ Printfile"),
"mime_type": "application/x-makerbot-replicator_plus",
"mode": MakerbotWriter.MakerbotWriter.OutputMode.BinaryMode,
}
]
},

View File

@ -1,65 +1,217 @@
# Copyright (c) 2020 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
# Created by Wayne Porter
# Re-write in April of 2024 by GregValiant (Greg Foresi)
# Changes:
# Added an 'Enable' setting
# Added support for multi-line insertions (comma delimited)
# Added insertions in a range of layers or a single insertion at a layer. Numbers are consistent with the Cura Preview (base1)
# Added frequency of Insertion (once only, every layer, every 2nd, 3rd, 5th, 10th, 25th, 50th, 100th)
# Added support for 'One at a Time' print sequence
# Rafts are allowed and accounted for but no insertions are made in raft layers
from ..Script import Script
import re
from UM.Application import Application
class InsertAtLayerChange(Script):
def __init__(self):
super().__init__()
def getSettingDataString(self):
return """{
"name": "Insert at layer change",
"name": "Insert at Layer Change",
"key": "InsertAtLayerChange",
"metadata": {},
"version": 2,
"settings":
{
"insert_location":
"enabled":
{
"label": "When to insert",
"description": "Whether to insert code before or after layer change.",
"label": "Enable this script",
"description": "You must enable the script for it to run.",
"type": "bool",
"default_value": true,
"enabled": true
},
"insert_frequency":
{
"label": "How often to insert",
"description": "Every so many layers starting with the Start Layer OR as single insertion at a specific layer. If the print sequence is 'one_at_a_time' then the insertions will be made for every model. Insertions are made at the beginning of a layer.",
"type": "enum",
"options": {"before": "Before", "after": "After"},
"default_value": "before"
"options": {
"once_only": "One insertion only",
"every_layer": "Every Layer",
"every_2nd": "Every 2nd",
"every_3rd": "Every 3rd",
"every_5th": "Every 5th",
"every_10th": "Every 10th",
"every_25th": "Every 25th",
"every_50th": "Every 50th",
"every_100th": "Every 100th"},
"default_value": "every_layer",
"enabled": "enabled"
},
"start_layer":
{
"label": "Starting Layer",
"description": "The layer before which the first insertion will take place. If the Print_Sequence is 'All at Once' then use the layer numbers from the Cura Preview. Enter '1' to start at gcode LAYER:0. In 'One at a Time' mode use the layer numbers from the first model that prints AND all models will receive the same insertions. NOTE: There is never an insertion for raft layers.",
"type": "int",
"default_value": 1,
"minimum_value": 1,
"enabled": "insert_frequency != 'once_only' and enabled"
},
"end_layer":
{
"label": "Ending Layer",
"description": "The layer before which the last insertion will take place. Enter '-1' to indicate the entire file. Use layer numbers from the Cura Preview.",
"type": "int",
"default_value": -1,
"minimum_value": -1,
"enabled": "insert_frequency != 'once_only' and enabled"
},
"single_end_layer":
{
"label": "Layer # for Single Insertion.",
"description": "The layer before which the Gcode insertion will take place. Use the layer numbers from the Cura Preview.",
"type": "str",
"default_value": "",
"enabled": "insert_frequency == 'once_only' and enabled"
},
"gcode_to_add":
{
"label": "G-code to insert",
"description": "G-code to add before or after layer change.",
"label": "G-code to insert.",
"description": "G-code to add at start of the layer. Use a comma to delimit multi-line commands. EX: G28 X Y,M220 S100,M117 HELL0. NOTE: All inserted text will be converted to upper-case as some firmwares don't understand lower-case.",
"type": "str",
"default_value": ""
},
"skip_layers":
{
"label": "Skip layers",
"description": "Number of layers to skip between insertions (0 for every layer).",
"type": "int",
"default_value": 0,
"minimum_value": 0
"default_value": "",
"enabled": "enabled"
}
}
}"""
def execute(self, data):
gcode_to_add = self.getSettingValueByKey("gcode_to_add") + "\n"
skip_layers = self.getSettingValueByKey("skip_layers")
count = 0
for layer in data:
# Check that a layer is being printed
lines = layer.split("\n")
for line in lines:
if ";LAYER:" in line:
index = data.index(layer)
if count == 0:
if self.getSettingValueByKey("insert_location") == "before":
layer = gcode_to_add + layer
else:
layer = layer + gcode_to_add
data[index] = layer
count = (count + 1) % (skip_layers + 1)
break
return data
# Exit if the script is not enabled
if not bool(self.getSettingValueByKey("enabled")):
return data
#Initialize variables
mycode = self.getSettingValueByKey("gcode_to_add").upper()
start_layer = int(self.getSettingValueByKey("start_layer"))
end_layer = int(self.getSettingValueByKey("end_layer"))
when_to_insert = self.getSettingValueByKey("insert_frequency")
end_list = [0]
print_sequence = Application.getInstance().getGlobalContainerStack().getProperty("print_sequence", "value")
# Get the topmost layer number and adjust the end_list
if end_layer == -1:
if print_sequence == "all_at_once":
for lnum in range(0, len(data) - 1):
if ";LAYER:" in data[lnum]:
the_top = int(data[lnum].split(";LAYER:")[1].split("\n")[0])
end_list[0] = the_top
# Get the topmost layer number for each model and append it to the end_list
if print_sequence == "one_at_a_time":
for lnum in range(0, 10):
if ";LAYER:0" in data[lnum]:
start_at = lnum + 1
break
for lnum in range(start_at, len(data)-1, 1):
if ";LAYER:" in data[lnum] and not ";LAYER:0" in data[lnum] and not ";LAYER:-" in data[lnum]:
end_list[len(end_list) - 1] = int(data[lnum].split(";LAYER:")[1].split("\n")[0])
continue
if ";LAYER:0" in data[lnum]:
end_list.append(0)
elif end_layer != -1:
if print_sequence == "all_at_once":
# Catch an error if the entered End_Layer > the top layer in the gcode
for e_num, layer in enumerate(data):
if ";LAYER:" in layer:
top_layer = int(data[e_num].split(";LAYER:")[1].split("\n")[0])
end_list[0] = end_layer - 1
if top_layer < end_layer - 1:
end_list[0] = top_layer
elif print_sequence == "one_at_a_time":
# Find the index of the first Layer:0
for lnum in range(0, 10):
if ";LAYER:0" in data[lnum]:
start_at = lnum + 1
break
# Get the top layer number for each model
for lnum in range(start_at, len(data)-1):
if ";LAYER:" in data[lnum] and not ";LAYER:0" in data[lnum] and not ";LAYER:-" in data[lnum]:
end_list[len(end_list) - 1] = int(data[lnum].split(";LAYER:")[1].split("\n")[0])
if ";LAYER:0" in data[lnum]:
end_list.append(0)
# Adjust the end list if an end layer was named
for index, num in enumerate(end_list):
if num > end_layer - 1:
end_list[index] = end_layer - 1
#If the gcode_to_enter is multi-line then replace the commas with newline characters
if mycode != "":
if "," in mycode:
mycode = re.sub(",", "\n",mycode)
gcode_to_add = mycode
#Get the insertion frequency
match when_to_insert:
case "every_layer":
freq = 1
case "every_2nd":
freq = 2
case "every_3rd":
freq = 3
case "every_5th":
freq = 5
case "every_10th":
freq = 10
case "every_25th":
freq = 25
case "every_50th":
freq = 50
case "every_100th":
freq = 100
case "once_only":
the_insert_layer = int(self.getSettingValueByKey("single_end_layer"))-1
case _:
raise ValueError(f"Unexpected insertion frequency {when_to_insert}")
#Single insertion
if when_to_insert == "once_only":
# For print sequence 'All at once'
if print_sequence == "all_at_once":
for index, layer in enumerate(data):
if ";LAYER:" + str(the_insert_layer) + "\n" in layer:
lines = layer.split("\n")
lines.insert(1,gcode_to_add)
data[index] = "\n".join(lines)
return data
# For print sequence 'One at a time'
else:
for index, layer in enumerate(data):
if ";LAYER:" + str(the_insert_layer) + "\n" in layer:
lines = layer.split("\n")
lines.insert(1,gcode_to_add)
data[index] = "\n".join(lines)
return data
# For multiple insertions
if when_to_insert != "once_only":
# Search from the line after the first Layer:0 so we know when a model ends if in One at a Time mode.
first_0 = True
next_layer = start_layer - 1
end_layer = end_list.pop(0)
for index, layer in enumerate(data):
lines = layer.split("\n")
for l_index, line in enumerate(lines):
if ";LAYER:" in line:
layer_number = int(line.split(":")[1])
if layer_number == next_layer and layer_number <= end_layer:
lines.insert(l_index + 1,gcode_to_add)
data[index] = "\n".join(lines)
next_layer += freq
# Reset the next_layer for one-at-a-time
if next_layer > int(end_layer):
next_layer = start_layer - 1
# Index to the next end_layer when a Layer:0 is encountered
try:
if not first_0 and layer_number == 0:
end_layer = end_list.pop(0)
except:
pass
# Beyond the initial Layer:0 futher Layer:0's indicate the top layer of a model.
if layer_number == 0:
first_0 = False
break
return data

View File

@ -1,9 +1,15 @@
# Copyright (c) 2020 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
# Created by Wayne Porter
# Modified 5/15/2023 - Greg Valiant (Greg Foresi)
# Created by Wayne Porter
# Added insertion frequency
# Adjusted for use with Relative Extrusion
# Changed Retract to a boolean and when true use the regular Cura retract settings.
# Use the regular Cura settings for Travel Speed and Speed_Z instead of asking.
# Added code to check the E location to prevent retracts if the filament was already retracted.
# Added 'Pause before image' per LemanRus
from ..Script import Script
from UM.Application import Application
from UM.Logger import Logger
class TimeLapse(Script):
def __init__(self):
@ -11,7 +17,7 @@ class TimeLapse(Script):
def getSettingDataString(self):
return """{
"name": "Time Lapse",
"name": "Time Lapse Camera",
"key": "TimeLapse",
"metadata": {},
"version": 2,
@ -19,24 +25,49 @@ class TimeLapse(Script):
{
"trigger_command":
{
"label": "Trigger camera command",
"description": "G-code command used to trigger camera.",
"label": "Camera Trigger Command",
"description": "G-code command used to trigger the camera. The setting box will take any command and parameters.",
"type": "str",
"default_value": "M240"
},
"insert_frequency":
{
"label": "How often (layers)",
"description": "Every so many layers (always starts at the first layer whether it's the model or a raft).",
"type": "enum",
"options": {
"every_layer": "Every Layer",
"every_2nd": "Every 2nd",
"every_3rd": "Every 3rd",
"every_5th": "Every 5th",
"every_10th": "Every 10th",
"every_25th": "Every 25th",
"every_50th": "Every 50th",
"every_100th": "Every 100th"},
"default_value": "every_layer"
},
"anti_shake_length":
{
"label": "Pause before image",
"description": "How long to wait (in ms) before capturing the image. This is to allow the printer to 'settle down' after movement. To disable set this to '0'.",
"type": "int",
"default_value": 0,
"minimum_value": 0,
"unit": "ms"
},
"pause_length":
{
"label": "Pause length",
"label": "Pause after image",
"description": "How long to wait (in ms) after camera was triggered.",
"type": "int",
"default_value": 700,
"default_value": 500,
"minimum_value": 0,
"unit": "ms"
},
"park_print_head":
{
"label": "Park Print Head",
"description": "Park the print head out of the way. Assumes absolute positioning.",
"description": "Park the print head out of the way.",
"type": "bool",
"default_value": true
},
@ -55,90 +86,166 @@ class TimeLapse(Script):
"description": "What Y location does the head move to for photo.",
"unit": "mm",
"type": "float",
"default_value": 190,
"enabled": "park_print_head"
},
"park_feed_rate":
{
"label": "Park Feed Rate",
"description": "How fast does the head move to the park coordinates.",
"unit": "mm/s",
"type": "float",
"default_value": 9000,
"default_value": 0,
"enabled": "park_print_head"
},
"retract":
{
"label": "Retraction Distance",
"description": "Filament retraction distance for camera trigger.",
"unit": "mm",
"type": "int",
"default_value": 0
"label": "Retract when required",
"description": "Retract if there isn't already a retraction. If unchecked then there will be no retraction even if there is none in the gcode. If retractions are not enabled in Cura there won't be a retraction. regardless of this setting.",
"type": "bool",
"default_value": true
},
"zhop":
{
"label": "Z-Hop Height When Parking",
"description": "Z-hop length before parking",
"description": "The height to lift the nozzle off the print before parking.",
"unit": "mm",
"type": "float",
"default_value": 0
"default_value": 2.0,
"minimum_value": 0.0
},
"ensure_final_image":
{
"label": "Ensure Final Image",
"description": "Depending on how the layer numbers work out with the 'How Often' frequency there might not be an image taken at the end of the last layer. This will ensure that one is taken. There is no parking as the Ending Gcode comes right up.",
"type": "bool",
"default_value": false
}
}
}"""
def execute(self, data):
feed_rate = self.getSettingValueByKey("park_feed_rate")
mycura = Application.getInstance().getGlobalContainerStack()
relative_extrusion = bool(mycura.getProperty("relative_extrusion", "value"))
extruder = mycura.extruderList
retract_speed = int(extruder[0].getProperty("retraction_speed", "value"))*60
retract_dist = round(float(extruder[0].getProperty("retraction_amount", "value")), 2)
retract_enabled = bool(extruder[0].getProperty("retraction_enable", "value"))
firmware_retract = bool(mycura.getProperty("machine_firmware_retract", "value"))
speed_z = int(extruder[0].getProperty("speed_z_hop", "value"))*60
if relative_extrusion:
rel_cmd = 83
else:
rel_cmd = 82
travel_speed = int(extruder[0].getProperty("speed_travel", "value"))*60
park_print_head = self.getSettingValueByKey("park_print_head")
x_park = self.getSettingValueByKey("head_park_x")
y_park = self.getSettingValueByKey("head_park_y")
trigger_command = self.getSettingValueByKey("trigger_command")
pause_length = self.getSettingValueByKey("pause_length")
retract = int(self.getSettingValueByKey("retract"))
retract = bool(self.getSettingValueByKey("retract"))
zhop = self.getSettingValueByKey("zhop")
gcode_to_append = ";TimeLapse Begin\n"
ensure_final_image = bool(self.getSettingValueByKey("ensure_final_image"))
when_to_insert = self.getSettingValueByKey("insert_frequency")
last_x = 0
last_y = 0
last_z = 0
last_e = 0
prev_e = 0
is_retracted = False
gcode_to_append = ""
if park_print_head:
gcode_to_append += self.putValue(G=1, F=feed_rate,
X=x_park, Y=y_park) + " ;Park print head\n"
gcode_to_append += self.putValue(M=400) + " ;Wait for moves to finish\n"
gcode_to_append += trigger_command + " ;Snap Photo\n"
gcode_to_append += self.putValue(G=4, P=pause_length) + " ;Wait for camera\n"
for idx, layer in enumerate(data):
for line in layer.split("\n"):
if self.getValue(line, "G") in {0, 1}: # Track X,Y,Z location.
last_x = self.getValue(line, "X", last_x)
last_y = self.getValue(line, "Y", last_y)
last_z = self.getValue(line, "Z", last_z)
# Check that a layer is being printed
lines = layer.split("\n")
for line in lines:
if ";LAYER:" in line:
if retract != 0: # Retract the filament so no stringing happens
layer += self.putValue(M=83) + " ;Extrude Relative\n"
layer += self.putValue(G=1, E=-retract, F=3000) + " ;Retract filament\n"
layer += self.putValue(M=82) + " ;Extrude Absolute\n"
layer += self.putValue(M=400) + " ;Wait for moves to finish\n" # Wait to fully retract before hopping
if zhop != 0:
layer += self.putValue(G=1, Z=last_z+zhop, F=3000) + " ;Z-Hop\n"
layer += gcode_to_append
if zhop != 0:
layer += self.putValue(G=0, X=last_x, Y=last_y, Z=last_z) + "; Restore position \n"
else:
layer += self.putValue(G=0, X=last_x, Y=last_y) + "; Restore position \n"
if retract != 0:
layer += self.putValue(M=400) + " ;Wait for moves to finish\n"
layer += self.putValue(M=83) + " ;Extrude Relative\n"
layer += self.putValue(G=1, E=retract, F=3000) + " ;Retract filament\n"
layer += self.putValue(M=82) + " ;Extrude Absolute\n"
data[idx] = layer
break
gcode_to_append += f"G0 F{travel_speed} X{x_park} Y{y_park} ;Park print head\n"
gcode_to_append += "M400 ;Wait for moves to finish\n"
anti_shake_length = self.getSettingValueByKey("anti_shake_length")
if anti_shake_length > 0:
gcode_to_append += f"G4 P{anti_shake_length} ;Wait for printer to settle down\n"
gcode_to_append += trigger_command + " ;Snap the Image\n"
gcode_to_append += f"G4 P{pause_length} ;Wait for camera to finish\n"
match when_to_insert:
case "every_layer":
step_freq = 1
case "every_2nd":
step_freq = 2
case "every_3rd":
step_freq = 3
case "every_5th":
step_freq = 5
case "every_10th":
step_freq = 10
case "every_25th":
step_freq = 25
case "every_50th":
step_freq = 50
case "every_100th":
step_freq = 100
case _:
step_freq = 1
# Use the step_freq to index through the layers----------------------------------------
for num in range(2,len(data)-1,step_freq):
layer = data[num]
try:
# Track X,Y,Z location.--------------------------------------------------------
for line in layer.split("\n"):
if self.getValue(line, "G") in {0, 1}:
last_x = self.getValue(line, "X", last_x)
last_y = self.getValue(line, "Y", last_y)
last_z = self.getValue(line, "Z", last_z)
#Track the E location so that if there is already a retraction we don't double dip.
if rel_cmd == 82:
if " E" in line:
last_e = line.split("E")[1]
if float(last_e) < float(prev_e):
is_retracted = True
else:
is_retracted = False
prev_e = last_e
elif rel_cmd == 83:
if " E" in line:
last_e = line.split("E")[1]
if float(last_e) < 0:
is_retracted = True
else:
is_retracted = False
prev_e = last_e
if firmware_retract and self.getValue(line, "G") in {10, 11}:
if self.getValue(line, "G") == 10:
is_retracted = True
last_e = float(prev_e) - float(retract_dist)
if self.getValue(line, "G") == 11:
is_retracted = False
last_e = float(prev_e) + float(retract_dist)
prev_e = last_e
lines = layer.split("\n")
# Insert the code----------------------------------------------------
camera_code = ""
for line in lines:
if ";LAYER:" in line:
if retract and not is_retracted and retract_enabled: # Retract unless already retracted
camera_code += ";TYPE:CUSTOM-----------------TimeLapse Begin\n"
camera_code += "M83 ;Extrude Relative\n"
if not firmware_retract:
camera_code += f"G1 F{retract_speed} E-{retract_dist} ;Retract filament\n"
else:
camera_code += "G10 ;Retract filament\n"
else:
camera_code += ";TYPE:CUSTOM-----------------TimeLapse Begin\n"
if zhop != 0:
camera_code += f"G1 F{speed_z} Z{round(last_z + zhop,2)} ;Z-Hop\n"
camera_code += gcode_to_append
camera_code += f"G0 F{travel_speed} X{last_x} Y{last_y} ;Restore XY position\n"
if zhop != 0:
camera_code += f"G0 F{speed_z} Z{last_z} ;Restore Z position\n"
if retract and not is_retracted and retract_enabled:
if not firmware_retract:
camera_code += f"G1 F{retract_speed} E{retract_dist} ;Un-Retract filament\n"
else:
camera_code += "G11 ;Un-Retract filament\n"
camera_code += f"M{rel_cmd} ;Extrude Mode\n"
camera_code += f";{'-' * 28}TimeLapse End"
# Format the camera code to be inserted
temp_lines = camera_code.split("\n")
for temp_index, temp_line in enumerate(temp_lines):
if ";" in temp_line and not temp_line.startswith(";"):
temp_lines[temp_index] = temp_line.replace(temp_line.split(";")[0], temp_line.split(";")[0] + str(" " * (29 - len(temp_line.split(";")[0]))),1)
temp_lines = "\n".join(temp_lines)
lines.insert(len(lines) - 2, temp_lines)
data[num] = "\n".join(lines)
break
except Exception as e:
Logger.log("w", "TimeLapse Error: " + repr(e))
# Take a final image if there was no camera shot at the end of the last layer.
if "TimeLapse Begin" not in data[len(data) - (3 if retract_enabled else 2)] and ensure_final_image:
data[len(data)-1] = "M400 ; Wait for all moves to finish\n" + trigger_command + " ;Snap the final Image\n" + f"G4 P{pause_length} ;Wait for camera\n" + data[len(data)-1]
return data

View File

@ -222,12 +222,11 @@ class SimulationView(CuraView):
self.setPath(i + fractional_value)
def advanceTime(self, time_increase: float) -> bool:
def advanceTime(self, time_increase: float) -> None:
"""
Advance the time by the given amount.
:param time_increase: The amount of time to advance (in seconds).
:return: True if the time was advanced, False if the end of the simulation was reached.
"""
total_duration = 0.0
if len(self.cumulativeLineDuration()) > 0:
@ -237,15 +236,13 @@ class SimulationView(CuraView):
# If we have reached the end of the simulation, go to the next layer.
if self.getCurrentLayer() == self.getMaxLayers():
# If we are already at the last layer, go to the first layer.
self.setTime(total_duration)
return False
# advance to the next layer, and reset the time
self.setLayer(self.getCurrentLayer() + 1)
self.setLayer(0)
else:
# advance to the next layer, and reset the time
self.setLayer(self.getCurrentLayer() + 1)
self.setTime(0.0)
else:
self.setTime(self._current_time + time_increase)
return True
def cumulativeLineDuration(self) -> List[float]:
# Make sure _cumulative_line_duration is initialized properly
@ -256,9 +253,19 @@ class SimulationView(CuraView):
polylines = self.getLayerData()
if polylines is not None:
for polyline in polylines.polygons:
for line_duration in list((polyline.lineLengths / polyline.lineFeedrates)[0]):
for line_index in range(len(polyline.lineLengths)):
line_length = polyline.lineLengths[line_index]
line_feedrate = polyline.lineFeedrates[line_index][0]
if line_feedrate > 0.0:
line_duration = line_length / line_feedrate
else:
# Something is wrong with this line, set an arbitrary non-null duration
line_duration = 0.1
total_duration += line_duration / SimulationView.SIMULATION_FACTOR
self._cumulative_line_duration.append(total_duration)
# for tool change we add an extra tool path
self._cumulative_line_duration.append(total_duration)
# set current cached layer
@ -583,7 +590,7 @@ class SimulationView(CuraView):
self._max_thickness = sys.float_info.min
self._min_flow_rate = sys.float_info.max
self._max_flow_rate = sys.float_info.min
self._cumulative_line_duration = {}
self._cumulative_line_duration = []
# The colour scheme is only influenced by the visible lines, so filter the lines by if they should be visible.
visible_line_types = []

View File

@ -144,9 +144,7 @@ Item
{
// divide by 1000 to account for ms to s conversion
const advance_time = simulationTimer.interval / 1000.0;
if (!UM.SimulationView.advanceTime(advance_time)) {
playButton.pauseSimulation();
}
UM.SimulationView.advanceTime(advance_time);
// The status must be set here instead of in the resumeSimulation function otherwise it won't work
// correctly, because part of the logic is in this trigger function.
isSimulationPlaying = true;

View File

@ -54,9 +54,9 @@ class SimulationViewProxy(QObject):
def currentPath(self):
return self._simulation_view.getCurrentPath()
@pyqtSlot(float, result=bool)
def advanceTime(self, duration: float) -> bool:
return self._simulation_view.advanceTime(duration)
@pyqtSlot(float)
def advanceTime(self, duration: float) -> None:
self._simulation_view.advanceTime(duration)
@pyqtProperty(int, notify=currentPathChanged)
def minimumPath(self):

View File

@ -360,8 +360,8 @@ geometry41core =
((v_prev_line_type[0] != 1) && (v_line_type[0] == 1)) ||
((v_prev_line_type[0] != 4) && (v_line_type[0] == 4))
)) {
float w = size_x;
float h = size_y;
float w = max(0.05, size_x);
float h = max(0.05, size_y);
myEmitVertex(v_vertex[0] + vec3( w, h, w), u_starts_color, normalize(vec3( 1.0, 1.0, 1.0)), viewProjectionMatrix * (gl_in[0].gl_Position + vec4( w, h, w, 0.0))); // Front-top-left
myEmitVertex(v_vertex[0] + vec3(-w, h, w), u_starts_color, normalize(vec3(-1.0, 1.0, 1.0)), viewProjectionMatrix * (gl_in[0].gl_Position + vec4(-w, h, w, 0.0))); // Front-top-right

View File

@ -1,11 +1,12 @@
# Copyright (c) 2023 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher.
import datetime
import json
import os
import platform
import time
from typing import Optional, Set, TYPE_CHECKING
from typing import Any, Optional, Set, TYPE_CHECKING
from PyQt6.QtCore import pyqtSlot, QObject
from PyQt6.QtNetwork import QNetworkRequest
@ -33,7 +34,18 @@ class SliceInfo(QObject, Extension):
no model files are being sent (Just a SHA256 hash of the model).
"""
info_url = "https://stats.ultimaker.com/api/cura"
info_url = "https://statistics.ultimaker.com/api/v2/cura/slice"
_adjust_flattened_names = {
"extruders_extruder": "extruders",
"extruders_settings": "extruders",
"models_model": "models",
"models_transformation_data": "models_transformation",
"print_settings_": "",
"print_times": "print_time",
"active_machine_": "",
"slice_uuid": "slice_id",
}
def __init__(self, parent = None):
QObject.__init__(self, parent)
@ -112,6 +124,26 @@ class SliceInfo(QObject, Extension):
return list(sorted(user_modified_setting_keys))
def _flattenData(self, data: Any, result: dict, current_flat_key: Optional[str] = None, lift_list: bool = False) -> None:
if isinstance(data, dict):
for key, value in data.items():
total_flat_key = key if current_flat_key is None else f"{current_flat_key}_{key}"
self._flattenData(value, result, total_flat_key, lift_list)
elif isinstance(data, list):
for item in data:
self._flattenData(item, result, current_flat_key, True)
else:
actual_flat_key = current_flat_key.lower()
for key, value in self._adjust_flattened_names.items():
if actual_flat_key.startswith(key):
actual_flat_key = actual_flat_key.replace(key, value)
if lift_list:
if actual_flat_key not in result:
result[actual_flat_key] = []
result[actual_flat_key].append(data)
else:
result[actual_flat_key] = data
def _onWriteStarted(self, output_device):
try:
if not self._application.getPreferences().getValue("info/send_slice_info"):
@ -125,8 +157,7 @@ class SliceInfo(QObject, Extension):
global_stack = machine_manager.activeMachine
data = dict() # The data that we're going to submit.
data["time_stamp"] = time.time()
data["schema_version"] = 0
data["schema_version"] = 1000
data["cura_version"] = self._application.getVersion()
data["cura_build_type"] = ApplicationMetadata.CuraBuildType
org_id = user_profile.get("organization_id", None) if user_profile else None
@ -298,6 +329,11 @@ class SliceInfo(QObject, Extension):
"time_backend": int(round(time_backend)),
}
# Massage data into format used in the DB:
flat_data = dict()
self._flattenData(data, flat_data)
data = flat_data
# Convert data to bytes
binary_data = json.dumps(data).encode("utf-8")

View File

@ -24,6 +24,7 @@ from UM.Settings.InstanceContainer import InstanceContainer
from cura.CuraApplication import CuraApplication
from cura.Settings.GlobalStack import GlobalStack
from cura.Utils.Threading import call_on_qt_thread
from cura.API import CuraAPI
from UM.i18n import i18nCatalog
@ -85,7 +86,8 @@ class UFPWriter(MeshWriter):
try:
archive.addContentType(extension="json", mime_type="application/json")
setting_textio = StringIO()
json.dump(self._getSliceMetadata(), setting_textio, separators=(", ", ": "), indent=4)
api = CuraApplication.getInstance().getCuraAPI()
json.dump(api.interface.settings.getSliceMetadata(), setting_textio, separators=(", ", ": "), indent=4)
steam = archive.getStream(SLICE_METADATA_PATH)
steam.write(setting_textio.getvalue().encode("UTF-8"))
except EnvironmentError as e:
@ -210,57 +212,3 @@ class UFPWriter(MeshWriter):
return [{"name": item.getName()}
for item in DepthFirstIterator(node)
if item.getMeshData() is not None and not item.callDecoration("isNonPrintingMesh")]
def _getSliceMetadata(self) -> Dict[str, Dict[str, Dict[str, str]]]:
"""Get all changed settings and all settings. For each extruder and the global stack"""
print_information = CuraApplication.getInstance().getPrintInformation()
machine_manager = CuraApplication.getInstance().getMachineManager()
settings = {
"material": {
"length": print_information.materialLengths,
"weight": print_information.materialWeights,
"cost": print_information.materialCosts,
},
"global": {
"changes": {},
"all_settings": {},
},
"quality": asdict(machine_manager.activeQualityDisplayNameMap()),
}
def _retrieveValue(container: InstanceContainer, setting_: str):
value_ = container.getProperty(setting_, "value")
for _ in range(0, 1024): # Prevent possibly endless loop by not using a limit.
if not isinstance(value_, SettingFunction):
return value_ # Success!
value_ = value_(container)
return 0 # Fallback value after breaking possibly endless loop.
global_stack = cast(GlobalStack, Application.getInstance().getGlobalContainerStack())
# Add global user or quality changes
global_flattened_changes = InstanceContainer.createMergedInstanceContainer(global_stack.userChanges, global_stack.qualityChanges)
for setting in global_flattened_changes.getAllKeys():
settings["global"]["changes"][setting] = _retrieveValue(global_flattened_changes, setting)
# Get global all settings values without user or quality changes
for setting in global_stack.getAllKeys():
settings["global"]["all_settings"][setting] = _retrieveValue(global_stack, setting)
for i, extruder in enumerate(global_stack.extruderList):
# Add extruder fields to settings dictionary
settings[f"extruder_{i}"] = {
"changes": {},
"all_settings": {},
}
# Add extruder user or quality changes
extruder_flattened_changes = InstanceContainer.createMergedInstanceContainer(extruder.userChanges, extruder.qualityChanges)
for setting in extruder_flattened_changes.getAllKeys():
settings[f"extruder_{i}"]["changes"][setting] = _retrieveValue(extruder_flattened_changes, setting)
# Get extruder all settings values without user or quality changes
for setting in extruder.getAllKeys():
settings[f"extruder_{i}"]["all_settings"][setting] = _retrieveValue(extruder, setting)
return settings

View File

@ -42,7 +42,7 @@ class CloudApiClient:
CLUSTER_API_ROOT = f"{ROOT_PATH}/connect/v1"
CURA_API_ROOT = f"{ROOT_PATH}/cura/v1"
DEFAULT_REQUEST_TIMEOUT = 10 # seconds
DEFAULT_REQUEST_TIMEOUT = 30 # seconds
# In order to avoid garbage collection we keep the callbacks in this list.
_anti_gc_callbacks = [] # type: List[Callable[[Any], None]]

View File

@ -331,7 +331,7 @@ class CloudOutputDevice(UltimakerNetworkedPrinterOutputDevice):
return False
[printer, *_] = self._printers
return printer.type in ("MakerBot Method X", "MakerBot Method XL", "MakerBot Sketch")
return printer.type in ("MakerBot Method", "MakerBot Method X", "MakerBot Method XL", "MakerBot Sketch", "MakerBot Sketch Large", "MakerBot Sketch Sprint")
@pyqtProperty(bool, notify=_cloudClusterPrintersChanged)
def supportsPrintJobActions(self) -> bool:

View File

@ -3,5 +3,7 @@
"ultimaker_methodx": "MakerBot Method X",
"ultimaker_methodxl": "MakerBot Method XL",
"ultimaker_factor4": "Ultimaker Factor 4",
"ultimaker_sketch": "MakerBot Sketch"
"ultimaker_sketch": "MakerBot Sketch",
"ultimaker_sketch_large": "MakerBot Sketch Large",
"ultimaker_sketch_sprint": "MakerBot Sketch Sprint"
}

View File

@ -97,6 +97,8 @@ class USBPrinterOutputDevice(PrinterOutputDevice):
CuraApplication.getInstance().getOnExitCallbackManager().addCallback(self._checkActivePrintingUponAppExit)
CuraApplication.getInstance().getPreferences().addPreference("usb_printing/enabled", False)
# This is a callback function that checks if there is any printing in progress via USB when the application tries
# to exit. If so, it will show a confirmation before
def _checkActivePrintingUponAppExit(self) -> None:
@ -144,6 +146,8 @@ class USBPrinterOutputDevice(PrinterOutputDevice):
CuraApplication.getInstance().getController().setActiveStage("MonitorStage")
CuraApplication.getInstance().getPreferences().setValue("usb_printing/enabled", True)
#Find the g-code to print.
gcode_textio = StringIO()
gcode_writer = cast(MeshWriter, PluginRegistry.getInstance().getPluginObject("GCodeWriter"))

View File

@ -0,0 +1,103 @@
# Copyright (c) 2024 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher.
import configparser
from typing import Dict, List, Tuple
import io
from UM.VersionUpgrade import VersionUpgrade
# Just to be sure, since in my testing there were both 0.1.0 and 0.2.0 settings about.
_PLUGIN_NAME = "_plugin__curaenginegradualflow"
_FROM_PLUGINS_SETTINGS = {
"gradual_flow_enabled",
"max_flow_acceleration",
"layer_0_max_flow_acceleration",
"gradual_flow_discretisation_step_size",
"reset_flow_duration",
} # type: Set[str]
_NEW_SETTING_VERSION = "24"
class VersionUpgrade58to59(VersionUpgrade):
def upgradePreferences(self, serialized: str, filename: str) -> Tuple[List[str], List[str]]:
"""
Upgrades preferences to remove from the visibility list the settings that were removed in this version.
It also changes the preferences to have the new version number.
This removes any settings that were removed in the new Cura version.
:param serialized: The original contents of the preferences file.
:param filename: The file name of the preferences file.
:return: A list of new file names, and a list of the new contents for
those files.
"""
parser = configparser.ConfigParser(interpolation = None)
parser.read_string(serialized)
# Update version number.
parser["metadata"]["setting_version"] = _NEW_SETTING_VERSION
# Fix renamed settings for visibility
if "visible_settings" in parser["general"]:
all_setting_keys = parser["general"]["visible_settings"].strip().split(";")
if all_setting_keys:
for idx, key in enumerate(all_setting_keys):
if key.startswith(_PLUGIN_NAME):
all_setting_keys[idx] = key.split("__")[-1]
parser["general"]["visible_settings"] = ";".join(all_setting_keys)
result = io.StringIO()
parser.write(result)
return [filename], [result.getvalue()]
def upgradeInstanceContainer(self, serialized: str, filename: str) -> Tuple[List[str], List[str]]:
"""
Upgrades instance containers to remove the settings that were removed in this version.
It also changes the instance containers to have the new version number.
This removes any settings that were removed in the new Cura version and updates settings that need to be updated
with a new value.
:param serialized: The original contents of the instance container.
:param filename: The original file name of the instance container.
:return: A list of new file names, and a list of the new contents for
those files.
"""
parser = configparser.ConfigParser(interpolation = None, comment_prefixes = ())
parser.read_string(serialized)
# Update version number.
parser["metadata"]["setting_version"] = _NEW_SETTING_VERSION
# Rename settings.
if "values" in parser:
for key, value in parser["values"].items():
if key.startswith(_PLUGIN_NAME):
parser["values"][key.split("__")[-1]] = parser["values"][key]
del parser["values"][key]
result = io.StringIO()
parser.write(result)
return [filename], [result.getvalue()]
def upgradeStack(self, serialized: str, filename: str) -> Tuple[List[str], List[str]]:
"""
Upgrades stacks to have the new version number.
:param serialized: The original contents of the stack.
:param filename: The original file name of the stack.
:return: A list of new file names, and a list of the new contents for
those files.
"""
parser = configparser.ConfigParser(interpolation = None)
parser.read_string(serialized)
# Update version number.
if "metadata" not in parser:
parser["metadata"] = {}
parser["metadata"]["setting_version"] = _NEW_SETTING_VERSION
result = io.StringIO()
parser.write(result)
return [filename], [result.getvalue()]

View File

@ -0,0 +1,61 @@
# Copyright (c) 2024 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher.
from typing import Any, Dict, TYPE_CHECKING
from . import VersionUpgrade58to59
if TYPE_CHECKING:
from UM.Application import Application
upgrade = VersionUpgrade58to59.VersionUpgrade58to59()
def getMetaData() -> Dict[str, Any]:
return {
"version_upgrade": {
# From To Upgrade function
("preferences", 7000023): ("preferences", 7000024, upgrade.upgradePreferences),
("machine_stack", 6000023): ("machine_stack", 6000024, upgrade.upgradeStack),
("extruder_train", 6000023): ("extruder_train", 6000024, upgrade.upgradeStack),
("definition_changes", 4000023): ("definition_changes", 4000024, upgrade.upgradeInstanceContainer),
("quality_changes", 4000023): ("quality_changes", 4000024, upgrade.upgradeInstanceContainer),
("quality", 4000023): ("quality", 4000024, upgrade.upgradeInstanceContainer),
("user", 4000023): ("user", 4000024, upgrade.upgradeInstanceContainer),
("intent", 4000023): ("intent", 4000024, upgrade.upgradeInstanceContainer),
},
"sources": {
"preferences": {
"get_version": upgrade.getCfgVersion,
"location": {"."}
},
"machine_stack": {
"get_version": upgrade.getCfgVersion,
"location": {"./machine_instances"}
},
"extruder_train": {
"get_version": upgrade.getCfgVersion,
"location": {"./extruders"}
},
"definition_changes": {
"get_version": upgrade.getCfgVersion,
"location": {"./definition_changes"}
},
"quality_changes": {
"get_version": upgrade.getCfgVersion,
"location": {"./quality_changes"}
},
"quality": {
"get_version": upgrade.getCfgVersion,
"location": {"./quality"}
},
"user": {
"get_version": upgrade.getCfgVersion,
"location": {"./user"}
}
}
}
def register(app: "Application") -> Dict[str, Any]:
return {"version_upgrade": upgrade}

View File

@ -0,0 +1,8 @@
{
"name": "Version Upgrade 5.8 to 5.9",
"author": "UltiMaker",
"version": "1.0.0",
"description": "Upgrades configurations from Cura 5.8 to Cura 5.9.",
"api": 8,
"i18n-catalog": "cura"
}

View File

@ -0,0 +1,68 @@
import configparser
import io
from typing import Dict, Tuple, List
from UM.VersionUpgrade import VersionUpgrade
_RENAMED_SETTINGS = {
"wall_overhang_speed_factor": "wall_overhang_speed_factors"
} # type: Dict[str, str]
_NEW_SETTING_VERSION = "25"
class VersionUpgrade59to510(VersionUpgrade):
def upgradePreferences(self, serialized: str, filename: str):
parser = configparser.ConfigParser(interpolation = None)
parser.read_string(serialized)
# Fix 'renamed'(ish) settings for visibility
if "visible_settings" in parser["general"]:
all_setting_keys = parser["general"]["visible_settings"].strip().split(";")
if all_setting_keys:
for idx, key in enumerate(all_setting_keys):
if key in _RENAMED_SETTINGS:
all_setting_keys[idx] = _RENAMED_SETTINGS[key]
parser["general"]["visible_settings"] = ";".join(all_setting_keys)
# Update version number.
parser["metadata"]["setting_version"] = _NEW_SETTING_VERSION
result = io.StringIO()
parser.write(result)
return [filename], [result.getvalue()]
def upgradeInstanceContainer(self, serialized: str, filename: str) -> Tuple[List[str], List[str]]:
parser = configparser.ConfigParser(interpolation = None, comment_prefixes = ())
parser.read_string(serialized)
# Update version number.
parser["metadata"]["setting_version"] = _NEW_SETTING_VERSION
if "values" in parser:
for old_name, new_name in _RENAMED_SETTINGS.items():
if old_name in parser["values"]:
parser["values"][new_name] = parser["values"][old_name]
del parser["values"][old_name]
if "wall_overhang_speed_factors" in parser["values"]:
old_value = float(parser["values"]["wall_overhang_speed_factors"])
new_value = [max(1, int(round(old_value)))]
parser["values"]["wall_overhang_speed_factor"] = str(new_value)
result = io.StringIO()
parser.write(result)
return [filename], [result.getvalue()]
def upgradeStack(self, serialized: str, filename: str) -> Tuple[List[str], List[str]]:
parser = configparser.ConfigParser(interpolation = None)
parser.read_string(serialized)
# Update version number.
if "metadata" not in parser:
parser["metadata"] = {}
parser["metadata"]["setting_version"] = _NEW_SETTING_VERSION
result = io.StringIO()
parser.write(result)
return [filename], [result.getvalue()]

View File

@ -0,0 +1,60 @@
# Copyright (c) 2024 UltiMaker
# Cura is released under the terms of the LGPLv3 or higher.
from typing import Any, Dict, TYPE_CHECKING
from . import VersionUpgrade59to510
if TYPE_CHECKING:
from UM.Application import Application
upgrade = VersionUpgrade59to510.VersionUpgrade59to510()
def getMetaData() -> Dict[str, Any]:
return {
"version_upgrade": {
# From To Upgrade function
("preferences", 7000024): ("preferences", 7000025, upgrade.upgradePreferences),
("machine_stack", 6000024): ("machine_stack", 6000025, upgrade.upgradeStack),
("extruder_train", 6000024): ("extruder_train", 6000025, upgrade.upgradeStack),
("definition_changes", 4000024): ("definition_changes", 4000025, upgrade.upgradeInstanceContainer),
("quality_changes", 4000024): ("quality_changes", 4000025, upgrade.upgradeInstanceContainer),
("quality", 4000024): ("quality", 4000025, upgrade.upgradeInstanceContainer),
("user", 4000024): ("user", 4000025, upgrade.upgradeInstanceContainer),
("intent", 4000024): ("intent", 4000025, upgrade.upgradeInstanceContainer),
},
"sources": {
"preferences": {
"get_version": upgrade.getCfgVersion,
"location": {"."}
},
"machine_stack": {
"get_version": upgrade.getCfgVersion,
"location": {"./machine_instances"}
},
"extruder_train": {
"get_version": upgrade.getCfgVersion,
"location": {"./extruders"}
},
"definition_changes": {
"get_version": upgrade.getCfgVersion,
"location": {"./definition_changes"}
},
"quality_changes": {
"get_version": upgrade.getCfgVersion,
"location": {"./quality_changes"}
},
"quality": {
"get_version": upgrade.getCfgVersion,
"location": {"./quality"}
},
"user": {
"get_version": upgrade.getCfgVersion,
"location": {"./user"}
}
}
}
def register(app: "Application") -> Dict[str, Any]:
return {"version_upgrade": upgrade}

View File

@ -0,0 +1,8 @@
{
"name": "Version Upgrade 5.9 to 5.10",
"author": "Ultimaker B.V.",
"version": "1.0.0",
"description": "Upgrades configurations from Cura 5.9 to Cura 5.10",
"api": 8,
"i18n-catalog": "cura"
}

View File

@ -11,12 +11,14 @@ import xml.etree.ElementTree as ET
from UM.PluginRegistry import PluginRegistry
from UM.Resources import Resources
from UM.Logger import Logger
from UM.Decorators import CachedMemberFunctions
import UM.Dictionary
from UM.Settings.InstanceContainer import InstanceContainer
from UM.Settings.ContainerRegistry import ContainerRegistry
from UM.ConfigurationErrorMessage import ConfigurationErrorMessage
from cura.CuraApplication import CuraApplication
from cura.PrinterOutput.FormatMaps import FormatMaps
from cura.Machines.VariantType import VariantType
try:
@ -70,6 +72,8 @@ class XmlMaterialProfile(InstanceContainer):
Logger.log("w", "Can't change metadata {key} of material {material_id} because it's read-only.".format(key = key, material_id = self.getId()))
return
CachedMemberFunctions.clearInstanceCache(self)
# Some metadata such as diameter should also be instantiated to be a setting. Go though all values for the
# "properties" field and apply the new values to SettingInstances as well.
new_setting_values_dict = {}
@ -249,7 +253,7 @@ class XmlMaterialProfile(InstanceContainer):
machine_variant_map[definition_id][variant_name] = variant_dict
# Map machine human-readable names to IDs
product_id_map = self.getProductIdMap()
product_id_map = FormatMaps.getProductIdMap()
for definition_id, container in machine_container_map.items():
definition_id = container.getMetaDataEntry("definition")
@ -479,6 +483,7 @@ class XmlMaterialProfile(InstanceContainer):
first.append(element)
def clearData(self):
CachedMemberFunctions.clearInstanceCache(self)
self._metadata = {
"id": self.getId(),
"name": ""
@ -518,6 +523,8 @@ class XmlMaterialProfile(InstanceContainer):
def deserialize(self, serialized, file_name = None):
"""Overridden from InstanceContainer"""
CachedMemberFunctions.clearInstanceCache(self)
containers_to_add = []
# update the serialized data first
from UM.Settings.Interfaces import ContainerInterface
@ -647,7 +654,7 @@ class XmlMaterialProfile(InstanceContainer):
self._dirty = False
# Map machine human-readable names to IDs
product_id_map = self.getProductIdMap()
product_id_map = FormatMaps.getProductIdMap()
machines = data.iterfind("./um:settings/um:machine", self.__namespaces)
for machine in machines:
@ -911,9 +918,6 @@ class XmlMaterialProfile(InstanceContainer):
base_metadata["properties"] = property_values
base_metadata["definition"] = "fdmprinter"
# Certain materials are loaded but should not be visible / selectable to the user.
base_metadata["visible"] = not base_metadata.get("abstract_color", False)
compatible_entries = data.iterfind("./um:settings/um:setting[@key='hardware compatible']", cls.__namespaces)
try:
common_compatibility = cls._parseCompatibleValue(next(compatible_entries).text) # type: ignore
@ -923,7 +927,7 @@ class XmlMaterialProfile(InstanceContainer):
result_metadata.append(base_metadata)
# Map machine human-readable names to IDs
product_id_map = cls.getProductIdMap()
product_id_map = FormatMaps.getProductIdMap()
for machine in data.iterfind("./um:settings/um:machine", cls.__namespaces):
machine_compatibility = common_compatibility
@ -1127,29 +1131,6 @@ class XmlMaterialProfile(InstanceContainer):
id_list = list(id_list)
return id_list
__product_to_id_map: Optional[Dict[str, List[str]]] = None
@classmethod
def getProductIdMap(cls) -> Dict[str, List[str]]:
"""Gets a mapping from product names in the XML files to their definition IDs.
This loads the mapping from a file.
"""
if cls.__product_to_id_map is not None:
return cls.__product_to_id_map
plugin_path = cast(str, PluginRegistry.getInstance().getPluginPath("XmlMaterialProfile"))
product_to_id_file = os.path.join(plugin_path, "product_to_id.json")
with open(product_to_id_file, encoding = "utf-8") as f:
contents = ""
for line in f:
contents += line if "#" not in line else "".join([line.replace("#", str(n)) for n in range(1, 12)])
cls.__product_to_id_map = json.loads(contents)
cls.__product_to_id_map = {key: [value] for key, value in cls.__product_to_id_map.items()}
#This also loads "Ultimaker S5" -> "ultimaker_s5" even though that is not strictly necessary with the default to change spaces into underscores.
#However it is not always loaded with that default; this mapping is also used in serialize() without that default.
return cls.__product_to_id_map
@staticmethod
def _parseCompatibleValue(value: str):
"""Parse the value of the "material compatible" property."""

View File

@ -1,6 +0,0 @@
pytest
pyinstaller==6.3.0
pyinstaller-hooks-contrib
pyyaml
sip==6.5.1
jinja2

View File

@ -1 +0,0 @@
git+https://github.com/ultimaker/libcharon@master/s-line#egg=charon

View File

@ -1,408 +0,0 @@
### Direct requirements for Uranium and libCharon ###
PyQt6-sip==13.6.0 \
--hash=sha256:0dfd22cfedd87e96f9d51e0778ca2ba3dc0be83e424e9e0f98f6994d8d9c90f0 \
--hash=sha256:13885361ca2cb2f5085d50359ba61b3fabd41b139fb58f37332acbe631ef2357 \
--hash=sha256:24441032a29791e82beb7dfd76878339058def0e97fdb7c1cea517f3a0e6e96b \
--hash=sha256:2486e1588071943d4f6657ba09096dc9fffd2322ad2c30041e78ea3f037b5778 \
--hash=sha256:3075d8b325382750829e6cde6971c943352309d35768a4d4da0587459606d562 \
--hash=sha256:33ea771fe777eb0d1a2c3ef35bcc3f7a286eb3ff09cd5b2fdd3d87d1f392d7e8 \
--hash=sha256:39854dba35f8e5a4288da26ecb5f40b4c5ec1932efffb3f49d5ea435a7f37fb3 \
--hash=sha256:3bf03e130fbfd75c9c06e687b86ba375410c7a9e835e4e03285889e61dd4b0c4 \
--hash=sha256:43fb8551796030aae3d66d6e35e277494071ec6172cd182c9569ab7db268a2f5 \
--hash=sha256:58f68a48400e0b3d1ccb18090090299bad26e3aed7ccb7057c65887b79b8aeea \
--hash=sha256:5b9c6b6f9cfccb48cbb78a59603145a698fb4ffd176764d7083e5bf47631d8df \
--hash=sha256:747f6ca44af81777a2c696bd501bc4815a53ec6fc94d4e25830e10bc1391f8ab \
--hash=sha256:86a7b67c64436e32bffa9c28c9f21bf14a9faa54991520b12c3f6f435f24df7f \
--hash=sha256:8c282062125eea5baf830c6998587d98c50be7c3a817a057fb95fef647184012 \
--hash=sha256:8f9df9f7ccd8a9f0f1d36948c686f03ce1a1281543a3e636b7b7d5e086e1a436 \
--hash=sha256:98bf954103b087162fa63b3a78f30b0b63da22fd6450b610ec1b851dbb798228 \
--hash=sha256:9adf672f9114687533a74d5c2d4c03a9a929ad5ad9c3e88098a7da1a440ab916 \
--hash=sha256:a6ce80bc24618d8a41be8ca51ad9f10e8bc4296dd90ab2809573df30a23ae0e5 \
--hash=sha256:d6b5f699aaed0ac1fcd23e8fbca70d8a77965831b7c1ce474b81b1678817a49d \
--hash=sha256:fa759b6339ff7e25f9afe2a6b651b775f0a36bcb3f5fa85e81a90d3b033c83f4 \
--hash=sha256:fa7b10af7488efc5e53b41dd42c0f421bde6c2865a107af7ae259aff9d841da9
PyQt6==6.6.0 \
--hash=sha256:33655db05ac2de699320f035250c21434c77144a6a2943aca3f4c579dabc3f7b \
--hash=sha256:3ef68830a9b32050c30f7962c56a5927802c9193b68eaf405faecb8ce9ae10a8 \
--hash=sha256:d41512d66044c2df9c5f515a56a922170d68a37b3406ffddc8b4adc57181b576 \
--hash=sha256:fc7185d65755f26d7a6842492ec5398c92544dc4eafbbcbef1b1922aca585c96
PyQt6-Qt6==6.6.0 \
--hash=sha256:1b079a33088d32ff47872cdb37fd15aa42101f0be46c3340244483849b781438 \
--hash=sha256:8cb30d64a4d32465ea1686bc827cbe452225fb387c4873356b0fa7b9ae63534f \
--hash=sha256:a151f34712cd645111e89cb30b02e5fb69c9dcc3603ab3c03a561e874bd7cbcf \
--hash=sha256:e5483ae04bf107411c7469f1be9f9e2eb9840303e788b3ac524fe30af90d45f4
PyQt6-NetworkAuth==6.6.0 \
--hash=sha256:7b90b81792fe53105287c8cbb5e4b22bc44a482268ffb7d3e33f852807f86182 \
--hash=sha256:c7e2335159aa795e2fe6fb069ccce6308672ab80f26c50fab57caf957371cbb5 \
--hash=sha256:cdfc0bfaea16a9e09f075bdafefb996aa9fdec392052ba4fb3cbac233c1958fb \
--hash=sha256:f60ff9a62f5129dc2a9d4c495fb47f9a03e4dfb666b50fb7d61f46e89bf7b6a2
PyQt6-NetworkAuth-Qt6==6.6.0 \
--hash=sha256:481d9093e1fb1ac6843d8beabcd359cc34b74b9a2cbb3e2b68d96bd3f178d4e0 \
--hash=sha256:4cc48fd375730a0ba5fbed9d64abb2914f587377560a78a63aff893f9e276a45 \
--hash=sha256:5006deabf55304d4a3e0b3c954f93e5835546b11e789d14653a2493d12d3a063 \
--hash=sha256:bcd56bfc892fec961c51eba3c0bf32ba8317a762d9e254d3830569611ed569d6
certifi==2023.5.7; \
--hash=sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716
cryptography==41.0.1 \
--hash=sha256:059e348f9a3c1950937e1b5d7ba1f8e968508ab181e75fc32b879452f08356db \
--hash=sha256:1a5472d40c8f8e91ff7a3d8ac6dfa363d8e3138b961529c996f3e2df0c7a411a \
--hash=sha256:1a8e6c2de6fbbcc5e14fd27fb24414507cb3333198ea9ab1258d916f00bc3039 \
--hash=sha256:1fee5aacc7367487b4e22484d3c7e547992ed726d14864ee33c0176ae43b0d7c \
--hash=sha256:5d092fdfedaec4cbbffbf98cddc915ba145313a6fdaab83c6e67f4e6c218e6f3 \
--hash=sha256:5f0ff6e18d13a3de56f609dd1fd11470918f770c6bd5d00d632076c727d35485 \
--hash=sha256:7bfc55a5eae8b86a287747053140ba221afc65eb06207bedf6e019b8934b477c \
--hash=sha256:7fa01527046ca5facdf973eef2535a27fec4cb651e4daec4d043ef63f6ecd4ca \
--hash=sha256:8dde71c4169ec5ccc1087bb7521d54251c016f126f922ab2dfe6649170a3b8c5 \
--hash=sha256:8f4ab7021127a9b4323537300a2acfb450124b2def3756f64dc3a3d2160ee4b5 \
--hash=sha256:948224d76c4b6457349d47c0c98657557f429b4e93057cf5a2f71d603e2fc3a3 \
--hash=sha256:9a6c7a3c87d595608a39980ebaa04d5a37f94024c9f24eb7d10262b92f739ddb \
--hash=sha256:b46e37db3cc267b4dea1f56da7346c9727e1209aa98487179ee8ebed09d21e43 \
--hash=sha256:b4ceb5324b998ce2003bc17d519080b4ec8d5b7b70794cbd2836101406a9be31 \
--hash=sha256:cb33ccf15e89f7ed89b235cff9d49e2e62c6c981a6061c9c8bb47ed7951190bc \
--hash=sha256:d198820aba55660b4d74f7b5fd1f17db3aa5eb3e6893b0a41b75e84e4f9e0e4b \
--hash=sha256:d34579085401d3f49762d2f7d6634d6b6c2ae1242202e860f4d26b046e3a1006 \
--hash=sha256:eb8163f5e549a22888c18b0d53d6bb62a20510060a22fd5a995ec8a05268df8a \
--hash=sha256:f73bff05db2a3e5974a6fd248af2566134d8981fd7ab012e5dd4ddb1d9a70699
zeroconf==0.31.0 \
--hash=sha256:53a180248471c6f81bd1fffcbce03ed93d7d8eaf10905c9121ac1ea996d19844 \
--hash=sha256:5a468da018bc3f04bbce77ae247924d802df7aeb4c291bbbb5a9616d128800b0
importlib-metadata==4.10.0 \
--hash=sha256:b7cf7d3fef75f1e4c80a96ca660efbd51473d7e8f39b5ab9210febc7809012a4 \
--hash=sha256:92a8b58ce734b2a4494878e0ecf7d79ccd7a128b5fc6014c401e0b61f006f0f6
keyring==23.0.1 \
--hash=sha256:045703609dd3fccfcdb27da201684278823b72af515aedec1a8515719a038cb8 \
--hash=sha256:8f607d7d1cc502c43a932a275a56fe47db50271904513a379d39df1af277ac48
# Use Numpy wheel that is compiled with Intel optimizations (MKL). Obtained from https://www.lfd.uci.edu/~gohlke/pythonlibs/#numpy
# We cache this at software.ultimaker.com since this website tends to remove older versions rather quickly.
https://software.ultimaker.com/cura-binary-dependencies/numpy-1.21.5+mkl-cp310-cp310-win_amd64.whl; \
sys_platform=="win32" \
--hash=sha256:fbd5d5126b730a151134d21994a951fe28df06464e0c9a2cba2a4132e542a5fc
numpy==1.21.5; \
sys_platform!="win32" \
--hash=sha256:00c9fa73a6989895b8815d98300a20ac993c49ac36c8277e8ffeaa3631c0dbbb \
--hash=sha256:025b497014bc33fc23897859350f284323f32a2fff7654697f5a5fc2a19e9939 \
--hash=sha256:08de8472d9f7571f9d51b27b75e827f5296295fa78817032e84464be8bb905bc \
--hash=sha256:1964db2d4a00348b7a60ee9d013c8cb0c566644a589eaa80995126eac3b99ced \
--hash=sha256:2a9add27d7fc0fdb572abc3b2486eb3b1395da71e0254c5552b2aad2a18b5441 \
--hash=sha256:2d8adfca843bc46ac199a4645233f13abf2011a0b2f4affc5c37cd552626f27b \
--hash=sha256:301e408a052fdcda5cdcf03021ebafc3c6ea093021bf9d1aa47c54d48bdad166 \
--hash=sha256:311283acf880cfcc20369201bd75da907909afc4666966c7895cbed6f9d2c640 \
--hash=sha256:341dddcfe3b7b6427a28a27baa59af5ad51baa59bfec3264f1ab287aa3b30b13 \
--hash=sha256:3a5098df115340fb17fc93867317a947e1dcd978c3888c5ddb118366095851f8 \
--hash=sha256:3c978544be9e04ed12016dd295a74283773149b48f507d69b36f91aa90a643e5 \
--hash=sha256:3d893b0871322eaa2f8c7072cdb552d8e2b27645b7875a70833c31e9274d4611 \
--hash=sha256:4fe6a006557b87b352c04596a6e3f12a57d6e5f401d804947bd3188e6b0e0e76 \
--hash=sha256:507c05c7a37b3683eb08a3ff993bd1ee1e6c752f77c2f275260533b265ecdb6c \
--hash=sha256:58ca1d7c8aef6e996112d0ce873ac9dfa1eaf4a1196b4ff7ff73880a09923ba7 \
--hash=sha256:61bada43d494515d5b122f4532af226fdb5ee08fe5b5918b111279843dc6836a \
--hash=sha256:69a5a8d71c308d7ef33ef72371c2388a90e3495dbb7993430e674006f94797d5 \
--hash=sha256:6a5928bc6241264dce5ed509e66f33676fc97f464e7a919edc672fb5532221ee \
--hash=sha256:7b9d6b14fc9a4864b08d1ba57d732b248f0e482c7b2ff55c313137e3ed4d8449 \
--hash=sha256:a7c4b701ca418cd39e28ec3b496e6388fe06de83f5f0cb74794fa31cfa384c02 \
--hash=sha256:a7e8f6216f180f3fd4efb73de5d1eaefb5f5a1ee5b645c67333033e39440e63a \
--hash=sha256:b545ebadaa2b878c8630e5bcdb97fc4096e779f335fc0f943547c1c91540c815 \
--hash=sha256:c293d3c0321996cd8ffe84215ffe5d269fd9d1d12c6f4ffe2b597a7c30d3e593 \
--hash=sha256:c5562bcc1a9b61960fc8950ade44d00e3de28f891af0acc96307c73613d18f6e \
--hash=sha256:ca9c23848292c6fe0a19d212790e62f398fd9609aaa838859be8459bfbe558aa \
--hash=sha256:cc1b30205d138d1005adb52087ff45708febbef0e420386f58664f984ef56954 \
--hash=sha256:dbce7adeb66b895c6aaa1fad796aaefc299ced597f6fbd9ceddb0dd735245354 \
--hash=sha256:dc4b2fb01f1b4ddbe2453468ea0719f4dbb1f5caa712c8b21bb3dd1480cd30d9 \
--hash=sha256:eed2afaa97ec33b4411995be12f8bdb95c87984eaa28d76cf628970c8a2d689a \
--hash=sha256:fc7a7d7b0ed72589fd8b8486b9b42a564f10b8762be8bd4d9df94b807af4a089
pyclipper==1.3.0.post3; \
--hash=sha256:1408461fba4985d58589fa74c59e273e8aa91d8b55c2e9a6abf966eed7562d90 \
--hash=sha256:1df7e4bce6ac68abfe926d319f52b749b7c9d5e0a6bd7112a0c7f2f908abecbc \
--hash=sha256:24b6b70114941805c14a33e9378e52d24b18791f1bfc365853d5adb33425f173 \
--hash=sha256:2b0950dada5b56a002dddccf131815a8f9b55c4df86ff6a43b7ef48a91b572aa \
--hash=sha256:2d51757df15f1721946f39016191c7d685306fc69d8a5f2933a1d22119150a1d \
--hash=sha256:341556b83ce2a5d4ee36e263e04751a9949e4161f60f0011f9500b845b25ce3c \
--hash=sha256:46b3996c8dcda562c408e653ccef8efd95a7d69400f9119df2c2cb8083d36bf8 \
--hash=sha256:5434e1e69425dc7958579b1f7bedfa8a7cce79400e1b708a42be769a165a3a2c \
--hash=sha256:5b4e0e360ebfc25d01c7e0873b27f912d1c02d99b84393d526bc01836a5fb9f4 \
--hash=sha256:60f20e96e9e055e9bb2e729fe6078969ce252c6b7b1b18d8d963e5343d99f99e \
--hash=sha256:615bece709d8c304d97089a83f8ff91ca0d2646e8fe42f2637d7cdfcf99a6e4e \
--hash=sha256:639fbc55569b94487f89261b1656e3e655d06888a582218c5432c426705d1f6f \
--hash=sha256:6748239b89a5edd00b3ce36cb5c7a177978ff3361de861fe2cc559bb2760625d \
--hash=sha256:679bfd1fd4595a3f58a706256dc6cc7179ee40fbeff4d134aa3163a9c87ca545 \
--hash=sha256:6ace0de72f252e48eda28981e24142a2b02ac17eacc3d8a2baf628671dd8cc8f \
--hash=sha256:771ba332790e88eb4aa9de2172131af25525ac23fdda789691e543962849f149 \
--hash=sha256:8fabba875314ebc751b66e571b8b0d5319c76e22051304880a552d70db2252af \
--hash=sha256:a050ec9df95e9611461adb7f86da4f066848c045d966c46e7b124593e6410e2a \
--hash=sha256:ab7e2f9b655333a37002b90bea47d77ff8d1f01293798911afa7f39217f1b71d \
--hash=sha256:b0097aef9ac8a5e10434059641fea338fb682c61993bfe65670e459ec14b4151 \
--hash=sha256:b509cfd696962683553cd6b9fc7f0baf05bff47c09fd68b085a8aea493436267 \
--hash=sha256:bad590e701eaef644899ce164631f83e39669796e552f17aef5a37238646b392 \
--hash=sha256:c586ca07c1418d4f010c6bc65215c4b193211e1b95dd8a1bd312d8207c5ccf6a \
--hash=sha256:cb5ad68b82c2aa408672444e567cea138db29790997d603525878632d61fd6ec \
--hash=sha256:cd9f0496daa9b505902848d401bfc3ffe80ee3a6863451fc6c05ceb2a45b9d8f \
--hash=sha256:da4d8f253dd8e152b3364902bed5e221165d3af4e71e2ae81eb9a9a9802089a2 \
--hash=sha256:e8d77755a00566e0f0cf48da2e42e76ff93423b55880621944f950058be3fc0e \
--hash=sha256:ebc13dbfaec1b489fc6ed92a642b8a2c7072fa2d4bc12514cc2bbeacd47c5baf \
--hash=sha256:ed5ea68bc6f3428fbf9d98f1e72e2020d763d88053cc9a9d31b2eeb49500b26f \
--hash=sha256:ee52b9d29512eb7b8b9faee6db3f8694eb6c8455785a5d2d561c40eca67b226f \
--hash=sha256:f428ecdd224ec30c1a4dbdbaac44e746cbe9a05c25627b05cc7bc2dcda235a26 \
--hash=sha256:f5f3ad171f21511813085ac549bb717bbdcc0f4da27abf6b0629438e1f23ca0b
scipy==1.9.1 \
--hash=sha256:c61b4a91a702e8e04aeb0bfc40460e1f17a640977c04dda8757efb0199c75332 \
--hash=sha256:d79da472015d0120ba9b357b28a99146cd6c17b9609403164b1a8ed149b4dfc8 \
--hash=sha256:825951b88f56765aeb6e5e38ac9d7d47407cfaaeb008d40aa1b45a2d7ea2731e \
--hash=sha256:f950a04b33e17b38ff561d5a0951caf3f5b47caa841edd772ffb7959f20a6af0 \
--hash=sha256:8cc81ac25659fec73599ccc52c989670e5ccd8974cf34bacd7b54a8d809aff1a \
--hash=sha256:8d3faa40ac16c6357aaf7ea50394ea6f1e8e99d75e927a51102b1943b311b4d9 \
--hash=sha256:7a412c476a91b080e456229e413792bbb5d6202865dae963d1e6e28c2bb58691 \
--hash=sha256:eb954f5aca4d26f468bbebcdc5448348eb287f7bea536c6306f62ea062f63d9a \
--hash=sha256:3c6f5d1d4b9a5e4fe5e14f26ffc9444fc59473bbf8d45dc4a9a15283b7063a72 \
--hash=sha256:bc4e2c77d4cd015d739e75e74ebbafed59ba8497a7ed0fd400231ed7683497c4 \
--hash=sha256:0419485dbcd0ed78c0d5bf234c5dd63e86065b39b4d669e45810d42199d49521 \
--hash=sha256:34441dfbee5b002f9e15285014fd56e5e3372493c3e64ae297bae2c4b9659f5a \
--hash=sha256:b97b479f39c7e4aaf807efd0424dec74bbb379108f7d22cf09323086afcd312c \
--hash=sha256:e8fe305d9d67a81255e06203454729405706907dccbdfcc330b7b3482a6c371d \
--hash=sha256:39ab9240cd215a9349c85ab908dda6d732f7d3b4b192fa05780812495536acc4 \
--hash=sha256:71487c503e036740635f18324f62a11f283a632ace9d35933b2b0a04fd898c98 \
--hash=sha256:3bc1ab68b9a096f368ba06c3a5e1d1d50957a86665fc929c4332d21355e7e8f4 \
--hash=sha256:f7c39f7dbb57cce00c108d06d731f3b0e2a4d3a95c66d96bce697684876ce4d4 \
--hash=sha256:47d1a95bd9d37302afcfe1b84c8011377c4f81e33649c5a5785db9ab827a6ade \
--hash=sha256:96d7cf7b25c9f23c59a766385f6370dab0659741699ecc7a451f9b94604938ce \
--hash=sha256:09412eb7fb60b8f00b328037fd814d25d261066ebc43a1e339cdce4f7502877e \
--hash=sha256:90c805f30c46cf60f1e76e947574f02954d25e3bb1e97aa8a07bc53aa31cf7d1 \
--hash=sha256:26d28c468900e6d5fdb37d2812ab46db0ccd22c63baa095057871faa3a498bc9
trimesh==3.9.36 \
--hash=sha256:f01e8edab14d1999700c980c21a1546f37417216ad915a53be649d263130181e \
--hash=sha256:8ac8bea693b3ee119f11b022fc9b9481c9f1af06cb38bc859bf5d16bbbe49b23
sentry-sdk==0.13.5 \
--hash=sha256:05285942901d38c7ce2498aba50d8e87b361fc603281a5902dda98f3f8c5e145 \
--hash=sha256:c6b919623e488134a728f16326c6f0bcdab7e3f59e7f4c472a90eea4d6d8fe82
mypy==0.931 \
--hash=sha256:0038b21890867793581e4cb0d810829f5fd4441aa75796b53033af3aa30430ce \
--hash=sha256:1171f2e0859cfff2d366da2c7092b06130f232c636a3f7301e3feb8b41f6377d \
--hash=sha256:1b06268df7eb53a8feea99cbfff77a6e2b205e70bf31743e786678ef87ee8069 \
--hash=sha256:1b65714dc296a7991000b6ee59a35b3f550e0073411ac9d3202f6516621ba66c \
--hash=sha256:1bf752559797c897cdd2c65f7b60c2b6969ffe458417b8d947b8340cc9cec08d \
--hash=sha256:300717a07ad09525401a508ef5d105e6b56646f7942eb92715a1c8d610149714 \
--hash=sha256:3c5b42d0815e15518b1f0990cff7a705805961613e701db60387e6fb663fe78a \
--hash=sha256:4365c60266b95a3f216a3047f1d8e3f895da6c7402e9e1ddfab96393122cc58d \
--hash=sha256:50c7346a46dc76a4ed88f3277d4959de8a2bd0a0fa47fa87a4cde36fe247ac05 \
--hash=sha256:5b56154f8c09427bae082b32275a21f500b24d93c88d69a5e82f3978018a0266 \
--hash=sha256:74f7eccbfd436abe9c352ad9fb65872cc0f1f0a868e9d9c44db0893440f0c697 \
--hash=sha256:7b3f6f557ba4afc7f2ce6d3215d5db279bcf120b3cfd0add20a5d4f4abdae5bc \
--hash=sha256:8c11003aaeaf7cc2d0f1bc101c1cc9454ec4cc9cb825aef3cafff8a5fdf4c799 \
--hash=sha256:8ca7f8c4b1584d63c9a0f827c37ba7a47226c19a23a753d52e5b5eddb201afcd \
--hash=sha256:c89702cac5b302f0c5d33b172d2b55b5df2bede3344a2fbed99ff96bddb2cf00 \
--hash=sha256:d8f1ff62f7a879c9fe5917b3f9eb93a79b78aad47b533911b853a757223f72e7 \
--hash=sha256:d9d2b84b2007cea426e327d2483238f040c49405a6bf4074f605f0156c91a47a \
--hash=sha256:e839191b8da5b4e5d805f940537efcaa13ea5dd98418f06dc585d2891d228cf0 \
--hash=sha256:f9fe20d0872b26c4bba1c1be02c5340de1019530302cf2dcc85c7f9fc3252ae0 \
--hash=sha256:ff3bf387c14c805ab1388185dd22d6b210824e164d4bb324b195ff34e322d166
pyserial==3.4 \
--hash=sha256:6e2d401fdee0eab996cf734e67773a0143b932772ca8b42451440cfed942c627 \
--hash=sha256:e0770fadba80c31013896c7e6ef703f72e7834965954a78e71a3049488d4d7d8
### Indirect requirements ###
chardet==3.0.4 \
--hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \
--hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691
idna==2.8 \
--hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
--hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c
attrs==21.3.0 \
--hash=sha256:8f7335278dedd26b58c38e006338242cc0977f06d51579b2b8b87b9b33bff66c \
--hash=sha256:50f3c9b216dc9021042f71b392859a773b904ce1a029077f58f6598272432045
requests==2.22.0 \
--hash=sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4 \
--hash=sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31
# twisted
Twisted==21.2.0 \
--hash=sha256:77544a8945cf69b98d2946689bbe0c75de7d145cdf11f391dd487eae8fc95a12 \
--hash=sha256:aab38085ea6cda5b378b519a0ec99986874921ee8881318626b0a3414bb2631e
constantly==15.1.0 \
--hash=sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35 \
--hash=sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d
hyperlink==21.0.0 \
--hash=sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b \
--hash=sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4
incremental==22.10.0 \
--hash=sha256:b864a1f30885ee72c5ac2835a761b8fe8aa9c28b9395cacf27286602688d3e51 \
--hash=sha256:912feeb5e0f7e0188e6f42241d2f450002e11bbc0937c65865045854c24c0bd0
zope.interface==5.4.0 \
--hash=sha256:0f91b5b948686659a8e28b728ff5e74b1be6bf40cb04704453617e5f1e945ef3 \
--hash=sha256:3c02411a3b62668200910090a0dff17c0b25aaa36145082a5a6adf08fa281e54 \
--hash=sha256:5dba5f530fec3f0988d83b78cc591b58c0b6eb8431a85edd1569a0539a8a5a0e \
--hash=sha256:bf68f4b2b6683e52bec69273562df15af352e5ed25d1b6641e7efddc5951d1a7 \
--hash=sha256:db1fa631737dab9fa0b37f3979d8d2631e348c3b4e8325d6873c2541d0ae5a48 \
--hash=sha256:f44e517131a98f7a76696a7b21b164bcb85291cee106a23beccce454e1f433a4
Automat==20.2.0 \
--hash=sha256:7979803c74610e11ef0c0d68a2942b152df52da55336e0c9d58daf1831cbdf33 \
--hash=sha256:b6feb6455337df834f6c9962d6ccf771515b7d939bca142b29c20c2376bc6111
twisted-iocpsupport==1.0.2; \
sys_platform=="win32" \
--hash=sha256:306becd6e22ab6e8e4f36b6bdafd9c92e867c98a5ce517b27fdd27760ee7ae41 \
--hash=sha256:3c61742cb0bc6c1ac117a7e5f422c129832f0c295af49e01d8a6066df8cfc04d \
--hash=sha256:72068b206ee809c9c596b57b5287259ea41ddb4774d86725b19f35bf56aa32a9 \
--hash=sha256:7d972cfa8439bdcb35a7be78b7ef86d73b34b808c74be56dfa785c8a93b851bf \
--hash=sha256:81b3abe3527b367da0220482820cb12a16c661672b7bcfcde328902890d63323 \
--hash=sha256:851b3735ca7e8102e661872390e3bce88f8901bece95c25a0c8bb9ecb8a23d32 \
--hash=sha256:985c06a33f5c0dae92c71a036d1ea63872ee86a21dd9b01e1f287486f15524b4 \
--hash=sha256:9dbb8823b49f06d4de52721b47de4d3b3026064ef4788ce62b1a21c57c3fff6f \
--hash=sha256:b435857b9efcbfc12f8c326ef0383f26416272260455bbca2cd8d8eca470c546 \
--hash=sha256:b76b4eed9b27fd63ddb0877efdd2d15835fdcb6baa745cb85b66e5d016ac2878 \
--hash=sha256:b9fed67cf0f951573f06d560ac2f10f2a4bbdc6697770113a2fc396ea2cb2565 \
--hash=sha256:bf4133139d77fc706d8f572e6b7d82871d82ec7ef25d685c2351bdacfb701415
numpy-stl==2.10.1 \
--hash=sha256:f6b529b8a8112dfe456d4f7697c7aee0aca62be5a873879306afe4b26fca963c
python-utils==2.3.0 \
--hash=sha256:34aaf26b39b0b86628008f2ae0ac001b30e7986a8d303b61e1357dfcdad4f6d3 \
--hash=sha256:e25f840564554eaded56eaa395bca507b0b9e9f0ae5ecb13a8cb785305c56d25
six==1.12.0 \
--hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
--hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73
shapely==2.0.1 \
--hash=sha256:01224899ff692a62929ef1a3f5fe389043e262698a708ab7569f43a99a48ae82 \
--hash=sha256:05c51a29336e604c084fb43ae5dbbfa2c0ef9bd6fedeae0a0d02c7b57a56ba46 \
--hash=sha256:09d6c7763b1bee0d0a2b84bb32a4c25c6359ad1ac582a62d8b211e89de986154 \
--hash=sha256:193a398d81c97a62fc3634a1a33798a58fd1dcf4aead254d080b273efbb7e3ff \
--hash=sha256:1a34a23d6266ca162499e4a22b79159dc0052f4973d16f16f990baa4d29e58b6 \
--hash=sha256:2569a4b91caeef54dd5ae9091ae6f63526d8ca0b376b5bb9fd1a3195d047d7d4 \
--hash=sha256:33403b8896e1d98aaa3a52110d828b18985d740cc9f34f198922018b1e0f8afe \
--hash=sha256:3ad81f292fffbd568ae71828e6c387da7eb5384a79db9b4fde14dd9fdeffca9a \
--hash=sha256:3cb256ae0c01b17f7bc68ee2ffdd45aebf42af8992484ea55c29a6151abe4386 \
--hash=sha256:45b4833235b90bc87ee26c6537438fa77559d994d2d3be5190dd2e54d31b2820 \
--hash=sha256:4641325e065fd3e07d55677849c9ddfd0cf3ee98f96475126942e746d55b17c8 \
--hash=sha256:502e0a607f1dcc6dee0125aeee886379be5242c854500ea5fd2e7ac076b9ce6d \
--hash=sha256:66a6b1a3e72ece97fc85536a281476f9b7794de2e646ca8a4517e2e3c1446893 \
--hash=sha256:70a18fc7d6418e5aea76ac55dce33f98e75bd413c6eb39cfed6a1ba36469d7d4 \
--hash=sha256:7d3bbeefd8a6a1a1017265d2d36f8ff2d79d0162d8c141aa0d37a87063525656 \
--hash=sha256:83a8ec0ee0192b6e3feee9f6a499d1377e9c295af74d7f81ecba5a42a6b195b7 \
--hash=sha256:865bc3d7cc0ea63189d11a0b1120d1307ed7a64720a8bfa5be2fde5fc6d0d33f \
--hash=sha256:90cfa4144ff189a3c3de62e2f3669283c98fb760cfa2e82ff70df40f11cadb39 \
--hash=sha256:91575d97fd67391b85686573d758896ed2fc7476321c9d2e2b0c398b628b961c \
--hash=sha256:9a6ac34c16f4d5d3c174c76c9d7614ec8fe735f8f82b6cc97a46b54f386a86bf \
--hash=sha256:a529218e72a3dbdc83676198e610485fdfa31178f4be5b519a8ae12ea688db14 \
--hash=sha256:a70a614791ff65f5e283feed747e1cc3d9e6c6ba91556e640636bbb0a1e32a71 \
--hash=sha256:ac1dfc397475d1de485e76de0c3c91cc9d79bd39012a84bb0f5e8a199fc17bef \
--hash=sha256:b06d031bc64149e340448fea25eee01360a58936c89985cf584134171e05863f \
--hash=sha256:b4f0711cc83734c6fad94fc8d4ec30f3d52c1787b17d9dca261dc841d4731c64 \
--hash=sha256:b50c401b64883e61556a90b89948297f1714dbac29243d17ed9284a47e6dd731 \
--hash=sha256:b519cf3726ddb6c67f6a951d1bb1d29691111eaa67ea19ddca4d454fbe35949c \
--hash=sha256:bca57b683e3d94d0919e2f31e4d70fdfbb7059650ef1b431d9f4e045690edcd5 \
--hash=sha256:c43755d2c46b75a7b74ac6226d2cc9fa2a76c3263c5ae70c195c6fb4e7b08e79 \
--hash=sha256:c7eed1fb3008a8a4a56425334b7eb82651a51f9e9a9c2f72844a2fb394f38a6c \
--hash=sha256:c8b0d834b11be97d5ab2b4dceada20ae8e07bcccbc0f55d71df6729965f406ad \
--hash=sha256:ce88ec79df55430e37178a191ad8df45cae90b0f6972d46d867bf6ebbb58cc4d \
--hash=sha256:d173d24e85e51510e658fb108513d5bc11e3fd2820db6b1bd0522266ddd11f51 \
--hash=sha256:d8f55f355be7821dade839df785a49dc9f16d1af363134d07eb11e9207e0b189 \
--hash=sha256:da71de5bf552d83dcc21b78cc0020e86f8d0feea43e202110973987ffa781c21 \
--hash=sha256:e55698e0ed95a70fe9ff9a23c763acfe0bf335b02df12142f74e4543095e9a9b \
--hash=sha256:f32a748703e7bf6e92dfa3d2936b2fbfe76f8ce5f756e24f49ef72d17d26ad02 \
--hash=sha256:f470a130d6ddb05b810fc1776d918659407f8d025b7f56d2742a596b6dffa6c7
cython==0.29.26 \
--hash=sha256:af377d543a762867da11fcf6e558f7a4a535ff8693f30cce123fab10c00fa312 \
--hash=sha256:f5e15ff892c8afad64931ee3dd723c4755c2c516606f9aae7613bebfac62b0f6 \
--hash=sha256:2b834ff6e4d10ba6d7a0d676dd71c1b427a181ddbbbbf79e91d1861557aab59f \
--hash=sha256:c813799d533194b7d85203d881d8b4f567a8c644a67f50d47f1ffbf316df412f \
--hash=sha256:6773cce9d4b3b6168d8feb2b6f06b658ef1e11cbfec075041745666d8e2a5e45 \
--hash=sha256:362fbb9cb4627c7786231429768b54aaba5459a2a0e46c25e59f202ca6155437
pybind11==2.6.2 \
--hash=sha256:2d8aebe1709bc367e34e3b23d8eccbf3f387ee9d5640548c6260d33b59f02405 \
--hash=sha256:d0e0aed9279656f21501243b707eb6e3b951e89e10c3271dedf3ae41c365e5ed
wheel==0.37.1 \
--hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 \
--hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a
setuptools==62.0.0 \
--hash=sha256:7999cbd87f1b6e1f33bf47efa368b224bed5e27b5ef2c4d46580186cbcb1a86a \
--hash=sha256:a65e3802053e99fc64c6b3b29c11132943d5b8c8facbcc461157511546510967
ifaddr==0.1.7 \
--hash=sha256:1f9e8a6ca6f16db5a37d3356f07b6e52344f6f9f7e806d618537731669eb1a94 \
--hash=sha256:d1f603952f0a71c9ab4e705754511e4e03b02565bc4cec7188ad6415ff534cd3
pycparser==2.20 \
--hash=sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0 \
--hash=sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705
zipp==3.5.0 \
--hash=sha256:957cfda87797e389580cb8b9e3870841ca991e2125350677b2ca83a0e99390a3 \
--hash=sha256:f5812b1e007e48cff63449a5e9f4e7ebea716b4111f9c4f9a645f91d579bf0c4
cffi==1.15.0 \
--hash=sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3 \
--hash=sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2 \
--hash=sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636 \
--hash=sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20 \
--hash=sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728 \
--hash=sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27 \
--hash=sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66 \
--hash=sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443 \
--hash=sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0 \
--hash=sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7 \
--hash=sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39 \
--hash=sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605 \
--hash=sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a \
--hash=sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37 \
--hash=sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029 \
--hash=sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139 \
--hash=sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc \
--hash=sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df \
--hash=sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14 \
--hash=sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880 \
--hash=sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2 \
--hash=sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a \
--hash=sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e \
--hash=sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474 \
--hash=sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024 \
--hash=sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8 \
--hash=sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0 \
--hash=sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e \
--hash=sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a \
--hash=sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e \
--hash=sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032 \
--hash=sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6 \
--hash=sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e \
--hash=sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b \
--hash=sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e \
--hash=sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954 \
--hash=sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962 \
--hash=sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c \
--hash=sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4 \
--hash=sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55 \
--hash=sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962 \
--hash=sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023 \
--hash=sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c \
--hash=sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6 \
--hash=sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8 \
--hash=sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382 \
--hash=sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7 \
--hash=sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc \
--hash=sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997 \
--hash=sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796
urllib3==1.25.9 \
--hash=sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527 \
--hash=sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115
mypy-extensions==0.4.3 \
--hash=sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d \
--hash=sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8
tomli==2.0.1 \
--hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \
--hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f
typing-extensions==3.10.0.2 \
--hash=sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e \
--hash=sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34
jeepney==0.7.1; \
--hash=sha256:1b5a0ea5c0e7b166b2f5895b91a08c14de8915afda4407fb5022a195224958ac \
--hash=sha256:fa9e232dfa0c498bd0b8a3a73b8d8a31978304dcef0515adc859d4e096f96f4f
SecretStorage==3.3.3 \
--hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \
--hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99
keyring==23.0.1 \
--hash=sha256:045703609dd3fccfcdb27da201684278823b72af515aedec1a8515719a038cb8 \
--hash=sha256:8f607d7d1cc502c43a932a275a56fe47db50271904513a379d39df1af277ac48
networkx==2.6.2 \
--hash=sha256:2306f1950ce772c5a59a57f5486d59bb9cab98497c45fc49cbc45ac0dec119bb \
--hash=sha256:5fcb7004be69e8fbdf07dcb502efa5c77cadcaad6982164134eeb9721f826c2e
pywin32==303; \
sys_platform=="win32" \
--hash=sha256:51cb52c5ec6709f96c3f26e7795b0bf169ee0d8395b2c1d7eb2c029a5008ed51
pywin32-ctypes==0.2.0; \
sys_platform=="win32" \
--hash=sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942 \
--hash=sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98
charset-normalizer==2.1.0; \
--hash=sha256:5189b6f22b01957427f35b6a08d9a0bc45b46d3788ef5a92e978433c7a35f8a5

View File

@ -1429,6 +1429,23 @@
}
}
},
"BASFUltrafuse316L": {
"package_info": {
"package_id": "BASFUltrafuse316L",
"package_type": "material",
"display_name": "BASF Ultrafuse 316L",
"description": "An innovative filament to produce 316L grade stainless steel parts.",
"package_version": "1.0.1",
"sdk_version": "8.6.0",
"website": "https://forward-am.com/material-portfolio/ultrafuse-filaments-for-fused-filaments-fabrication-fff/metal-filaments/ultrafuse-316l/",
"author": {
"author_id": "BASF",
"display_name": "BASF",
"email": null,
"website": "https://forward-am.com/"
}
}
},
"DagomaChromatikPLA": {
"package_info": {
"package_id": "DagomaChromatikPLA",
@ -1582,6 +1599,23 @@
}
}
},
"JabilTPE_SEBS1300_95a": {
"package_info": {
"package_id": "JabilTPE_SEBS1300_95a",
"package_type": "material",
"display_name": "Jabil TPE SEBS 1300 95a",
"description": "Soft material great for prototyping where rubber-like or elastomeric properties and durability are required.",
"package_version": "1.0.1",
"sdk_version": "8.6.0",
"website": "https://www.jabil.com/services/additive-manufacturing/additive-materials/compare-filaments/tpe-sebs-95a.html",
"author": {
"author_id": "Jabil",
"display_name": "Jabil",
"email": null,
"website": "https://www.jabil.com/"
}
}
},
"OctofiberPLA": {
"package_info": {
"package_id": "OctofiberPLA",
@ -1616,6 +1650,23 @@
}
}
},
"PolyMaxPC": {
"package_info": {
"package_id": "PolyMaxPC",
"package_type": "material",
"display_name": "PolyMax™ PC",
"description": "PolyMax™ PC is an engineered PC filament combining excellent strength, toughness, heat resistance and printing quality. It is the ideal choice for a wide range of engineering applications.",
"package_version": "1.0.1",
"sdk_version": "8.6.0",
"website": "http://www.polymaker.com/shop/polymax/",
"author": {
"author_id": "Polymaker",
"display_name": "Polymaker L.L.C.",
"email": "inquiry@polymaker.com",
"website": "https://www.polymaker.com"
}
}
},
"PolyMaxPLA": {
"package_info": {
"package_id": "PolyMaxPLA",
@ -1933,11 +1984,201 @@
}
}
},
"ULTIMAKERABSMETHOD": {
"package_info": {
"package_id": "ULTIMAKERABSMETHOD",
"package_type": "material",
"display_name": "ABS",
"description": "Example package for material and quality profiles for Ultimaker materials.",
"package_version": "2.0.0",
"sdk_version": "8.6.0",
"website": "https://ultimaker.com/materials/method-series-abs/",
"author": {
"author_id": "UltimakerPackages",
"display_name": "UltiMaker",
"email": "materials@ultimaker.com",
"website": "https://ultimaker.com",
"description": "Professional 3D printing made accessible.",
"support_website": "https://ultimaker.com/materials/method-materials/"
}
}
},
"ULTIMAKERNYLONMETHOD": {
"package_info": {
"package_id": "ULTIMAKERNYLONMETHOD",
"package_type": "material",
"display_name": "Nylon",
"description": "Example package for material and quality profiles for Ultimaker materials.",
"package_version": "2.0.0",
"sdk_version": "8.6.0",
"website": "https://ultimaker.com/materials/method-series-nylon/",
"author": {
"author_id": "UltimakerPackages",
"display_name": "UltiMaker",
"email": "materials@ultimaker.com",
"website": "https://ultimaker.com",
"description": "Professional 3D printing made accessible.",
"support_website": "https://ultimaker.com/materials/method-materials/"
}
}
},
"ULTIMAKERNYLONCFMETHOD": {
"package_info": {
"package_id": "ULTIMAKERNYLONCFMETHOD",
"package_type": "material",
"display_name": "Nylon Carbon Fiber",
"description": "Example package for material and quality profiles for Ultimaker materials.",
"package_version": "2.0.0",
"sdk_version": "8.6.0",
"website": "https://ultimaker.com/materials/method-series-nylon-carbon-fiber/",
"author": {
"author_id": "UltimakerPackages",
"display_name": "UltiMaker",
"email": "materials@ultimaker.com",
"website": "https://ultimaker.com",
"description": "Professional 3D printing made accessible.",
"support_website": "https://ultimaker.com/materials/method-materials/"
}
}
},
"ULTIMAKERPLAMETHOD": {
"package_info": {
"package_id": "ULTIMAKERPLAMETHOD",
"package_type": "material",
"display_name": "PLA",
"description": "Example package for material and quality profiles for Ultimaker materials.",
"package_version": "2.0.0",
"sdk_version": "8.6.0",
"website": "https://ultimaker.com/materials/method-series-pla/",
"author": {
"author_id": "UltimakerPackages",
"display_name": "UltiMaker",
"email": "materials@ultimaker.com",
"website": "https://ultimaker.com",
"description": "Professional 3D printing made accessible.",
"support_website": "https://ultimaker.com/materials/method-materials/"
}
}
},
"ULTIMAKERPVAMETHOD": {
"package_info": {
"package_id": "ULTIMAKERPVAMETHOD",
"package_type": "material",
"display_name": "PVA",
"description": "Example package for material and quality profiles for Ultimaker materials.",
"package_version": "2.0.0",
"sdk_version": "8.6.0",
"website": "https://ultimaker.com/materials/method-series-pva/",
"author": {
"author_id": "UltimakerPackages",
"display_name": "UltiMaker",
"email": "materials@ultimaker.com",
"website": "https://ultimaker.com",
"description": "Professional 3D printing made accessible.",
"support_website": "https://ultimaker.com/materials/method-materials/"
}
}
},
"ULTIMAKERPETGMETHOD": {
"package_info": {
"package_id": "ULTIMAKERPETGMETHOD",
"package_type": "material",
"display_name": "PETG",
"description": "Example package for material and quality profiles for Ultimaker materials.",
"package_version": "2.0.0",
"sdk_version": "8.6.0",
"website": "https://ultimaker.com/materials/method-series-petg/",
"author": {
"author_id": "UltimakerPackages",
"display_name": "UltiMaker",
"email": "materials@ultimaker.com",
"website": "https://ultimaker.com",
"description": "Professional 3D printing made accessible.",
"support_website": "https://ultimaker.com/materials/method-materials/"
}
}
},
"ULTIMAKERTOUGHMETHOD": {
"package_info": {
"package_id": "ULTIMAKERTOUGHMETHOD",
"package_type": "material",
"display_name": "Tough PLA",
"description": "Example package for material and quality profiles for Ultimaker materials.",
"package_version": "2.0.0",
"sdk_version": "8.6.0",
"website": "https://ultimaker.com/materials/method-series-tough/",
"author": {
"author_id": "UltimakerPackages",
"display_name": "UltiMaker",
"email": "materials@ultimaker.com",
"website": "https://ultimaker.com",
"description": "Professional 3D printing made accessible.",
"support_website": "https://ultimaker.com/materials/method-materials/"
}
}
},
"BASFMETALMETHOD": {
"package_info": {
"package_id": "BASFMETALMETHOD",
"package_type": "material",
"display_name": "BASF Ultrafuse 316L",
"description": "Example package for material and quality profiles for Ultimaker materials.",
"package_version": "2.0.0",
"sdk_version": "8.6.0",
"website": "https://ultimaker.com/materials/method-materials/#metal",
"author": {
"author_id": "UltimakerPackages",
"display_name": "UltiMaker",
"email": "materials@ultimaker.com",
"website": "https://ultimaker.com",
"description": "Professional 3D printing made accessible.",
"support_website": "https://ultimaker.com/materials/method-materials/"
}
}
},
"JABILSEBSMETHOD": {
"package_info": {
"package_id": "JABILSEBSMETHOD",
"package_type": "material",
"display_name": "Jabil TPE SEBS 95A",
"description": "Example package for material and quality profiles for Ultimaker materials.",
"package_version": "2.0.0",
"sdk_version": "8.6.0",
"website": "https://ultimaker.com/materials/method-materials/",
"author": {
"author_id": "UltimakerPackages",
"display_name": "UltiMaker",
"email": "materials@ultimaker.com",
"website": "https://ultimaker.com",
"description": "Professional 3D printing made accessible.",
"support_website": "https://ultimaker.com/materials/method-materials/"
}
}
},
"POLYMAKERPCMETHOD": {
"package_info": {
"package_id": "POLYMAKERPCMETHOD",
"package_type": "material",
"display_name": "Polymaker PolyMax PC",
"description": "Example package for material and quality profiles for Ultimaker materials.",
"package_version": "2.0.0",
"sdk_version": "8.6.0",
"website": "https://ultimaker.com/materials/method-materials/",
"author": {
"author_id": "UltimakerPackages",
"display_name": "UltiMaker",
"email": "materials@ultimaker.com",
"website": "https://ultimaker.com",
"description": "Professional 3D printing made accessible.",
"support_website": "https://ultimaker.com/materials/method-materials/"
}
}
},
"ULTIMAKERBASCFMETHOD": {
"package_info": {
"package_id": "ULTIMAKERBASCFMETHOD",
"package_type": "material",
"display_name": "Ultimaker ABS-CF",
"display_name": "ABS-CF",
"description": "Example package for material and quality profiles for Ultimaker materials.",
"package_version": "2.0.0",
"sdk_version": "8.6.0",
@ -1956,7 +2197,7 @@
"package_info": {
"package_id": "ULTIMAKERABSRMETHOD",
"package_type": "material",
"display_name": "Ultimaker ABS-R",
"display_name": "ABS-R",
"description": "Example package for material and quality profiles for Ultimaker materials.",
"package_version": "2.0.0",
"sdk_version": "8.6.0",
@ -1975,7 +2216,7 @@
"package_info": {
"package_id": "ULTIMAKERASAMETHOD",
"package_type": "material",
"display_name": "Ultimaker ASA",
"display_name": "ASA",
"description": "Example package for material and quality profiles for Ultimaker materials.",
"package_version": "2.0.0",
"sdk_version": "8.6.0",
@ -1994,7 +2235,7 @@
"package_info": {
"package_id": "ULTIMAKERNYLON12CFMETHOD",
"package_type": "material",
"display_name": "Ultimaker Nylon12 Carbon Fiber",
"display_name": "Nylon12 Carbon Fiber",
"description": "Example package for material and quality profiles for Ultimaker materials.",
"package_version": "2.0.0",
"sdk_version": "8.6.0",
@ -2013,7 +2254,7 @@
"package_info": {
"package_id": "ULTIMAKERRAPIDRINSEMETHOD",
"package_type": "material",
"display_name": "Ultimaker RapidRinse",
"display_name": "RapidRinse",
"description": "Example package for material and quality profiles for Ultimaker materials.",
"package_version": "2.0.0",
"sdk_version": "8.6.0",
@ -2032,7 +2273,7 @@
"package_info": {
"package_id": "ULTIMAKERSR30METHOD",
"package_type": "material",
"display_name": "Ultimaker SR-30",
"display_name": "SR-30",
"description": "Example package for material and quality profiles for Ultimaker materials.",
"package_version": "2.0.0",
"sdk_version": "8.6.0",

View File

@ -1 +1 @@
version: "5.8.1"
version: "5.9.0"

View File

@ -5,7 +5,7 @@ from conan.tools.files import copy, update_conandata
from conan.tools.scm import Version
from conan.errors import ConanInvalidConfiguration
required_conan_version = ">=1.58.0 <2.0.0"
required_conan_version = ">=2.7.0"
class CuraResource(ConanFile):
@ -15,9 +15,8 @@ class CuraResource(ConanFile):
url = "https://github.com/Ultimaker/cura"
description = "Cura Resources"
topics = ("conan", "cura")
settings = "os", "compiler", "build_type", "arch"
no_copy_source = True
package_type = "shared-library"
@property
def _shared_resources(self):
@ -37,10 +36,6 @@ class CuraResource(ConanFile):
copy(self, pattern="*", src=os.path.join(self.recipe_folder, shared_resources),
dst=os.path.join(self.export_sources_folder, shared_resources))
def validate(self):
if Version(self.version) <= Version("4"):
raise ConanInvalidConfiguration("Only versions 5+ are support")
def layout(self):
self.cpp.source.resdirs = self._shared_resources
self.cpp.package.resdirs = [f"res/{res}" for res in self._shared_resources]
@ -51,12 +46,9 @@ class CuraResource(ConanFile):
def package_info(self):
self.cpp_info.includedirs = []
self.runenv_info.append_path("CURA_RESOURCES", os.path.join(self.package_folder, "res"))
self.runenv_info.append_path("CURA_ENGINE_SEARCH_PATH", os.path.join(self.package_folder, "res", "definitions"))
self.runenv_info.append_path("CURA_ENGINE_SEARCH_PATH", os.path.join(self.package_folder, "res", "extruders"))
self.env_info.CURA_RESOURCES.append(os.path.join(self.package_folder, "res"))
self.env_info.CURA_ENGINE_SEARCH_PATH.append(os.path.join(self.package_folder, "res", "definitions"))
self.env_info.CURA_ENGINE_SEARCH_PATH.append(os.path.join(self.package_folder, "res", "definitions"))
self.runenv_info.define("CURA_RESOURCES", os.path.join(self.package_folder, "res"))
self.runenv_info.define("CURA_ENGINE_SEARCH_PATH", os.path.join(self.package_folder, "res", "definitions"))
self.runenv_info.define("CURA_ENGINE_SEARCH_PATH", os.path.join(self.package_folder, "res", "extruders"))
def package_id(self):
self.info.clear()

View File

@ -130,7 +130,6 @@
"machine_min_cool_heat_time_window": { "default_value": 15.0 },
"machine_name": { "default_value": "Mark2_for_Ultimaker2" },
"machine_nozzle_cool_down_speed": { "default_value": 1.5 },
"machine_nozzle_head_distance": { "default_value": 5 },
"machine_nozzle_heat_up_speed": { "default_value": 3.5 },
"machine_nozzle_size": { "default_value": 0.4 },
"machine_show_variants": { "default_value": true },

View File

@ -10,6 +10,7 @@
"file_formats": "text/x-gcode",
"platform": "ankermake_m5_platform.obj",
"has_machine_quality": true,
"has_textured_buildplate": true,
"machine_extruder_trains": { "0": "ankermake_m5_extruder_0" },
"platform_texture": "ankermake_m5.png",
"preferred_material": "generic_pla",
@ -119,7 +120,12 @@
"support_z_distance": { "value": "layer_height * 2" },
"top_bottom_thickness": { "value": "layer_height * 4" },
"wall_overhang_angle": { "value": 55 },
"wall_overhang_speed_factor": { "value": 55 },
"wall_overhang_speed_factors":
{
"value": [
55
]
},
"zig_zaggify_infill": { "value": "infill_pattern == 'cross' or infill_pattern == 'cross_3d' or infill_pattern == 'lines'" }
}
}

View File

@ -10,6 +10,7 @@
"file_formats": "text/x-gcode",
"platform": "ankermake_m5c_platform.obj",
"has_machine_quality": true,
"has_textured_buildplate": true,
"machine_extruder_trains": { "0": "ankermake_m5c_extruder_0" },
"platform_texture": "ankermake_m5c.png",
"preferred_material": "generic_pla",
@ -67,7 +68,6 @@
]
},
"infill_extruder_nr": { "value": -1 },
"infill_line_distance": { "value": 8 },
"infill_material_flow": { "value": 90 },
"infill_pattern": { "value": "'lines' if infill_sparse_density >= 25 else 'grid'" },
"infill_sparse_density": { "value": 10 },
@ -238,7 +238,12 @@
"wall_extruder_nr": { "value": -1 },
"wall_line_width_0": { "value": 0.44 },
"wall_overhang_angle": { "value": 45 },
"wall_overhang_speed_factor": { "value": 40 },
"wall_overhang_speed_factors":
{
"value": [
40
]
},
"wall_thickness": { "value": 0.84 },
"wall_x_extruder_nr": { "value": -1 },
"zig_zaggify_infill": { "value": true }

View File

@ -167,7 +167,12 @@
"travel_avoid_supports": { "value": "True" },
"wall_line_width": { "value": "machine_nozzle_size" },
"wall_overhang_angle": { "value": "75" },
"wall_overhang_speed_factor": { "value": "50" },
"wall_overhang_speed_factors":
{
"value": [
50
]
},
"zig_zaggify_infill": { "value": "True" }
}
}

View File

@ -65,7 +65,6 @@
"machine_heated_bed": { "default_value": true },
"machine_height": { "default_value": 340 },
"machine_name": { "default_value": "Atom 3" },
"machine_nozzle_head_distance": { "default_value": 6 },
"machine_shape": { "default_value": "elliptic" },
"machine_show_variants": { "default_value": true },
"machine_start_gcode": { "default_value": ";MACHINE START CODE\nG21 ;metric values\nG90 ;absolute positioning\nG28 ;home\nG1 Z5 F9000\n;MACHINE START CODE" },

View File

@ -107,7 +107,7 @@
"travel_retract_before_outer_wall": { "value": true },
"wall_0_wipe_dist": { "value": 0.0 },
"wall_overhang_angle": { "default_value": 75 },
"wall_overhang_speed_factor": { "default_value": 50 },
"wall_overhang_speed_factors": { "default_value": "[50]" },
"wall_thickness": { "value": "line_width * 2" },
"z_seam_corner": { "value": "'z_seam_corner_weighted'" },
"z_seam_type": { "value": "'back'" }

View File

@ -0,0 +1,18 @@
{
"version": 2,
"name": "Creality CR-M4",
"inherits": "creality_base",
"metadata":
{
"visible": true,
"quality_definition": "creality_base"
},
"overrides":
{
"gantry_height": { "value": 35 },
"machine_depth": { "default_value": 450 },
"machine_height": { "default_value": 470 },
"machine_name": { "default_value": "Creality CR-M4" },
"machine_width": { "default_value": 450 }
}
}

View File

@ -36,9 +36,9 @@
},
"machine_heated_bed": { "default_value": true },
"machine_height": { "default_value": 250 },
"machine_max_acceleration_e": { "value": 5000 },
"machine_max_acceleration_x": { "value": 5000.0 },
"machine_max_acceleration_y": { "value": 5000.0 },
"machine_max_acceleration_e": { "value": 2500 },
"machine_max_acceleration_x": { "value": 2500.0 },
"machine_max_acceleration_y": { "value": 2500.0 },
"machine_max_acceleration_z": { "value": 500.0 },
"machine_max_feedrate_e": { "value": 100 },
"machine_max_feedrate_x": { "value": 500 },

View File

@ -11,6 +11,7 @@
"platform": "dagoma_sigma_pro.obj",
"first_start_actions": [ "MachineSettingsAction" ],
"has_machine_quality": true,
"has_textured_buildplate": true,
"has_variants": true,
"machine_extruder_trains": { "0": "dagoma_sigma_pro_extruder" },
"platform_texture": "dagoma_sigma_pro.png",

View File

@ -11,6 +11,7 @@
"platform": "dagoma_sigma_pro.obj",
"first_start_actions": [ "MachineSettingsAction" ],
"has_machine_quality": true,
"has_textured_buildplate": true,
"has_variants": true,
"machine_extruder_trains":
{

View File

@ -35,7 +35,6 @@
"machine_max_feedrate_y": { "default_value": 300 },
"machine_max_feedrate_z": { "default_value": 40 },
"machine_name": { "default_value": "Diytech 220" },
"machine_nozzle_head_distance": { "default_value": 3 },
"machine_nozzle_tip_outer_diameter": { "default_value": 1 },
"machine_start_gcode": { "default_value": "G21\nG90\nM82\nM107\nG28\nG1 Z15 F200\nT0\nG92 E0\nG1 E16 F250\nG92 E0\n" },
"machine_use_extruder_offset_to_offset_coords": { "default_value": true },

Some files were not shown because too many files have changed in this diff Show More