Merge branch 'main' into voron-nozzles

This commit is contained in:
Christian Kunis 2024-06-20 13:56:39 -04:00
commit ad800b0bd7
23 changed files with 408 additions and 58 deletions

View File

@ -30,6 +30,29 @@ on:
required: true required: true
type: boolean type: boolean
workflow_call:
inputs:
cura_conan_version:
default: 'cura/latest@ultimaker/testing'
required: true
type: string
conan_args:
default: ''
required: false
type: string
enterprise:
default: false
required: true
type: boolean
staging:
default: false
required: true
type: boolean
nightly:
default: false
required: true
type: boolean
schedule: schedule:
# Daily at 4:15 CET (main-branch) and 5:15 CET (release-branch) # Daily at 4:15 CET (main-branch) and 5:15 CET (release-branch)
- cron: '15 3 * * *' - cron: '15 3 * * *'
@ -70,7 +93,7 @@ jobs:
enterprise: ${{ github.event.inputs.enterprise == 'true' }} enterprise: ${{ github.event.inputs.enterprise == 'true' }}
staging: ${{ github.event.inputs.staging == 'true' }} staging: ${{ github.event.inputs.staging == 'true' }}
architecture: X64 architecture: X64
operating_system: ubuntu-22.04 operating_system: self-hosted-Ubuntu22-X64
secrets: inherit secrets: inherit
macos-installer: macos-installer:
@ -109,7 +132,7 @@ jobs:
fetch-depth: 1 fetch-depth: 1
- name: Download the run info - name: Download the run info
uses: actions/download-artifact@v2 uses: actions/download-artifact@v4
with: with:
name: linux-run-info name: linux-run-info
@ -151,13 +174,13 @@ jobs:
f.writelines(f"NIGHTLY_TIME={nightly_creation_time}\n") f.writelines(f"NIGHTLY_TIME={nightly_creation_time}\n")
- name: Download linux installer jobs artifacts - name: Download linux installer jobs artifacts
uses: actions/download-artifact@v2 uses: actions/download-artifact@v4
with: with:
name: ${{ steps.filename.outputs.LINUX }}-AppImage name: ${{ steps.filename.outputs.LINUX }}-AppImage
path: installers path: installers
- name: Download linux installer jobs asc artifacts - name: Download linux installer jobs asc artifacts
uses: actions/download-artifact@v2 uses: actions/download-artifact@v4
with: with:
name: ${{ steps.filename.outputs.LINUX }}-asc name: ${{ steps.filename.outputs.LINUX }}-asc
path: installers path: installers
@ -175,13 +198,13 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Download win msi installer jobs artifacts - name: Download win msi installer jobs artifacts
uses: actions/download-artifact@v2 uses: actions/download-artifact@v4
with: with:
name: ${{ steps.filename.outputs.WIN_MSI }}-msi name: ${{ steps.filename.outputs.WIN_MSI }}-msi
path: installers path: installers
- name: Download win exe installer jobs artifacts - name: Download win exe installer jobs artifacts
uses: actions/download-artifact@v2 uses: actions/download-artifact@v4
with: with:
name: ${{ steps.filename.outputs.WIN_EXE }}-exe name: ${{ steps.filename.outputs.WIN_EXE }}-exe
path: installers path: installers
@ -199,13 +222,13 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Download MacOS (X64) dmg installer jobs artifacts - name: Download MacOS (X64) dmg installer jobs artifacts
uses: actions/download-artifact@v2 uses: actions/download-artifact@v4
with: with:
name: ${{ steps.filename.outputs.MAC_X64_DMG }}-dmg name: ${{ steps.filename.outputs.MAC_X64_DMG }}-dmg
path: installers path: installers
- name: Download MacOS (X64) pkg installer jobs artifacts - name: Download MacOS (X64) pkg installer jobs artifacts
uses: actions/download-artifact@v2 uses: actions/download-artifact@v4
with: with:
name: ${{ steps.filename.outputs.MAC_X64_PKG }}-pkg name: ${{ steps.filename.outputs.MAC_X64_PKG }}-pkg
path: installers path: installers
@ -223,13 +246,13 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Download MacOS (ARM-64) dmg installer jobs artifacts - name: Download MacOS (ARM-64) dmg installer jobs artifacts
uses: actions/download-artifact@v2 uses: actions/download-artifact@v4
with: with:
name: ${{ steps.filename.outputs.MAC_ARM_DMG }}-dmg name: ${{ steps.filename.outputs.MAC_ARM_DMG }}-dmg
path: installers path: installers
- name: Download MacOS (ARM-64) pkg installer jobs artifacts - name: Download MacOS (ARM-64) pkg installer jobs artifacts
uses: actions/download-artifact@v2 uses: actions/download-artifact@v4
with: with:
name: ${{ steps.filename.outputs.MAC_ARM_PKG }}-pkg name: ${{ steps.filename.outputs.MAC_ARM_PKG }}-pkg
path: installers path: installers

View File

@ -34,10 +34,11 @@ on:
operating_system: operating_system:
description: 'OS' description: 'OS'
required: true required: true
default: 'ubuntu-22.04' default: 'self-hosted-Ubuntu22-X64'
type: choice type: choice
options: options:
- ubuntu-22.04 - ubuntu-22.04
- self-hosted-Ubuntu22-X64
jobs: jobs:
linux-installer: linux-installer:
@ -49,4 +50,4 @@ jobs:
staging: ${{ inputs.staging }} staging: ${{ inputs.staging }}
architecture: ${{ inputs.architecture }} architecture: ${{ inputs.architecture }}
operating_system: ${{ inputs.operating_system }} operating_system: ${{ inputs.operating_system }}
secrets: inherit secrets: inherit

View File

@ -0,0 +1,32 @@
name: Feature Freeze
run-name: Feature freeze Cura ${{ inputs.cura_version }} by @${{ github.actor }}
on:
workflow_dispatch:
inputs:
cura_version:
description: 'Cura version major and minor, e.g. 5.7'
required: true
type: string
jobs:
parse-version:
name: Parse input version string
runs-on: ubuntu-latest
outputs:
package_version: ${{ steps.version_parser.outputs.major }}.${{ steps.version_parser.outputs.minor }}.0-alpha.1
steps:
- name: Parse version string
id: version_parser
uses: booxmedialtd/ws-action-parse-semver@v1.4.7
with:
input_string: ${{ inputs.cura_version }}.0
feature-freeze:
name: Process feature freeze
uses: Ultimaker/Cura-workflows/.github/workflows/cura-set-packages-versions.yml@main
needs: [parse-version]
with:
cura_version: ${{ needs.parse-version.outputs.package_version }}
create_feature_branch: true
secrets: inherit

View File

@ -0,0 +1,179 @@
name: Prepare Release Candidate
run-name: Release Candidate for Cura ${{ inputs.cura_version }} by @${{ github.actor }}
on:
workflow_dispatch:
inputs:
cura_version:
description: 'Cura version number, e.g. 5.7.0, 5.7.2 or 5.8.0-beta.2'
required: true
type: string
jobs:
parse-version:
name: Parse input version string
runs-on: ubuntu-latest
outputs:
version_major: ${{ steps.version_parser.outputs.major }}
version_minor: ${{ steps.version_parser.outputs.minor }}
version_patch: ${{ steps.version_parser.outputs.patch }}
branch_name: ${{ steps.version_parser.outputs.major }}.${{ steps.version_parser.outputs.minor }}
steps:
- name: Parse version string
id: version_parser
uses: booxmedialtd/ws-action-parse-semver@v1.4.7
with:
input_string: ${{ inputs.cura_version }}
freeze-packages-versions:
name: Freeze packges versions
uses: Ultimaker/Cura-workflows/.github/workflows/cura-set-packages-versions.yml@main
needs: [parse-version]
with:
cura_version: ${{ inputs.cura_version }}
create_feature_branch: false
secrets: inherit
find-rc-tag:
name: Find RC tag name
runs-on: ubuntu-latest
needs: [freeze-packages-versions]
outputs:
tag_name: ${{ steps.find-available-tag-name.outputs.tag_name }}
steps:
- name: Checkout repo
uses: actions/checkout@v4
with:
fetch-tags: true
fetch-depth: 0
- name: Find available tag name
id: find-available-tag-name
run: |
VERSION=${{ inputs.cura_version }}
RC_INDEX=0
while
RC_INDEX=$((RC_INDEX+1))
TAG_NAME="$VERSION-RC$RC_INDEX"
[[ $(git tag -l "$TAG_NAME") ]]
do true; done
echo "tag_name=$TAG_NAME" >> "$GITHUB_OUTPUT"
create-tags:
name: Create tags
runs-on: ubuntu-latest
needs: [parse-version, find-rc-tag]
strategy:
matrix:
repository: [Cura, Uranium, CuraEngine, cura-binary-data, fdm_materials]
steps:
- name: Checkout repo
uses: actions/checkout@v4
with:
repository: Ultimaker/${{ matrix.repository }}
ref: ${{ needs.parse-version.outputs.branch_name }}
token: ${{ secrets.CURA_AUTORELEASE_PAT }}
- name: Create tag
run: |
git tag ${{ needs.find-rc-tag.outputs.tag_name }}
git push origin tag ${{ needs.find-rc-tag.outputs.tag_name }}
create-dependencies-packages:
name: Create conan packages for dependencies
uses: ultimaker/cura-workflows/.github/workflows/conan-package-release.yml@main
needs: [parse-version, freeze-packages-versions]
strategy:
matrix:
repository: [Cura, Uranium, CuraEngine, cura-binary-data, fdm_materials]
include:
- conan_recipe_root: "."
- repository: Cura
conan_recipe_root: "resources"
with:
repository: ${{ matrix.repository }}
ref_name: ${{ needs.parse-version.outputs.branch_name }}
version: ${{ inputs.cura_version }}
conan_release: true
conan_user_channel: ultimaker/stable
conan_internal: false
conan_latest: true
conan_recipe_root: ${{ matrix.conan_recipe_root }}
secrets: inherit
create-cura-package:
name: Create conan package for Cura
uses: ultimaker/cura-workflows/.github/workflows/conan-package-release.yml@main
needs: [parse-version, create-dependencies-packages]
with:
repository: Cura
ref_name: ${{ needs.parse-version.outputs.branch_name }}
version: ${{ inputs.cura_version }}
conan_release: true
conan_user_channel: ultimaker/stable
conan_internal: false
conan_latest: true
secrets: inherit
create-installers:
name: Create installers
uses: ./.github/workflows/installers.yml
needs: [parse-version, create-cura-package]
with:
cura_conan_version: cura/${{ inputs.cura_version }}@/
enterprise: false
staging: false
nightly: false
secrets: inherit
create-release-draft:
name: Create the release draft
runs-on: ubuntu-latest
needs: [create-installers, parse-version]
steps:
- name: Checkout Cura repo
uses: actions/checkout@v4
with:
ref: ${{ needs.parse-version.outputs.branch_name }}
- name: Extract changelog
run: python ./scripts/extract_changelog.py --version ${{ needs.parse-version.outputs.version_major }}.${{ needs.parse-version.outputs.version_minor }}.${{ needs.parse-version.outputs.version_patch }} --changelog ./resources/texts/change_log.txt > formatted_changelog.txt
- name: Get commit id for release
id: get-commit-id
uses: iawia002/get-tag-or-commit-id@v1.0.1
with:
length: 40
- name: Create release
uses: notpeelz/action-gh-create-release@v5.0.1
with:
target: ${{ steps.get-commit-id.outputs.id }}
tag: ${{ inputs.cura_version }}
strategy: replace
title: UltiMaker Cura ${{ inputs.cura_version }}
draft: true
body-source: file
body: formatted_changelog.txt
- name: Download artifacts
uses: actions/download-artifact@v4.1.7
with:
path: artifacts
merge-multiple: true
- name: Upload artifacts
working-directory: artifacts
run: |
gh release upload ${{ inputs.cura_version }} UltiMaker-Cura-${{ inputs.cura_version }}-linux-X64.AppImage --clobber
gh release upload ${{ inputs.cura_version }} UltiMaker-Cura-${{ inputs.cura_version }}-linux-X64.AppImage.asc --clobber
gh release upload ${{ inputs.cura_version }} UltiMaker-Cura-${{ inputs.cura_version }}-macos-ARM64.dmg --clobber
gh release upload ${{ inputs.cura_version }} UltiMaker-Cura-${{ inputs.cura_version }}-macos-ARM64.pkg --clobber
gh release upload ${{ inputs.cura_version }} UltiMaker-Cura-${{ inputs.cura_version }}-macos-X64.dmg --clobber
gh release upload ${{ inputs.cura_version }} UltiMaker-Cura-${{ inputs.cura_version }}-macos-X64.pkg --clobber
gh release upload ${{ inputs.cura_version }} UltiMaker-Cura-${{ inputs.cura_version }}-win64-X64.exe --clobber
gh release upload ${{ inputs.cura_version }} UltiMaker-Cura-${{ inputs.cura_version }}-win64-X64.msi --clobber
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -50,4 +50,4 @@ jobs:
staging: ${{ inputs.staging }} staging: ${{ inputs.staging }}
architecture: ${{ inputs.architecture }} architecture: ${{ inputs.architecture }}
operating_system: ${{ inputs.operating_system }} operating_system: ${{ inputs.operating_system }}
secrets: inherit secrets: inherit

View File

@ -2,6 +2,7 @@ checks:
diagnostic-mesh-file-extension: true diagnostic-mesh-file-extension: true
diagnostic-mesh-file-size: true diagnostic-mesh-file-size: true
diagnostic-definition-redundant-override: true diagnostic-definition-redundant-override: true
diagnostic-definition-experimental-setting: true
diagnostic-resources-macos-app-directory-name: true diagnostic-resources-macos-app-directory-name: true
diagnostic-incorrect-formula: true diagnostic-incorrect-formula: true
diagnostic-resource-file-deleted: true diagnostic-resource-file-deleted: true

View File

@ -115,15 +115,15 @@ class Account(QObject):
self._update_timer.setSingleShot(True) self._update_timer.setSingleShot(True)
self._update_timer.timeout.connect(self.sync) self._update_timer.timeout.connect(self.sync)
self._sync_services: Dict[str, int] = {}
"""contains entries "service_name" : SyncState""" """contains entries "service_name" : SyncState"""
self.syncRequested.connect(self._updatePermissions) self._sync_services: Dict[str, int] = {}
def initialize(self) -> None: def initialize(self) -> None:
self._authorization_service.initialize(self._application.getPreferences()) self._authorization_service.initialize(self._application.getPreferences())
self._authorization_service.onAuthStateChanged.connect(self._onLoginStateChanged) self._authorization_service.onAuthStateChanged.connect(self._onLoginStateChanged)
self._authorization_service.onAuthenticationError.connect(self._onLoginStateChanged) self._authorization_service.onAuthenticationError.connect(self._onLoginStateChanged)
self._authorization_service.accessTokenChanged.connect(self._onAccessTokenChanged) self._authorization_service.accessTokenChanged.connect(self._onAccessTokenChanged)
self._authorization_service.accessTokenChanged.connect(self._updatePermissions)
self._authorization_service.loadAuthDataFromPreferences() self._authorization_service.loadAuthDataFromPreferences()
@pyqtProperty(int, notify=syncStateChanged) @pyqtProperty(int, notify=syncStateChanged)

View File

@ -96,7 +96,8 @@ class ThreeMFWriter(MeshWriter):
@staticmethod @staticmethod
def _convertUMNodeToSavitarNode(um_node, def _convertUMNodeToSavitarNode(um_node,
transformation = Matrix(), transformation = Matrix(),
exported_settings: Optional[Dict[str, Set[str]]] = None): exported_settings: Optional[Dict[str, Set[str]]] = None,
center_mesh = False):
"""Convenience function that converts an Uranium SceneNode object to a SavitarSceneNode """Convenience function that converts an Uranium SceneNode object to a SavitarSceneNode
:returns: Uranium Scene node. :returns: Uranium Scene node.
@ -111,16 +112,20 @@ class ThreeMFWriter(MeshWriter):
savitar_node = Savitar.SceneNode() savitar_node = Savitar.SceneNode()
savitar_node.setName(um_node.getName()) savitar_node.setName(um_node.getName())
node_matrix = Matrix()
mesh_data = um_node.getMeshData() mesh_data = um_node.getMeshData()
# compensate for original center position, if object(s) is/are not around its zero position
if mesh_data is not None: if center_mesh:
extents = mesh_data.getExtents() node_matrix = Matrix()
if extents is not None: # compensate for original center position, if object(s) is/are not around its zero position
# We use a different coordinate space while writing, so flip Z and Y if mesh_data is not None:
center_vector = Vector(extents.center.x, extents.center.z, extents.center.y) extents = mesh_data.getExtents()
node_matrix.setByTranslation(center_vector) if extents is not None:
node_matrix.multiply(um_node.getLocalTransformation()) # We use a different coordinate space while writing, so flip Z and Y
center_vector = Vector(extents.center.x, extents.center.y, extents.center.z)
node_matrix.setByTranslation(center_vector)
node_matrix.multiply(um_node.getLocalTransformation())
else:
node_matrix = um_node.getLocalTransformation()
matrix_string = ThreeMFWriter._convertMatrixToString(node_matrix.preMultiply(transformation)) matrix_string = ThreeMFWriter._convertMatrixToString(node_matrix.preMultiply(transformation))
@ -147,7 +152,7 @@ class ThreeMFWriter(MeshWriter):
for key in changed_setting_keys: for key in changed_setting_keys:
savitar_node.setSetting("cura:" + key, str(stack.getProperty(key, "value"))) savitar_node.setSetting("cura:" + key, str(stack.getProperty(key, "value")))
else: else:
# We want to export only the specified settings # We want to export only the specified settings
if um_node.getName() in exported_settings: if um_node.getName() in exported_settings:
model_exported_settings = exported_settings[um_node.getName()] model_exported_settings = exported_settings[um_node.getName()]
@ -283,7 +288,8 @@ class ThreeMFWriter(MeshWriter):
for root_child in node.getChildren(): for root_child in node.getChildren():
savitar_node = ThreeMFWriter._convertUMNodeToSavitarNode(root_child, savitar_node = ThreeMFWriter._convertUMNodeToSavitarNode(root_child,
transformation_matrix, transformation_matrix,
exported_model_settings) exported_model_settings,
center_mesh = True)
if savitar_node: if savitar_node:
savitar_scene.addSceneNode(savitar_node) savitar_scene.addSceneNode(savitar_node)
else: else:
@ -442,7 +448,7 @@ class ThreeMFWriter(MeshWriter):
def sceneNodesToString(scene_nodes: [SceneNode]) -> str: def sceneNodesToString(scene_nodes: [SceneNode]) -> str:
savitar_scene = Savitar.Scene() savitar_scene = Savitar.Scene()
for scene_node in scene_nodes: for scene_node in scene_nodes:
savitar_node = ThreeMFWriter._convertUMNodeToSavitarNode(scene_node) savitar_node = ThreeMFWriter._convertUMNodeToSavitarNode(scene_node, center_mesh = True)
savitar_scene.addSceneNode(savitar_node) savitar_scene.addSceneNode(savitar_node)
parser = Savitar.ThreeMFParser() parser = Savitar.ThreeMFParser()
scene_string = parser.sceneToString(savitar_scene) scene_string = parser.sceneToString(savitar_scene)

View File

@ -544,7 +544,7 @@ class CuraEngineBackend(QObject, Backend):
if job.getResult() == StartJobResult.ObjectsWithDisabledExtruder: if job.getResult() == StartJobResult.ObjectsWithDisabledExtruder:
self._error_message = Message(catalog.i18nc("@info:status", self._error_message = Message(catalog.i18nc("@info:status",
"Unable to slice because there are objects associated with disabled Extruder %s.") % job.getMessage(), "Unable to slice because there are objects associated with disabled Extruder %s.") % job.getAssociatedDisabledExtruders(),
title = catalog.i18nc("@info:title", "Unable to slice"), title = catalog.i18nc("@info:title", "Unable to slice"),
message_type = Message.MessageType.WARNING) message_type = Message.MessageType.WARNING)
self._error_message.show() self._error_message.show()

View File

@ -146,6 +146,7 @@ class StartSliceJob(Job):
self._slice_message: Arcus.PythonMessage = slice_message self._slice_message: Arcus.PythonMessage = slice_message
self._is_cancelled: bool = False self._is_cancelled: bool = False
self._build_plate_number: Optional[int] = None self._build_plate_number: Optional[int] = None
self._associated_disabled_extruders: Optional[str] = None
# cache for all setting values from all stacks (global & extruder) for the current machine # cache for all setting values from all stacks (global & extruder) for the current machine
self._all_extruders_settings: Optional[Dict[str, Any]] = None self._all_extruders_settings: Optional[Dict[str, Any]] = None
@ -153,6 +154,9 @@ class StartSliceJob(Job):
def getSliceMessage(self) -> Arcus.PythonMessage: def getSliceMessage(self) -> Arcus.PythonMessage:
return self._slice_message return self._slice_message
def getAssociatedDisabledExtruders(self) -> Optional[str]:
return self._associated_disabled_extruders
def setBuildPlate(self, build_plate_number: int) -> None: def setBuildPlate(self, build_plate_number: int) -> None:
self._build_plate_number = build_plate_number self._build_plate_number = build_plate_number
@ -334,7 +338,7 @@ class StartSliceJob(Job):
if has_model_with_disabled_extruders: if has_model_with_disabled_extruders:
self.setResult(StartJobResult.ObjectsWithDisabledExtruder) self.setResult(StartJobResult.ObjectsWithDisabledExtruder)
associated_disabled_extruders = {p + 1 for p in associated_disabled_extruders} associated_disabled_extruders = {p + 1 for p in associated_disabled_extruders}
self.setMessage(", ".join(map(str, sorted(associated_disabled_extruders)))) self._associated_disabled_extruders = ", ".join(map(str, sorted(associated_disabled_extruders)))
return return
# There are cases when there is nothing to slice. This can happen due to one at a time slicing not being # There are cases when there is nothing to slice. This can happen due to one at a time slicing not being

View File

@ -14,10 +14,10 @@ def getLinter(file: Path, settings: dict) -> Optional[List[Linter]]:
if not file.exists(): if not file.exists():
return [Directory(file, settings)] return [Directory(file, settings)]
if ".inst" in file.suffixes and ".cfg" in file.suffixes: if ".inst" in file.suffixes and file.suffixes[-1] == ".cfg":
return [Directory(file, settings), Profile(file, settings), Formulas(file, settings)] return [Directory(file, settings), Profile(file, settings), Formulas(file, settings)]
if ".def" in file.suffixes and ".json" in file.suffixes: if ".def" in file.suffixes and file.suffixes[-1] == ".json":
if file.stem in ("fdmprinter.def", "fdmextruder.def"): if file.stem in ("fdmprinter.def", "fdmextruder.def"):
return [Formulas(file, settings)] return [Formulas(file, settings)]
return [Directory(file, settings), Definition(file, settings), Formulas(file, settings)] return [Directory(file, settings), Definition(file, settings), Formulas(file, settings)]

View File

@ -13,8 +13,11 @@ class Definition(Linter):
def __init__(self, file: Path, settings: dict) -> None: def __init__(self, file: Path, settings: dict) -> None:
super().__init__(file, settings) super().__init__(file, settings)
self._definitions = {} self._definitions = {}
self._definition_name = None
self._experimental_settings = []
self._loadDefinitionFiles(file) self._loadDefinitionFiles(file)
self._content = self._file.read_text() self._content = self._file.read_text()
self._loadExperimentalSettings()
self._loadBasePrinterSettings() self._loadBasePrinterSettings()
@property @property
@ -32,6 +35,10 @@ class Definition(Linter):
for check in self.checkMaterialTemperature(): for check in self.checkMaterialTemperature():
yield check yield check
if self._settings["checks"].get("diagnostic-definition-experimental-setting", False):
for check in self.checkExperimentalSetting():
yield check
# Add other which will yield Diagnostic's # Add other which will yield Diagnostic's
# TODO: A check to determine if the user set value is with the min and max value defined in the parent and doesn't trigger a warning # TODO: A check to determine if the user set value is with the min and max value defined in the parent and doesn't trigger a warning
# TODO: A check if the key exist in the first place # TODO: A check if the key exist in the first place
@ -41,9 +48,8 @@ class Definition(Linter):
def checkRedefineOverride(self) -> Iterator[Diagnostic]: def checkRedefineOverride(self) -> Iterator[Diagnostic]:
""" Checks if definition file overrides its parents settings with the same value. """ """ Checks if definition file overrides its parents settings with the same value. """
definition_name = list(self._definitions.keys())[0] definition = self._definitions[self._definition_name]
definition = self._definitions[definition_name] if "overrides" in definition and self._definition_name not in ("fdmprinter", "fdmextruder"):
if "overrides" in definition and definition_name not in ("fdmprinter", "fdmextruder"):
for key, value_dict in definition["overrides"].items(): for key, value_dict in definition["overrides"].items():
is_redefined, child_key, child_value, parent, inherited_by= self._isDefinedInParent(key, value_dict, definition['inherits']) is_redefined, child_key, child_value, parent, inherited_by= self._isDefinedInParent(key, value_dict, definition['inherits'])
if is_redefined: if is_redefined:
@ -71,9 +77,8 @@ class Definition(Linter):
def checkMaterialTemperature(self) -> Iterator[Diagnostic]: def checkMaterialTemperature(self) -> Iterator[Diagnostic]:
"""Checks if definition file has material tremperature defined within them""" """Checks if definition file has material tremperature defined within them"""
definition_name = list(self._definitions.keys())[0] definition = self._definitions[self._definition_name]
definition = self._definitions[definition_name] if "overrides" in definition and self._definition_name not in ("fdmprinter", "fdmextruder"):
if "overrides" in definition and definition_name not in ("fdmprinter", "fdmextruder"):
for key, value_dict in definition["overrides"].items(): for key, value_dict in definition["overrides"].items():
if "temperature" in key and "material" in key: if "temperature" in key and "material" in key:
@ -97,6 +102,22 @@ class Definition(Linter):
replacements=replacements replacements=replacements
) )
def checkExperimentalSetting(self) -> Iterator[Diagnostic]:
"""Checks if definition uses experimental settings"""
definition = self._definitions[self._definition_name]
if "overrides" in definition and self._definition_name not in ("fdmprinter", "fdmextruder"):
for setting in definition["overrides"]:
if setting in self._experimental_settings:
redefined = re.compile(setting)
found = redefined.search(self._content)
yield Diagnostic(
file=self._file,
diagnostic_name="diagnostic-definition-experimental-setting",
message=f"Setting {setting} is still experimental and should not be used in default profiles",
level="Warning",
offset=found.span(0)[0]
)
def _loadDefinitionFiles(self, definition_file) -> None: def _loadDefinitionFiles(self, definition_file) -> None:
""" Loads definition file contents into self._definitions. Also load parent definition if it exists. """ """ Loads definition file contents into self._definitions. Also load parent definition if it exists. """
definition_name = Path(definition_file.stem).stem definition_name = Path(definition_file.stem).stem
@ -104,6 +125,9 @@ class Definition(Linter):
if not definition_file.exists() or definition_name in self._definitions: if not definition_file.exists() or definition_name in self._definitions:
return return
if self._definition_name is None:
self._definition_name = definition_name
# Load definition file into dictionary # Load definition file into dictionary
self._definitions[definition_name] = json.loads(definition_file.read_text()) self._definitions[definition_name] = json.loads(definition_file.read_text())
@ -152,6 +176,12 @@ class Definition(Linter):
return self._isDefinedInParent(key, value_dict, parent["inherits"]) return self._isDefinedInParent(key, value_dict, parent["inherits"])
return False, None, None, None, None return False, None, None, None, None
def _loadExperimentalSettings(self):
try:
self._experimental_settings = self._definitions[self.base_def]["settings"]["experimental"]["children"].keys()
except:
pass
def _loadBasePrinterSettings(self): def _loadBasePrinterSettings(self):
settings = {} settings = {}
for k, v in self._definitions[self.base_def]["settings"].items(): for k, v in self._definitions[self.base_def]["settings"].items():

View File

@ -146,12 +146,13 @@ class Formulas(Linter):
available_sections = ["values"] available_sections = ["values"]
for section in available_sections: for section in available_sections:
options = config.options(section) if config.has_section(section):
for option in options: options = config.options(section)
values ={} for option in options:
values["value"] = config.get(section, option) values ={}
overrides[option] = values values["value"] = config.get(section, option)
file_data["overrides"]= overrides# Process the value here overrides[option] = values
file_data["overrides"]= overrides# Process the value here
return file_data return file_data

View File

@ -37,6 +37,6 @@ class Profile(Linter):
config = ConfigParser() config = ConfigParser()
config.read([self._file]) config.read([self._file])
name_of_profile = config.get("general", "name") name_of_profile = config.get("general", "name")
redefined = re.compile(name_of_profile) redefined = re.compile(re.escape(name_of_profile))
found = redefined.search(self._content) found = redefined.search(self._content)
return name_of_profile, found return name_of_profile, found

View File

@ -206,9 +206,9 @@ chardet==3.0.4 \
idna==2.8 \ idna==2.8 \
--hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \ --hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
--hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c --hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c
attrs==21.2.0 \ attrs==21.3.0 \
--hash=sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1 \ --hash=sha256:8f7335278dedd26b58c38e006338242cc0977f06d51579b2b8b87b9b33bff66c \
--hash=sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb --hash=sha256:50f3c9b216dc9021042f71b392859a773b904ce1a029077f58f6598272432045
requests==2.22.0 \ requests==2.22.0 \
--hash=sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4 \ --hash=sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4 \
--hash=sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31 --hash=sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31
@ -222,9 +222,9 @@ constantly==15.1.0 \
hyperlink==21.0.0 \ hyperlink==21.0.0 \
--hash=sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b \ --hash=sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b \
--hash=sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4 --hash=sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4
incremental==21.3.0 \ incremental==22.10.0 \
--hash=sha256:02f5de5aff48f6b9f665d99d48bfc7ec03b6e3943210de7cfc88856d755d6f57 \ --hash=sha256:b864a1f30885ee72c5ac2835a761b8fe8aa9c28b9395cacf27286602688d3e51 \
--hash=sha256:92014aebc6a20b78a8084cdd5645eeaa7f74b8933f70fa3ada2cfbd1e3b54321 --hash=sha256:912feeb5e0f7e0188e6f42241d2f450002e11bbc0937c65865045854c24c0bd0
zope.interface==5.4.0 \ zope.interface==5.4.0 \
--hash=sha256:0f91b5b948686659a8e28b728ff5e74b1be6bf40cb04704453617e5f1e945ef3 \ --hash=sha256:0f91b5b948686659a8e28b728ff5e74b1be6bf40cb04704453617e5f1e945ef3 \
--hash=sha256:3c02411a3b62668200910090a0dff17c0b25aaa36145082a5a6adf08fa281e54 \ --hash=sha256:3c02411a3b62668200910090a0dff17c0b25aaa36145082a5a6adf08fa281e54 \

1
resources/conandata.yml Normal file
View File

@ -0,0 +1 @@
version: "5.8.0-alpha.0"

View File

@ -28,8 +28,6 @@ class CuraResource(ConanFile):
self.version = self.conan_data["version"] self.version = self.conan_data["version"]
def export(self): def export(self):
copy(self, pattern="conandata.yml", src=os.path.join(self.recipe_folder, ".."), dst=self.export_folder,
keep_path=False)
copy(self, pattern="LICENSE*", src=os.path.join(self.recipe_folder, ".."), dst=self.export_folder, copy(self, pattern="LICENSE*", src=os.path.join(self.recipe_folder, ".."), dst=self.export_folder,
keep_path=False) keep_path=False)
update_conandata(self, {"version": self.version}) update_conandata(self, {"version": self.version})

View File

@ -1342,6 +1342,15 @@
"limit_to_extruder": "wall_0_extruder_nr", "limit_to_extruder": "wall_0_extruder_nr",
"settable_per_mesh": true "settable_per_mesh": true
}, },
"z_seam_on_vertex":
{
"label": "Z Seam On Vertex",
"description": "Place the z-seam on a polygon vertex. Switching this off can place the seam between vertices as well. (Keep in mind that this won't override the restrictions on placing the seam on an unsupported overhang.)",
"type": "bool",
"default_value": true,
"settable_per_mesh": true,
"enabled": "z_seam_type == 'back' or z_seam_type == 'shortest'"
},
"z_seam_position": "z_seam_position":
{ {
"label": "Z Seam Position", "label": "Z Seam Position",
@ -6208,6 +6217,7 @@
"type": "bool", "type": "bool",
"default_value": false, "default_value": false,
"enabled": "resolveOrValue('adhesion_type') == 'raft'", "enabled": "resolveOrValue('adhesion_type') == 'raft'",
"resolve": "any(extruderValues('raft_remove_inside_corners'))",
"settable_per_mesh": false, "settable_per_mesh": false,
"settable_per_extruder": false, "settable_per_extruder": false,
"children": "children":
@ -6220,6 +6230,7 @@
"value": "raft_remove_inside_corners", "value": "raft_remove_inside_corners",
"default_value": false, "default_value": false,
"enabled": "resolveOrValue('adhesion_type') == 'raft'", "enabled": "resolveOrValue('adhesion_type') == 'raft'",
"resolve": "any(extruderValues('raft_base_remove_inside_corners'))",
"settable_per_mesh": false, "settable_per_mesh": false,
"settable_per_extruder": false "settable_per_extruder": false
}, },
@ -6231,6 +6242,7 @@
"value": "raft_remove_inside_corners", "value": "raft_remove_inside_corners",
"default_value": false, "default_value": false,
"enabled": "resolveOrValue('adhesion_type') == 'raft'", "enabled": "resolveOrValue('adhesion_type') == 'raft'",
"resolve": "any(extruderValues('raft_interface_remove_inside_corners'))",
"settable_per_mesh": false, "settable_per_mesh": false,
"settable_per_extruder": false "settable_per_extruder": false
}, },
@ -6242,6 +6254,7 @@
"value": "raft_remove_inside_corners", "value": "raft_remove_inside_corners",
"default_value": false, "default_value": false,
"enabled": "resolveOrValue('adhesion_type') == 'raft'", "enabled": "resolveOrValue('adhesion_type') == 'raft'",
"resolve": "any(extruderValues('raft_surface_remove_inside_corners'))",
"settable_per_mesh": false, "settable_per_mesh": false,
"settable_per_extruder": false "settable_per_extruder": false
} }
@ -6845,7 +6858,7 @@
"label": "Prime Tower Type", "label": "Prime Tower Type",
"description": "<html>How to generate the prime tower:<ul><li><b>Normal:</b> create a bucket in which secondary materials are primed</li><li><b>Interleaved:</b> create a prime tower as sparse as possible. This will save time and filament, but is only possible if the used materials adhere to each other</li></ul></html>", "description": "<html>How to generate the prime tower:<ul><li><b>Normal:</b> create a bucket in which secondary materials are primed</li><li><b>Interleaved:</b> create a prime tower as sparse as possible. This will save time and filament, but is only possible if the used materials adhere to each other</li></ul></html>",
"type": "enum", "type": "enum",
"resolve": "'interleaved' if (all(material_type_var == extruderValues('material_type')[0] for material_type_var in extruderValues('material_type')) and all(material_brand_var == extruderValues('material_brand')[0] for material_brand_var in extruderValues('material_brand'))) else 'normal'", "resolve": "'interleaved' if all(mode == 'interleaved' for mode in extruderValues('prime_tower_mode')) else 'interleaved' if (all(material_type_var == extruderValues('material_type')[0] for material_type_var in extruderValues('material_type')) and all(material_brand_var == extruderValues('material_brand')[0] for material_brand_var in extruderValues('material_brand'))) else 'normal'",
"options": "options":
{ {
"normal": "Normal", "normal": "Normal",
@ -8021,6 +8034,19 @@
"default_value": 90, "default_value": 90,
"settable_per_mesh": true "settable_per_mesh": true
}, },
"seam_overhang_angle":
{
"label": "Seam Overhanging Wall Angle",
"description": "Try to prevent seams on walls that overhang more than this angle. When the value is 90, no walls will be treated as overhanging.",
"unit": "\u00b0",
"type": "float",
"minimum_value": "0",
"minimum_value_warning": "2",
"maximum_value": "90",
"default_value": 90,
"value": "wall_overhang_angle",
"settable_per_mesh": true
},
"wall_overhang_speed_factor": "wall_overhang_speed_factor":
{ {
"label": "Overhanging Wall Speed", "label": "Overhanging Wall Speed",

View File

@ -80,6 +80,7 @@
"maximum_value_warning": "120", "maximum_value_warning": "120",
"minimum_value": "0" "minimum_value": "0"
}, },
"material_print_temp_wait": { "value": false },
"material_print_temperature": { "minimum_value": "0" }, "material_print_temperature": { "minimum_value": "0" },
"material_standby_temperature": "material_standby_temperature":
{ {

View File

@ -41,6 +41,7 @@ xy_offset_layer_0
hole_xy_offset hole_xy_offset
hole_xy_offset_max_diameter hole_xy_offset_max_diameter
z_seam_type z_seam_type
z_seam_on_vertex
z_seam_position z_seam_position
z_seam_x z_seam_x
z_seam_y z_seam_y

View File

@ -1,3 +1,13 @@
[5.7.2]
* Bugfixes
- Fixed a bug where modifier meshes and support meshes were not loaded in the correct position, should resolve https://github.com/Ultimaker/Cura/issues/18761 (and https://github.com/5axes/SpoonAntiWarping/issues/8)
- Primeblobs were not printed for a second extruder
- Interleaved prime tower can now be printed with a raft
- Improved behaviors for start temperature for multiple extruders
- Improved preheating logic when using a raft with multiple extruders
- Enabled Raft Remove Inside Corners and Prime Tower Mode to be shipped with a material profile
[5.7.1] [5.7.1]
* Introducing the UltiMaker Factor 4 * Introducing the UltiMaker Factor 4

View File

@ -56,7 +56,7 @@
"secondary_button": "background_1", "secondary_button": "background_1",
"secondary_button_hover": "background_3", "secondary_button_hover": "background_3",
"secondary_button_text": [255, 255, 255, 255], "secondary_button_text": "text_secondary_button",
"icon": "text_default", "icon": "text_default",
"toolbar_background": "background_1", "toolbar_background": "background_1",
@ -69,9 +69,7 @@
"main_window_header_button_text_active": "background_4", "main_window_header_button_text_active": "background_4",
"main_window_header_background": "background_4", "main_window_header_background": "background_4",
"main_window_header_background_gradient": "background_4", "main_window_header_background_gradient": "background_4",
"main_window_header_button_background_hovered": [46, 46, 46, 255], "main_window_header_button_background_hovered": [46, 46, 46, 255],
"secondary_button_text": "text_secondary_button",
"account_sync_state_icon": [255, 255, 255, 204], "account_sync_state_icon": [255, 255, 255, 204],

View File

@ -0,0 +1,38 @@
import argparse
import re
if __name__ == "__main__":
parser = argparse.ArgumentParser(description = 'Extract the changelog to be inserted to the release description')
parser.add_argument('--changelog', type = str, help = 'Path to the changelog file', required = True)
parser.add_argument('--version', type = str, help = 'Cura version to be extracted', required = True)
args = parser.parse_args()
# In the changelog we usually omit the patch number for minor release (i.e. 5.7.0 => 5.7)
if args.version.endswith('.0'):
args.version = args.version[:-2]
start_token = f"[{args.version}]"
pattern_stop_log = "\[\d+(\.\d+){1,2}\]"
log_line = False
first_chapter = True
with open(args.changelog, "r") as changelog_file:
for line in changelog_file.readlines():
line = line.strip()
if log_line:
if re.match(pattern_stop_log, line):
log_line = False
elif len(line) > 0:
if line.startswith('*'):
if not first_chapter:
print("")
first_chapter = False
line = line[1:].strip()
print(f"<H2>{line}</H2>\n")
else:
print(line)
elif line == start_token:
log_line = True